You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by da...@apache.org on 2018/08/25 03:09:38 UTC

[01/15] lucene-solr:jira/http2: SOLR-12690: Regularize LoggerFactory declarations

Repository: lucene-solr
Updated Branches:
  refs/heads/jira/http2 49d9b4988 -> 56c44139c


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index 2c1209e..6abfba8 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -72,7 +72,7 @@ import static org.apache.solr.common.util.Utils.fromJSON;
 
 public class ZkStateReader implements Closeable {
   public static final int STATE_UPDATE_DELAY = Integer.getInteger("solr.OverseerStateUpdateDelay", 2000);  // delay between cloud state updates
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static final String BASE_URL_PROP = "base_url";
   public static final String NODE_NAME_PROP = "node_name";
@@ -243,7 +243,7 @@ public class ZkStateReader implements Closeable {
     String configName = null;
 
     String path = COLLECTIONS_ZKNODE + "/" + collection;
-    LOG.debug("Loading collection config from: [{}]", path);
+    log.debug("Loading collection config from: [{}]", path);
 
     try {
       byte[] data = zkClient.getData(path, null, null, true);
@@ -256,10 +256,10 @@ public class ZkStateReader implements Closeable {
       if (configName != null) {
         String configPath = CONFIGS_ZKNODE + "/" + configName;
         if (!zkClient.exists(configPath, true)) {
-          LOG.error("Specified config=[{}] does not exist in ZooKeeper at location=[{}]", configName, configPath);
+          log.error("Specified config=[{}] does not exist in ZooKeeper at location=[{}]", configName, configPath);
           throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "Specified config does not exist in ZooKeeper: " + configName);
         } else {
-          LOG.debug("path=[{}] [{}]=[{}] specified config exists in ZooKeeper", configPath, CONFIGNAME_PROP, configName);
+          log.debug("path=[{}] [{}]=[{}] specified config exists in ZooKeeper", configPath, CONFIGNAME_PROP, configName);
         }
       } else {
         throw new ZooKeeperException(ErrorCode.INVALID_STATE, "No config data found at path: " + path);
@@ -300,12 +300,12 @@ public class ZkStateReader implements Closeable {
             try {
               ZkStateReader.this.createClusterStateWatchersAndUpdate();
             } catch (KeeperException e) {
-              LOG.error("A ZK error has occurred", e);
+              log.error("A ZK error has occurred", e);
               throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
             } catch (InterruptedException e) {
               // Restore the interrupted status
               Thread.currentThread().interrupt();
-              LOG.error("Interrupted", e);
+              log.error("Interrupted", e);
               throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "Interrupted", e);
             }
           }
@@ -359,7 +359,7 @@ public class ZkStateReader implements Closeable {
 
     synchronized (getUpdateLock()) {
       if (clusterState == null) {
-        LOG.warn("ClusterState watchers have not been initialized");
+        log.warn("ClusterState watchers have not been initialized");
         return;
       }
 
@@ -367,20 +367,20 @@ public class ZkStateReader implements Closeable {
       if (ref == null || legacyCollectionStates.containsKey(collection)) {
         // We either don't know anything about this collection (maybe it's new?) or it's legacy.
         // First update the legacy cluster state.
-        LOG.debug("Checking legacy cluster state for collection {}", collection);
+        log.debug("Checking legacy cluster state for collection {}", collection);
         refreshLegacyClusterState(null);
         if (!legacyCollectionStates.containsKey(collection)) {
           // No dice, see if a new collection just got created.
           LazyCollectionRef tryLazyCollection = new LazyCollectionRef(collection);
           if (tryLazyCollection.get() != null) {
             // What do you know, it exists!
-            LOG.debug("Adding lazily-loaded reference for collection {}", collection);
+            log.debug("Adding lazily-loaded reference for collection {}", collection);
             lazyCollectionStates.putIfAbsent(collection, tryLazyCollection);
             constructState(Collections.singleton(collection));
           }
         }
       } else if (ref.isLazilyLoaded()) {
-        LOG.debug("Refreshing lazily-loaded state for collection {}", collection);
+        log.debug("Refreshing lazily-loaded state for collection {}", collection);
         if (ref.get() != null) {
           return;
         }
@@ -388,13 +388,13 @@ public class ZkStateReader implements Closeable {
         refreshLegacyClusterState(null);
       } else if (watchedCollectionStates.containsKey(collection)) {
         // Exists as a watched collection, force a refresh.
-        LOG.debug("Forcing refresh of watched collection state for {}", collection);
+        log.debug("Forcing refresh of watched collection state for {}", collection);
         DocCollection newState = fetchCollectionState(collection, null);
         if (updateWatchedCollection(collection, newState)) {
           constructState(Collections.singleton(collection));
         }
       } else {
-        LOG.error("Collection {} is not lazy or watched!", collection);
+        log.error("Collection {} is not lazy or watched!", collection);
       }
     }
 
@@ -409,7 +409,7 @@ public class ZkStateReader implements Closeable {
     DocCollection collection = clusterState.getCollectionOrNull(coll);
     if (collection == null) return null;
     if (collection.getZNodeVersion() < version) {
-      LOG.debug("Server older than client {}<{}", collection.getZNodeVersion(), version);
+      log.debug("Server older than client {}<{}", collection.getZNodeVersion(), version);
       DocCollection nu = getCollectionLive(this, coll);
       if (nu == null) return -1 ;
       if (nu.getZNodeVersion() > collection.getZNodeVersion()) {
@@ -426,7 +426,7 @@ public class ZkStateReader implements Closeable {
       return null;
     }
     
-    LOG.debug("Wrong version from client [{}]!=[{}]", version, collection.getZNodeVersion());
+    log.debug("Wrong version from client [{}]!=[{}]", version, collection.getZNodeVersion());
     
     return collection.getZNodeVersion();
   }
@@ -435,7 +435,7 @@ public class ZkStateReader implements Closeable {
       InterruptedException {
     // We need to fetch the current cluster state and the set of live nodes
 
-    LOG.debug("Updating cluster state from ZooKeeper... ");
+    log.debug("Updating cluster state from ZooKeeper... ");
 
     // Sanity check ZK structure.
     if (!zkClient.exists(CLUSTER_STATE, true)) {
@@ -480,7 +480,7 @@ public class ZkStateReader implements Closeable {
             }
             try {
               synchronized (ZkStateReader.this.getUpdateLock()) {
-                LOG.debug("Updating [{}] ... ", SOLR_SECURITY_CONF_PATH);
+                log.debug("Updating [{}] ... ", SOLR_SECURITY_CONF_PATH);
 
                 // remake watch
                 final Watcher thisWatch = this;
@@ -489,18 +489,18 @@ public class ZkStateReader implements Closeable {
                 try {
                   callback.call(new Pair<>(data, stat));
                 } catch (Exception e) {
-                  LOG.error("Error running collections node listener", e);
+                  log.error("Error running collections node listener", e);
                 }
               }
             } catch (KeeperException.ConnectionLossException | KeeperException.SessionExpiredException e) {
-              LOG.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
+              log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
             } catch (KeeperException e) {
-              LOG.error("A ZK error has occurred", e);
+              log.error("A ZK error has occurred", e);
               throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "", e);
             } catch (InterruptedException e) {
               // Restore the interrupted status
               Thread.currentThread().interrupt();
-              LOG.warn("Interrupted", e);
+              log.warn("Interrupted", e);
             }
           }
 
@@ -534,15 +534,15 @@ public class ZkStateReader implements Closeable {
 
     this.clusterState = new ClusterState(liveNodes, result, legacyClusterStateVersion);
 
-    LOG.debug("clusterStateSet: legacy [{}] interesting [{}] watched [{}] lazy [{}] total [{}]",
+    log.debug("clusterStateSet: legacy [{}] interesting [{}] watched [{}] lazy [{}] total [{}]",
         legacyCollectionStates.keySet().size(),
         collectionWatches.keySet().size(),
         watchedCollectionStates.keySet().size(),
         lazyCollectionStates.keySet().size(),
         clusterState.getCollectionStates().size());
 
-    if (LOG.isTraceEnabled()) {
-      LOG.trace("clusterStateSet: legacy [{}] interesting [{}] watched [{}] lazy [{}] total [{}]",
+    if (log.isTraceEnabled()) {
+      log.trace("clusterStateSet: legacy [{}] interesting [{}] watched [{}] lazy [{}] total [{}]",
           legacyCollectionStates.keySet(),
           collectionWatches.keySet(),
           watchedCollectionStates.keySet(),
@@ -631,7 +631,7 @@ public class ZkStateReader implements Closeable {
       try {
         children = zkClient.getChildren(COLLECTIONS_ZKNODE, watcher, true);
       } catch (KeeperException.NoNodeException e) {
-        LOG.warn("Error fetching collection names: [{}]", e.getMessage());
+        log.warn("Error fetching collection names: [{}]", e.getMessage());
         // fall through
       }
       if (children == null || children.isEmpty()) {
@@ -783,10 +783,10 @@ public class ZkStateReader implements Closeable {
       }
     }
     if (oldLiveNodes.size() != newLiveNodes.size()) {
-      LOG.info("Updated live nodes from ZooKeeper... ({}) -> ({})", oldLiveNodes.size(), newLiveNodes.size());
+      log.info("Updated live nodes from ZooKeeper... ({}) -> ({})", oldLiveNodes.size(), newLiveNodes.size());
     }
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("Updated live nodes from ZooKeeper... {} -> {}", oldLiveNodes, newLiveNodes);
+    if (log.isDebugEnabled()) {
+      log.debug("Updated live nodes from ZooKeeper... {} -> {}", oldLiveNodes, newLiveNodes);
     }
     if (!oldLiveNodes.equals(newLiveNodes)) { // fire listeners
       liveNodesListeners.forEach(listener ->
@@ -1002,11 +1002,11 @@ public class ZkStateReader implements Closeable {
         try {
           byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, new Stat(), true);
           this.clusterProperties = (Map<String, Object>) Utils.fromJSON(data);
-          LOG.debug("Loaded cluster properties: {}", this.clusterProperties);
+          log.debug("Loaded cluster properties: {}", this.clusterProperties);
           return;
         } catch (KeeperException.NoNodeException e) {
           this.clusterProperties = Collections.emptyMap();
-          LOG.debug("Loaded empty cluster properties");
+          log.debug("Loaded empty cluster properties");
           // set an exists watch, and if the node has been created since the last call,
           // read the data again
           if (zkClient.exists(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, true) == null)
@@ -1014,7 +1014,7 @@ public class ZkStateReader implements Closeable {
         }
       }
     } catch (KeeperException | InterruptedException e) {
-      LOG.error("Error reading cluster properties from zookeeper", SolrZkClient.checkInterrupted(e));
+      log.error("Error reading cluster properties from zookeeper", SolrZkClient.checkInterrupted(e));
     }
   }
 
@@ -1117,12 +1117,12 @@ public class ZkStateReader implements Closeable {
 
       if (!collectionWatches.containsKey(coll)) {
         // This collection is no longer interesting, stop watching.
-        LOG.debug("Uninteresting collection {}", coll);
+        log.debug("Uninteresting collection {}", coll);
         return;
       }
 
       Set<String> liveNodes = ZkStateReader.this.liveNodes;
-      LOG.info("A cluster state change: [{}] for collection [{}] has occurred - updating... (live nodes size: [{}])",
+      log.info("A cluster state change: [{}] for collection [{}] has occurred - updating... (live nodes size: [{}])",
               event, coll, liveNodes.size());
 
       refreshAndWatch();
@@ -1143,13 +1143,13 @@ public class ZkStateReader implements Closeable {
         }
 
       } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) {
-        LOG.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
+        log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
       } catch (KeeperException e) {
-        LOG.error("Unwatched collection: [{}]", coll, e);
+        log.error("Unwatched collection: [{}]", coll, e);
         throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
       } catch (InterruptedException e) {
         Thread.currentThread().interrupt();
-        LOG.error("Unwatched collection: [{}]", coll, e);
+        log.error("Unwatched collection: [{}]", coll, e);
       }
     }
   }
@@ -1164,7 +1164,7 @@ public class ZkStateReader implements Closeable {
         return;
       }
       int liveNodesSize = ZkStateReader.this.clusterState == null ? 0 : ZkStateReader.this.clusterState.getLiveNodes().size();
-      LOG.debug("A cluster state change: [{}], has occurred - updating... (live nodes size: [{}])", event, liveNodesSize);
+      log.debug("A cluster state change: [{}], has occurred - updating... (live nodes size: [{}])", event, liveNodesSize);
       refreshAndWatch();
     }
 
@@ -1176,14 +1176,14 @@ public class ZkStateReader implements Closeable {
         throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
                 "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready");
       } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) {
-        LOG.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
+        log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
       } catch (KeeperException e) {
-        LOG.error("A ZK error has occurred", e);
+        log.error("A ZK error has occurred", e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
       } catch (InterruptedException e) {
         // Restore the interrupted status
         Thread.currentThread().interrupt();
-        LOG.warn("Interrupted", e);
+        log.warn("Interrupted", e);
       }
     }
   }
@@ -1205,11 +1205,11 @@ public class ZkStateReader implements Closeable {
 
       if (!collectionPropsWatches.containsKey(coll)) {
         // No one can be notified of the change, we can ignore it and "unset" the watch
-        LOG.debug("Ignoring property change for collection {}", coll);
+        log.debug("Ignoring property change for collection {}", coll);
         return;
       }
 
-      LOG.info("A collection property change: [{}] for collection [{}] has occurred - updating...",
+      log.info("A collection property change: [{}] for collection [{}] has occurred - updating...",
           event, coll);
 
       refreshAndWatch(true);
@@ -1236,13 +1236,13 @@ public class ZkStateReader implements Closeable {
           }
         }
       } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) {
-        LOG.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
+        log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
       } catch (KeeperException e) {
-        LOG.error("Lost collection property watcher for {} due to ZK error", coll, e);
+        log.error("Lost collection property watcher for {} due to ZK error", coll, e);
         throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
       } catch (InterruptedException e) {
         Thread.currentThread().interrupt();
-        LOG.error("Lost collection property watcher for {} due to the thread being interrupted", coll, e);
+        log.error("Lost collection property watcher for {} due to the thread being interrupted", coll, e);
       }
     }
   }
@@ -1256,7 +1256,7 @@ public class ZkStateReader implements Closeable {
       if (EventType.None.equals(event.getType())) {
         return;
       }
-      LOG.debug("A collections change: [{}], has occurred - updating...", event);
+      log.debug("A collections change: [{}], has occurred - updating...", event);
       refreshAndWatch();
       synchronized (getUpdateLock()) {
         constructState(Collections.emptySet());
@@ -1268,14 +1268,14 @@ public class ZkStateReader implements Closeable {
       try {
         refreshCollectionList(this);
       } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) {
-        LOG.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
+        log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
       } catch (KeeperException e) {
-        LOG.error("A ZK error has occurred", e);
+        log.error("A ZK error has occurred", e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
       } catch (InterruptedException e) {
         // Restore the interrupted status
         Thread.currentThread().interrupt();
-        LOG.warn("Interrupted", e);
+        log.warn("Interrupted", e);
       }
     }
   }
@@ -1289,7 +1289,7 @@ public class ZkStateReader implements Closeable {
       if (EventType.None.equals(event.getType())) {
         return;
       }
-      LOG.debug("A live node change: [{}], has occurred - updating... (live nodes size: [{}])", event, liveNodes.size());
+      log.debug("A live node change: [{}], has occurred - updating... (live nodes size: [{}])", event, liveNodes.size());
       refreshAndWatch();
     }
 
@@ -1297,14 +1297,14 @@ public class ZkStateReader implements Closeable {
       try {
         refreshLiveNodes(this);
       } catch (KeeperException.SessionExpiredException | KeeperException.ConnectionLossException e) {
-        LOG.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
+        log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
       } catch (KeeperException e) {
-        LOG.error("A ZK error has occurred", e);
+        log.error("A ZK error has occurred", e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
       } catch (InterruptedException e) {
         // Restore the interrupted status
         Thread.currentThread().interrupt();
-        LOG.warn("Interrupted", e);
+        log.warn("Interrupted", e);
       }
     }
   }
@@ -1520,7 +1520,7 @@ public class ZkStateReader implements Closeable {
   private boolean updateWatchedCollection(String coll, DocCollection newState) {
 
     if (newState == null) {
-      LOG.debug("Removing cached collection state for [{}]", coll);
+      log.debug("Removing cached collection state for [{}]", coll);
       watchedCollectionStates.remove(coll);
       return true;
     }
@@ -1534,7 +1534,7 @@ public class ZkStateReader implements Closeable {
       DocCollection oldState = watchedCollectionStates.get(coll);
       if (oldState == null) {
         if (watchedCollectionStates.putIfAbsent(coll, newState) == null) {
-          LOG.debug("Add data for [{}] ver [{}]", coll, newState.getZNodeVersion());
+          log.debug("Add data for [{}] ver [{}]", coll, newState.getZNodeVersion());
           updated = true;
           break;
         }
@@ -1546,7 +1546,7 @@ public class ZkStateReader implements Closeable {
           break;
         }
         if (watchedCollectionStates.replace(coll, oldState, newState)) {
-          LOG.debug("Updating data for [{}] from [{}] to [{}]", coll, oldState.getZNodeVersion(), newState.getZNodeVersion());
+          log.debug("Updating data for [{}] from [{}] to [{}]", coll, oldState.getZNodeVersion(), newState.getZNodeVersion());
           updated = true;
           break;
         }
@@ -1556,7 +1556,7 @@ public class ZkStateReader implements Closeable {
     // Resolve race with unregisterCore.
     if (!collectionWatches.containsKey(coll)) {
       watchedCollectionStates.remove(coll);
-      LOG.debug("Removing uninteresting collection [{}]", coll);
+      log.debug("Removing uninteresting collection [{}]", coll);
     }
 
     return updated;
@@ -1611,7 +1611,7 @@ public class ZkStateReader implements Closeable {
     }
     catch (RejectedExecutionException e) {
       if (closed == false) {
-        LOG.error("Couldn't run collection notifications for {}", collection, e);
+        log.error("Couldn't run collection notifications for {}", collection, e);
       }
     }
   }
@@ -1643,7 +1643,7 @@ public class ZkStateReader implements Closeable {
             removeCollectionStateWatcher(collection, watcher);
           }
         } catch (Exception exception) {
-          LOG.warn("Error on calling watcher", exception);
+          log.warn("Error on calling watcher", exception);
         }
       }
     }
@@ -1684,7 +1684,7 @@ public class ZkStateReader implements Closeable {
    */
   public class AliasesManager implements Watcher  { // the holder is a Zk watcher
     // note: as of this writing, this class if very generic. Is it useful to use for other ZK managed things?
-    private final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+    private final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
     private volatile Aliases aliases = Aliases.EMPTY;
 
@@ -1719,7 +1719,7 @@ public class ZkStateReader implements Closeable {
         Aliases modAliases = op.apply(curAliases);
         final byte[] modAliasesJson = modAliases.toJSON();
         if (curAliases == modAliases) {
-          LOG.debug("Current aliases has the desired modification; no further ZK interaction needed.");
+          log.debug("Current aliases has the desired modification; no further ZK interaction needed.");
           return;
         }
 
@@ -1729,8 +1729,8 @@ public class ZkStateReader implements Closeable {
             setIfNewer(Aliases.fromJSON(modAliasesJson, stat.getVersion()));
             return;
           } catch (KeeperException.BadVersionException e) {
-            LOG.debug(e.toString(), e);
-            LOG.warn("Couldn't save aliases due to race with another modification; will update and retry until timeout");
+            log.debug(e.toString(), e);
+            log.warn("Couldn't save aliases due to race with another modification; will update and retry until timeout");
             // considered a backoff here, but we really do want to compete strongly since the normal case is
             // that we will do one update and succeed. This is left as a hot loop for limited tries intentionally.
             // More failures than that here probably indicate a bug or a very strange high write frequency usage for
@@ -1758,7 +1758,7 @@ public class ZkStateReader implements Closeable {
      * @return true if an update was performed
      */
     public boolean update() throws KeeperException, InterruptedException {
-      LOG.debug("Checking ZK for most up to date Aliases {}", ALIASES);
+      log.debug("Checking ZK for most up to date Aliases {}", ALIASES);
       // Call sync() first to ensure the subsequent read (getData) is up to date.
       zkClient.getSolrZooKeeper().sync(ALIASES, null, null);
       Stat stat = new Stat();
@@ -1774,7 +1774,7 @@ public class ZkStateReader implements Closeable {
         return;
       }
       try {
-        LOG.debug("Aliases: updating");
+        log.debug("Aliases: updating");
 
         // re-register the watch
         Stat stat = new Stat();
@@ -1783,14 +1783,14 @@ public class ZkStateReader implements Closeable {
         setIfNewer(Aliases.fromJSON(data, stat.getVersion()));
       } catch (KeeperException.ConnectionLossException | KeeperException.SessionExpiredException e) {
         // note: aliases.json is required to be present
-        LOG.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
+        log.warn("ZooKeeper watch triggered, but Solr cannot talk to ZK: [{}]", e.getMessage());
       } catch (KeeperException e) {
-        LOG.error("A ZK error has occurred", e);
+        log.error("A ZK error has occurred", e);
         throw new ZooKeeperException(ErrorCode.SERVER_ERROR, "A ZK error has occurred", e);
       } catch (InterruptedException e) {
         // Restore the interrupted status
         Thread.currentThread().interrupt();
-        LOG.warn("Interrupted", e);
+        log.warn("Interrupted", e);
       }
     }
 
@@ -1804,12 +1804,12 @@ public class ZkStateReader implements Closeable {
       synchronized (this) {
         int cmp = Integer.compare(aliases.getZNodeVersion(), newAliases.getZNodeVersion());
         if (cmp < 0) {
-          LOG.debug("Aliases: cmp={}, new definition is: {}", cmp, newAliases);
+          log.debug("Aliases: cmp={}, new definition is: {}", cmp, newAliases);
           aliases = newAliases;
           this.notifyAll();
           return true;
         } else {
-          LOG.debug("Aliases: cmp={}, not overwriting ZK version.", cmp);
+          log.debug("Aliases: cmp={}, not overwriting ZK version.", cmp);
           assert cmp != 0 || Arrays.equals(aliases.toJSON(), newAliases.toJSON()) : aliases + " != " + newAliases;
           return false;
         }
@@ -1823,7 +1823,7 @@ public class ZkStateReader implements Closeable {
       collectionPropsNotifications.submit(new PropsNotification(collection, properties));
     } catch (RejectedExecutionException e) {
       if (!closed) {
-        LOG.error("Couldn't run collection properties notifications for {}", collection, e);
+        log.error("Couldn't run collection properties notifications for {}", collection, e);
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/common/util/IOUtils.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/IOUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/IOUtils.java
index 3e9efa5..198a664 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/IOUtils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/IOUtils.java
@@ -23,7 +23,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class IOUtils {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static void closeQuietly(Closeable closeable) {
     try {
@@ -31,7 +31,7 @@ public class IOUtils {
         closeable.close();
       }
     } catch (Exception e) {
-      LOG.error("Error while closing", e);
+      log.error("Error while closing", e);
     }
   }
 }


[15/15] lucene-solr:jira/http2: Merge with master

Posted by da...@apache.org.
Merge with master


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/56c44139
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/56c44139
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/56c44139

Branch: refs/heads/jira/http2
Commit: 56c44139c0590daeeaece2c0bc3a0e162f763bdd
Parents: 49d9b49 f26dd13
Author: Cao Manh Dat <da...@apache.org>
Authored: Sat Aug 25 10:09:11 2018 +0700
Committer: Cao Manh Dat <da...@apache.org>
Committed: Sat Aug 25 10:09:11 2018 +0700

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |   3 +
 .../lucene/codecs/lucene70/Lucene70Codec.java   | 133 ++++++
 .../apache/lucene/codecs/lucene70/package.html  |  25 ++
 .../services/org.apache.lucene.codecs.Codec     |   1 +
 .../benchmark/byTask/tasks/CreateIndexTask.java |   4 +-
 .../java/org/apache/lucene/codecs/Codec.java    |   2 +-
 .../lucene/codecs/lucene70/Lucene70Codec.java   | 175 --------
 .../lucene/codecs/lucene80/Lucene80Codec.java   | 177 ++++++++
 .../lucene/codecs/lucene80/package-info.java    | 409 +++++++++++++++++++
 .../org/apache/lucene/index/PendingDeletes.java |  13 +-
 .../apache/lucene/index/PendingSoftDeletes.java |   7 +-
 .../apache/lucene/index/ReadersAndUpdates.java  |   3 +
 .../services/org.apache.lucene.codecs.Codec     |   2 +-
 ...cene50StoredFieldsFormatHighCompression.java |   8 +-
 .../lucene70/TestLucene70NormsFormat.java       |   4 +-
 .../org/apache/lucene/index/Test2BPoints.java   |   2 +-
 .../lucene/index/TestPendingSoftDeletes.java    |   8 +-
 .../apache/lucene/index/TestPointValues.java    |   8 +-
 .../TestSoftDeletesRetentionMergePolicy.java    |  94 ++++-
 .../org/apache/lucene/search/TestBoolean2.java  |   8 +-
 .../apache/lucene/search/TestPointQueries.java  |   4 +-
 .../directory/DirectoryTaxonomyReader.java      |   6 +-
 .../document/TestFloatPointNearestNeighbor.java |   2 +-
 .../org/apache/lucene/search/TestNearest.java   |   2 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java |   4 +-
 .../suggest/document/TestSuggestField.java      |   4 +-
 .../apache/lucene/geo/BaseGeoPointTestCase.java |   2 +-
 .../util/TestRuleSetupAndRestoreClassEnv.java   |  30 +-
 .../java/org/apache/lucene/util/TestUtil.java   |   4 +-
 .../src/groovy/check-source-patterns.groovy     |   9 +-
 solr/CHANGES.txt                                |   9 +
 .../handler/dataimport/MailEntityProcessor.java |  74 ++--
 .../handler/dataimport/BinURLDataSource.java    |  10 +-
 .../handler/dataimport/DataImportHandler.java   |   6 +-
 .../solr/handler/dataimport/DataImporter.java   |  24 +-
 .../dataimport/DateFormatTransformer.java       |   4 +-
 .../solr/handler/dataimport/DocBuilder.java     |  34 +-
 .../dataimport/FieldReaderDataSource.java       |   6 +-
 .../dataimport/FieldStreamDataSource.java       |   4 +-
 .../solr/handler/dataimport/FileDataSource.java |   8 +-
 .../solr/handler/dataimport/JdbcDataSource.java |  20 +-
 .../solr/handler/dataimport/LogTransformer.java |  22 +-
 .../handler/dataimport/RegexTransformer.java    |   4 +-
 .../handler/dataimport/SolrEntityProcessor.java |   6 +-
 .../handler/dataimport/SqlEntityProcessor.java  |   8 +-
 .../handler/dataimport/TemplateTransformer.java |   4 +-
 .../solr/handler/dataimport/URLDataSource.java  |  10 +-
 .../dataimport/XPathEntityProcessor.java        |  24 +-
 .../handler/dataimport/XPathRecordReader.java   |   6 +-
 .../dataimport/config/DIHConfiguration.java     |   6 +-
 .../TestSolrEntityProcessorEndToEnd.java        |  20 +-
 .../client/solrj/embedded/JettySolrRunner.java  |   6 +-
 .../java/org/apache/solr/cloud/LockTree.java    |   6 +-
 .../apache/solr/cloud/OverseerTaskQueue.java    |  14 +-
 .../org/apache/solr/cloud/RecoveryStrategy.java | 164 ++++----
 .../apache/solr/cloud/ReplicateFromLeader.java  |   6 +-
 .../org/apache/solr/cloud/SolrZkServer.java     |   4 +-
 .../apache/solr/cloud/ZkDistributedQueue.java   |   6 +-
 .../cloud/autoscaling/HttpTriggerListener.java  |   4 +-
 .../solr/cloud/autoscaling/LoggingListener.java |   4 +-
 .../solr/cloud/autoscaling/TriggerBase.java     |   8 +-
 .../cloud/autoscaling/TriggerEventQueue.java    |  16 +-
 .../apache/solr/core/HdfsDirectoryFactory.java  |  50 +--
 .../solr/core/IndexDeletionPolicyWrapper.java   |   4 +-
 .../apache/solr/core/SchemaCodecFactory.java    |   4 +-
 .../repository/BackupRepositoryFactory.java     |   6 +-
 .../org/apache/solr/handler/IndexFetcher.java   | 142 +++----
 .../apache/solr/handler/ReplicationHandler.java |  68 +--
 .../org/apache/solr/handler/SnapShooter.java    |  16 +-
 .../admin/AutoscalingHistoryHandler.java        |   4 +-
 .../solr/handler/component/QueryComponent.java  |   4 +-
 .../handler/component/SpellCheckComponent.java  |  24 +-
 .../handler/component/SuggestComponent.java     |  26 +-
 .../solr/handler/export/BoolFieldWriter.java    |  23 +-
 .../solr/handler/export/DateFieldWriter.java    |  22 +-
 .../solr/handler/export/DoubleFieldWriter.java  |  27 +-
 .../apache/solr/handler/export/DoubleValue.java |  20 +
 .../solr/handler/export/DoubleValueSortDoc.java |   9 +
 .../solr/handler/export/ExportWriter.java       |   2 +-
 .../apache/solr/handler/export/FieldWriter.java |   2 +-
 .../solr/handler/export/FloatFieldWriter.java   |  27 +-
 .../apache/solr/handler/export/FloatValue.java  |  20 +
 .../solr/handler/export/IntFieldWriter.java     |  22 +-
 .../apache/solr/handler/export/IntValue.java    |  25 +-
 .../solr/handler/export/LongFieldWriter.java    |  20 +-
 .../apache/solr/handler/export/LongValue.java   |  20 +
 .../solr/handler/export/MultiFieldWriter.java   |   6 +-
 .../solr/handler/export/QuadValueSortDoc.java   |  13 +
 .../solr/handler/export/SingleValueSortDoc.java |   7 +
 .../org/apache/solr/handler/export/SortDoc.java |  10 +
 .../apache/solr/handler/export/SortValue.java   |   8 +
 .../solr/handler/export/StringFieldWriter.java  |  24 +-
 .../apache/solr/handler/export/StringValue.java |  24 +-
 .../solr/handler/export/TripleValueSortDoc.java |  11 +
 .../metrics/reporters/ReporterClientCache.java  |   6 +-
 .../reporters/jmx/JmxMetricsReporter.java       |  58 +--
 .../reporters/solr/SolrShardReporter.java       |   2 +-
 .../org/apache/solr/request/SimpleFacets.java   |   4 +-
 .../solr/response/BinaryResponseWriter.java     |   4 +-
 .../solr/schema/JsonPreAnalyzedParser.java      |  12 +-
 .../apache/solr/schema/PreAnalyzedField.java    |  10 +-
 .../solr/search/SurroundQParserPlugin.java      |   2 +-
 .../search/stats/ExactSharedStatsCache.java     |   6 +-
 .../solr/search/stats/ExactStatsCache.java      |  28 +-
 .../apache/solr/search/stats/LRUStatsCache.java |  10 +-
 .../solr/search/stats/LocalStatsCache.java      |  18 +-
 .../org/apache/solr/search/stats/StatsUtil.java |  22 +-
 .../solr/servlet/CheckLoggingConfiguration.java |   4 +-
 .../solr/spelling/DirectSolrSpellChecker.java   |   6 +-
 .../solr/spelling/SpellCheckCollator.java       |  10 +-
 .../solr/spelling/suggest/SolrSuggester.java    |  34 +-
 .../apache/solr/spelling/suggest/Suggester.java |  26 +-
 .../suggest/jaspell/JaspellLookupFactory.java   |   4 +-
 .../solr/store/blockcache/BlockDirectory.java   |   8 +-
 .../apache/solr/store/hdfs/HdfsDirectory.java   |  16 +-
 .../org/apache/solr/util/stats/MetricUtils.java |   6 +-
 .../apache/solr/cloud/DeleteReplicaTest.java    |   8 +-
 .../solr/cloud/DistribCursorPagingTest.java     |   1 +
 .../solr/cloud/LIROnShardRestartTest.java       |   4 +-
 .../solr/cloud/LIRRollingUpdatesTest.java       |   4 +-
 .../solr/cloud/LeaderVoteWaitTimeoutTest.java   |   6 +-
 .../apache/solr/cloud/MoveReplicaHDFSTest.java  |   3 +-
 .../cloud/SharedFSAutoReplicaFailoverTest.java  |   2 +-
 .../apache/solr/cloud/TestCloudConsistency.java |   4 +-
 .../org/apache/solr/cloud/TestPullReplica.java  |  18 +-
 .../cloud/TestPullReplicaErrorHandling.java     |  12 +-
 .../TestSolrCloudWithDelegationTokens.java      |   1 +
 .../org/apache/solr/cloud/TestTlogReplica.java  |  12 +-
 .../apache/solr/cloud/TestWithCollection.java   |   2 +
 .../HdfsCollectionsAPIDistributedZkTest.java    |   3 +-
 .../collections/TestHdfsCloudBackupRestore.java |   3 +-
 .../HdfsAutoAddReplicasIntegrationTest.java     |   2 +-
 .../cloud/autoscaling/ScheduledTriggerTest.java |   2 +-
 .../sim/GenericDistributedQueue.java            |   6 +-
 .../cloud/autoscaling/sim/SimCloudManager.java  |  18 +-
 .../sim/SimClusterStateProvider.java            |  50 +--
 .../autoscaling/sim/SimDistribStateManager.java |   2 +-
 .../sim/SimDistributedQueueFactory.java         |   4 +-
 .../autoscaling/sim/SimNodeStateProvider.java   |  12 +-
 .../sim/TestClusterStateProvider.java           |   4 +-
 .../sim/TestDistribStateManager.java            |   4 +-
 .../solr/handler/TestReplicationHandler.java    |   2 +
 .../solr/handler/export/TestExportWriter.java   |  49 +++
 .../solr/schema/SchemaApiFailureTest.java       |   1 +
 .../search/CurrencyRangeFacetCloudTest.java     |   2 +-
 .../solr/search/facet/RangeFacetCloudTest.java  |   2 +-
 .../apache/solr/update/TestHdfsUpdateLog.java   |   2 +
 solr/solr-ref-guide/src/analytics.adoc          |   2 +-
 .../src/configuring-solrconfig-xml.adoc         |   4 +-
 .../detecting-languages-during-indexing.adoc    |   2 +-
 .../solr-ref-guide/src/filter-descriptions.adoc |   8 +-
 solr/solr-ref-guide/src/language-analysis.adoc  |  32 +-
 solr/solr-ref-guide/src/learning-to-rank.adoc   |   2 +-
 .../src/lib-directives-in-solrconfig.adoc       |  38 --
 .../src/resource-and-plugin-loading.adoc        |  86 ++++
 .../src/stream-decorator-reference.adoc         | 153 ++++---
 solr/solr-ref-guide/src/tokenizers.adoc         |   2 +-
 .../src/update-request-processors.adoc          |   2 +-
 .../cloud/autoscaling/AutoScalingConfig.java    |   4 +-
 .../client/solrj/cloud/autoscaling/Policy.java  |   8 +-
 .../solr/client/solrj/impl/HttpClientUtil.java  |  12 +-
 .../solrj/impl/Krb5HttpClientBuilder.java       |  20 +-
 .../client/solrj/io/stream/CommitStream.java    |   4 +-
 .../client/solrj/io/stream/DaemonStream.java    |  14 +-
 .../client/solrj/io/stream/ExecutorStream.java  |   8 +-
 .../client/solrj/io/stream/UpdateStream.java    |   6 +-
 .../solr/common/cloud/ZkConfigManager.java      |   4 +-
 .../apache/solr/common/cloud/ZkStateReader.java | 140 +++----
 .../org/apache/solr/common/util/IOUtils.java    |   4 +-
 .../solrj/io/stream/StreamDecoratorTest.java    |   4 +-
 .../cloud/TestCloudCollectionsListeners.java    |   1 +
 171 files changed, 2267 insertions(+), 1224 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/56c44139/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/56c44139/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java
----------------------------------------------------------------------
diff --cc solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java
index e48229d,afa2ef6..813c762
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java
@@@ -64,10 -49,9 +64,10 @@@ import org.slf4j.LoggerFactory
   */
  public class Krb5HttpClientBuilder implements HttpClientBuilderFactory {
    
-   private static final String LOGIN_CONFIG_PROP = "java.security.auth.login.config";
+   public static final String LOGIN_CONFIG_PROP = "java.security.auth.login.config";
 +  private static final String SPNEGO_OID = "1.3.6.1.5.5.2";
-   private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-   
+   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 -  
++
    private static Configuration jaasConfig = new SolrJaasConfiguration();
  
    public Krb5HttpClientBuilder() {
@@@ -109,10 -93,11 +109,10 @@@
          // authentication mechanism can load the credentials from the JAAS configuration.
          if (useSubjectCredsVal == null) {
            System.setProperty(useSubjectCredsProp, "false");
 -        }
 -        else if (!useSubjectCredsVal.toLowerCase(Locale.ROOT).equals("false")) {
 +        } else if (!useSubjectCredsVal.toLowerCase(Locale.ROOT).equals("false")) {
            // Don't overwrite the prop value if it's already been written to something else,
            // but log because it is likely the Credentials won't be loaded correctly.
-           logger.warn("System Property: " + useSubjectCredsProp + " set to: " + useSubjectCredsVal
+           log.warn("System Property: " + useSubjectCredsProp + " set to: " + useSubjectCredsVal
                + " not false.  SPNego authentication may not be successful.");
          }
  
@@@ -152,72 -137,9 +152,72 @@@
            return credentialsProvider;
          });
          HttpClientUtil.addRequestInterceptor(bufferedEntityInterceptor);
 +
 +        //setup for http2
 +        builder.setHttp2Configurator(http2Client -> {
 +          HttpAuthenticationStore authenticationStore = new HttpAuthenticationStore();
 +          authenticationStore.addAuthentication(new Authentication() {
 +            @Override
 +            public boolean matches(String type, URI uri, String realm) {
 +              return "Negotiate".equals(type);
 +            }
 +
 +            @Override
 +            public Result authenticate(Request request, ContentResponse response, HeaderInfo headerInfo, Attributes context) {
 +              String challenge = headerInfo.getBase64();
 +              if (challenge == null) challenge = "";
 +              byte[] input = java.util.Base64.getDecoder().decode(challenge);
 +              byte[] token;
 +              String authServer = request.getHost();
 +              final GSSManager manager = GSSManager.getInstance();
 +              try {
 +                GSSName serverName = manager.createName("HTTP@" + authServer, GSSName.NT_HOSTBASED_SERVICE);
 +                final GSSContext gssContext = createGSSContext(manager, new Oid(SPNEGO_OID), serverName, null);
 +                if (input != null) {
 +                  token = gssContext.initSecContext(input, 0, input.length);
 +                } else {
 +                  token = gssContext.initSecContext(new byte[] {}, 0, 0);
 +                }
 +              } catch (GSSException e) {
 +                throw new IllegalArgumentException("Unable to init GSSContext", e);
 +              }
 +              return new Result() {
 +                AtomicBoolean sentToken = new AtomicBoolean(false);
 +                @Override
 +                public URI getURI() {
 +                  // Since Kerberos is connection based authentication, sub-sequence requests won't need to resend the token in header
 +                  // by return null, the ProtocolHandler won't try to apply this result on sequence requests
 +                  return null;
 +                }
 +
 +                @Override
 +                public void apply(Request request) {
 +                  if (sentToken.get()) return;
 +
 +                  final String tokenstr = java.util.Base64.getEncoder().encodeToString(token);
-                   if (logger.isDebugEnabled()) {
-                     logger.info("Sending response '" + tokenstr + "' back to the auth server");
++                  if (log.isDebugEnabled()) {
++                    log.info("Sending response '" + tokenstr + "' back to the auth server");
 +                  }
 +                  request.header(headerInfo.getHeader().asString(), "Negotiate "+tokenstr);
 +                }
 +              };
 +            }
 +
 +            private GSSContext createGSSContext(GSSManager manager, Oid oid, GSSName serverName, final GSSCredential gssCredential) throws GSSException {
 +              // Get the credentials from the JAAS configuration rather than here
 +              final GSSContext gssContext = manager.createContext(serverName.canonicalize(oid), oid, gssCredential,
 +                  GSSContext.DEFAULT_LIFETIME);
 +              gssContext.requestMutualAuth(true);
 +              return gssContext;
 +            }
 +          });
 +          http2Client.getHttpClient().setAuthenticationStore(authenticationStore);
 +          http2Client.getProtocolHandlers().put(new SolrWWWAuthenticationProtocolHandler(http2Client.getHttpClient()));
 +          http2Client.getProtocolHandlers().put(new SolrProxyAuthenticationProtocolHandler(http2Client.getHttpClient()));
 +        });
        }
      } else {
-       logger.warn("{} is configured without specifying system property '{}'",
+       log.warn("{} is configured without specifying system property '{}'",
            getClass().getName(), LOGIN_CONFIG_PROP);
      }
  


[14/15] lucene-solr:jira/http2: LUCENE-8458: Ensure init PendingSoftDeletes when carry-over deletes

Posted by da...@apache.org.
LUCENE-8458: Ensure init PendingSoftDeletes when carry-over deletes

Today when carrying over hard-deletes after merging segments, we might
not adjust soft-deletes count accordingly because we do not always
ensure that the PendingSoftDeletes of the new segment is initialized.

This change fixes the initialization condition in PendingSoftDeletes and
makes sure it is initialized before accepting deletes.

Co-authored-by: Simon Willnauer <si...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/f26dd13b
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/f26dd13b
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/f26dd13b

Branch: refs/heads/jira/http2
Commit: f26dd13b34e3d3a6921230cfe44ff34b2c319e7b
Parents: fc9aac1
Author: Nhat Nguyen <nh...@elastic.co>
Authored: Fri Aug 24 08:33:18 2018 -0400
Committer: Nhat Nguyen <nh...@elastic.co>
Committed: Fri Aug 24 08:33:18 2018 -0400

----------------------------------------------------------------------
 lucene/CHANGES.txt                              |  3 +
 .../org/apache/lucene/index/PendingDeletes.java | 13 ++-
 .../apache/lucene/index/PendingSoftDeletes.java |  7 +-
 .../apache/lucene/index/ReadersAndUpdates.java  |  3 +
 .../lucene/index/TestPendingSoftDeletes.java    |  8 +-
 .../TestSoftDeletesRetentionMergePolicy.java    | 94 +++++++++++++++++---
 6 files changed, 111 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f26dd13b/lucene/CHANGES.txt
----------------------------------------------------------------------
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 1550f1b..21c6d23 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -217,6 +217,9 @@ Bug Fixes:
 * LUCENE-8441: IndexWriter now checks doc value type for index sort fields
   and fails the document if they are not compatible. (Jim Ferenczi, Mike McCandless)
 
+* LUCENE-8458: Adjust initialization condition of PendingSoftDeletes and ensures
+  it is initialized before accepting deletes (Simon Willnauer, Nhat Nguyen)
+
 Changes in Runtime Behavior:
 
 * LUCENE-7976: TieredMergePolicy now respects maxSegmentSizeMB by default when executing

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f26dd13b/lucene/core/src/java/org/apache/lucene/index/PendingDeletes.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/PendingDeletes.java b/lucene/core/src/java/org/apache/lucene/index/PendingDeletes.java
index 4ab037c..157ae41 100644
--- a/lucene/core/src/java/org/apache/lucene/index/PendingDeletes.java
+++ b/lucene/core/src/java/org/apache/lucene/index/PendingDeletes.java
@@ -39,7 +39,7 @@ class PendingDeletes {
   // case getMutableBits needs to be called
   private FixedBitSet writeableLiveDocs;
   protected int pendingDeleteCount;
-  private boolean liveDocsInitialized;
+  boolean liveDocsInitialized;
 
   PendingDeletes(SegmentReader reader, SegmentCommitInfo info) {
     this(info, reader.getLiveDocs(), true);
@@ -53,7 +53,7 @@ class PendingDeletes {
     // For segments that were published we enforce a reader in the BufferedUpdatesStream.SegmentState ctor
   }
 
-  private PendingDeletes(SegmentCommitInfo info, Bits liveDocs, boolean liveDocsInitialized) {
+  PendingDeletes(SegmentCommitInfo info, Bits liveDocs, boolean liveDocsInitialized) {
     this.info = info;
     this.liveDocs = liveDocs;
     pendingDeleteCount = 0;
@@ -279,4 +279,13 @@ class PendingDeletes {
         " info.getDelCount()=" + info.getDelCount();
     return true;
   }
+
+  /**
+   * Returns {@code true} if we have to initialize this PendingDeletes before {@link #delete(int)};
+   * otherwise this PendingDeletes is ready to accept deletes. A PendingDeletes can be initialized
+   * by providing it a reader via {@link #onNewReader(CodecReader, SegmentCommitInfo)}.
+   */
+  boolean mustInitOnDelete() {
+    return false;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f26dd13b/lucene/core/src/java/org/apache/lucene/index/PendingSoftDeletes.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/PendingSoftDeletes.java b/lucene/core/src/java/org/apache/lucene/index/PendingSoftDeletes.java
index eb6e4ff..926295f 100644
--- a/lucene/core/src/java/org/apache/lucene/index/PendingSoftDeletes.java
+++ b/lucene/core/src/java/org/apache/lucene/index/PendingSoftDeletes.java
@@ -37,7 +37,7 @@ final class PendingSoftDeletes extends PendingDeletes {
   private final PendingDeletes hardDeletes;
 
   PendingSoftDeletes(String field, SegmentCommitInfo info)  {
-    super(info);
+    super(info, null, info.getDelCount(true) == 0);
     this.field = field;
     hardDeletes = new PendingDeletes(info);
   }
@@ -230,6 +230,11 @@ final class PendingSoftDeletes extends PendingDeletes {
     return hardDeletes.getLiveDocs();
   }
 
+  @Override
+  boolean mustInitOnDelete() {
+    return liveDocsInitialized == false;
+  }
+
   static int countSoftDeletes(DocIdSetIterator softDeletedDocs, Bits hardDeletes) throws IOException {
     int count = 0;
     if (softDeletedDocs != null) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f26dd13b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
index b09338f..9afff9c 100644
--- a/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
+++ b/lucene/core/src/java/org/apache/lucene/index/ReadersAndUpdates.java
@@ -184,6 +184,9 @@ final class ReadersAndUpdates {
   }
 
   public synchronized boolean delete(int docID) throws IOException {
+    if (reader == null && pendingDeletes.mustInitOnDelete()) {
+      getReader(IOContext.READ).decRef(); // pass a reader to initialize the pending deletes
+    }
     return pendingDeletes.delete(docID);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f26dd13b/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java b/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java
index 70f43a0..7d03c7e 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPendingSoftDeletes.java
@@ -65,7 +65,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
     writer.softUpdateDocument(new Term("id", "2"), doc,
         new NumericDocValuesField("_soft_deletes", 1));
     writer.commit();
-    DirectoryReader reader = writer.getReader();
+    DirectoryReader reader = DirectoryReader.open(dir);
     assertEquals(1, reader.leaves().size());
     SegmentReader segmentReader = (SegmentReader) reader.leaves().get(0).reader();
     SegmentCommitInfo segmentInfo = segmentReader.getSegmentInfo();
@@ -105,7 +105,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
     writer.softUpdateDocument(new Term("id", "2"), doc,
         new NumericDocValuesField("_soft_deletes", 1));
     writer.commit();
-    DirectoryReader reader = writer.getReader();
+    DirectoryReader reader = DirectoryReader.open(dir);
     assertEquals(1, reader.leaves().size());
     SegmentReader segmentReader = (SegmentReader) reader.leaves().get(0).reader();
     SegmentCommitInfo segmentInfo = segmentReader.getSegmentInfo();
@@ -222,7 +222,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
     writer.softUpdateDocument(new Term("id", "2"), doc,
         new NumericDocValuesField("_soft_deletes", 1));
     writer.commit();
-    DirectoryReader reader = writer.getReader();
+    DirectoryReader reader = DirectoryReader.open(dir);
     assertEquals(1, reader.leaves().size());
     SegmentReader segmentReader = (SegmentReader) reader.leaves().get(0).reader();
     SegmentCommitInfo segmentInfo = segmentReader.getSegmentInfo();
@@ -270,7 +270,7 @@ public class TestPendingSoftDeletes extends TestPendingDeletes {
     writer.softUpdateDocument(new Term("id", "2"), doc,
         new NumericDocValuesField("_soft_deletes", 1));
     writer.commit();
-    DirectoryReader reader = writer.getReader();
+    DirectoryReader reader = DirectoryReader.open(dir);
     assertEquals(1, reader.leaves().size());
     SegmentReader segmentReader = (SegmentReader) reader.leaves().get(0).reader();
     SegmentCommitInfo segmentInfo = segmentReader.getSegmentInfo();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/f26dd13b/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java b/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java
index 909ee9c..bc50fe5 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSoftDeletesRetentionMergePolicy.java
@@ -29,6 +29,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.function.Supplier;
 
+import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.IntPoint;
@@ -594,7 +595,7 @@ public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
     }
     while (true) {
       try (DirectoryReader reader = writer.getReader()) {
-        TopDocs topDocs = new IndexSearcher(new NoDeletesWrapper(reader)).search(new TermQuery(new Term("id", "1")), 1);
+        TopDocs topDocs = new IndexSearcher(new IncludeSoftDeletesWrapper(reader)).search(new TermQuery(new Term("id", "1")), 1);
         assertEquals(1, topDocs.totalHits.value);
         if (writer.tryDeleteDocument(reader, topDocs.scoreDocs[0].doc) > 0) {
           break;
@@ -630,11 +631,58 @@ public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
     IOUtils.close(sm, writer, dir);
   }
 
+  public void testMixedSoftDeletesAndHardDeletes() throws Exception {
+    Directory dir = newDirectory();
+    String softDeletesField = "soft-deletes";
+    IndexWriterConfig config = newIndexWriterConfig()
+        .setMaxBufferedDocs(2 + random().nextInt(50)).setRAMBufferSizeMB(IndexWriterConfig.DISABLE_AUTO_FLUSH)
+        .setSoftDeletesField(softDeletesField)
+        .setMergePolicy(new SoftDeletesRetentionMergePolicy(softDeletesField, MatchAllDocsQuery::new, newMergePolicy()));
+    IndexWriter writer = new IndexWriter(dir, config);
+    int numDocs = 10 + random().nextInt(100);
+    Set<String> liveDocs = new HashSet<>();
+    for (int i = 0; i < numDocs; i++) {
+      String id = Integer.toString(i);
+      Document doc = new Document();
+      doc.add(new StringField("id", id, Field.Store.YES));
+      writer.addDocument(doc);
+      liveDocs.add(id);
+    }
+    for (int i = 0; i < numDocs; i++) {
+      if (random().nextBoolean()) {
+        String id = Integer.toString(i);
+        if (random().nextBoolean() && liveDocs.contains(id)) {
+          doUpdate(new Term("id", id), writer, new NumericDocValuesField(softDeletesField, 1));
+        } else {
+          Document doc = new Document();
+          doc.add(new StringField("id", "v" + id, Field.Store.YES));
+          writer.softUpdateDocument(new Term("id", id), doc, new NumericDocValuesField(softDeletesField, 1));
+          liveDocs.add("v" + id);
+        }
+      }
+      if (random().nextBoolean() && liveDocs.isEmpty() == false) {
+        String delId = RandomPicks.randomFrom(random(), liveDocs);
+        if (random().nextBoolean()) {
+          doDelete(new Term("id", delId), writer);
+        } else {
+          writer.deleteDocuments(new Term("id", delId));
+        }
+        liveDocs.remove(delId);
+      }
+    }
+    try (DirectoryReader unwrapped = writer.getReader()) {
+      DirectoryReader reader = new IncludeSoftDeletesWrapper(unwrapped);
+      assertEquals(liveDocs.size(), reader.numDocs());
+    }
+    writer.commit();
+    IOUtils.close(writer, dir);
+  }
+
   static void doUpdate(Term doc, IndexWriter writer, Field... fields) throws IOException {
     long seqId = -1;
     do { // retry if we just committing a merge
       try (DirectoryReader reader = writer.getReader()) {
-        TopDocs topDocs = new IndexSearcher(new NoDeletesWrapper(reader)).search(new TermQuery(doc), 10);
+        TopDocs topDocs = new IndexSearcher(new IncludeSoftDeletesWrapper(reader)).search(new TermQuery(doc), 10);
         assertEquals(1, topDocs.totalHits.value);
         int theDoc = topDocs.scoreDocs[0].doc;
         seqId = writer.tryUpdateDocValue(reader, theDoc, fields);
@@ -642,20 +690,46 @@ public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
     } while (seqId == -1);
   }
 
-  private static final class NoDeletesSubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper {
+  static void doDelete(Term doc, IndexWriter writer) throws IOException {
+    long seqId;
+    do { // retry if we just committing a merge
+      try (DirectoryReader reader = writer.getReader()) {
+        TopDocs topDocs = new IndexSearcher(new IncludeSoftDeletesWrapper(reader)).search(new TermQuery(doc), 10);
+        assertEquals(1, topDocs.totalHits.value);
+        int theDoc = topDocs.scoreDocs[0].doc;
+        seqId = writer.tryDeleteDocument(reader, theDoc);
+      }
+    } while (seqId == -1);
+  }
 
+  private static final class IncludeSoftDeletesSubReaderWrapper extends FilterDirectoryReader.SubReaderWrapper {
     @Override
     public LeafReader wrap(LeafReader reader) {
+      while (reader instanceof FilterLeafReader) {
+        reader = ((FilterLeafReader) reader).getDelegate();
+      }
+      Bits hardLiveDocs = ((SegmentReader) reader).getHardLiveDocs();
+      final int numDocs;
+      if (hardLiveDocs == null) {
+        numDocs = reader.maxDoc();
+      } else {
+        int bits = 0;
+        for (int i = 0; i < hardLiveDocs.length(); i++) {
+          if (hardLiveDocs.get(i)) {
+            bits++;
+          }
+        }
+        numDocs = bits;
+      }
       return new FilterLeafReader(reader) {
-
         @Override
         public int numDocs() {
-          return maxDoc();
+          return numDocs;
         }
 
         @Override
         public Bits getLiveDocs() {
-          return null;
+          return hardLiveDocs;
         }
 
         @Override
@@ -671,15 +745,15 @@ public class TestSoftDeletesRetentionMergePolicy extends LuceneTestCase {
     }
   }
 
-  private static final class NoDeletesWrapper extends FilterDirectoryReader {
+  private static final class IncludeSoftDeletesWrapper extends FilterDirectoryReader {
 
-    NoDeletesWrapper(DirectoryReader in) throws IOException {
-      super(in, new NoDeletesSubReaderWrapper());
+    IncludeSoftDeletesWrapper(DirectoryReader in) throws IOException {
+      super(in, new IncludeSoftDeletesSubReaderWrapper());
     }
 
     @Override
     protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException {
-      return new NoDeletesWrapper(in);
+      return new IncludeSoftDeletesWrapper(in);
     }
 
 


[13/15] lucene-solr:jira/http2: SOLR-12684: Document speed gotchas and partitionKeys usage for ParallelStream. Standardize search streams to specify the export handler in the examples for expressions that operate on all the tuples

Posted by da...@apache.org.
SOLR-12684: Document speed gotchas and partitionKeys usage for ParallelStream. Standardize search streams to specify the export handler in the examples for expressions that operate on all the tuples


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/fc9aac11
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/fc9aac11
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/fc9aac11

Branch: refs/heads/jira/http2
Commit: fc9aac11f76e9776b68d7a42ed3b58cbf964dc47
Parents: cee309a
Author: Varun Thacker <va...@apache.org>
Authored: Fri Aug 24 01:20:06 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Fri Aug 24 01:20:06 2018 -0700

----------------------------------------------------------------------
 .../src/stream-decorator-reference.adoc         | 153 +++++++++++--------
 1 file changed, 93 insertions(+), 60 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/fc9aac11/solr/solr-ref-guide/src/stream-decorator-reference.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/stream-decorator-reference.adoc b/solr/solr-ref-guide/src/stream-decorator-reference.adoc
index 61608c8..b397192 100644
--- a/solr/solr-ref-guide/src/stream-decorator-reference.adoc
+++ b/solr/solr-ref-guide/src/stream-decorator-reference.adoc
@@ -83,7 +83,7 @@ The following examples show different outputs for this source tuple
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, q="*:*", qt="/export", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   fieldB
 )
 
@@ -104,7 +104,7 @@ cartesianProduct(
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, q="*:*", qt="/export", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   sequence(3,4,5) as fieldE
 )
 
@@ -133,9 +133,9 @@ cartesianProduct(
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, q="*:*", qt="/export", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   fieldB,
-  productSort="fieldB DESC"
+  productSort="fieldB desc"
 )
 
 {
@@ -155,9 +155,9 @@ cartesianProduct(
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, q="*:*", qt="/export", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   sequence(3,4,5) as fieldE,
-  productSort='newFieldE DESC'
+  productSort="newFieldE desc"
 )
 
 {
@@ -185,9 +185,9 @@ cartesianProduct(
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, q="*:*", qt="/export", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   fieldB as newFieldB,
-  productSort="fieldB DESC"
+  productSort="fieldB desc"
 )
 
 {
@@ -209,7 +209,7 @@ cartesianProduct(
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, q="*:*", qt="/export", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   fieldB,
   fieldC
 )
@@ -251,10 +251,10 @@ cartesianProduct(
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, qt="/export", q="*:*", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   fieldB,
   fieldC,
-  productSort="fieldC ASC"
+  productSort="fieldC asc"
 )
 
 {
@@ -294,10 +294,10 @@ cartesianProduct(
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, q="*:*", qt="/export", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   fieldB,
   fieldC,
-  productSort="fieldC ASC, fieldB DESC"
+  productSort="fieldC asc, fieldB desc"
 )
 
 {
@@ -337,7 +337,7 @@ cartesianProduct(
 [source,text]
 ----
 cartesianProduct(
-  search(collection1, q='*:*', fl='fieldA, fieldB, fieldC', sort='fieldA ASC'),
+  search(collection1, q="*:*", qt="/export", fl="fieldA, fieldB, fieldC", sort="fieldA asc"),
   sequence(3,4,5) as fieldE,
   fieldB
 )
@@ -407,6 +407,7 @@ classify(model(modelCollection,
              cacheMillis=5000),
          search(contentCollection,
              q="id:(a b c)",
+             qt="/export",
              fl="text_t, id",
              sort="id asc"),
              field="text_t")
@@ -437,7 +438,7 @@ commit(
     update(
         destinationCollection,
         batchSize=5,
-        search(collection1, q=*:*, fl="id,a_s,a_i,a_f,s_multi,i_multi", sort="a_f asc, a_i asc")
+        search(collection1, q="*:*", qt="/export", fl="id,a_s,a_i,a_f,s_multi,i_multi", sort="a_f asc, a_i asc")
     )
 )
 ----
@@ -457,14 +458,14 @@ The `complement` function wraps two streams (A and B) and emits tuples from A wh
 [source,text]
 ----
 complement(
-  search(collection1, q=a_s:(setA || setAB), fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
-  search(collection1, q=a_s:(setB || setAB), fl="id,a_s,a_i", sort="a_i asc"),
+  search(collection1, q="a_s:(setA || setAB)", qt="/export", fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
+  search(collection1, q="a_s:(setB || setAB)", qt="/export", fl="id,a_s,a_i", sort="a_i asc"),
   on="a_i"
 )
 
 complement(
-  search(collection1, q=a_s:(setA || setAB), fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
-  search(collection1, q=a_s:(setB || setAB), fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
+  search(collection1, q="a_s:(setA || setAB)", qt="/export", fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
+  search(collection1, q="a_s:(setB || setAB)", qt="/export", fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
   on="a_i,a_s"
 )
 ----
@@ -660,7 +661,7 @@ The `fetch` function iterates a stream and fetches additional fields and adds th
 [source,text]
 ----
 fetch(addresses,
-      search(people, q="*:*", fl="username, firstName, lastName", sort="username asc"),
+      search(people, q="*:*", qt="/export", fl="username, firstName, lastName", sort="username asc"),
       fl="streetAddress, city, state, country, zip",
       on="username=userId")
 ----
@@ -685,7 +686,8 @@ The comparison evaluators compare the value in a specific field with a value, wh
 having(rollup(over=a_s,
               sum(a_i),
               search(collection1,
-                     q=*:*,
+                     q="*:*",
+                     qt="/export",
                      fl="id,a_s,a_i,a_f",
                      sort="a_s asc")),
        and(gt(sum(a_i), 100), lt(sum(a_i), 110)))
@@ -711,21 +713,21 @@ You can wrap the incoming streams with a `select` function to be specific about
 [source,text]
 ----
 leftOuterJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
-  search(pets, q=type:cat, fl="personId,petName", sort="personId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
+  search(pets, q="type:cat", qt="/export", fl="personId,petName", sort="personId asc"),
   on="personId"
 )
 
 leftOuterJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
-  search(pets, q=type:cat, fl="ownerId,petName", sort="ownerId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
+  search(pets, q="type:cat", qt="/export", fl="ownerId,petName", sort="ownerId asc"),
   on="personId=ownerId"
 )
 
 leftOuterJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
   select(
-    search(pets, q=type:cat, fl="ownerId,name", sort="ownerId asc"),
+    search(pets, q="type:cat", qt="/export", fl="ownerId,name", sort="ownerId asc"),
     ownerId,
     name as petName
   ),
@@ -752,21 +754,21 @@ The hashJoin function can be used when the tuples of Left and Right cannot be pu
 [source,text]
 ----
 hashJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
-  hashed=search(pets, q=type:cat, fl="personId,petName", sort="personId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
+  hashed=search(pets, q="type:cat", qt="/export", fl="personId,petName", sort="personId asc"),
   on="personId"
 )
 
 hashJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
-  hashed=search(pets, q=type:cat, fl="ownerId,petName", sort="ownerId asc"),
+  search(people, q="*:*", fl="personId,name", sort="personId asc"),
+  hashed=search(pets, q="type:cat", qt="/export", fl="ownerId,petName", sort="ownerId asc"),
   on="personId=ownerId"
 )
 
 hashJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
   hashed=select(
-    search(pets, q=type:cat, fl="ownerId,name", sort="ownerId asc"),
+    search(pets, q="type:cat", qt="/export", fl="ownerId,name", sort="ownerId asc"),
     ownerId,
     name as petName
   ),
@@ -789,21 +791,21 @@ Wraps two streams, Left and Right. For every tuple in Left which exists in Right
 [source,text]
 ----
 innerJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
-  search(pets, q=type:cat, fl="personId,petName", sort="personId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
+  search(pets, q="type:cat", qt="/export", fl="personId,petName", sort="personId asc"),
   on="personId"
 )
 
 innerJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
-  search(pets, q=type:cat, fl="ownerId,petName", sort="ownerId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
+  search(pets, q="type:cat", qt="/export", fl="ownerId,petName", sort="ownerId asc"),
   on="personId=ownerId"
 )
 
 innerJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
   select(
-    search(pets, q=type:cat, fl="ownerId,name", sort="ownerId asc"),
+    search(pets, q="type:cat", qt="/export", fl="ownerId,name", sort="ownerId asc"),
     ownerId,
     name as petName
   ),
@@ -826,14 +828,14 @@ The `intersect` function wraps two streams, A and B, and emits tuples from A whi
 [source,text]
 ----
 intersect(
-  search(collection1, q=a_s:(setA || setAB), fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
-  search(collection1, q=a_s:(setB || setAB), fl="id,a_s,a_i", sort="a_i asc"),
+  search(collection1, q="a_s:(setA || setAB)", qt="/export", fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
+  search(collection1, q="a_s:(setB || setAB)", qt="/export", fl="id,a_s,a_i", sort="a_i asc"),
   on="a_i"
 )
 
 intersect(
-  search(collection1, q=a_s:(setA || setAB), fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
-  search(collection1, q=a_s:(setB || setAB), fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
+  search(collection1, q="a_s:(setA || setAB)", qt="/export", fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
+  search(collection1, q="a_s:(setB || setAB)", qt="/export", fl="id,a_s,a_i", sort="a_i asc, a_s asc"),
   on="a_i,a_s"
 )
 ----
@@ -857,10 +859,12 @@ The `merge` function merges two or more streaming expressions and maintains the
 merge(
       search(collection1,
              q="id:(0 3 4)",
+             qt="/export",
              fl="id,a_s,a_i,a_f",
              sort="a_f asc"),
       search(collection1,
              q="id:(1)",
+             qt="/export",
              fl="id,a_s,a_i,a_f",
              sort="a_f asc"),
       on="a_f asc")
@@ -873,18 +877,22 @@ merge(
 merge(
       search(collection1,
              q="id:(0 3 4)",
+             qt="/export",
              fl="id,fieldA,fieldB,fieldC",
              sort="fieldA asc, fieldB desc"),
       search(collection1,
              q="id:(1)",
+             qt="/export",
              fl="id,fieldA",
              sort="fieldA asc"),
       search(collection2,
              q="id:(10 11 13)",
+             qt="/export",
              fl="id,fieldA,fieldC",
              sort="fieldA asc"),
       search(collection3,
              q="id:(987)",
+             qt="/export",
              fl="id,fieldA,fieldC",
              sort="fieldA asc"),
       on="fieldA asc")
@@ -909,7 +917,7 @@ The null expression can be wrapped by the parallel function and sent to worker n
 [source,text]
 ----
  parallel(workerCollection,
-          null(search(collection1, q=*:*, fl="id,a_s,a_i,a_f", sort="a_s desc", qt="/export", partitionKeys="a_s")),
+          null(search(collection1, q="*:*", fl="id,a_s,a_i,a_f", sort="a_s desc", qt="/export", partitionKeys="a_s")),
           workers="20",
           zkHost="localhost:9983",
           sort="a_s desc")
@@ -936,21 +944,21 @@ The outerHashJoin stream can be used when the tuples of Left and Right cannot be
 [source,text]
 ----
 outerHashJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
-  hashed=search(pets, q=type:cat, fl="personId,petName", sort="personId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
+  hashed=search(pets, q="type:cat", qt="/export", fl="personId,petName", sort="personId asc"),
   on="personId"
 )
 
 outerHashJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
-  hashed=search(pets, q=type:cat, fl="ownerId,petName", sort="ownerId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
+  hashed=search(pets, q="type:cat", qt="/export", fl="ownerId,petName", sort="ownerId asc"),
   on="personId=ownerId"
 )
 
 outerHashJoin(
-  search(people, q=*:*, fl="personId,name", sort="personId asc"),
+  search(people, q="*:*", qt="/export", fl="personId,name", sort="personId asc"),
   hashed=select(
-    search(pets, q=type:cat, fl="ownerId,name", sort="ownerId asc"),
+    search(pets, q="type:cat", qt="/export", fl="ownerId,name", sort="ownerId asc"),
     ownerId,
     name as petName
   ),
@@ -964,7 +972,11 @@ The `parallel` function wraps a streaming expression and sends it to N worker no
 
 The parallel function requires that the `partitionKeys` parameter be provided to the underlying searches. The `partitionKeys` parameter will partition the search results (tuples) across the worker nodes. Tuples with the same values in the partitionKeys field will be shuffled to the same worker nodes.
 
-The parallel function maintains the sort order of the tuples returned by the worker nodes, so the sort criteria of the parallel function must match up with the sort order of the tuples returned by the workers.
+The parallel function maintains the sort order of the tuples returned by the worker nodes, so the sort criteria of the parallel function must incorporate the sort order of the tuples returned by the workers.
+
+For example if you sort on year, month and day you could partition on year only as long as there was enough different years to spread the tuples around the worker nodes.
+Solr allows sorting on more than 4 fields, but you cannot specify more than 4 partitionKeys for speed tradeoffs. Also it's an overkill to specify many partitionKeys when we one or two keys could be enough to spread the tuples.
+Parallel Stream was designed when the underlying search stream will emit a lot of tuples from the collection. If the search stream only emits a small subset of the data from the collection using parallel could potentially be slower.
 
 .Worker Collections
 [TIP]
@@ -985,16 +997,36 @@ The worker nodes can be from the same collection as the data, or they can be a d
 [source,text]
 ----
  parallel(workerCollection,
-          reduce(search(collection1, q=*:*, fl="id,a_s,a_i,a_f", sort="a_s desc", partitionKeys="a_s"),
-                 by="a_s",
-                 group(sort="a_f desc", n="4")),
+          rollup(search(collection1, q="*:*", fl="id,year_i,month_i,day_i", qt="/export", sort="year_i desc,month_i desc,day_i asc", partitionKeys="year_i"),
+                 over="year_i", count(*)),
           workers="20",
           zkHost="localhost:9983",
-          sort="a_s desc")
+          sort="year_i desc")
 ----
 
 The expression above shows a `parallel` function wrapping a `reduce` function. This will cause the `reduce` function to be run in parallel across 20 worker nodes.
 
+.Warmup
+[TIP]
+====
+The parallel stream uses the hash query parser to split the data amongst the workers. It executes on all the documents and the result bitset is cached in the filterCache.
+For a parallel stream with the same number of workers and partitonKeys the first query would be slower than subsequent queries.
+A trick to not pay the penalty for the first slow query would be to use a warmup query for every new searcher.
+The following is a solrconfig.xml snippet for 2 workers and "year_i" as the partionKeys.
+
+
+[source,text]
+----
+<listener event="newSearcher" class="solr.QuerySenderListener">
+<arr name="queries">
+    <lst><str name="q">:</str><str name="fq">{!hash workers=2 worker=0}</str><str name="partitionKeys">year_i</str></lst>
+    <lst><str name="q">:</str><str name="fq">{!hash workers=2 worker=1}</str><str name="partitionKeys">year_i</str></lst>
+</arr>
+</listener>
+----
+
+====
+
 == priority
 
 The `priority` function is a simple priority scheduler for the <<executor>> function. The `executor` function doesn't directly have a concept of task prioritization; instead it simply executes tasks in the order that they are read from it's underlying stream. The `priority` function provides the ability to schedule a higher priority task ahead of lower priority tasks that were submitted earlier.
@@ -1043,7 +1075,7 @@ The reduce function relies on the sort order of the underlying stream. According
 
 [source,text]
 ----
-reduce(search(collection1, q=*:*, fl="id,a_s,a_i,a_f", sort="a_s asc, a_f asc"),
+reduce(search(collection1, q="*:*", qt="/export", fl="id,a_s,a_i,a_f", sort="a_s asc, a_f asc"),
        by="a_s",
        group(sort="a_f desc", n="4")
 )
@@ -1066,7 +1098,7 @@ The rollup function also needs to process entire result sets in order to perform
 [source,text]
 ----
 rollup(
-   search(collection1, q=*:*, fl="a_s,a_i,a_f", qt="/export", sort="a_s asc"),
+   search(collection1, q="*:*", qt="/export", fl="a_s,a_i,a_f", qt="/export", sort="a_s asc"),
    over="a_s",
    sum(a_i),
    sum(a_f),
@@ -1104,7 +1136,7 @@ The `select` function wraps a streaming expression and outputs tuples containing
 ----
 // output tuples with fields teamName, wins, losses, and winPercentages where a null value for wins or losses is translated to the value of 0
 select(
-  search(collection1, fl="id,teamName_s,wins,losses", q="*:*", sort="id asc"),
+  search(collection1, fl="id,teamName_s,wins,losses", q="*:*", qt="/export", sort="id asc"),
   teamName_s as teamName,
   wins,
   losses,
@@ -1131,8 +1163,8 @@ The expression below finds dog owners and orders the results by owner and pet na
 ----
 sort(
   innerJoin(
-    search(people, q=*:*, fl="id,name", sort="id asc"),
-    search(pets, q=type:dog, fl="owner,petName", sort="owner asc"),
+    search(people, q="*:*", qt="/export", fl="id,name", sort="id asc"),
+    search(pets, q="type:dog", qt="/export", fl="owner,petName", sort="owner asc"),
     on="id=owner"
   ),
   by="name asc, petName asc"
@@ -1206,6 +1238,7 @@ The `update` function wraps another functions and sends the tuples to a SolrClou
         batchSize=500,
         search(collection1,
                q=*:*,
+               qt="/export",
                fl="id,a_s,a_i,a_f,s_multi,i_multi",
                sort="a_f asc, a_i asc"))
 


[11/15] lucene-solr:jira/http2: SOLR-11598: Fix bug while setting and resetting string doc-values while exporting documents

Posted by da...@apache.org.
SOLR-11598: Fix bug while setting and resetting string doc-values while exporting documents


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/9e78be40
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/9e78be40
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/9e78be40

Branch: refs/heads/jira/http2
Commit: 9e78be40c338005b75609a3b123778aea822bcf0
Parents: aa10cb7
Author: Varun Thacker <va...@apache.org>
Authored: Thu Aug 23 23:57:48 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Thu Aug 23 23:57:48 2018 -0700

----------------------------------------------------------------------
 .../apache/solr/handler/export/StringValue.java |  4 ++--
 .../solr/handler/export/TestExportWriter.java   | 23 ++++++++++++++++++++
 2 files changed, 25 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e78be40/solr/core/src/java/org/apache/solr/handler/export/StringValue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/StringValue.java b/solr/core/src/java/org/apache/solr/handler/export/StringValue.java
index b7f020b..5df4eeb 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/StringValue.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/StringValue.java
@@ -96,10 +96,10 @@ class StringValue implements SortValue {
   }
 
   public void setNextReader(LeafReaderContext context) throws IOException {
-    if (globalDocValues instanceof MultiDocValues.MultiSortedDocValues) {
+    if (ordinalMap != null) {
       toGlobal = ordinalMap.getGlobalOrds(context.ord);
-      docValues = DocValues.getSorted(context.reader(), field);
     }
+    docValues = DocValues.getSorted(context.reader(), field);
     lastDocID = 0;
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/9e78be40/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
index 6bb1bdb..206132e 100644
--- a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
+++ b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
@@ -84,6 +84,29 @@ public class TestExportWriter extends SolrTestCaseJ4 {
 
   }
 
+  @Test
+  public void testSortingOnFieldWithNoValues() throws Exception {
+    assertU(delQ("*:*"));
+    assertU(commit());
+
+    assertU(adoc("id","1"));
+    assertU(commit());
+
+    // 10 fields
+    List<String> fieldNames = new ArrayList<>(Arrays.asList("floatdv", "intdv", "stringdv", "longdv", "doubledv",
+        "datedv", "booleandv", "field1_s_dv", "field2_i_p", "field3_l_p"));
+    for (String sortField : fieldNames) {
+      String resp = h.query(req("q", "*:*", "qt", "/export", "fl", "id," + sortField, "sort", sortField + " desc"));
+      assertJsonEquals(resp, "{\n" +
+          "  \"responseHeader\":{\"status\":0},\n" +
+          "  \"response\":{\n" +
+          "    \"numFound\":1,\n" +
+          "    \"docs\":[{\n" +
+          "        \"id\":\"1\"}]}}");
+    }
+
+  }
+
   public static void createIndex() {
     assertU(adoc("id","1",
                  "floatdv","2.1",


[08/15] lucene-solr:jira/http2: SOLR-12590: Improve Solr resource loader coverage in the ref guide

Posted by da...@apache.org.
SOLR-12590: Improve Solr resource loader coverage in the ref guide


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/95cb7aa4
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/95cb7aa4
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/95cb7aa4

Branch: refs/heads/jira/http2
Commit: 95cb7aa491f5659084852ec29f52cc90cd7ea35c
Parents: dfd2801
Author: Steve Rowe <sa...@apache.org>
Authored: Thu Aug 23 14:36:05 2018 -0400
Committer: Steve Rowe <sa...@apache.org>
Committed: Thu Aug 23 14:36:05 2018 -0400

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  3 +
 solr/solr-ref-guide/src/analytics.adoc          |  2 +-
 .../src/configuring-solrconfig-xml.adoc         |  4 +-
 .../detecting-languages-during-indexing.adoc    |  2 +-
 .../solr-ref-guide/src/filter-descriptions.adoc |  8 +-
 solr/solr-ref-guide/src/language-analysis.adoc  | 32 ++++----
 solr/solr-ref-guide/src/learning-to-rank.adoc   |  2 +-
 .../src/lib-directives-in-solrconfig.adoc       | 38 ---------
 .../src/resource-and-plugin-loading.adoc        | 86 ++++++++++++++++++++
 solr/solr-ref-guide/src/tokenizers.adoc         |  2 +-
 .../src/update-request-processors.adoc          |  2 +-
 11 files changed, 116 insertions(+), 65 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 48ed840..9157bb3 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -339,6 +339,9 @@ Other Changes
 
 * SOLR-12690: Regularize LoggerFactory declarations (Erick Erickson)
 
+* SOLR-12590: Improve Solr resource loader coverage in the ref guide.
+  (Steve Rowe, Cassandra Targett, Christine Poerschke)
+
 ==================  7.4.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/analytics.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/analytics.adoc b/solr/solr-ref-guide/src/analytics.adoc
index fe9b110..d7407d1 100644
--- a/solr/solr-ref-guide/src/analytics.adoc
+++ b/solr/solr-ref-guide/src/analytics.adoc
@@ -33,7 +33,7 @@ Since the Analytics framework is a _search component_, it must be declared as su
 For distributed analytics requests over cloud collections, the component uses the `AnalyticsHandler` strictly for inter-shard communication.
 The Analytics Handler should not be used by users to submit analytics requests.
 
-To configure Solr to use the Analytics Component, the first step is to add a `lib` directive so Solr loads the Analytic Component classes (for more about the `lib` directive, see <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig, Lib Directives in SolrConfig>>). In the section of `solrconfig.xml` where the default `lib` directive are, add a line:
+To configure Solr to use the Analytics Component, the first step is to add a `<lib/>` directive so Solr loads the Analytic Component classes (for more about the `<lib/>` directive, see <<resource-and-plugin-loading.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>). In the section of `solrconfig.xml` where the default `<lib/>` directives are, add a line:
 
 [source,xml]
 <lib dir="${solr.install.dir:../../../..}/dist/" regex="solr-analytics-\d.*\.jar" />

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/configuring-solrconfig-xml.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/configuring-solrconfig-xml.adoc b/solr/solr-ref-guide/src/configuring-solrconfig-xml.adoc
index 83febaf..d2570fa 100644
--- a/solr/solr-ref-guide/src/configuring-solrconfig-xml.adoc
+++ b/solr/solr-ref-guide/src/configuring-solrconfig-xml.adoc
@@ -1,5 +1,5 @@
 = Configuring solrconfig.xml
-:page-children: datadir-and-directoryfactory-in-solrconfig, lib-directives-in-solrconfig, schema-factory-definition-in-solrconfig, indexconfig-in-solrconfig, requesthandlers-and-searchcomponents-in-solrconfig, initparams-in-solrconfig, updatehandlers-in-solrconfig, query-settings-in-solrconfig, requestdispatcher-in-solrconfig, update-request-processors, codec-factory
+:page-children: datadir-and-directoryfactory-in-solrconfig, resource-and-plugin-loading, schema-factory-definition-in-solrconfig, indexconfig-in-solrconfig, requesthandlers-and-searchcomponents-in-solrconfig, initparams-in-solrconfig, updatehandlers-in-solrconfig, query-settings-in-solrconfig, requestdispatcher-in-solrconfig, update-request-processors, codec-factory
 // Licensed to the Apache Software Foundation (ASF) under one
 // or more contributor license agreements.  See the NOTICE file
 // distributed with this work for additional information
@@ -38,7 +38,7 @@ The `solrconfig.xml` file is located in the `conf/` directory for each collectio
 We've covered the options in the following sections:
 
 * <<datadir-and-directoryfactory-in-solrconfig.adoc#datadir-and-directoryfactory-in-solrconfig,DataDir and DirectoryFactory in SolrConfig>>
-* <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>
+* <<resource-and-plugin-loading.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>
 * <<schema-factory-definition-in-solrconfig.adoc#schema-factory-definition-in-solrconfig,Schema Factory Definition in SolrConfig>>
 * <<indexconfig-in-solrconfig.adoc#indexconfig-in-solrconfig,IndexConfig in SolrConfig>>
 * <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#requesthandlers-and-searchcomponents-in-solrconfig,RequestHandlers and SearchComponents in SolrConfig>>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/detecting-languages-during-indexing.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/detecting-languages-during-indexing.adoc b/solr/solr-ref-guide/src/detecting-languages-during-indexing.adoc
index 8b0556b..8d446a2 100644
--- a/solr/solr-ref-guide/src/detecting-languages-during-indexing.adoc
+++ b/solr/solr-ref-guide/src/detecting-languages-during-indexing.adoc
@@ -80,7 +80,7 @@ Here is an example of a minimal OpenNLP `langid` configuration in `solrconfig.xm
 ==== OpenNLP-specific Parameters
 
 `langid.model`::
-An OpenNLP language detection model. The OpenNLP project provides a pre-trained 103 language model on the http://opennlp.apache.org/models.html[OpenNLP site's model dowload page]. Model training instructions are provided on the http://opennlp.apache.org/docs/{ivy-opennlp-version}/manual/opennlp.html#tools.langdetect[OpenNLP website]. This parameter is required.
+An OpenNLP language detection model. The OpenNLP project provides a pre-trained 103 language model on the http://opennlp.apache.org/models.html[OpenNLP site's model dowload page]. Model training instructions are provided on the http://opennlp.apache.org/docs/{ivy-opennlp-version}/manual/opennlp.html#tools.langdetect[OpenNLP website]. This parameter is required.  See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for information on where to put the model.
 
 ==== OpenNLP Language Codes
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/filter-descriptions.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/filter-descriptions.adoc b/solr/solr-ref-guide/src/filter-descriptions.adoc
index f517901..7fabb75 100644
--- a/solr/solr-ref-guide/src/filter-descriptions.adoc
+++ b/solr/solr-ref-guide/src/filter-descriptions.adoc
@@ -471,7 +471,7 @@ Note that for this filter to work properly, the upstream tokenizer must not remo
 
 This filter is a custom Unicode normalization form that applies the foldings specified in http://www.unicode.org/reports/tr30/tr30-4.html[Unicode TR #30: Character Foldings] in addition to the `NFKC_Casefold` normalization form as described in <<ICU Normalizer 2 Filter>>. This filter is a better substitute for the combined behavior of the <<ASCII Folding Filter>>, <<Lower Case Filter>>, and <<ICU Normalizer 2 Filter>>.
 
-To use this filter, see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `solr_home/lib`. For more information about adding jars, see the section <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig,Lib Directives in Solrconfig>>.
+To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
 
 *Factory class:* `solr.ICUFoldingFilterFactory`
 
@@ -543,7 +543,7 @@ This filter factory normalizes text according to one of five Unicode Normalizati
 
 For detailed information about these normalization forms, see http://unicode.org/reports/tr15/[Unicode Normalization Forms].
 
-To use this filter, see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `solr_home/lib`.
+To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
 
 == ICU Transform Filter
 
@@ -567,7 +567,7 @@ This filter applies http://userguide.icu-project.org/transforms/general[ICU Tran
 
 For detailed information about ICU Transforms, see http://userguide.icu-project.org/transforms/general.
 
-To use this filter, see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `solr_home/lib`.
+To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
 
 == Keep Word Filter
 
@@ -1501,7 +1501,7 @@ NOTE: Although this filter produces correct token graphs, it cannot consume an i
 
 *Arguments:*
 
-`synonyms`:: (required) The path of a file that contains a list of synonyms, one per line. In the (default) `solr` format - see the `format` argument below for alternatives - blank lines and lines that begin with "`#`" are ignored. This may be a comma-separated list of absolute paths, or paths relative to the Solr config directory.
+`synonyms`:: (required) The path of a file that contains a list of synonyms, one per line. In the (default) `solr` format - see the `format` argument below for alternatives - blank lines and lines that begin with "`#`" are ignored. This may be a comma-separated list of paths.  See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
 +
 There are two ways to specify synonym mappings:
 +

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/language-analysis.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/language-analysis.adoc b/solr/solr-ref-guide/src/language-analysis.adoc
index cd893f9..2749ce1 100644
--- a/solr/solr-ref-guide/src/language-analysis.adoc
+++ b/solr/solr-ref-guide/src/language-analysis.adoc
@@ -94,7 +94,7 @@ Compound words are most commonly found in Germanic languages.
 
 *Arguments:*
 
-`dictionary`:: (required) The path of a file that contains a list of simple words, one per line. Blank lines and lines that begin with "#" are ignored. This path may be an absolute path, or path relative to the Solr config directory.
+`dictionary`:: (required) The path of a file that contains a list of simple words, one per line. Blank lines and lines that begin with "#" are ignored.  See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
 
 `minWordSize`:: (integer, default 5) Any token shorter than this is not decompounded.
 
@@ -130,7 +130,7 @@ Unicode Collation in Solr is fast, because all the work is done at index time.
 
 Rather than specifying an analyzer within `<fieldtype ... class="solr.TextField">`, the `solr.CollationField` and `solr.ICUCollationField` field type classes provide this functionality. `solr.ICUCollationField`, which is backed by http://site.icu-project.org[the ICU4J library], provides more flexible configuration, has more locales, is significantly faster, and requires less memory and less index space, since its keys are smaller than those produced by the JDK implementation that backs `solr.CollationField`.
 
-`solr.ICUCollationField` is included in the Solr `analysis-extras` contrib - see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `SOLR_HOME/lib` in order to use it.
+To use `solr.ICUCollationField`, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>).  See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
 
 `solr.ICUCollationField` and `solr.CollationField` fields can be created in two ways:
 
@@ -361,7 +361,7 @@ The `lucene/analysis/opennlp` module provides OpenNLP integration via several an
 
 NOTE: The <<OpenNLP Tokenizer>> must be used with all other OpenNLP analysis components, for two reasons: first, the OpenNLP Tokenizer detects and marks the sentence boundaries required by all the OpenNLP filters; and second, since the pre-trained OpenNLP models used by these filters were trained using the corresponding language-specific sentence-detection/tokenization models, the same tokenization, using the same models, must be used at runtime for optimal performance.
 
-See `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add to your `SOLR_HOME/lib`.
+To use the OpenNLP components, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
 
 === OpenNLP Tokenizer
 
@@ -371,9 +371,9 @@ The OpenNLP Tokenizer takes two language-specific binary model files as paramete
 
 *Arguments:*
 
-`sentenceModel`:: (required) The path of a language-specific OpenNLP sentence detection model file. This path may be an absolute path, or path relative to the Solr config directory.
+`sentenceModel`:: (required) The path of a language-specific OpenNLP sentence detection model file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
 
-`tokenizerModel`:: (required) The path of a language-specific OpenNLP tokenization model file. This path may be an absolute path, or path relative to the Solr config directory.
+`tokenizerModel`:: (required) The path of a language-specific OpenNLP tokenization model file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
 
 *Example:*
 
@@ -396,7 +396,7 @@ NOTE: Lucene currently does not index token types, so if you want to keep this i
 
 *Arguments:*
 
-`posTaggerModel`:: (required) The path of a language-specific OpenNLP POS tagger model file. This path may be an absolute path, or path relative to the Solr config directory.
+`posTaggerModel`:: (required) The path of a language-specific OpenNLP POS tagger model file. See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
 
 *Examples:*
 
@@ -469,7 +469,7 @@ NOTE: Lucene currently does not index token types, so if you want to keep this i
 
 *Arguments:*
 
-`chunkerModel`:: (required) The path of a language-specific OpenNLP phrase chunker model file. This path may be an absolute path, or path relative to the Solr config directory.
+`chunkerModel`:: (required) The path of a language-specific OpenNLP phrase chunker model file.  See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
 
 *Examples*:
 
@@ -511,9 +511,9 @@ This filter replaces the text of each token with its lemma. Both a dictionary-ba
 
 Either `dictionary` or `lemmatizerModel` must be provided, and both may be provided - see the examples below:
 
-`dictionary`:: (optional) The path of a lemmatization dictionary file. This path may be an absolute path, or path relative to the Solr config directory. The dictionary file must be encoded as UTF-8, with one entry per line, in the form `word[tab]lemma[tab]part-of-speech`, e.g., `wrote[tab]write[tab]VBD`.
+`dictionary`:: (optional) The path of a lemmatization dictionary file.  See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information. The dictionary file must be encoded as UTF-8, with one entry per line, in the form `word[tab]lemma[tab]part-of-speech`, e.g., `wrote[tab]write[tab]VBD`.
 
-`lemmatizerModel`:: (optional) The path of a language-specific OpenNLP lemmatizer model file. This path may be an absolute path, or path relative to the Solr config directory.
+`lemmatizerModel`:: (optional) The path of a language-specific OpenNLP lemmatizer model file.  See <<resource-and-plugin-loading.adoc#resource-and-plugin-loading,Resource and Plugin Loading>> for more information.
 
 *Examples:*
 
@@ -698,7 +698,7 @@ Solr can stem Catalan using the Snowball Porter Stemmer with an argument of `lan
 
 === Traditional Chinese
 
-The default configuration of the <<tokenizers.adoc#icu-tokenizer,ICU Tokenizer>> is suitable for Traditional Chinese text.  It follows the Word Break rules from the Unicode Text Segmentation algorithm for non-Chinese text, and uses a dictionary to segment Chinese words.  To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add to your `SOLR_HOME/lib`.
+The default configuration of the <<tokenizers.adoc#icu-tokenizer,ICU Tokenizer>> is suitable for Traditional Chinese text.  It follows the Word Break rules from the Unicode Text Segmentation algorithm for non-Chinese text, and uses a dictionary to segment Chinese words.  To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
 
 <<tokenizers.adoc#standard-tokenizer,Standard Tokenizer>> can also be used to tokenize Traditional Chinese text.  Following the Word Break rules from the Unicode Text Segmentation algorithm, it produces one token per Chinese character.  When combined with <<CJK Bigram Filter>>, overlapping bigrams of Chinese characters are formed.
 
@@ -751,9 +751,9 @@ See the example under <<Traditional Chinese>>.
 
 === Simplified Chinese
 
-For Simplified Chinese, Solr provides support for Chinese sentence and word segmentation with the <<HMM Chinese Tokenizer>>. This component includes a large dictionary and segments Chinese text into words with the Hidden Markov Model. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add to your `SOLR_HOME/lib`.
+For Simplified Chinese, Solr provides support for Chinese sentence and word segmentation with the <<HMM Chinese Tokenizer>>. This component includes a large dictionary and segments Chinese text into words with the Hidden Markov Model. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
 
-The default configuration of the <<tokenizers.adoc#icu-tokenizer,ICU Tokenizer>> is also suitable for Simplified Chinese text.  It follows the Word Break rules from the Unicode Text Segmentation algorithm for non-Chinese text, and uses a dictionary to segment Chinese words.  To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add to your `SOLR_HOME/lib`.
+The default configuration of the <<tokenizers.adoc#icu-tokenizer,ICU Tokenizer>> is also suitable for Simplified Chinese text.  It follows the Word Break rules from the Unicode Text Segmentation algorithm for non-Chinese text, and uses a dictionary to segment Chinese words.  To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
 
 Also useful for Chinese analysis:
 
@@ -786,7 +786,7 @@ Also useful for Chinese analysis:
 
 === HMM Chinese Tokenizer
 
-For Simplified Chinese, Solr provides support for Chinese sentence and word segmentation with the `solr.HMMChineseTokenizerFactory` in the `analysis-extras` contrib module. This component includes a large dictionary and segments Chinese text into words with the Hidden Markov Model. To use this tokenizer, see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `solr_home/lib`.
+For Simplified Chinese, Solr provides support for Chinese sentence and word segmentation with the `solr.HMMChineseTokenizerFactory` in the `analysis-extras` contrib module. This component includes a large dictionary and segments Chinese text into words with the Hidden Markov Model. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>).  See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
 
 *Factory class:* `solr.HMMChineseTokenizerFactory`
 
@@ -1278,7 +1278,7 @@ Example:
 [[hebrew-lao-myanmar-khmer]]
 === Hebrew, Lao, Myanmar, Khmer
 
-Lucene provides support, in addition to UAX#29 word break rules, for Hebrew's use of the double and single quote characters, and for segmenting Lao, Myanmar, and Khmer into syllables with the `solr.ICUTokenizerFactory` in the `analysis-extras` contrib module. To use this tokenizer, see `solr/contrib/analysis-extras/README.txt for` instructions on which jars you need to add to your `solr_home/lib`.
+Lucene provides support, in addition to UAX#29 word break rules, for Hebrew's use of the double and single quote characters, and for segmenting Lao, Myanmar, and Khmer into syllables with the `solr.ICUTokenizerFactory` in the `analysis-extras` contrib module. To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>).  See `solr/contrib/analysis-extras/README.txt for` instructions on which jars you need to add.
 
 See <<tokenizers.adoc#icu-tokenizer,the ICUTokenizer>> for more information.
 
@@ -1423,7 +1423,7 @@ Solr includes support for normalizing Persian, and Lucene includes an example st
 
 === Polish
 
-Solr provides support for Polish stemming with the `solr.StempelPolishStemFilterFactory`, and `solr.MorphologikFilterFactory` for lemmatization, in the `contrib/analysis-extras` module. The `solr.StempelPolishStemFilterFactory` component includes an algorithmic stemmer with tables for Polish. To use either of these filters, see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `solr_home/lib`.
+Solr provides support for Polish stemming with the `solr.StempelPolishStemFilterFactory`, and `solr.MorphologikFilterFactory` for lemmatization, in the `contrib/analysis-extras` module. The `solr.StempelPolishStemFilterFactory` component includes an algorithmic stemmer with tables for Polish. To use either of these filters, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
 
 *Factory class:* `solr.StempelPolishStemFilterFactory` and `solr.MorfologikFilterFactory`
 
@@ -1750,7 +1750,7 @@ Solr includes support for stemming Turkish with the `solr.SnowballPorterFilterFa
 
 === Ukrainian
 
-Solr provides support for Ukrainian lemmatization with the `solr.MorphologikFilterFactory`, in the `contrib/analysis-extras` module. To use this filter, see `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add to your `solr_home/lib`.
+Solr provides support for Ukrainian lemmatization with the `solr.MorphologikFilterFactory`, in the `contrib/analysis-extras` module. To use this filter, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See `solr/contrib/analysis-extras/README.txt` for instructions on which jars you need to add.
 
 Lucene also includes an example Ukrainian stopword list, in the `lucene-analyzers-morfologik` jar.
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/learning-to-rank.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/learning-to-rank.adoc b/solr/solr-ref-guide/src/learning-to-rank.adoc
index 8fe3c33..12fab32 100644
--- a/solr/solr-ref-guide/src/learning-to-rank.adoc
+++ b/solr/solr-ref-guide/src/learning-to-rank.adoc
@@ -533,7 +533,7 @@ Assuming that you consider to use a large model placed at `/path/to/models/myMod
 }
 ----
 
-First, add the directory to Solr's resource paths by <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig,Lib Directives>>:
+First, add the directory to Solr's resource paths with a <<resource-and-plugin-loading.adoc#lib-directives-in-solrconfig,`<lib/>` directive>>:
 
 [source,xml]
 ----

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/lib-directives-in-solrconfig.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/lib-directives-in-solrconfig.adoc b/solr/solr-ref-guide/src/lib-directives-in-solrconfig.adoc
deleted file mode 100644
index dc3b319..0000000
--- a/solr/solr-ref-guide/src/lib-directives-in-solrconfig.adoc
+++ /dev/null
@@ -1,38 +0,0 @@
-= Lib Directives in SolrConfig
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements.  See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership.  The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License.  You may obtain a copy of the License at
-//
-//   http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied.  See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-Solr allows loading plugins by defining `<lib/>` directives in `solrconfig.xml`.
-
-The plugins are loaded in the order they appear in `solrconfig.xml`. If there are dependencies, list the lowest level dependency jar first.
-
-Regular expressions can be used to provide control loading jars with dependencies on other jars in the same directory. All directories are resolved as relative to the Solr `instanceDir`.
-
-[source,xml]
-----
-<lib dir="../../../contrib/extraction/lib" regex=".*\.jar" />
-<lib dir="../../../dist/" regex="solr-cell-\d.*\.jar" />
-
-<lib dir="../../../contrib/clustering/lib/" regex=".*\.jar" />
-<lib dir="../../../dist/" regex="solr-clustering-\d.*\.jar" />
-
-<lib dir="../../../contrib/langid/lib/" regex=".*\.jar" />
-<lib dir="../../../dist/" regex="solr-langid-\d.*\.jar" />
-
-<lib dir="../../../contrib/velocity/lib" regex=".*\.jar" />
-<lib dir="../../../dist/" regex="solr-velocity-\d.*\.jar" />
-----

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/resource-and-plugin-loading.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/resource-and-plugin-loading.adoc b/solr/solr-ref-guide/src/resource-and-plugin-loading.adoc
new file mode 100644
index 0000000..60cd60f
--- /dev/null
+++ b/solr/solr-ref-guide/src/resource-and-plugin-loading.adoc
@@ -0,0 +1,86 @@
+= Resource and Plugin Loading
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+Solr components can be configured using *resources*: data stored in external files that may be referred to in a location-independent fashion. Examples include: files needed by schema components, e.g. a stopword list for <<filter-descriptions.adoc#stop-filter,Stop Filter>>; and machine-learned models for <<learning-to-rank.adoc#learning-to-rank,Learning to Rank>>.
+  
+Solr *plugins*, which can be configured in `solrconfig.xml`, are Java classes that are normally packaged in `.jar` files along with supporting classes and data. Solr ships with a number of built-in plugins, and can also be configured to use custom plugins.  Example plugins are the <<uploading-structured-data-store-data-with-the-data-import-handler.adoc#uploading-structured-data-store-data-with-the-data-import-handler,Data Import Handler>> and custom search components.
+
+Resources and plugins may be stored:
+
+* in ZooKeeper under a collection's configset node (SolrCloud only);
+* on a filesystem accessible to Solr nodes; or
+* in Solr's <<blob-store-api.adoc#blob-store-api,Blob Store>> (SolrCloud only).
+  
+NOTE: Schema components may not be stored as plugins in the Blob Store, and cannot access resources stored in the Blob Store.  
+
+== Resource and Plugin Loading Sequence 
+
+Under SolrCloud, resources and plugins to be loaded are first looked up in ZooKeeper under the collection's configset znode.  If the resource or plugin is not found there, Solr will fall back to loading <<Resources and Plugins on the Filesystem,from the filesystem>>.
+
+Note that by default, Solr will not attempt to load resources and plugins from the Blob Store.  To enable this, see the section <<blob-store-api.adoc#use-a-blob-in-a-handler-or-component,Use a Blob in a Handler or Component>>.  When loading from the Blob Store is enabled for a component, lookups occur only in the Blob Store, and never in ZooKeeper or on the filesystem.  
+
+== Resources and Plugins in ConfigSets on ZooKeeper
+
+Resources and plugins may be uploaded to ZooKeeper as part of a configset, either via the <<configsets-api.adoc#configsets-api,Configsets API>> or <<solr-control-script-reference.adoc#upload-a-configuration-set,`bin/solr zk upload`>>.
+
+To upload a plugin or resource to a configset already stored on ZooKeeper, you can use <<solr-control-script-reference.adoc#copy-between-local-files-and-zookeeper-znodes,`bin/solr zk cp`>>.   
+
+CAUTION: By default, ZooKeeper's file size limit is 1MB. If your files are larger than this, you'll need to either <<setting-up-an-external-zookeeper-ensemble.adoc#increasing-the-file-size-limit,increase the ZooKeeper file size limit>> or store them instead <<Resources and Plugins on the Filesystem,on the filesystem>>.
+      
+== Resources and Plugins on the Filesystem 
+
+Under standalone Solr, when looking up a plugin or resource to be loaded, Solr's resource loader will first look under the `<instanceDir>/conf/` directory.  If the plugin or resource is not found, the configured plugin and resource file paths are searched - see the section <<Lib Directives in SolrConfig>> below.
+
+On core load, Solr's resource loader constructs a list of paths (subdirectories and jars), first under <<solr_home-lib,`solr_home/lib`>>, and then under directories pointed to by <<Lib Directives in SolrConfig,`<lib/>` directives in SolrConfig>>.
+  
+When looking up a resource or plugin to be loaded, the paths on the list are searched in the order they were added.
+
+NOTE: Under SolrCloud, each node hosting a collection replica will need its own copy of plugins and resources to be loaded.
+
+To get Solr's resource loader to find resources either under subdirectories or in jar files that were created after Solr's resource path list was constructed, reload the collection (SolrCloud) or the core (standalone Solr).  Restarting all affected Solr nodes also works.
+
+WARNING: Resource files *will not be loaded* if they are located directly under either `solr_home/lib` or a directory given by the `dir` attribute on a `<lib/>` directive in SolrConfig.  Resources are only searched for under subdirectories or in jar files found in those locations.
+
+=== solr_home/lib
+
+Each Solr node can have a directory named `lib/` under the <<taking-solr-to-production.adoc#solr-home-directory,Solr home directory>>.  In order to use this directory to host resources or plugins, it must first be manually created. 
+
+=== Lib Directives in SolrConfig
+
+Plugin and resource file paths are configurable via `<lib/>` directives in `solrconfig.xml`.
+
+Loading occurs in the order `<lib/>` directives appear in `solrconfig.xml`. If there are dependencies, list the lowest level dependency jar first.
+
+A regular expression supplied in the `<lib/>` element's `regex` attribute value can be used to restrict which subdirectories and/or jar files are added to the Solr resource loader's list of search locations.  If no regular expression is given, all direct subdirectory and jar children are included in the resource path list.  All directories are resolved as relative to the Solr core's `instanceDir`.
+
+From an example SolrConfig: 
+
+[source,xml]
+----
+<lib dir="../../../contrib/extraction/lib" regex=".*\.jar" />
+<lib dir="../../../dist/" regex="solr-cell-\d.*\.jar" />
+
+<lib dir="../../../contrib/clustering/lib/" regex=".*\.jar" />
+<lib dir="../../../dist/" regex="solr-clustering-\d.*\.jar" />
+
+<lib dir="../../../contrib/langid/lib/" regex=".*\.jar" />
+<lib dir="../../../dist/" regex="solr-langid-\d.*\.jar" />
+
+<lib dir="../../../contrib/velocity/lib" regex=".*\.jar" />
+<lib dir="../../../dist/" regex="solr-velocity-\d.*\.jar" />
+----

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/tokenizers.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/tokenizers.adoc b/solr/solr-ref-guide/src/tokenizers.adoc
index 82e730d..db32c78 100644
--- a/solr/solr-ref-guide/src/tokenizers.adoc
+++ b/solr/solr-ref-guide/src/tokenizers.adoc
@@ -288,7 +288,7 @@ The default configuration for `solr.ICUTokenizerFactory` provides UAX#29 word br
 [IMPORTANT]
 ====
 
-To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<lib-directives-in-solrconfig.adoc#lib-directives-in-solrconfig,Lib Directives in SolrConfig>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add to your `SOLR_HOME/lib`.
+To use this tokenizer, you must add additional .jars to Solr's classpath (as described in the section <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,Resources and Plugins on the Filesystem>>). See the `solr/contrib/analysis-extras/README.txt` for information on which jars you need to add.
 
 ====
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/95cb7aa4/solr/solr-ref-guide/src/update-request-processors.adoc
----------------------------------------------------------------------
diff --git a/solr/solr-ref-guide/src/update-request-processors.adoc b/solr/solr-ref-guide/src/update-request-processors.adoc
index 267ffbd..27b999b 100644
--- a/solr/solr-ref-guide/src/update-request-processors.adoc
+++ b/solr/solr-ref-guide/src/update-request-processors.adoc
@@ -353,7 +353,7 @@ The {solr-javadocs}/solr-langid/index.html[`langid`] contrib provides::
 
 The {solr-javadocs}/solr-analysis-extras/index.html[`analysis-extras`] contrib provides::
 
-{solr-javadocs}/solr-analysis-extras/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.html[OpenNLPExtractNamedEntitiesUpdateProcessorFactory]::: Update document(s) to be indexed with named entities extracted using an OpenNLP NER model.  Note that in order to use model files larger than 1MB on SolrCloud, <<setting-up-an-external-zookeeper-ensemble#increasing-the-file-size-limit,ZooKeeper server and client configuration is required>>.  
+{solr-javadocs}/solr-analysis-extras/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.html[OpenNLPExtractNamedEntitiesUpdateProcessorFactory]::: Update document(s) to be indexed with named entities extracted using an OpenNLP NER model.  Note that in order to use model files larger than 1MB on SolrCloud, you must either <<setting-up-an-external-zookeeper-ensemble#increasing-the-file-size-limit,configure both ZooKeeper server and clients>> or <<resource-and-plugin-loading.adoc#resources-and-plugins-on-the-filesystem,store the model files on the filesystem>> on each node hosting a collection replica.  
 
 === Update Processor Factories You Should _Not_ Modify or Remove
 


[02/15] lucene-solr:jira/http2: SOLR-12690: Regularize LoggerFactory declarations

Posted by da...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java b/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java
index 0121279..f9f1f65 100644
--- a/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java
+++ b/solr/core/src/java/org/apache/solr/store/blockcache/BlockDirectory.java
@@ -38,7 +38,7 @@ import org.slf4j.LoggerFactory;
  * @lucene.experimental
  */
 public class BlockDirectory extends FilterDirectory implements ShutdownAwareDirectory {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static final long BLOCK_SHIFT = Integer.getInteger("solr.hdfs.blockcache.blockshift", 13);
 
@@ -118,11 +118,11 @@ public class BlockDirectory extends FilterDirectory implements ShutdownAwareDire
     }
     this.blockCacheReadEnabled = blockCacheReadEnabled;
     if (!blockCacheReadEnabled) {
-      LOG.info("Block cache on read is disabled");
+      log.info("Block cache on read is disabled");
     }
     this.blockCacheWriteEnabled = blockCacheWriteEnabled;
     if (!blockCacheWriteEnabled) {
-      LOG.info("Block cache on write is disabled");
+      log.info("Block cache on write is disabled");
     }
   }
   
@@ -238,7 +238,7 @@ public class BlockDirectory extends FilterDirectory implements ShutdownAwareDire
   
   @Override
   public void closeOnShutdown() throws IOException {
-    LOG.info("BlockDirectory closing on shutdown");
+    log.info("BlockDirectory closing on shutdown");
     // we are shutting down, no need to clean up cache
     super.close();
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
index 72d48ae..a186715 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsDirectory.java
@@ -41,7 +41,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class HdfsDirectory extends BaseDirectory {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   public static final int DEFAULT_BUFFER_SIZE = 4096;
   
   private static final String LF_EXT = ".lf";
@@ -69,7 +69,7 @@ public class HdfsDirectory extends BaseDirectory {
     if (fileSystem instanceof DistributedFileSystem) {
       // Make sure dfs is not in safe mode
       while (((DistributedFileSystem) fileSystem).setSafeMode(SafeModeAction.SAFEMODE_GET, true)) {
-        LOG.warn("The NameNode is in SafeMode - Solr will wait 5 seconds and try again.");
+        log.warn("The NameNode is in SafeMode - Solr will wait 5 seconds and try again.");
         try {
           Thread.sleep(5000);
         } catch (InterruptedException e) {
@@ -94,7 +94,7 @@ public class HdfsDirectory extends BaseDirectory {
   
   @Override
   public void close() throws IOException {
-    LOG.info("Closing hdfs directory {}", hdfsDirPath);
+    log.info("Closing hdfs directory {}", hdfsDirPath);
     fileSystem.close();
     isOpen = false;
   }
@@ -143,7 +143,7 @@ public class HdfsDirectory extends BaseDirectory {
   @Override
   public void deleteFile(String name) throws IOException {
     Path path = new Path(hdfsDirPath, name);
-    LOG.debug("Deleting {}", path);
+    log.debug("Deleting {}", path);
     getFileSystem().delete(path, false);
   }
   
@@ -197,7 +197,7 @@ public class HdfsDirectory extends BaseDirectory {
   }
   
   public static class HdfsIndexInput extends CustomBufferedIndexInput {
-    private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+    private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
     
     private final Path path;
     private final FSDataInputStream inputStream;
@@ -208,7 +208,7 @@ public class HdfsDirectory extends BaseDirectory {
         int bufferSize) throws IOException {
       super(name, bufferSize);
       this.path = path;
-      LOG.debug("Opening normal index input on {}", path);
+      log.debug("Opening normal index input on {}", path);
       FileStatus fileStatus = fileSystem.getFileStatus(path);
       length = fileStatus.getLen();
       inputStream = fileSystem.open(path, bufferSize);
@@ -227,7 +227,7 @@ public class HdfsDirectory extends BaseDirectory {
     
     @Override
     protected void closeInternal() throws IOException {
-      LOG.debug("Closing normal index input on {}", path);
+      log.debug("Closing normal index input on {}", path);
       if (!clone) {
         inputStream.close();
       }
@@ -248,7 +248,7 @@ public class HdfsDirectory extends BaseDirectory {
   
   @Override
   public void sync(Collection<String> names) throws IOException {
-    LOG.debug("Sync called on {}", Arrays.toString(names.toArray()));
+    log.debug("Sync called on {}", Arrays.toString(names.toArray()));
   }
   
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
index 29e2db2..687c505 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
@@ -57,7 +57,7 @@ import org.slf4j.LoggerFactory;
  * Metrics specific utility functions.
  */
 public class MetricUtils {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final String METRIC_NAME = "metric";
   public static final String VALUE = "value";
@@ -274,7 +274,7 @@ public class MetricUtils {
         convertGauge(n, gauge, propertyFilter, simple, compact, separator, consumer);
       } catch (InternalError ie) {
         if (n.startsWith("memory.") && ie.getMessage().contains("Memory Pool not found")) {
-          LOG.warn("Error converting gauge '" + n + "', possible JDK bug: SOLR-10362", ie);
+          log.warn("Error converting gauge '" + n + "', possible JDK bug: SOLR-10362", ie);
           consumer.accept(n, null);
         } else {
           throw ie;
@@ -577,7 +577,7 @@ public class MetricUtils {
       try {
         beanInfo = Introspector.getBeanInfo(intf, intf.getSuperclass(), Introspector.IGNORE_ALL_BEANINFO);
       } catch (IntrospectionException e) {
-        LOG.warn("Unable to fetch properties of MXBean " + obj.getClass().getName());
+        log.warn("Unable to fetch properties of MXBean " + obj.getClass().getName());
         return;
       }
       for (final PropertyDescriptor desc : beanInfo.getPropertyDescriptors()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
index 5346265..a82dd7c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DeleteReplicaTest.java
@@ -57,7 +57,7 @@ import static org.apache.solr.common.cloud.Replica.State.DOWN;
 
 public class DeleteReplicaTest extends SolrCloudTestCase {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @BeforeClass
   public static void setupCluster() throws Exception {
@@ -253,7 +253,7 @@ public class DeleteReplicaTest extends SolrCloudTestCase {
         if (times.incrementAndGet() > 1) {
           return false;
         }
-        LOG.info("Running delete core {}",cd);
+        log.info("Running delete core {}",cd);
 
         try {
           ZkNodeProps m = new ZkNodeProps(
@@ -371,7 +371,7 @@ public class DeleteReplicaTest extends SolrCloudTestCase {
           try {
             cluster.getSolrClient().add(collectionName, new SolrInputDocument("id", String.valueOf(doc++)));
           } catch (Exception e) {
-            LOG.error("Failed on adding document to {}", collectionName, e);
+            log.error("Failed on adding document to {}", collectionName, e);
           }
         }
       });
@@ -389,7 +389,7 @@ public class DeleteReplicaTest extends SolrCloudTestCase {
     try {
       cluster.getSolrClient().waitForState(collectionName, 20, TimeUnit.SECONDS, (liveNodes, collectionState) -> collectionState.getReplicas().size() == 1);
     } catch (TimeoutException e) {
-      LOG.info("Timeout wait for state {}", getCollectionState(collectionName));
+      log.info("Timeout wait for state {}", getCollectionState(collectionName));
       throw e;
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/LIROnShardRestartTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LIROnShardRestartTest.java b/solr/core/src/test/org/apache/solr/cloud/LIROnShardRestartTest.java
index 31947be..54742a9 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LIROnShardRestartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LIROnShardRestartTest.java
@@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory;
 @Deprecated
 public class LIROnShardRestartTest extends SolrCloudTestCase {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @BeforeClass
   public static void setupCluster() throws Exception {
@@ -166,7 +166,7 @@ public class LIROnShardRestartTest extends SolrCloudTestCase {
     } catch (Throwable th) {
       String electionPath = "/collections/allReplicasInLIR/leader_elect/shard1/election/";
       List<String> children = zkClient().getChildren(electionPath, null, true);
-      LOG.info("Election queue {}", children);
+      log.info("Election queue {}", children);
       throw th;
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/LIRRollingUpdatesTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LIRRollingUpdatesTest.java b/solr/core/src/test/org/apache/solr/cloud/LIRRollingUpdatesTest.java
index 3ccd32e..336cef3 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LIRRollingUpdatesTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LIRRollingUpdatesTest.java
@@ -60,7 +60,7 @@ import org.slf4j.LoggerFactory;
 
 public class LIRRollingUpdatesTest extends SolrCloudTestCase {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private static Map<URI, SocketProxy> proxies;
   private static Map<URI, JettySolrRunner> jettys;
@@ -79,7 +79,7 @@ public class LIRRollingUpdatesTest extends SolrCloudTestCase {
       cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart
       cluster.startJettySolrRunner(jetty);
       proxy.open(jetty.getBaseUrl().toURI());
-      LOG.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
+      log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
       proxies.put(proxy.getUrl(), proxy);
       jettys.put(proxy.getUrl(), jetty);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java
index a4a7bc0..01ad51c 100644
--- a/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/LeaderVoteWaitTimeoutTest.java
@@ -45,7 +45,7 @@ import org.slf4j.LoggerFactory;
 
 public class LeaderVoteWaitTimeoutTest extends SolrCloudTestCase {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private static final int NODE_COUNT = 4;
 
   private static Map<JettySolrRunner, SocketProxy> proxies;
@@ -70,7 +70,7 @@ public class LeaderVoteWaitTimeoutTest extends SolrCloudTestCase {
       cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart
       cluster.startJettySolrRunner(jetty);
       proxy.open(jetty.getBaseUrl().toURI());
-      LOG.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
+      log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
       proxies.put(jetty, proxy);
       jettys.put(proxy.getUrl(), jetty);
     }
@@ -202,7 +202,7 @@ public class LeaderVoteWaitTimeoutTest extends SolrCloudTestCase {
     } catch (Exception e) {
       List<String> children = zkClient().getChildren("/collections/"+collectionName+"/leader_elect/shard1/election",
           null, true);
-      LOG.info("{} election nodes:{}", collectionName, children);
+      log.info("{} election nodes:{}", collectionName, children);
       throw e;
     }
     cluster.getJettySolrRunner(0).start();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java
index aac1b9c..6eea5b8 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudConsistency.java
@@ -47,7 +47,7 @@ import org.slf4j.LoggerFactory;
 
 public class TestCloudConsistency extends SolrCloudTestCase {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private static Map<JettySolrRunner, SocketProxy> proxies;
   private static Map<URI, JettySolrRunner> jettys;
@@ -70,7 +70,7 @@ public class TestCloudConsistency extends SolrCloudTestCase {
       cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart
       cluster.startJettySolrRunner(jetty);
       proxy.open(jetty.getBaseUrl().toURI());
-      LOG.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
+      log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
       proxies.put(jetty, proxy);
       jettys.put(proxy.getUrl(), jetty);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java
index 767fb1b..15625db 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplica.java
@@ -69,7 +69,7 @@ import com.carrotsearch.randomizedtesting.annotations.Repeat;
 @Slow
 public class TestPullReplica extends SolrCloudTestCase {
   
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private String collectionName = null;
   private final static int REPLICATION_TIMEOUT_SECS = 10;
@@ -85,7 +85,7 @@ public class TestPullReplica extends SolrCloudTestCase {
         .addConfig("conf", configset("cloud-minimal"))
         .configure();
     Boolean useLegacyCloud = rarely();
-    LOG.info("Using legacyCloud?: {}", useLegacyCloud);
+    log.info("Using legacyCloud?: {}", useLegacyCloud);
     CollectionAdminRequest.ClusterProp clusterPropRequest = CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, String.valueOf(useLegacyCloud));
     CollectionAdminResponse response = clusterPropRequest.process(cluster.getSolrClient());
     assertEquals(0, response.getStatus());
@@ -107,14 +107,14 @@ public class TestPullReplica extends SolrCloudTestCase {
   public void tearDown() throws Exception {
     for (JettySolrRunner jetty:cluster.getJettySolrRunners()) {
       if (!jetty.isRunning()) {
-        LOG.warn("Jetty {} not running, probably some bad test. Starting it", jetty.getLocalPort());
+        log.warn("Jetty {} not running, probably some bad test. Starting it", jetty.getLocalPort());
         ChaosMonkey.start(jetty);
       }
     }
     if (cluster.getSolrClient().getZkStateReader().getClusterState().getCollectionOrNull(collectionName) != null) {
-      LOG.info("tearDown deleting collection");
+      log.info("tearDown deleting collection");
       CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient());
-      LOG.info("Collection deleted");
+      log.info("Collection deleted");
       waitForDeletion(collectionName);
     }
     super.tearDown();
@@ -321,18 +321,18 @@ public class TestPullReplica extends SolrCloudTestCase {
     List<Replica.State> statesSeen = new ArrayList<>(3);
     cluster.getSolrClient().registerCollectionStateWatcher(collectionName, (liveNodes, collectionState) -> {
       Replica r = collectionState.getSlice("shard1").getReplica("core_node2");
-      LOG.info("CollectionStateWatcher state change: {}", r);
+      log.info("CollectionStateWatcher state change: {}", r);
       if (r == null) {
         return false;
       }
       statesSeen.add(r.getState());
-      LOG.info("CollectionStateWatcher saw state: {}", r.getState());
+      log.info("CollectionStateWatcher saw state: {}", r.getState());
       return r.getState() == Replica.State.ACTIVE;
     });
     CollectionAdminRequest.addReplicaToShard(collectionName, "shard1", Replica.Type.PULL).process(cluster.getSolrClient());
     waitForState("Replica not added", collectionName, activeReplicaCount(1, 0, 1));
     zkClient().printLayoutToStdOut();
-    LOG.info("Saw states: " + Arrays.toString(statesSeen.toArray()));
+    log.info("Saw states: " + Arrays.toString(statesSeen.toArray()));
     assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), 3, statesSeen.size());
     assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.DOWN, statesSeen.get(0));
     assertEquals("Expecting DOWN->RECOVERING->ACTIVE but saw: " + Arrays.toString(statesSeen.toArray()), Replica.State.RECOVERING, statesSeen.get(0));
@@ -557,7 +557,7 @@ public class TestPullReplica extends SolrCloudTestCase {
   private void waitForDeletion(String collection) throws InterruptedException, KeeperException {
     TimeOut t = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME);
     while (cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection(collection)) {
-      LOG.info("Collection not yet deleted");
+      log.info("Collection not yet deleted");
       try {
         Thread.sleep(100);
         if (t.hasTimedOut()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
index eb238f6..2c57d33 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
@@ -58,7 +58,7 @@ public class TestPullReplicaErrorHandling extends SolrCloudTestCase {
   
   private final static int REPLICATION_TIMEOUT_SECS = 10;
   
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private static Map<URI, SocketProxy> proxies;
   private static Map<URI, JettySolrRunner> jettys;
 
@@ -83,7 +83,7 @@ public class TestPullReplicaErrorHandling extends SolrCloudTestCase {
       cluster.stopJettySolrRunner(jetty);//TODO: Can we avoid this restart
       cluster.startJettySolrRunner(jetty);
       proxy.open(jetty.getBaseUrl().toURI());
-      LOG.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
+      log.info("Adding proxy for URL: " + jetty.getBaseUrl() + ". Proxy: " + proxy.getUrl());
       proxies.put(proxy.getUrl(), proxy);
       jettys.put(proxy.getUrl(), jetty);
     }
@@ -124,9 +124,9 @@ public class TestPullReplicaErrorHandling extends SolrCloudTestCase {
   @Override
   public void tearDown() throws Exception {
     if (cluster.getSolrClient().getZkStateReader().getClusterState().getCollectionOrNull(collectionName) != null) {
-      LOG.info("tearDown deleting collection");
+      log.info("tearDown deleting collection");
       CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient());
-      LOG.info("Collection deleted");
+      log.info("Collection deleted");
       waitForDeletion(collectionName);
     }
     collectionName = null;
@@ -198,7 +198,7 @@ public void testCantConnectToPullReplica() throws Exception {
       }
       assertNumDocs(10, cluster.getSolrClient());
     } finally {
-      LOG.info("Opening leader node");
+      log.info("Opening leader node");
       proxy.reopen();
     }
 //     Back to normal
@@ -304,7 +304,7 @@ public void testCantConnectToPullReplica() throws Exception {
   private void waitForDeletion(String collection) throws InterruptedException, KeeperException {
     TimeOut t = new TimeOut(10, TimeUnit.SECONDS, TimeSource.NANO_TIME);
     while (cluster.getSolrClient().getZkStateReader().getClusterState().hasCollection(collection)) {
-      LOG.info("Collection not yet deleted");
+      log.info("Collection not yet deleted");
       try {
         Thread.sleep(100);
         if (t.hasTimedOut()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
index d23fc8d..6888d88 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestTlogReplica.java
@@ -77,7 +77,7 @@ import org.slf4j.LoggerFactory;
 @Slow
 public class TestTlogReplica extends SolrCloudTestCase {
   
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private String collectionName = null;
   private final static int REPLICATION_TIMEOUT_SECS = 10;
@@ -93,7 +93,7 @@ public class TestTlogReplica extends SolrCloudTestCase {
         .addConfig("conf", configset("cloud-minimal-inplace-updates"))
         .configure();
     Boolean useLegacyCloud = rarely();
-    LOG.info("Using legacyCloud?: {}", useLegacyCloud);
+    log.info("Using legacyCloud?: {}", useLegacyCloud);
     CollectionAdminRequest.ClusterProp clusterPropRequest = CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, String.valueOf(useLegacyCloud));
     CollectionAdminResponse response = clusterPropRequest.process(cluster.getSolrClient());
     assertEquals(0, response.getStatus());
@@ -115,12 +115,12 @@ public class TestTlogReplica extends SolrCloudTestCase {
   public void tearDown() throws Exception {
     for (JettySolrRunner jetty:cluster.getJettySolrRunners()) {
       if (!jetty.isRunning()) {
-        LOG.warn("Jetty {} not running, probably some bad test. Starting it", jetty.getLocalPort());
+        log.warn("Jetty {} not running, probably some bad test. Starting it", jetty.getLocalPort());
         ChaosMonkey.start(jetty);
       }
     }
     if (cluster.getSolrClient().getZkStateReader().getClusterState().getCollectionOrNull(collectionName) != null) {
-      LOG.info("tearDown deleting collection");
+      log.info("tearDown deleting collection");
       CollectionAdminRequest.deleteCollection(collectionName).process(cluster.getSolrClient());
       waitForDeletion(collectionName);
     }
@@ -561,7 +561,7 @@ public class TestTlogReplica extends SolrCloudTestCase {
       if ((Integer)((NamedList<Object>)response.get("responseHeader")).get(UpdateRequest.REPFACT) >= 2) {
         break;
       }
-      LOG.info("Min RF not achieved yet. retrying");
+      log.info("Min RF not achieved yet. retrying");
     }
     checkRTG(3,7, cluster.getJettySolrRunners());
     DirectUpdateHandler2.commitOnClose = false;
@@ -603,7 +603,7 @@ public class TestTlogReplica extends SolrCloudTestCase {
       if ((Integer)((NamedList<Object>)response.get("responseHeader")).get(UpdateRequest.REPFACT) >= 2) {
         break;
       }
-      LOG.info("Min RF not achieved yet. retrying");
+      log.info("Min RF not achieved yet. retrying");
     }
     new UpdateRequest()
         .add(sdoc("id", "9"))

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java
index 76bea430..727ff64 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/GenericDistributedQueue.java
@@ -57,7 +57,7 @@ import org.slf4j.LoggerFactory;
  * Implementation based on {@link org.apache.solr.cloud.ZkDistributedQueue}
  */
 public class GenericDistributedQueue implements DistributedQueue {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   static final String PREFIX = "qn-";
 
@@ -252,7 +252,7 @@ public class GenericDistributedQueue implements DistributedQueue {
             try {
               stateManager.removeData(ops.get(j).getPath(), -1);
             } catch (NoSuchElementException e2) {
-              LOG.debug("Can not remove node which is not exist : " + ops.get(j).getPath());
+              log.debug("Can not remove node which is not exist : " + ops.get(j).getPath());
             }
           }
         }
@@ -419,7 +419,7 @@ public class GenericDistributedQueue implements DistributedQueue {
         for (String childName : childNames) {
           // Check format
           if (!childName.regionMatches(0, PREFIX, 0, PREFIX.length())) {
-            LOG.debug("Found child node with improper name: " + childName);
+            log.debug("Found child node with improper name: " + childName);
             continue;
           }
           orderedChildren.add(childName);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
index 63dd5bf..1f0b6cf 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimCloudManager.java
@@ -105,7 +105,7 @@ import static org.apache.solr.cloud.api.collections.OverseerCollectionMessageHan
  * Simulated {@link SolrCloudManager}.
  */
 public class SimCloudManager implements SolrCloudManager {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final SimDistribStateManager stateManager;
   private final SimClusterStateProvider clusterStateProvider;
@@ -395,7 +395,7 @@ public class SimCloudManager implements SolrCloudManager {
     String nodeId = (String)values.get(ImplicitSnitch.NODE);
     nodeStateProvider.simSetNodeValues(nodeId, values);
     clusterStateProvider.simAddNode(nodeId);
-    LOG.trace("-- added node " + nodeId);
+    log.trace("-- added node " + nodeId);
     // initialize history handler if this is the first node
     if (metricsHistoryHandler == null && liveNodesSet.size() == 1) {
       metricsHandler = new MetricsHandler(metricManager);
@@ -428,7 +428,7 @@ public class SimCloudManager implements SolrCloudManager {
         metricsHandler = null;
       }
     }
-    LOG.trace("-- removed node " + nodeId);
+    log.trace("-- removed node " + nodeId);
   }
 
   /**
@@ -517,7 +517,7 @@ public class SimCloudManager implements SolrCloudManager {
    * @param killNodeId optional nodeId to kill. If null then don't kill any node, just restart the thread
    */
   public void simRestartOverseer(String killNodeId) throws Exception {
-    LOG.info("=== Restarting OverseerTriggerThread and clearing object cache...");
+    log.info("=== Restarting OverseerTriggerThread and clearing object cache...");
     triggerThread.interrupt();
     IOUtils.closeQuietly(triggerThread);
     if (killNodeId != null) {
@@ -648,7 +648,7 @@ public class SimCloudManager implements SolrCloudManager {
     // pay the penalty for remote request, at least 5 ms
     timeSource.sleep(5);
 
-    LOG.trace("--- got SolrRequest: " + req.getMethod() + " " + req.getPath() +
+    log.trace("--- got SolrRequest: " + req.getMethod() + " " + req.getPath() +
         (req.getParams() != null ? " " + req.getParams().toQueryString() : ""));
     if (req.getPath() != null) {
       if (req.getPath().startsWith("/admin/autoscaling") ||
@@ -674,7 +674,7 @@ public class SimCloudManager implements SolrCloudManager {
           ByteArrayOutputStream baos = new ByteArrayOutputStream();
           cw.write(baos);
           String payload = baos.toString("UTF-8");
-          LOG.trace("-- payload: {}", payload);
+          log.trace("-- payload: {}", payload);
           queryRequest.setContentStreams(Collections.singletonList(new ContentStreamBase.StringStream(payload)));
         }
         queryRequest.getContext().put("httpMethod", req.getMethod().toString());
@@ -698,12 +698,12 @@ public class SimCloudManager implements SolrCloudManager {
           }
         }
         if (queryResponse.getException() != null) {
-          LOG.debug("-- exception handling request", queryResponse.getException());
+          log.debug("-- exception handling request", queryResponse.getException());
           throw new IOException(queryResponse.getException());
         }
         SolrResponse rsp = new SolrResponseBase();
         rsp.setResponse(queryResponse.getValues());
-        LOG.trace("-- response: {}", rsp);
+        log.trace("-- response: {}", rsp);
         return rsp;
       }
     }
@@ -736,7 +736,7 @@ public class SimCloudManager implements SolrCloudManager {
       if (action == null) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown action: " + a);
       }
-      LOG.trace("Invoking Collection Action :{} with params {}", action.toLower(), req.getParams().toQueryString());
+      log.trace("Invoking Collection Action :{} with params {}", action.toLower(), req.getParams().toQueryString());
       NamedList results = new NamedList();
       rsp.setResponse(results);
       incrementCount(action.name());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
index 741a868..17b56d7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimClusterStateProvider.java
@@ -113,7 +113,7 @@ import static org.apache.solr.common.params.CommonParams.NAME;
  *   </ul>
  */
 public class SimClusterStateProvider implements ClusterStateProvider {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final long DEFAULT_DOC_SIZE_BYTES = 500;
 
@@ -333,7 +333,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     }
     // pick first
     overseerLeader = liveNodes.iterator().next();
-    LOG.debug("--- new Overseer leader: " + overseerLeader);
+    log.debug("--- new Overseer leader: " + overseerLeader);
     // record it in ZK
     Map<String, Object> id = new HashMap<>();
     id.put("id", cloudManager.getTimeSource().getTimeNs() +
@@ -341,7 +341,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     try {
       cloudManager.getDistribStateManager().makePath(path, Utils.toJSON(id), CreateMode.EPHEMERAL, false);
     } catch (Exception e) {
-      LOG.warn("Exception saving overseer leader id", e);
+      log.warn("Exception saving overseer leader id", e);
     }
   }
 
@@ -513,7 +513,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           "", true, "INDEX.sizeInBytes");
       // at this point nuke our cached DocCollection state
       collectionsStatesRef.set(null);
-      LOG.trace("-- simAddReplica {}", replicaInfo);
+      log.trace("-- simAddReplica {}", replicaInfo);
       if (runLeaderElection) {
         simRunLeaderElection(Collections.singleton(replicaInfo.getCollection()), true);
       }
@@ -552,7 +552,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
             }
             cloudManager.getSimNodeStateProvider().simSetNodeValue(nodeId, ImplicitSnitch.DISK, disk + 1);
           }
-          LOG.trace("-- simRemoveReplica {}", ri);
+          log.trace("-- simRemoveReplica {}", ri);
           simRunLeaderElection(Collections.singleton(ri.getCollection()), true);
           return;
         }
@@ -612,14 +612,14 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       }
       dc.getSlices().forEach(s -> {
         if (s.getLeader() != null) {
-          LOG.debug("-- already has leader {} / {}", dc.getName(), s.getName());
+          log.debug("-- already has leader {} / {}", dc.getName(), s.getName());
           return;
         }
         if (s.getReplicas().isEmpty()) {
-          LOG.debug("-- no replicas in {} / {}", dc.getName(), s.getName());
+          log.debug("-- no replicas in {} / {}", dc.getName(), s.getName());
           return;
         }
-        LOG.debug("-- submit leader election for {} / {}", dc.getName(), s.getName());
+        log.debug("-- submit leader election for {} / {}", dc.getName(), s.getName());
         cloudManager.submit(() -> {
           simRunLeaderElection(dc.getName(), s, saveClusterState);
           return true;
@@ -632,9 +632,9 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     AtomicBoolean stateChanged = new AtomicBoolean(Boolean.FALSE);
     Replica leader = s.getLeader();
     if (leader == null || !liveNodes.contains(leader.getNodeName())) {
-      LOG.debug("Running leader election for {} / {}", collection, s.getName());
+      log.debug("Running leader election for {} / {}", collection, s.getName());
       if (s.getReplicas().isEmpty()) { // no replicas - punt
-        LOG.debug("-- no replicas in {} / {}", collection, s.getName());
+        log.debug("-- no replicas in {} / {}", collection, s.getName());
         return;
       }
       ActionThrottle lt = getThrottle(collection, s.getName());
@@ -651,14 +651,14 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           synchronized (ri) {
             if (r.isActive(liveNodes.get())) {
               if (ri.getVariables().get(ZkStateReader.LEADER_PROP) != null) {
-                LOG.trace("-- found existing leader {} / {}: {}, {}", collection, s.getName(), ri, r);
+                log.trace("-- found existing leader {} / {}: {}, {}", collection, s.getName(), ri, r);
                 alreadyHasLeader.set(true);
                 return;
               } else {
                 active.add(ri);
               }
             } else { // if it's on a node that is not live mark it down
-              LOG.trace("-- replica not active on live nodes: {}, {}", liveNodes.get(), r);
+              log.trace("-- replica not active on live nodes: {}, {}", liveNodes.get(), r);
               if (!liveNodes.contains(r.getNodeName())) {
                 ri.getVariables().put(ZkStateReader.STATE_PROP, Replica.State.DOWN.toString());
                 ri.getVariables().remove(ZkStateReader.LEADER_PROP);
@@ -668,12 +668,12 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           }
         });
         if (alreadyHasLeader.get()) {
-          LOG.debug("-- already has leader {} / {}: {}", collection, s.getName(), s);
+          log.debug("-- already has leader {} / {}: {}", collection, s.getName(), s);
           return;
         }
         if (active.isEmpty()) {
-          LOG.warn("Can't find any active replicas for {} / {}: {}", collection, s.getName(), s);
-          LOG.debug("-- liveNodes: {}", liveNodes.get());
+          log.warn("Can't find any active replicas for {} / {}: {}", collection, s.getName(), s);
+          log.debug("-- liveNodes: {}", liveNodes.get());
           return;
         }
         // pick first active one
@@ -685,7 +685,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           }
         }
         if (ri == null) {
-          LOG.warn("-- can't find any suitable replica type for {} / {}: {}", collection, s.getName(), s);
+          log.warn("-- can't find any suitable replica type for {} / {}: {}", collection, s.getName(), s);
           return;
         }
         // now mark the leader election throttle
@@ -695,10 +695,10 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           ri.getVariables().put(ZkStateReader.LEADER_PROP, "true");
         }
         stateChanged.set(true);
-        LOG.debug("-- elected new leader for " + collection + " / " + s.getName() + ": " + ri.getName());
+        log.debug("-- elected new leader for " + collection + " / " + s.getName() + ": " + ri.getName());
       }
     } else {
-      LOG.debug("-- already has leader for {} / {}", collection, s.getName());
+      log.debug("-- already has leader for {} / {}", collection, s.getName());
     }
     if (stateChanged.get() || saveState) {
       collectionsStatesRef.set(null);
@@ -751,7 +751,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
 
     ZkWriteCommand cmd = new ClusterStateMutator(cloudManager).createCollection(clusterState, props);
     if (cmd.noop) {
-      LOG.warn("Collection {} already exists. exit", collectionName);
+      log.warn("Collection {} already exists. exit", collectionName);
       results.add("success", "no-op");
       return;
     }
@@ -906,7 +906,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
       saveClusterState.set(true);
       results.add("success", "");
     } catch (Exception e) {
-      LOG.warn("Exception", e);
+      log.warn("Exception", e);
     } finally {
       lock.unlock();
     }
@@ -973,7 +973,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
     String newSolrCoreName = Assign.buildSolrCoreName(stateManager, coll, slice.getName(), replica.getType());
     String coreNodeName = Assign.assignCoreNodeName(stateManager, coll);
     ReplicaInfo newReplica = new ReplicaInfo(coreNodeName, newSolrCoreName, collection, slice.getName(), replica.getType(), targetNode, null);
-    LOG.debug("-- new replica: " + newReplica);
+    log.debug("-- new replica: " + newReplica);
     // xxx should run leader election here already?
     simAddReplica(targetNode, newReplica, false);
     // this will trigger leader election
@@ -1276,14 +1276,14 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           // NOTE: we don't use getProperty because it uses PROPERTY_PROP_PREFIX
           Replica leader = s.getLeader();
           if (leader == null) {
-            LOG.debug("-- no leader in " + s);
+            log.debug("-- no leader in " + s);
             continue;
           }
           cloudManager.getMetricManager().registry(createRegistryName(collection, s.getName(), leader)).counter("UPDATE./update.requests").inc();
           ReplicaInfo ri = getReplicaInfo(leader);
           Number numDocs = (Number)ri.getVariable("SEARCHER.searcher.numDocs");
           if (numDocs == null || numDocs.intValue() <= 0) {
-            LOG.debug("-- attempting to delete nonexistent doc " + id + " from " + s.getLeader());
+            log.debug("-- attempting to delete nonexistent doc " + id + " from " + s.getLeader());
             continue;
           }
           modified = true;
@@ -1314,7 +1314,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           for (Slice s : coll.getSlices()) {
             Replica leader = s.getLeader();
             if (leader == null) {
-              LOG.debug("-- no leader in " + s);
+              log.debug("-- no leader in " + s);
               continue;
             }
 
@@ -1348,7 +1348,7 @@ public class SimClusterStateProvider implements ClusterStateProvider {
           Slice s = router.getTargetSlice(id, null, null, req.getParams(), coll);
           Replica leader = s.getLeader();
           if (leader == null) {
-            LOG.debug("-- no leader in " + s);
+            log.debug("-- no leader in " + s);
             continue;
           }
           cloudManager.getMetricManager().registry(createRegistryName(collection, s.getName(), leader)).counter("UPDATE./update.requests").inc();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
index 7a8dbbb..1e99ff2 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistribStateManager.java
@@ -68,7 +68,7 @@ import org.slf4j.LoggerFactory;
  * invoked.
  */
 public class SimDistribStateManager implements DistribStateManager {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final class Node {
     ReentrantLock dataLock = new ReentrantLock();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java
index e9616f0..7168ce9 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimDistributedQueueFactory.java
@@ -51,7 +51,7 @@ import org.slf4j.LoggerFactory;
  * exposed anywhere.
  */
 public class SimDistributedQueueFactory implements DistributedQueueFactory {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   Map<String, SimDistributedQueue> queues = new ConcurrentHashMap<>();
 
@@ -190,7 +190,7 @@ public class SimDistributedQueueFactory implements DistributedQueueFactory {
       try {
         queue.offer(new Pair(String.format(Locale.ROOT, "qn-%010d", seq), data));
         seq++;
-        LOG.trace("=== offer " + System.nanoTime());
+        log.trace("=== offer " + System.nanoTime());
         changed.signalAll();
       } finally {
         updateLock.unlock();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
index 7a346ea..9673fa7 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/SimNodeStateProvider.java
@@ -47,7 +47,7 @@ import org.slf4j.LoggerFactory;
  * to setup core-level metrics use {@link SimClusterStateProvider#simSetCollectionValue(String, String, Object, boolean, boolean)}.
  */
 public class SimNodeStateProvider implements NodeStateProvider {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final Map<String, Map<String, Object>> nodeValues = new ConcurrentHashMap<>();
   private final SimClusterStateProvider clusterStateProvider;
@@ -164,7 +164,7 @@ public class SimNodeStateProvider implements NodeStateProvider {
    * @param node node id
    */
   public void simRemoveNodeValues(String node) throws InterruptedException {
-    LOG.debug("--removing value for " + node);
+    log.debug("--removing value for " + node);
     lock.lockInterruptibly();
     try {
       Map<String, Object> values = nodeValues.remove(node);
@@ -187,7 +187,7 @@ public class SimNodeStateProvider implements NodeStateProvider {
     try {
       AtomicBoolean updateRoles = new AtomicBoolean(false);
       myNodes.forEach(n -> {
-        LOG.debug("- removing dead node values: " + n);
+        log.debug("- removing dead node values: " + n);
         Map<String, Object> vals = nodeValues.remove(n);
         if (vals.containsKey("nodeRole")) {
           updateRoles.set(true);
@@ -253,7 +253,7 @@ public class SimNodeStateProvider implements NodeStateProvider {
     for (String tag : tags) {
       String[] parts = tag.split(":");
       if (parts.length < 3 || !parts[0].equals("metrics")) {
-        LOG.warn("Invalid metrics: tag: " + tag);
+        log.warn("Invalid metrics: tag: " + tag);
         continue;
       }
       if (!parts[1].startsWith("solr.core.")) {
@@ -263,7 +263,7 @@ public class SimNodeStateProvider implements NodeStateProvider {
       Matcher m = REGISTRY_PATTERN.matcher(parts[1]);
 
       if (!m.matches()) {
-        LOG.warn("Invalid registry name: " + parts[1]);
+        log.warn("Invalid registry name: " + parts[1]);
         continue;
       }
       String collection = m.group(1);
@@ -291,7 +291,7 @@ public class SimNodeStateProvider implements NodeStateProvider {
 
   @Override
   public Map<String, Object> getNodeValues(String node, Collection<String> tags) {
-    LOG.trace("-- requested values for " + node + ": " + tags);
+    log.trace("-- requested values for " + node + ": " + tags);
     if (!liveNodesSet.contains(node)) {
       throw new RuntimeException("non-live node " + node);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java
index e395985..2cdc456 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestClusterStateProvider.java
@@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory;
  * This test compares the cluster state of a real cluster and a simulated one.
  */
 public class TestClusterStateProvider extends SolrCloudTestCase {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private static int NODE_COUNT = 3;
   private static boolean simulated;
@@ -72,7 +72,7 @@ public class TestClusterStateProvider extends SolrCloudTestCase {
   @BeforeClass
   public static void setupCluster() throws Exception {
     simulated = random().nextBoolean();
-    LOG.info("####### Using simulated components? " + simulated);
+    log.info("####### Using simulated components? " + simulated);
 
     configureCluster(NODE_COUNT)
         .addConfig("conf", configset("cloud-minimal"))

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestDistribStateManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestDistribStateManager.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestDistribStateManager.java
index 14f7416..74d9bb1 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestDistribStateManager.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestDistribStateManager.java
@@ -47,7 +47,7 @@ import org.slf4j.LoggerFactory;
  * This test compares a ZK-based {@link DistribStateManager} to the simulated one.
  */
 public class TestDistribStateManager extends SolrTestCaseJ4 {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private DistribStateManager stateManager;
   private ZkTestServer zkTestServer;
@@ -80,7 +80,7 @@ public class TestDistribStateManager extends SolrTestCaseJ4 {
       solrZkClient = new SolrZkClient(zkTestServer.getZkHost(), 30000);
       stateManager = new ZkDistribStateManager(solrZkClient);
     }
-    LOG.info("Using " + stateManager.getClass().getName());
+    log.info("Using " + stateManager.getClass().getName());
   }
 
   private DistribStateManager createDistribStateManager() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java b/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java
index 652f3f0..362940a 100644
--- a/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java
+++ b/solr/core/src/test/org/apache/solr/search/CurrencyRangeFacetCloudTest.java
@@ -42,7 +42,7 @@ import org.junit.Test;
 
 public class CurrencyRangeFacetCloudTest extends SolrCloudTestCase {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private static final String COLLECTION = MethodHandles.lookup().lookupClass().getName();
   private static final String CONF = COLLECTION + "_configSet";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java
index 05c25cf..846977c 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java
@@ -53,7 +53,7 @@ import org.junit.BeforeClass;
  */
 public class RangeFacetCloudTest extends SolrCloudTestCase {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private static final String COLLECTION = MethodHandles.lookup().lookupClass().getName();
   private static final String CONF = COLLECTION + "_configSet";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java
index fa0505e..ccd02eb 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/AutoScalingConfig.java
@@ -43,7 +43,7 @@ import static java.util.stream.Collectors.toList;
  * lazily.
  */
 public class AutoScalingConfig implements MapWriter {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final Map<String, Object> jsonMap;
   private final boolean empty;
@@ -81,7 +81,7 @@ public class AutoScalingConfig implements MapWriter {
           TriggerEventProcessorStage stage = TriggerEventProcessorStage.valueOf(String.valueOf(stageName).toUpperCase(Locale.ROOT));
           stages.add(stage);
         } catch (Exception e) {
-          LOG.warn("Invalid stage name '" + name + "' in listener config, skipping: " + properties);
+          log.warn("Invalid stage name '" + name + "' in listener config, skipping: " + properties);
         }
       }
       listenerClass = (String)this.properties.get(AutoScalingParams.CLASS);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
index 525728e..210e324 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Policy.java
@@ -69,7 +69,7 @@ import static org.apache.solr.client.solrj.cloud.autoscaling.Variable.Type.WITH_
  *
  */
 public class Policy implements MapWriter {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final String POLICY = "policy";
   public static final String EACH = "#EACH";
@@ -281,7 +281,7 @@ public class Policy implements MapWriter {
             return p.compare(r1, r2, false);
           });
         } catch (Exception e) {
-          LOG.error("Exception! prefs = {}, recent r1 = {}, r2 = {}, matrix = {}",
+          log.error("Exception! prefs = {}, recent r1 = {}, r2 = {}, matrix = {}",
               clusterPreferences,
               lastComparison[0],
               lastComparison[1],
@@ -498,9 +498,9 @@ public class Policy implements MapWriter {
       this.nodeStateProvider = cloudManager.getNodeStateProvider();
       try {
         state = cloudManager.getClusterStateProvider().getClusterState();
-        LOG.trace("-- session created with cluster state: {}", state);
+        log.trace("-- session created with cluster state: {}", state);
       } catch (Exception e) {
-        LOG.trace("-- session created, can't obtain cluster state", e);
+        log.trace("-- session created, can't obtain cluster state", e);
       }
       this.znodeVersion = state != null ? state.getZNodeVersion() : -1;
       this.nodes = new ArrayList<>(cloudManager.getClusterStateProvider().getLiveNodes());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java
index e08f85f..d538b18 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClientUtil.java
@@ -73,7 +73,7 @@ import org.slf4j.LoggerFactory;
  */
 public class HttpClientUtil {
   
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static final int DEFAULT_CONNECT_TIMEOUT = 60000;
   public static final int DEFAULT_SO_TIMEOUT = 600000;
@@ -147,7 +147,7 @@ public class HttpClientUtil {
     // Configure the HttpClientBuilder if user has specified the factory type.
     String factoryClassName = System.getProperty(SYS_PROP_HTTP_CLIENT_BUILDER_FACTORY);
     if (factoryClassName != null) {
-      logger.debug ("Using " + factoryClassName);
+      log.debug ("Using " + factoryClassName);
       try {
         HttpClientBuilderFactory factory = (HttpClientBuilderFactory)Class.forName(factoryClassName).newInstance();
         httpClientBuilder = factory.getHttpClientBuilder(Optional.of(SolrHttpClientBuilder.create()));
@@ -176,7 +176,7 @@ public class HttpClientUtil {
           try {
             interceptor.process(request, context);
           } catch (Exception e) {
-            logger.error("", e);
+            log.error("", e);
           }
         }
       });
@@ -234,7 +234,7 @@ public class HttpClientUtil {
       } else {
         sslConnectionSocketFactory = new SSLConnectionSocketFactory(SSLContexts.createSystemDefault(),
                                                                     NoopHostnameVerifier.INSTANCE);
-        logger.debug(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME + "is false, hostname checks disabled.");
+        log.debug(HttpClientUtil.SYS_PROP_CHECK_PEER_NAME + "is false, hostname checks disabled.");
       }
       builder.register("https", sslConnectionSocketFactory);
 
@@ -268,8 +268,8 @@ public class HttpClientUtil {
 
   public static CloseableHttpClient createClient(final SolrParams params, PoolingHttpClientConnectionManager cm, boolean sharedConnectionManager, HttpRequestExecutor httpRequestExecutor)  {
     final ModifiableSolrParams config = new ModifiableSolrParams(params);
-    if (logger.isDebugEnabled()) {
-      logger.debug("Creating new http client, config:" + config);
+    if (log.isDebugEnabled()) {
+      log.debug("Creating new http client, config:" + config);
     }
 
     cm.setMaxTotal(params.getInt(HttpClientUtil.PROP_MAX_CONNECTIONS, 10000));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java
index 1bcf96b..afa2ef6 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/Krb5HttpClientBuilder.java
@@ -50,7 +50,7 @@ import org.slf4j.LoggerFactory;
 public class Krb5HttpClientBuilder implements HttpClientBuilderFactory {
   
   public static final String LOGIN_CONFIG_PROP = "java.security.auth.login.config";
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private static Configuration jaasConfig = new SolrJaasConfiguration();
 
@@ -85,7 +85,7 @@ public class Krb5HttpClientBuilder implements HttpClientBuilderFactory {
       String configValue = System.getProperty(LOGIN_CONFIG_PROP);
 
       if (configValue != null) {
-        logger.info("Setting up SPNego auth with config: " + configValue);
+        log.info("Setting up SPNego auth with config: " + configValue);
         final String useSubjectCredsProp = "javax.security.auth.useSubjectCredsOnly";
         String useSubjectCredsVal = System.getProperty(useSubjectCredsProp);
 
@@ -97,7 +97,7 @@ public class Krb5HttpClientBuilder implements HttpClientBuilderFactory {
         else if (!useSubjectCredsVal.toLowerCase(Locale.ROOT).equals("false")) {
           // Don't overwrite the prop value if it's already been written to something else,
           // but log because it is likely the Credentials won't be loaded correctly.
-          logger.warn("System Property: " + useSubjectCredsProp + " set to: " + useSubjectCredsVal
+          log.warn("System Property: " + useSubjectCredsProp + " set to: " + useSubjectCredsVal
               + " not false.  SPNego authentication may not be successful.");
         }
 
@@ -139,7 +139,7 @@ public class Krb5HttpClientBuilder implements HttpClientBuilderFactory {
         HttpClientUtil.addRequestInterceptor(bufferedEntityInterceptor);
       }
     } else {
-      logger.warn("{} is configured without specifying system property '{}'",
+      log.warn("{} is configured without specifying system property '{}'",
           getClass().getName(), LOGIN_CONFIG_PROP);
     }
 
@@ -176,11 +176,11 @@ public class Krb5HttpClientBuilder implements HttpClientBuilderFactory {
     public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
       if (baseConfig == null) return null;
 
-      logger.debug("Login prop: "+System.getProperty(LOGIN_CONFIG_PROP));
+      log.debug("Login prop: "+System.getProperty(LOGIN_CONFIG_PROP));
 
       String clientAppName = System.getProperty("solr.kerberos.jaas.appname", "Client");
       if (initiateAppNames.contains(appName)) {
-        logger.debug("Using AppConfigurationEntry for appName '"+clientAppName+"' instead of: " + appName);
+        log.debug("Using AppConfigurationEntry for appName '"+clientAppName+"' instead of: " + appName);
         return baseConfig.getAppConfigurationEntry(clientAppName);
       }
       return baseConfig.getAppConfigurationEntry(appName);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java
index dc229bf..b29ea09 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/CommitStream.java
@@ -43,7 +43,7 @@ import org.slf4j.LoggerFactory;
  */
 public class CommitStream extends TupleStream implements Expressible {
   private static final long serialVersionUID = 1L;
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   // Part of expression / passed in
   private String collection;
@@ -252,7 +252,7 @@ public class CommitStream extends TupleStream implements Expressible {
     try {
       clientCache.getCloudSolrClient(zkHost).commit(collection, waitFlush, waitSearcher, softCommit);
     } catch (SolrServerException | IOException e) {
-      LOG.warn(String.format(Locale.ROOT, "Unable to commit documents to collection '%s' due to unexpected error.", collection), e);
+      log.warn(String.format(Locale.ROOT, "Unable to commit documents to collection '%s' due to unexpected error.", collection), e);
       String className = e.getClass().getName();
       String message = e.getMessage();
       throw new IOException(String.format(Locale.ROOT,"Unexpected error when committing documents to collection %s- %s:%s", collection, className, message));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
index a4f528b..9d02ec2 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/DaemonStream.java
@@ -60,7 +60,7 @@ public class DaemonStream extends TupleStream implements Expressible {
   private Map<String, DaemonStream> daemons;
   private boolean terminate;
   private boolean closed = false;
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public DaemonStream(StreamExpression expression, StreamFactory factory) throws IOException{
 
@@ -329,14 +329,14 @@ public class DaemonStream extends TupleStream implements Expressible {
             }
           } catch (IOException e) {
             exception = e;
-            logger.error("Error in DaemonStream:" + id, e);
+            log.error("Error in DaemonStream:" + id, e);
             ++errors;
             if (errors > 100) {
-              logger.error("Too many consectutive errors. Stopping DaemonStream:" + id);
+              log.error("Too many consectutive errors. Stopping DaemonStream:" + id);
               break OUTER;
             }
           } catch (Throwable t) {
-            logger.error("Fatal Error in DaemonStream:" + id, t);
+            log.error("Fatal Error in DaemonStream:" + id, t);
             //For anything other then IOException break out of the loop and shutdown the thread.
             break OUTER;
           } finally {
@@ -345,7 +345,7 @@ public class DaemonStream extends TupleStream implements Expressible {
             } catch (IOException e1) {
               if (exception == null) {
                 exception = e1;
-                logger.error("Error in DaemonStream:" + id, e1);
+                log.error("Error in DaemonStream:" + id, e1);
                 break OUTER;
               }
             }
@@ -357,7 +357,7 @@ public class DaemonStream extends TupleStream implements Expressible {
           try {
             Thread.sleep(sleepMillis);
           } catch (InterruptedException e) {
-            logger.error("Error in DaemonStream:" + id, e);
+            log.error("Error in DaemonStream:" + id, e);
             break OUTER;
           }
         }
@@ -370,7 +370,7 @@ public class DaemonStream extends TupleStream implements Expressible {
         try {
           queue.put(tuple);
         } catch (InterruptedException e) {
-          logger.error("Error in DaemonStream:"+id, e);
+          log.error("Error in DaemonStream:"+id, e);
         }
       }
       setStopTime(new Date().getTime());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
index 2a1b491..bee208c 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ExecutorStream.java
@@ -55,7 +55,7 @@ import static org.apache.solr.common.params.CommonParams.ID;
 
 public class ExecutorStream extends TupleStream implements Expressible {
 
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private TupleStream stream;
 
@@ -148,7 +148,7 @@ public class ExecutorStream extends TupleStream implements Expressible {
     try {
       executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
     } catch(InterruptedException e) {
-      logger.error("Interrupted while waiting for termination", e);
+      log.error("Interrupted while waiting for termination", e);
     }
   }
 
@@ -214,12 +214,12 @@ public class ExecutorStream extends TupleStream implements Expressible {
           }
         }
       } catch (Exception e) {
-        logger.error("Executor Error: id="+id+" expr_s="+expr, e);
+        log.error("Executor Error: id="+id+" expr_s="+expr, e);
       } finally {
         try {
           stream.close();
         } catch (Exception e1) {
-          logger.error("Executor Error", e1);
+          log.error("Executor Error", e1);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java
index a08f838..c00de10 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/UpdateStream.java
@@ -50,7 +50,7 @@ import static org.apache.solr.common.params.CommonParams.VERSION_FIELD;
  * @since 6.0.0
  */
 public class UpdateStream extends TupleStream implements Expressible {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static String BATCH_INDEXED_FIELD_NAME = "batchIndexed"; // field name in summary tuple for #docs updated in batch
   private String collection;
@@ -281,7 +281,7 @@ public class UpdateStream extends TupleStream implements Expressible {
         }
       }
     }
-    LOG.debug("Tuple [{}] was converted into SolrInputDocument [{}].", tuple, doc);
+    log.debug("Tuple [{}] was converted into SolrInputDocument [{}].", tuple, doc);
     
     return doc;
   }
@@ -300,7 +300,7 @@ public class UpdateStream extends TupleStream implements Expressible {
     try {
       cloudSolrClient.add(collection, documentBatch);
     } catch (SolrServerException | IOException e) {
-      LOG.warn("Unable to add documents to collection due to unexpected error.", e);
+      log.warn("Unable to add documents to collection due to unexpected error.", e);
       String className = e.getClass().getName();
       String message = e.getMessage();
       throw new IOException(String.format(Locale.ROOT,"Unexpected error when adding documents to collection %s- %s:%s", collection, className, message));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java
index 015793c..bf4a1ed 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java
@@ -36,7 +36,7 @@ import java.util.regex.Pattern;
  */
 public class ZkConfigManager {
 
-  private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   /** ZkNode where named configs are stored */
   public static final String CONFIGS_ZKNODE = "/configs";
@@ -139,7 +139,7 @@ public class ZkConfigManager {
         List<String> children = zkClient.getChildren(fromZkPath + "/" + file, null, true);
         if (children.size() == 0) {
           final String toZkFilePath = toZkPath + "/" + file;
-          logger.info("Copying zk node {} to {}",
+          log.info("Copying zk node {} to {}",
               fromZkPath + "/" + file, toZkFilePath);
           byte[] data = zkClient.getData(fromZkPath + "/" + file, null, null, true);
           zkClient.makePath(toZkFilePath, data, true);


[05/15] lucene-solr:jira/http2: SOLR-12690: Regularize LoggerFactory declarations

Posted by da...@apache.org.
SOLR-12690: Regularize LoggerFactory declarations


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/8cde1277
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/8cde1277
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/8cde1277

Branch: refs/heads/jira/http2
Commit: 8cde1277ec7151bd6ab62950ac93cbdd6ff04d9f
Parents: 5eab1c3
Author: Erick Erickson <Er...@gmail.com>
Authored: Wed Aug 22 15:26:37 2018 -0700
Committer: Erick Erickson <Er...@gmail.com>
Committed: Wed Aug 22 15:26:37 2018 -0700

----------------------------------------------------------------------
 .../directory/DirectoryTaxonomyReader.java      |   6 +-
 .../src/groovy/check-source-patterns.groovy     |   9 +-
 solr/CHANGES.txt                                |   2 +
 .../handler/dataimport/MailEntityProcessor.java |  74 ++++-----
 .../handler/dataimport/BinURLDataSource.java    |  10 +-
 .../handler/dataimport/DataImportHandler.java   |   6 +-
 .../solr/handler/dataimport/DataImporter.java   |  24 +--
 .../dataimport/DateFormatTransformer.java       |   4 +-
 .../solr/handler/dataimport/DocBuilder.java     |  34 ++--
 .../dataimport/FieldReaderDataSource.java       |   6 +-
 .../dataimport/FieldStreamDataSource.java       |   4 +-
 .../solr/handler/dataimport/FileDataSource.java |   8 +-
 .../solr/handler/dataimport/JdbcDataSource.java |  20 +--
 .../solr/handler/dataimport/LogTransformer.java |  22 +--
 .../handler/dataimport/RegexTransformer.java    |   4 +-
 .../handler/dataimport/SolrEntityProcessor.java |   6 +-
 .../handler/dataimport/SqlEntityProcessor.java  |   8 +-
 .../handler/dataimport/TemplateTransformer.java |   4 +-
 .../solr/handler/dataimport/URLDataSource.java  |  10 +-
 .../dataimport/XPathEntityProcessor.java        |  24 +--
 .../handler/dataimport/XPathRecordReader.java   |   6 +-
 .../dataimport/config/DIHConfiguration.java     |   6 +-
 .../TestSolrEntityProcessorEndToEnd.java        |  18 +-
 .../client/solrj/embedded/JettySolrRunner.java  |   6 +-
 .../java/org/apache/solr/cloud/LockTree.java    |   6 +-
 .../apache/solr/cloud/OverseerTaskQueue.java    |  14 +-
 .../org/apache/solr/cloud/RecoveryStrategy.java | 164 +++++++++----------
 .../apache/solr/cloud/ReplicateFromLeader.java  |   6 +-
 .../org/apache/solr/cloud/SolrZkServer.java     |   4 +-
 .../apache/solr/cloud/ZkDistributedQueue.java   |   6 +-
 .../cloud/autoscaling/HttpTriggerListener.java  |   4 +-
 .../solr/cloud/autoscaling/LoggingListener.java |   4 +-
 .../solr/cloud/autoscaling/TriggerBase.java     |   8 +-
 .../cloud/autoscaling/TriggerEventQueue.java    |  16 +-
 .../apache/solr/core/HdfsDirectoryFactory.java  |  50 +++---
 .../solr/core/IndexDeletionPolicyWrapper.java   |   4 +-
 .../repository/BackupRepositoryFactory.java     |   6 +-
 .../org/apache/solr/handler/IndexFetcher.java   | 142 ++++++++--------
 .../apache/solr/handler/ReplicationHandler.java |  68 ++++----
 .../org/apache/solr/handler/SnapShooter.java    |  16 +-
 .../admin/AutoscalingHistoryHandler.java        |   4 +-
 .../solr/handler/component/QueryComponent.java  |   4 +-
 .../handler/component/SpellCheckComponent.java  |  24 +--
 .../handler/component/SuggestComponent.java     |  26 +--
 .../metrics/reporters/ReporterClientCache.java  |   6 +-
 .../reporters/jmx/JmxMetricsReporter.java       |  58 +++----
 .../reporters/solr/SolrShardReporter.java       |   2 +-
 .../org/apache/solr/request/SimpleFacets.java   |   4 +-
 .../solr/response/BinaryResponseWriter.java     |   4 +-
 .../solr/schema/JsonPreAnalyzedParser.java      |  12 +-
 .../apache/solr/schema/PreAnalyzedField.java    |  10 +-
 .../solr/search/SurroundQParserPlugin.java      |   2 +-
 .../search/stats/ExactSharedStatsCache.java     |   6 +-
 .../solr/search/stats/ExactStatsCache.java      |  28 ++--
 .../apache/solr/search/stats/LRUStatsCache.java |  10 +-
 .../solr/search/stats/LocalStatsCache.java      |  18 +-
 .../org/apache/solr/search/stats/StatsUtil.java |  22 +--
 .../solr/servlet/CheckLoggingConfiguration.java |   4 +-
 .../solr/spelling/DirectSolrSpellChecker.java   |   6 +-
 .../solr/spelling/SpellCheckCollator.java       |  10 +-
 .../solr/spelling/suggest/SolrSuggester.java    |  34 ++--
 .../apache/solr/spelling/suggest/Suggester.java |  26 +--
 .../suggest/jaspell/JaspellLookupFactory.java   |   4 +-
 .../solr/store/blockcache/BlockDirectory.java   |   8 +-
 .../apache/solr/store/hdfs/HdfsDirectory.java   |  16 +-
 .../org/apache/solr/util/stats/MetricUtils.java |   6 +-
 .../apache/solr/cloud/DeleteReplicaTest.java    |   8 +-
 .../solr/cloud/LIROnShardRestartTest.java       |   4 +-
 .../solr/cloud/LIRRollingUpdatesTest.java       |   4 +-
 .../solr/cloud/LeaderVoteWaitTimeoutTest.java   |   6 +-
 .../apache/solr/cloud/TestCloudConsistency.java |   4 +-
 .../org/apache/solr/cloud/TestPullReplica.java  |  18 +-
 .../cloud/TestPullReplicaErrorHandling.java     |  12 +-
 .../org/apache/solr/cloud/TestTlogReplica.java  |  12 +-
 .../sim/GenericDistributedQueue.java            |   6 +-
 .../cloud/autoscaling/sim/SimCloudManager.java  |  18 +-
 .../sim/SimClusterStateProvider.java            |  50 +++---
 .../autoscaling/sim/SimDistribStateManager.java |   2 +-
 .../sim/SimDistributedQueueFactory.java         |   4 +-
 .../autoscaling/sim/SimNodeStateProvider.java   |  12 +-
 .../sim/TestClusterStateProvider.java           |   4 +-
 .../sim/TestDistribStateManager.java            |   4 +-
 .../search/CurrencyRangeFacetCloudTest.java     |   2 +-
 .../solr/search/facet/RangeFacetCloudTest.java  |   2 +-
 .../cloud/autoscaling/AutoScalingConfig.java    |   4 +-
 .../client/solrj/cloud/autoscaling/Policy.java  |   8 +-
 .../solr/client/solrj/impl/HttpClientUtil.java  |  12 +-
 .../solrj/impl/Krb5HttpClientBuilder.java       |  12 +-
 .../client/solrj/io/stream/CommitStream.java    |   4 +-
 .../client/solrj/io/stream/DaemonStream.java    |  14 +-
 .../client/solrj/io/stream/ExecutorStream.java  |   8 +-
 .../client/solrj/io/stream/UpdateStream.java    |   6 +-
 .../solr/common/cloud/ZkConfigManager.java      |   4 +-
 .../apache/solr/common/cloud/ZkStateReader.java | 140 ++++++++--------
 .../org/apache/solr/common/util/IOUtils.java    |   4 +-
 95 files changed, 791 insertions(+), 790 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
----------------------------------------------------------------------
diff --git a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
index cde56e1..2e4270b 100644
--- a/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
+++ b/lucene/facet/src/java/org/apache/lucene/facet/taxonomy/directory/DirectoryTaxonomyReader.java
@@ -60,7 +60,7 @@ import org.apache.lucene.util.RamUsageEstimator;
  */
 public class DirectoryTaxonomyReader extends TaxonomyReader implements Accountable {
 
-  private static final Logger logger = Logger.getLogger(DirectoryTaxonomyReader.class.getName());
+  private static final Logger log = Logger.getLogger(DirectoryTaxonomyReader.class.getName());
 
   private static final int DEFAULT_CACHE_VALUE = 4000;
 
@@ -421,8 +421,8 @@ public class DirectoryTaxonomyReader extends TaxonomyReader implements Accountab
         }
         sb.append(i +": "+category.toString()+"\n");
       } catch (IOException e) {
-        if (logger.isLoggable(Level.FINEST)) {
-          logger.log(Level.FINEST, e.getMessage(), e);
+        if (log.isLoggable(Level.FINEST)) {
+          log.log(Level.FINEST, e.getMessage(), e);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/lucene/tools/src/groovy/check-source-patterns.groovy
----------------------------------------------------------------------
diff --git a/lucene/tools/src/groovy/check-source-patterns.groovy b/lucene/tools/src/groovy/check-source-patterns.groovy
index d7af361..7fa4aa4 100644
--- a/lucene/tools/src/groovy/check-source-patterns.groovy
+++ b/lucene/tools/src/groovy/check-source-patterns.groovy
@@ -66,7 +66,7 @@ def lineSplitter = ~$/[\r\n]+/$;
 def singleLineSplitter = ~$/\n\r?/$;
 def licenseMatcher = Defaults.createDefaultMatcher();
 def validLoggerPattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+\p{javaJavaIdentifierStart}+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$;
-def validLoggerNamePattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+(log|LOG)+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$;
+def validLoggerNamePattern = ~$/(?s)\b(private\s|static\s|final\s){3}+\s*Logger\s+log+\s+=\s+\QLoggerFactory.getLogger(MethodHandles.lookup().lookupClass());\E/$;
 def packagePattern = ~$/(?m)^\s*package\s+org\.apache.*;/$;
 def xmlTagPattern = ~$/(?m)\s*<[a-zA-Z].*/$;
 def sourceHeaderPattern = ~$/\[source\b.*/$;
@@ -170,11 +170,8 @@ ant.fileScanner{
       if (!validLoggerPattern.matcher(text).find()) {
         reportViolation(f, 'invalid logging pattern [not private static final, uses static class name]');
       }
-      if (f.toString().contains('solr/contrib') && !validLoggerNamePattern.matcher(text).find()) {
-        reportViolation(f, 'invalid logger name [not log or LOG]');
-      }
-      if (f.toString().contains('solr/core') && !validLoggerNamePattern.matcher(text).find()) {
-        reportViolation(f, 'invalid logger name [not log or LOG]');
+      if (!validLoggerNamePattern.matcher(text).find()) {
+        reportViolation(f, 'invalid logger name [log, uses static class name, not specialized logger]')
       }
     }
     checkLicenseHeaderPrecedes(f, 'package', packagePattern, javaCommentPattern, text, ratDocument);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 1303062..a902422 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -333,6 +333,8 @@ Other Changes
 
 * SOLR-12625: Combine SolrDocumentFetcher and RetrieveFieldsOptimizer (Erick Erickson)
 
+* SOLR-12690: Regularize LoggerFactory declarations (Erick Erickson)
+
 ==================  7.4.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
index f7ad22f..54f5984 100644
--- a/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
+++ b/solr/contrib/dataimporthandler-extras/src/java/org/apache/solr/handler/dataimport/MailEntityProcessor.java
@@ -58,7 +58,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ROOT);
   private static final SimpleDateFormat afterFmt = 
       new SimpleDateFormat("yyyy/MM/dd", Locale.ROOT);
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static interface CustomFilter {
     public SearchTerm getCustomSearch(Folder folder);
@@ -112,7 +112,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
     String varName = ConfigNameConstants.IMPORTER_NS_SHORT + "." + cname + "."
         + DocBuilder.LAST_INDEX_TIME;
     Object varValue = context.getVariableResolver().resolve(varName);
-    LOG.info(varName+"="+varValue);    
+    log.info(varName+"="+varValue);
     
     if (varValue != null && !"".equals(varValue) && 
         !"".equals(getStringFromContext("fetchMailsSince", ""))) {
@@ -123,21 +123,21 @@ public class MailEntityProcessor extends EntityProcessorBase {
       try {
         tmp = sinceDateParser.parse((String)varValue);
         if (tmp.getTime() == 0) {
-          LOG.info("Ignoring initial value "+varValue+" for "+varName+
+          log.info("Ignoring initial value "+varValue+" for "+varName+
               " in favor of fetchMailsSince config parameter");
           tmp = null; // don't use this value
         }
       } catch (ParseException e) {
         // probably ok to ignore this since we have other options below
         // as we're just trying to figure out if the date is 0
-        LOG.warn("Failed to parse "+varValue+" from "+varName+" due to: "+e);
+        log.warn("Failed to parse "+varValue+" from "+varName+" due to: "+e);
       }    
       
       if (tmp == null) {
         // favor fetchMailsSince in this case because the value from
         // dataimport.properties is the default/init value
         varValue = getStringFromContext("fetchMailsSince", "");
-        LOG.info("fetchMailsSince="+varValue);            
+        log.info("fetchMailsSince="+varValue);
       }
     }
     
@@ -145,7 +145,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       varName = ConfigNameConstants.IMPORTER_NS_SHORT + "."
           + DocBuilder.LAST_INDEX_TIME;
       varValue = context.getVariableResolver().resolve(varName);
-      LOG.info(varName+"="+varValue);
+      log.info(varName+"="+varValue);
     }
       
     if (varValue != null && varValue instanceof String) {
@@ -157,13 +157,13 @@ public class MailEntityProcessor extends EntityProcessorBase {
     if (lastIndexTime == null) 
       lastIndexTime = getStringFromContext("fetchMailsSince", "");
 
-    LOG.info("Using lastIndexTime "+lastIndexTime+" for mail import");
+    log.info("Using lastIndexTime "+lastIndexTime+" for mail import");
     
     this.fetchMailsSince = null;
     if (lastIndexTime != null && lastIndexTime.length() > 0) {
       try {
         fetchMailsSince = sinceDateParser.parse(lastIndexTime);
-        LOG.info("Parsed fetchMailsSince=" + lastIndexTime);
+        log.info("Parsed fetchMailsSince=" + lastIndexTime);
       } catch (ParseException e) {
         throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
             "Invalid value for fetchMailSince: " + lastIndexTime, e);
@@ -247,7 +247,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       addPartToDocument(mail, row, true);
       return row;
     } catch (Exception e) {
-      LOG.error("Failed to convert message [" + mail.toString()
+      log.error("Failed to convert message [" + mail.toString()
           + "] to document due to: " + e, e);
       return null;
     }
@@ -269,7 +269,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
         for (int i = 0; i < count; i++)
           addPartToDocument(mp.getBodyPart(i), row, false);
       } else {
-        LOG.warn("Multipart content is a not an instance of Multipart! Content is: "
+        log.warn("Multipart content is a not an instance of Multipart! Content is: "
             + (content != null ? content.getClass().getName() : "null")
             + ". Typically, this is due to the Java Activation JAR being loaded by the wrong classloader.");
       }
@@ -374,7 +374,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
       
       if (("imap".equals(protocol) || "imaps".equals(protocol))
           && "imap.gmail.com".equals(host)) {
-        LOG.info("Consider using 'gimaps' protocol instead of '" + protocol
+        log.info("Consider using 'gimaps' protocol instead of '" + protocol
             + "' for enabling GMail specific extensions for " + host);
       }
       
@@ -399,14 +399,14 @@ public class MailEntityProcessor extends EntityProcessorBase {
       } else {
         mailbox.connect(host, user, password);
       }
-      LOG.info("Connected to " + user + "'s mailbox on " + host);
+      log.info("Connected to " + user + "'s mailbox on " + host);
       
       return true;
     } catch (MessagingException e) {      
       String errMsg = String.format(Locale.ENGLISH,
           "Failed to connect to %s server %s as user %s due to: %s", protocol,
           host, user, e.toString());
-      LOG.error(errMsg, e);
+      log.error(errMsg, e);
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
           errMsg, e);
     }
@@ -431,7 +431,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
   }
   
   private void logConfig() {
-    if (!LOG.isInfoEnabled()) return;
+    if (!log.isInfoEnabled()) return;
     
     String lineSep = System.getProperty("line.separator"); 
     
@@ -474,7 +474,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
         .append(lineSep);
     config.append("includeSharedFolders : ").append(includeSharedFolders)
         .append(lineSep);
-    LOG.info(config.toString());
+    log.info(config.toString());
   }
   
   class FolderIterator implements Iterator<Folder> {
@@ -515,22 +515,22 @@ public class MailEntityProcessor extends EntityProcessorBase {
               hasMessages = (next.getType() & Folder.HOLDS_MESSAGES) != 0;
               next.open(Folder.READ_ONLY);
               lastFolder = next;
-              LOG.info("Opened folder : " + fullName);
+              log.info("Opened folder : " + fullName);
             }
             if (recurse && ((next.getType() & Folder.HOLDS_FOLDERS) != 0)) {
               Folder[] children = next.list();
-              LOG.info("Added its children to list  : ");
+              log.info("Added its children to list  : ");
               for (int i = children.length - 1; i >= 0; i--) {
                 folders.add(0, children[i]);
-                LOG.info("child name : " + children[i].getFullName());
+                log.info("child name : " + children[i].getFullName());
               }
-              if (children.length == 0) LOG.info("NO children : ");
+              if (children.length == 0) log.info("NO children : ");
             }
           }
         } while (!hasMessages);
         return next;
       } catch (Exception e) {
-        LOG.warn("Failed to read folders due to: "+e);
+        log.warn("Failed to read folders due to: "+e);
         // throw new
         // DataImportHandlerException(DataImportHandlerException.SEVERE,
         // "Folder open failed", e);
@@ -568,12 +568,12 @@ public class MailEntityProcessor extends EntityProcessorBase {
       try {
         Folder[] ufldrs = mailbox.getUserNamespaces(null);
         if (ufldrs != null) {
-          LOG.info("Found " + ufldrs.length + " user namespace folders");
+          log.info("Found " + ufldrs.length + " user namespace folders");
           for (Folder ufldr : ufldrs)
             folders.add(ufldr);
         }
       } catch (MessagingException me) {
-        LOG.warn("Messaging exception retrieving user namespaces: "
+        log.warn("Messaging exception retrieving user namespaces: "
             + me.getMessage());
       }
     }
@@ -582,12 +582,12 @@ public class MailEntityProcessor extends EntityProcessorBase {
       try {
         Folder[] sfldrs = mailbox.getSharedNamespaces();
         if (sfldrs != null) {
-          LOG.info("Found " + sfldrs.length + " shared namespace folders");
+          log.info("Found " + sfldrs.length + " shared namespace folders");
           for (Folder sfldr : sfldrs)
             folders.add(sfldr);
         }
       } catch (MessagingException me) {
-        LOG.warn("Messaging exception retrieving shared namespaces: "
+        log.warn("Messaging exception retrieving shared namespaces: "
             + me.getMessage());
       }
     }
@@ -620,14 +620,14 @@ public class MailEntityProcessor extends EntityProcessorBase {
         this.batchSize = batchSize;
         SearchTerm st = getSearchTerm();
         
-        LOG.info("SearchTerm=" + st);
+        log.info("SearchTerm=" + st);
         
         if (st != null || folder instanceof GmailFolder) {
           doBatching = false;
           // Searching can still take a while even though we're only pulling
           // envelopes; unless you're using gmail server-side filter, which is
           // fast
-          LOG.info("Searching folder " + folder.getName() + " for messages");
+          log.info("Searching folder " + folder.getName() + " for messages");
           final RTimer searchTimer = new RTimer();
 
           // If using GMail, speed up the envelope processing by doing a
@@ -642,11 +642,11 @@ public class MailEntityProcessor extends EntityProcessorBase {
                     
           if (folder instanceof GmailFolder && fetchMailsSince != null) {
             String afterCrit = "after:" + afterFmt.format(fetchMailsSince);
-            LOG.info("Added server-side gmail filter: " + afterCrit);
+            log.info("Added server-side gmail filter: " + afterCrit);
             Message[] afterMessages = folder.search(new GmailRawSearchTerm(
                 afterCrit));
             
-            LOG.info("GMail server-side filter found " + afterMessages.length
+            log.info("GMail server-side filter found " + afterMessages.length
                 + " messages received " + afterCrit + " in folder " + folder.getName());
             
             // now pass in the server-side filtered messages to the local filter
@@ -657,11 +657,11 @@ public class MailEntityProcessor extends EntityProcessorBase {
           totalInFolder = messagesInCurBatch.length;
           folder.fetch(messagesInCurBatch, fp);
           current = 0;
-          LOG.info("Total messages : " + totalInFolder);
-          LOG.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime());
+          log.info("Total messages : " + totalInFolder);
+          log.info("Search criteria applied. Batching disabled. Took {} (ms)", searchTimer.getTime());
         } else {
           totalInFolder = folder.getMessageCount();
-          LOG.info("Total messages : " + totalInFolder);
+          log.info("Total messages : " + totalInFolder);
           getNextBatch(batchSize, folder);
         }
       } catch (MessagingException e) {
@@ -685,8 +685,8 @@ public class MailEntityProcessor extends EntityProcessorBase {
       folder.fetch(messagesInCurBatch, fp);
       current = 0;
       currentBatch++;
-      LOG.info("Current Batch  : " + currentBatch);
-      LOG.info("Messages in this batch  : " + messagesInCurBatch.length);
+      log.info("Current Batch  : " + currentBatch);
+      log.info("Messages in this batch  : " + messagesInCurBatch.length);
     }
     
     public boolean hasNext() {
@@ -741,7 +741,7 @@ public class MailEntityProcessor extends EntityProcessorBase {
     
     @SuppressWarnings("serial")
     public SearchTerm getCustomSearch(final Folder folder) {
-      LOG.info("Building mail filter for messages in " + folder.getName()
+      log.info("Building mail filter for messages in " + folder.getName()
           + " that occur after " + sinceDateParser.format(since));
       return new DateTerm(ComparisonTerm.GE, since) {
         private int matched = 0;
@@ -761,15 +761,15 @@ public class MailEntityProcessor extends EntityProcessorBase {
             } else {
               String msgDateStr = (msgDate != null) ? sinceDateParser.format(msgDate) : "null";
               String sinceDateStr = (since != null) ? sinceDateParser.format(since) : "null";
-              LOG.debug("Message " + msg.getSubject() + " was received at [" + msgDateStr
+              log.debug("Message " + msg.getSubject() + " was received at [" + msgDateStr
                   + "], since filter is [" + sinceDateStr + "]");
             }
           } catch (MessagingException e) {
-            LOG.warn("Failed to process message due to: "+e, e);
+            log.warn("Failed to process message due to: "+e, e);
           }
           
           if (seen % 100 == 0) {
-            LOG.info("Matched " + matched + " of " + seen + " messages since: "
+            log.info("Matched " + matched + " of " + seen + " messages since: "
                 + sinceDateParser.format(since));
           }
           

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
index 2b3a2c1..c1b4808 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/BinURLDataSource.java
@@ -36,7 +36,7 @@ import java.util.Properties;
  * @since solr 3.1
  */
 public class BinURLDataSource extends DataSource<InputStream>{
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private String baseUrl;
   private int connectionTimeout = CONNECTION_TIMEOUT;
@@ -61,14 +61,14 @@ public class BinURLDataSource extends DataSource<InputStream>{
       try {
         connectionTimeout = Integer.parseInt(cTimeout);
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid connection timeout: " + cTimeout);
+        log.warn("Invalid connection timeout: " + cTimeout);
       }
     }
     if (rTimeout != null) {
       try {
         readTimeout = Integer.parseInt(rTimeout);
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid read timeout: " + rTimeout);
+        log.warn("Invalid read timeout: " + rTimeout);
       }
     }
   }
@@ -79,13 +79,13 @@ public class BinURLDataSource extends DataSource<InputStream>{
     try {
       if (URIMETHOD.matcher(query).find()) url = new URL(query);
       else url = new URL(baseUrl + query);
-      LOG.debug("Accessing URL: " + url.toString());
+      log.debug("Accessing URL: " + url.toString());
       URLConnection conn = url.openConnection();
       conn.setConnectTimeout(connectionTimeout);
       conn.setReadTimeout(readTimeout);
       return conn.getInputStream();
     } catch (Exception e) {
-      LOG.error("Exception thrown while getting data", e);
+      log.error("Exception thrown while getting data", e);
       wrapAndThrow (SEVERE, e, "Exception in invoking url " + url);
       return null;//unreachable
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
index c9e997c..71ee442 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImportHandler.java
@@ -68,7 +68,7 @@ import static org.apache.solr.handler.dataimport.DataImporter.IMPORT_CMD;
 public class DataImportHandler extends RequestHandlerBase implements
         SolrCoreAware {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private DataImporter importer;
 
@@ -107,7 +107,7 @@ public class DataImportHandler extends RequestHandlerBase implements
       debugEnabled = StrUtils.parseBool((String)initArgs.get(ENABLE_DEBUG), true);
       importer = new DataImporter(core, myName);         
     } catch (Exception e) {
-      LOG.error( DataImporter.MSG.LOAD_EXP, e);
+      log.error( DataImporter.MSG.LOAD_EXP, e);
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, DataImporter.MSG.LOAD_EXP, e);
     }
   }
@@ -257,7 +257,7 @@ public class DataImportHandler extends RequestHandlerBase implements
           try {
             return super.upload(document);
           } catch (RuntimeException e) {
-            LOG.error("Exception while adding: " + document, e);
+            log.error("Exception while adding: " + document, e);
             return false;
           }
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
index 4825fd1..d610d66 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DataImporter.java
@@ -74,8 +74,8 @@ public class DataImporter {
     IDLE, RUNNING_FULL_DUMP, RUNNING_DELTA_DUMP, JOB_FAILED
   }
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private static final XMLErrorLogger XMLLOG = new XMLErrorLogger(LOG);
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final XMLErrorLogger XMLLOG = new XMLErrorLogger(log);
 
   private Status status = Status.IDLE;
   private DIHConfiguration config;
@@ -125,7 +125,7 @@ public class DataImporter {
         } else if(dataconfigFile!=null) {
           is = new InputSource(core.getResourceLoader().openResource(dataconfigFile));
           is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(dataconfigFile));
-          LOG.info("Loading DIH Configuration: " + dataconfigFile);
+          log.info("Loading DIH Configuration: " + dataconfigFile);
         }
         if(is!=null) {          
           config = loadDataConfig(is);
@@ -143,12 +143,12 @@ public class DataImporter {
             if (name.equals("datasource")) {
               success = true;
               NamedList dsConfig = (NamedList) defaultParams.getVal(position);
-              LOG.info("Getting configuration for Global Datasource...");              
+              log.info("Getting configuration for Global Datasource...");
               Map<String,String> props = new HashMap<>();
               for (int i = 0; i < dsConfig.size(); i++) {
                 props.put(dsConfig.getName(i), dsConfig.getVal(i).toString());
               }
-              LOG.info("Adding properties to datasource: " + props);
+              log.info("Adding properties to datasource: " + props);
               dsProps.put((String) dsConfig.get("name"), props);
             }
             position++;
@@ -201,7 +201,7 @@ public class DataImporter {
           dbf.setXIncludeAware(true);
           dbf.setNamespaceAware(true);
         } catch( UnsupportedOperationException e ) {
-          LOG.warn( "XML parser doesn't support XInclude option" );
+          log.warn( "XML parser doesn't support XInclude option" );
         }
       }
       
@@ -224,7 +224,7 @@ public class DataImporter {
       }
 
       dihcfg = readFromXml(document);
-      LOG.info("Data Configuration loaded successfully");
+      log.info("Data Configuration loaded successfully");
     } catch (Exception e) {
       throw new DataImportHandlerException(SEVERE,
               "Data Config problem: " + e.getMessage(), e);
@@ -414,7 +414,7 @@ public class DataImporter {
   }
 
   public void doFullImport(DIHWriter writer, RequestInfo requestParams) {
-    LOG.info("Starting Full Import");
+    log.info("Starting Full Import");
     setStatus(Status.RUNNING_FULL_DUMP);
     try {
       DIHProperties dihPropWriter = createPropertyWriter();
@@ -425,7 +425,7 @@ public class DataImporter {
       if (!requestParams.isDebug())
         cumulativeStatistics.add(docBuilder.importStatistics);
     } catch (Exception e) {
-      SolrException.log(LOG, "Full Import failed", e);
+      SolrException.log(log, "Full Import failed", e);
       docBuilder.handleError("Full Import failed", e);
     } finally {
       setStatus(Status.IDLE);
@@ -442,7 +442,7 @@ public class DataImporter {
   }
 
   public void doDeltaImport(DIHWriter writer, RequestInfo requestParams) {
-    LOG.info("Starting Delta Import");
+    log.info("Starting Delta Import");
     setStatus(Status.RUNNING_DELTA_DUMP);
     try {
       DIHProperties dihPropWriter = createPropertyWriter();
@@ -453,7 +453,7 @@ public class DataImporter {
       if (!requestParams.isDebug())
         cumulativeStatistics.add(docBuilder.importStatistics);
     } catch (Exception e) {
-      LOG.error("Delta Import Failed", e);
+      log.error("Delta Import Failed", e);
       docBuilder.handleError("Delta Import Failed", e);
     } finally {
       setStatus(Status.IDLE);
@@ -475,7 +475,7 @@ public class DataImporter {
       return;
     }
     if (!importLock.tryLock()){
-      LOG.warn("Import command failed . another import is running");      
+      log.warn("Import command failed . another import is running");
       return;
     }
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java
index f48cbea..6da9cc1 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DateFormatTransformer.java
@@ -39,7 +39,7 @@ import org.slf4j.LoggerFactory;
  */
 public class DateFormatTransformer extends Transformer {
   private Map<String, SimpleDateFormat> fmtCache = new HashMap<>();
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   @SuppressWarnings("unchecked")
@@ -80,7 +80,7 @@ public class DateFormatTransformer extends Transformer {
           }
         }
       } catch (ParseException e) {
-        LOG.warn("Could not parse a Date field ", e);
+        log.warn("Could not parse a Date field ", e);
       }
     }
     return aRow;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
index f6a62aa..164cf70 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/DocBuilder.java
@@ -50,7 +50,7 @@ import java.util.concurrent.atomic.AtomicLong;
  */
 public class DocBuilder {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private static final AtomicBoolean WARNED_ABOUT_INDEX_TIME_BOOSTS = new AtomicBoolean();
 
   private static final Date EPOCH = new Date(0);
@@ -265,7 +265,7 @@ public class DocBuilder {
         statusMessages.put(DataImporter.MSG.TOTAL_FAILED_DOCS, ""+ importStatistics.failedDocCount.get());
 
       statusMessages.put("Time taken", getTimeElapsedSince(startTime.get()));
-      LOG.info("Time taken = " + getTimeElapsedSince(startTime.get()));
+      log.info("Time taken = " + getTimeElapsedSince(startTime.get()));
     } catch(Exception e)
     {
       throw new RuntimeException(e);
@@ -294,7 +294,7 @@ public class DocBuilder {
 
   @SuppressWarnings("unchecked")
   private void finish(Map<String,Object> lastIndexTimeProps) {
-    LOG.info("Import completed successfully");
+    log.info("Import completed successfully");
     statusMessages.put("", "Indexing completed. Added/Updated: "
             + importStatistics.docCount + " documents. Deleted "
             + importStatistics.deletedDocCount + " documents.");
@@ -307,7 +307,7 @@ public class DocBuilder {
     try {
       propWriter.persist(lastIndexTimeProps);
     } catch (Exception e) {
-      LOG.error("Could not write property file", e);
+      log.error("Could not write property file", e);
       statusMessages.put("error", "Could not write property file. Delta imports will not work. " +
           "Make sure your conf directory is writable");
     }
@@ -340,7 +340,7 @@ public class DocBuilder {
     }
 
     addStatusMessage("Identifying Delta");
-    LOG.info("Starting delta collection.");
+    log.info("Starting delta collection.");
     Set<Map<String, Object>> deletedKeys = new HashSet<>();
     Set<Map<String, Object>> allPks = collectDelta(currentEntityProcessorWrapper, resolver, deletedKeys);
     if (stop.get())
@@ -369,12 +369,12 @@ public class DocBuilder {
     }
 
     if (!stop.get()) {
-      LOG.info("Delta Import completed successfully");
+      log.info("Delta Import completed successfully");
     }
   }
 
   private void deleteAll(Set<Map<String, Object>> deletedKeys) {
-    LOG.info("Deleting stale documents ");
+    log.info("Deleting stale documents ");
     Iterator<Map<String, Object>> iter = deletedKeys.iterator();
     while (iter.hasNext()) {
       Map<String, Object> map = iter.next();
@@ -385,7 +385,7 @@ public class DocBuilder {
         key = map.get(keyName);
       }
       if(key == null) {
-        LOG.warn("no key was available for deleted pk query. keyName = " + keyName);
+        log.warn("no key was available for deleted pk query. keyName = " + keyName);
         continue;
       }
       writer.deleteDoc(key);
@@ -483,7 +483,7 @@ public class DocBuilder {
             if (seenDocCount <= reqParams.getStart())
               continue;
             if (seenDocCount > reqParams.getStart() + reqParams.getRows()) {
-              LOG.info("Indexing stopped at docCount = " + importStatistics.docCount);
+              log.info("Indexing stopped at docCount = " + importStatistics.docCount);
               break;
             }
           }
@@ -548,7 +548,7 @@ public class DocBuilder {
               importStatistics.skipDocCount.getAndIncrement();
               doc = null;
             } else {
-              SolrException.log(LOG, "Exception while processing: "
+              SolrException.log(log, "Exception while processing: "
                       + epw.getEntity().getName() + " document : " + doc, e);
             }
             if (e.getErrCode() == DataImportHandlerException.SEVERE)
@@ -620,9 +620,9 @@ public class DocBuilder {
     if (value != null) {
       String message = "Ignoring document boost: " + value + " as index-time boosts are not supported anymore";
       if (WARNED_ABOUT_INDEX_TIME_BOOSTS.compareAndSet(false, true)) {
-        LOG.warn(message);
+        log.warn(message);
       } else {
-        LOG.debug(message);
+        log.debug(message);
       }
     }
 
@@ -759,7 +759,7 @@ public class DocBuilder {
                   "deltaQuery has no column to resolve to declared primary key pk='%s'",
                   pk));
     }
-    LOG.info(String.format(Locale.ROOT,
+    log.info(String.format(Locale.ROOT,
         "Resolving deltaQuery column '%s' to match entity's declared pk '%s'",
         resolvedPk, pk));
     return resolvedPk;
@@ -796,7 +796,7 @@ public class DocBuilder {
     
     // identifying the modified rows for this entity
     Map<String, Map<String, Object>> deltaSet = new HashMap<>();
-    LOG.info("Running ModifiedRowKey() for Entity: " + epw.getEntity().getName());
+    log.info("Running ModifiedRowKey() for Entity: " + epw.getEntity().getName());
     //get the modified rows in this entity
     String pk = epw.getEntity().getPk();
     while (true) {
@@ -844,8 +844,8 @@ public class DocBuilder {
         return new HashSet();
     }
 
-    LOG.info("Completed ModifiedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deltaSet.size());
-    LOG.info("Completed DeletedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deletedSet.size());
+    log.info("Completed ModifiedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deltaSet.size());
+    log.info("Completed DeletedRowKey for Entity: " + epw.getEntity().getName() + " rows obtained : " + deletedSet.size());
 
     myModifiedPks.addAll(deltaSet.values());
     Set<Map<String, Object>> parentKeyList = new HashSet<>();
@@ -870,7 +870,7 @@ public class DocBuilder {
           return new HashSet();
       }
     }
-    LOG.info("Completed parentDeltaQuery for Entity: " + epw.getEntity().getName());
+    log.info("Completed parentDeltaQuery for Entity: " + epw.getEntity().getName());
     if (epw.getEntity().isDocRoot())
       deletedRows.addAll(deletedSet);
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
index edc8589..571c280 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldReaderDataSource.java
@@ -48,7 +48,7 @@ import java.util.Properties;
  * @since 1.4
  */
 public class FieldReaderDataSource extends DataSource<Reader> {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   protected VariableResolver vr;
   protected String dataField;
   private String encoding;
@@ -77,7 +77,7 @@ public class FieldReaderDataSource extends DataSource<Reader> {
         // so let us just check it
         return readCharStream(clob);
       } catch (Exception e) {
-        LOG.info("Unable to get data from CLOB");
+        log.info("Unable to get data from CLOB");
         return null;
 
       }
@@ -87,7 +87,7 @@ public class FieldReaderDataSource extends DataSource<Reader> {
       try {
         return getReader(blob);
       } catch (Exception e) {
-        LOG.info("Unable to get data from BLOB");
+        log.info("Unable to get data from BLOB");
         return null;
 
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
index 42ba5a6..ba7ca5d 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FieldStreamDataSource.java
@@ -45,7 +45,7 @@ import org.slf4j.LoggerFactory;
  * @since 3.1
  */
 public class FieldStreamDataSource extends DataSource<InputStream> {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   protected VariableResolver vr;
   protected String dataField;
   private EntityProcessorWrapper wrapper;
@@ -67,7 +67,7 @@ public class FieldStreamDataSource extends DataSource<InputStream> {
       try {
         return blob.getBinaryStream();
       } catch (SQLException sqle) {
-        LOG.info("Unable to get data from BLOB");
+        log.info("Unable to get data from BLOB");
         return null;
       }
     } else if (o instanceof byte[]) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java
index 2db5804..920472e 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/FileDataSource.java
@@ -58,7 +58,7 @@ public class FileDataSource extends DataSource<Reader> {
    */
   protected String encoding = null;
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   public void init(Context context, Properties initProps) {
@@ -102,13 +102,13 @@ public class FileDataSource extends DataSource<Reader> {
         File basePathFile;
         if (basePath == null) {
           basePathFile = new File(".").getAbsoluteFile(); 
-          LOG.warn("FileDataSource.basePath is empty. " +
+          log.warn("FileDataSource.basePath is empty. " +
               "Resolving to: " + basePathFile.getAbsolutePath());
         } else {
           basePathFile = new File(basePath);
           if (!basePathFile.isAbsolute()) {
             basePathFile = basePathFile.getAbsoluteFile();
-            LOG.warn("FileDataSource.basePath is not absolute. Resolving to: "
+            log.warn("FileDataSource.basePath is not absolute. Resolving to: "
                 + basePathFile.getAbsolutePath());
           }
         }
@@ -117,7 +117,7 @@ public class FileDataSource extends DataSource<Reader> {
       }
 
       if (file.isFile() && file.canRead()) {
-        LOG.debug("Accessing File: " + file.getAbsolutePath());
+        log.debug("Accessing File: " + file.getAbsolutePath());
         return file;
       } else {
         throw new FileNotFoundException("Could not find file: " + query + 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
index b17650a..a8eed55 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/JdbcDataSource.java
@@ -51,7 +51,7 @@ import java.util.concurrent.TimeUnit;
  */
 public class JdbcDataSource extends
         DataSource<Iterator<Map<String, Object>>> {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   protected Callable<Connection> factory;
 
@@ -87,7 +87,7 @@ public class JdbcDataSource extends
         if (batchSize == -1)
           batchSize = Integer.MIN_VALUE;
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid batch size: " + bsz);
+        log.warn("Invalid batch size: " + bsz);
       }
     }
 
@@ -172,7 +172,7 @@ public class JdbcDataSource extends
     return factory = new Callable<Connection>() {
       @Override
       public Connection call() throws Exception {
-        LOG.info("Creating a connection for entity "
+        log.info("Creating a connection for entity "
                 + context.getEntityAttribute(DataImporter.NAME) + " with URL: "
                 + url);
         long start = System.nanoTime();
@@ -199,13 +199,13 @@ public class JdbcDataSource extends
             try {
               c.close();
             } catch (SQLException e2) {
-              LOG.warn("Exception closing connection during cleanup", e2);
+              log.warn("Exception closing connection during cleanup", e2);
             }
 
             throw new DataImportHandlerException(SEVERE, "Exception initializing SQL connection", e);
           }
         }
-        LOG.info("Time taken for getConnection(): "
+        log.info("Time taken for getConnection(): "
             + TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
         return c;
       }
@@ -289,7 +289,7 @@ public class JdbcDataSource extends
   }
 
   private void logError(String msg, Exception e) {
-    LOG.warn(msg, e);
+    log.warn(msg, e);
   }
 
   protected List<String> readFieldNames(ResultSetMetaData metaData)
@@ -316,10 +316,10 @@ public class JdbcDataSource extends
       try {
         Connection c = getConnection();
         stmt = createStatement(c, batchSize, maxRows);
-        LOG.debug("Executing SQL: " + query);
+        log.debug("Executing SQL: " + query);
         long start = System.nanoTime();
         resultSet = executeStatement(stmt, query);
-        LOG.trace("Time taken for sql :"
+        log.trace("Time taken for sql :"
                 + TimeUnit.MILLISECONDS.convert(System.nanoTime() - start, TimeUnit.NANOSECONDS));
         setColNames(resultSet);
       } catch (Exception e) {
@@ -541,7 +541,7 @@ public class JdbcDataSource extends
   protected void finalize() throws Throwable {
     try {
       if(!isClosed){
-        LOG.error("JdbcDataSource was not closed prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!!");
+        log.error("JdbcDataSource was not closed prior to finalize(), indicates a bug -- POSSIBLE RESOURCE LEAK!!!");
         close();
       }
     } finally {
@@ -575,7 +575,7 @@ public class JdbcDataSource extends
         conn.close();
       }
     } catch (Exception e) {
-      LOG.error("Ignoring Error when closing connection", e);
+      log.error("Ignoring Error when closing connection", e);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java
index 17687e1..66c525e 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/LogTransformer.java
@@ -33,7 +33,7 @@ import java.util.Map;
  * @since solr 1.4
  */
 public class LogTransformer extends Transformer {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   public Object transformRow(Map<String, Object> row, Context ctx) {
@@ -43,20 +43,20 @@ public class LogTransformer extends Transformer {
     if (expr == null || level == null) return row;
 
     if ("info".equals(level)) {
-      if (LOG.isInfoEnabled())
-        LOG.info(ctx.replaceTokens(expr));
+      if (log.isInfoEnabled())
+        log.info(ctx.replaceTokens(expr));
     } else if ("trace".equals(level)) {
-      if (LOG.isTraceEnabled())
-        LOG.trace(ctx.replaceTokens(expr));
+      if (log.isTraceEnabled())
+        log.trace(ctx.replaceTokens(expr));
     } else if ("warn".equals(level)) {
-      if (LOG.isWarnEnabled())
-        LOG.warn(ctx.replaceTokens(expr));
+      if (log.isWarnEnabled())
+        log.warn(ctx.replaceTokens(expr));
     } else if ("error".equals(level)) {
-      if (LOG.isErrorEnabled())
-        LOG.error(ctx.replaceTokens(expr));
+      if (log.isErrorEnabled())
+        log.error(ctx.replaceTokens(expr));
     } else if ("debug".equals(level)) {
-      if (LOG.isDebugEnabled())
-        LOG.debug(ctx.replaceTokens(expr));
+      if (log.isDebugEnabled())
+        log.debug(ctx.replaceTokens(expr));
     }
 
     return row;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
index 8e0d249..7a919de 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/RegexTransformer.java
@@ -40,7 +40,7 @@ import java.util.regex.Pattern;
  * @see Pattern
  */
 public class RegexTransformer extends Transformer {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   @SuppressWarnings("unchecked")
@@ -166,7 +166,7 @@ public class RegexTransformer extends Transformer {
               }
             }
           } catch (Exception e) {
-            LOG.warn("Parsing failed for field : " + columnName, e);
+            log.warn("Parsing failed for field : " + columnName, e);
           }
         }
         return l == null ? map: l;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
index 5bbb57b..7732673 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SolrEntityProcessor.java
@@ -59,7 +59,7 @@ import org.slf4j.LoggerFactory;
  */
 public class SolrEntityProcessor extends EntityProcessorBase {
   
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static final String SOLR_SERVER = "url";
   public static final String QUERY = "query";
@@ -118,13 +118,13 @@ public class SolrEntityProcessor extends EntityProcessorBase {
             .withHttpClient(client)
             .withResponseParser(new XMLResponseParser())
             .build();
-        LOG.info("using XMLResponseParser");
+        log.info("using XMLResponseParser");
       } else {
         // TODO: it doesn't matter for this impl when passing a client currently, but we should close this!
         solrClient = new Builder(url.toExternalForm())
             .withHttpClient(client)
             .build();
-        LOG.info("using BinaryResponseParser");
+        log.info("using BinaryResponseParser");
       }
     } catch (MalformedURLException e) {
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE, e);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java
index d30c670..19c6d0f 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/SqlEntityProcessor.java
@@ -42,7 +42,7 @@ import java.util.regex.Pattern;
  * @since solr 1.3
  */
 public class SqlEntityProcessor extends EntityProcessorBase {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   protected DataSource<Iterator<Map<String, Object>>> dataSource;
 
@@ -61,7 +61,7 @@ public class SqlEntityProcessor extends EntityProcessorBase {
     } catch (DataImportHandlerException e) {
       throw e;
     } catch (Exception e) {
-      LOG.error( "The query failed '" + q + "'", e);
+      log.error( "The query failed '" + q + "'", e);
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE, e);
     }
   }
@@ -103,7 +103,7 @@ public class SqlEntityProcessor extends EntityProcessorBase {
       String parentDeltaQuery = context.getEntityAttribute(PARENT_DELTA_QUERY);
       if (parentDeltaQuery == null)
         return null;
-      LOG.info("Running parentDeltaQuery for Entity: "
+      log.info("Running parentDeltaQuery for Entity: "
               + context.getEntityAttribute("name"));
       initQuery(context.replaceTokens(parentDeltaQuery));
     }
@@ -119,7 +119,7 @@ public class SqlEntityProcessor extends EntityProcessorBase {
       String deltaImportQuery = context.getEntityAttribute(DELTA_IMPORT_QUERY);
       if(deltaImportQuery != null) return deltaImportQuery;
     }
-    LOG.warn("'deltaImportQuery' attribute is not specified for entity : "+ entityName);
+    log.warn("'deltaImportQuery' attribute is not specified for entity : "+ entityName);
     return getDeltaImportQuery(queryString);
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
index a5faa7e..f655edd 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/TemplateTransformer.java
@@ -48,7 +48,7 @@ import org.slf4j.LoggerFactory;
  */
 public class TemplateTransformer extends Transformer {
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private Map<String ,List<String>> templateVsVars = new HashMap<>();
 
   @Override
@@ -76,7 +76,7 @@ public class TemplateTransformer extends Transformer {
       }
       for (String v : variables) {
         if (resolver.resolve(v) == null) {
-          LOG.warn("Unable to resolve variable: " + v
+          log.warn("Unable to resolve variable: " + v
                   + " while parsing expression: " + expr);
           resolvable = false;
         }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java
index 9271679..145ffc4 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/URLDataSource.java
@@ -41,7 +41,7 @@ import java.util.regex.Pattern;
  * @since solr 1.4
  */
 public class URLDataSource extends DataSource<Reader> {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private String baseUrl;
 
@@ -72,14 +72,14 @@ public class URLDataSource extends DataSource<Reader> {
       try {
         connectionTimeout = Integer.parseInt(cTimeout);
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid connection timeout: " + cTimeout);
+        log.warn("Invalid connection timeout: " + cTimeout);
       }
     }
     if (rTimeout != null) {
       try {
         readTimeout = Integer.parseInt(rTimeout);
       } catch (NumberFormatException e) {
-        LOG.warn("Invalid read timeout: " + rTimeout);
+        log.warn("Invalid read timeout: " + rTimeout);
       }
     }
   }
@@ -91,7 +91,7 @@ public class URLDataSource extends DataSource<Reader> {
       if (URIMETHOD.matcher(query).find()) url = new URL(query);
       else url = new URL(baseUrl + query);
 
-      LOG.debug("Accessing URL: " + url.toString());
+      log.debug("Accessing URL: " + url.toString());
 
       URLConnection conn = url.openConnection();
       conn.setConnectTimeout(connectionTimeout);
@@ -112,7 +112,7 @@ public class URLDataSource extends DataSource<Reader> {
       DataImporter.QUERY_COUNT.get().incrementAndGet();
       return new InputStreamReader(in, enc);
     } catch (Exception e) {
-      LOG.error("Exception thrown while getting data", e);
+      log.error("Exception thrown while getting data", e);
       throw new DataImportHandlerException(DataImportHandlerException.SEVERE,
               "Exception in invoking url " + url, e);
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
index 70b9aba..c93b581 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathEntityProcessor.java
@@ -54,8 +54,8 @@ import java.util.concurrent.atomic.AtomicReference;
  * @since solr 1.3
  */
 public class XPathEntityProcessor extends EntityProcessorBase {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private static final XMLErrorLogger xmllog = new XMLErrorLogger(LOG);
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
 
   private static final Map<String, Object> END_MARKER = new HashMap<>();
   
@@ -136,7 +136,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           // some XML parsers are broken and don't close the byte stream (but they should according to spec)
           IOUtils.closeQuietly(xsltSource.getInputStream());
         }
-        LOG.info("Using xslTransformer: "
+        log.info("Using xslTransformer: "
                         + xslTransformer.getClass().getName());
       } catch (Exception e) {
         throw new DataImportHandlerException(SEVERE,
@@ -293,10 +293,10 @@ public class XPathEntityProcessor extends EntityProcessorBase {
         if (ABORT.equals(onError)) {
           wrapAndThrow(SEVERE, e);
         } else if (SKIP.equals(onError)) {
-          if (LOG.isDebugEnabled()) LOG.debug("Skipping url : " + s, e);
+          if (log.isDebugEnabled()) log.debug("Skipping url : " + s, e);
           wrapAndThrow(DataImportHandlerException.SKIP, e);
         } else {
-          LOG.warn("Failed for url : " + s, e);
+          log.warn("Failed for url : " + s, e);
           rowIterator = Collections.EMPTY_LIST.iterator();
           return;
         }
@@ -313,7 +313,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           } else if (SKIP.equals(onError)) {
             wrapAndThrow(DataImportHandlerException.SKIP, e);
           } else {
-            LOG.warn("Failed for url : " + s, e);
+            log.warn("Failed for url : " + s, e);
             rowIterator = Collections.EMPTY_LIST.iterator();
             return;
           }
@@ -330,12 +330,12 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           if (ABORT.equals(onError)) {
             wrapAndThrow(SEVERE, e, msg);
           } else if (SKIP.equals(onError)) {
-            LOG.warn(msg, e);
+            log.warn(msg, e);
             Map<String, Object> map = new HashMap<>();
             map.put(DocBuilder.SKIP_DOC, Boolean.TRUE);
             rows.add(map);
           } else if (CONTINUE.equals(onError)) {
-            LOG.warn(msg, e);
+            log.warn(msg, e);
           }
         }
         rowIterator = rows.iterator();
@@ -457,7 +457,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
         try {
           while (!blockingQueue.offer(row, blockingQueueTimeOut, blockingQueueTimeOutUnits)) {
             if (isEnd.get()) return;
-            LOG.debug("Timeout elapsed writing records.  Perhaps buffer size should be increased.");
+            log.debug("Timeout elapsed writing records.  Perhaps buffer size should be increased.");
           }
         } catch (InterruptedException e) {
           return;
@@ -488,10 +488,10 @@ public class XPathEntityProcessor extends EntityProcessorBase {
           try {
             row = blockingQueue.poll(blockingQueueTimeOut, blockingQueueTimeOutUnits);
             if (row == null) {
-              LOG.debug("Timeout elapsed reading records.");
+              log.debug("Timeout elapsed reading records.");
             }
           } catch (InterruptedException e) {
-            LOG.debug("Caught InterruptedException while waiting for row.  Aborting.");
+            log.debug("Caught InterruptedException while waiting for row.  Aborting.");
             isEnd.set(true);
             return null;
           }
@@ -507,7 +507,7 @@ public class XPathEntityProcessor extends EntityProcessorBase {
             } else if (SKIP.equals(onError)) {
               wrapAndThrow(DataImportHandlerException.SKIP, exp.get());
             } else {
-              LOG.warn(msg, exp.get());
+              log.warn(msg, exp.get());
             }
           }
           return null;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
index 311215c..13daf49 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/XPathRecordReader.java
@@ -57,8 +57,8 @@ import org.slf4j.LoggerFactory;
  * @since solr 1.3
  */
 public class XPathRecordReader {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private static final XMLErrorLogger XMLLOG = new XMLErrorLogger(LOG);
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final XMLErrorLogger XMLLOG = new XMLErrorLogger(log);
 
   private Node rootNode = new Node("/", null);
 
@@ -645,7 +645,7 @@ public class XPathRecordReader {
     } catch (IllegalArgumentException ex) {
       // Other implementations will likely throw this exception since "reuse-instance"
       // isimplementation specific.
-      LOG.debug("Unable to set the 'reuse-instance' property for the input chain: " + factory);
+      log.debug("Unable to set the 'reuse-instance' property for the input chain: " + factory);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
index 2b91042..0ba13ea 100644
--- a/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
+++ b/solr/contrib/dataimporthandler/src/java/org/apache/solr/handler/dataimport/config/DIHConfiguration.java
@@ -47,7 +47,7 @@ import org.w3c.dom.Element;
  * @since solr 1.3
  */
 public class DIHConfiguration {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   // TODO - remove from here and add it to entity
   private final String deleteQuery;
@@ -106,7 +106,7 @@ public class DIHConfiguration {
       SchemaField sf = entry.getValue();
       if (!fields.containsKey(sf.getName())) {
         if (sf.isRequired()) {
-          LOG.info(sf.getName() + " is a required field in SolrSchema . But not found in DataConfig");
+          log.info(sf.getName() + " is a required field in SolrSchema . But not found in DataConfig");
         }
       }
     }
@@ -114,7 +114,7 @@ public class DIHConfiguration {
       EntityField fld = entry.getValue();
       SchemaField field = getSchemaField(fld.getName());
       if (field == null && !isSpecialCommand(fld.getName())) {
-        LOG.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema");
+        log.info("The field :" + fld.getName() + " present in DataConfig does not have a counterpart in Solr Schema");
       }
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
index 858b9cc..7e08f0e 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
@@ -47,7 +47,7 @@ import java.util.Properties;
  */
 public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTestCase {
   
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private static final String SOLR_CONFIG = "dataimport-solrconfig.xml";
   private static final String SOLR_SCHEMA = "dataimport-schema.xml";
@@ -136,7 +136,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
     try {
       deleteCore();
     } catch (Exception e) {
-      LOG.error("Error deleting core", e);
+      log.error("Error deleting core", e);
     }
     jetty.stop();
     instance.tearDown();
@@ -151,7 +151,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       addDocumentsToSolr(SOLR_DOCS);
       runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", false));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -169,7 +169,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       map.put("rows", "50");
       runFullImport(generateDIHConfig("query='*:*' fq='desc:Description1*,desc:Description*2' rows='2'", false), map);
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -184,7 +184,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       addDocumentsToSolr(generateSolrDocuments(7));
       runFullImport(generateDIHConfig("query='*:*' fl='id' rows='2'"+(random().nextBoolean() ?" cursorMark='true' sort='id asc'":""), false));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -221,7 +221,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       addDocumentsToSolr(DOCS);
       runFullImport(getDihConfigTagsInnerEntity());
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     } finally {
       MockDataSource.clearCache();
@@ -244,7 +244,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
     try {
       runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", true /* use dead server */));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -258,7 +258,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       runFullImport(generateDIHConfig("query='bogus:3' rows='2' fl='id,desc' onError='"+
             (random().nextBoolean() ? "abort" : "justtogetcoverage")+"'", false));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     
@@ -278,7 +278,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
       runFullImport(generateDIHConfig(attrs,
             false));
     } catch (Exception e) {
-      LOG.error(e.getMessage(), e);
+      log.error(e.getMessage(), e);
       fail(e.getMessage());
     }
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
index 14f4dc9..5fdec0f 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
@@ -89,17 +89,17 @@ public class JettySolrRunner {
   private final JettyConfig config;
   private final String solrHome;
   private final Properties nodeProperties;
-  
+
   private volatile boolean startedBefore = false;
 
   private LinkedList<FilterHolder> extraFilters;
 
   private static final String excludePatterns = "/css/.+,/js/.+,/img/.+,/tpl/.+";
-  
+
   private int proxyPort = -1;
 
   public static class DebugFilter implements Filter {
-    public final static Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+    private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
     private AtomicLong nRequests = new AtomicLong();
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/LockTree.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/LockTree.java b/solr/core/src/java/org/apache/solr/cloud/LockTree.java
index 8ae7f75..af0d30e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/LockTree.java
+++ b/solr/core/src/java/org/apache/solr/cloud/LockTree.java
@@ -36,7 +36,7 @@ import org.slf4j.LoggerFactory;
  * but internally it is synchronized so that only one thread can perform any operation.
  */
 public class LockTree {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private final Node root = new Node(null, LockLevel.CLUSTER, null);
 
   public void clear() {
@@ -141,7 +141,7 @@ public class LockTree {
     void unlock(LockImpl lockObject) {
       if (myLock == lockObject) myLock = null;
       else {
-        LOG.info("Unlocked multiple times : {}", lockObject.toString());
+        log.info("Unlocked multiple times : {}", lockObject.toString());
       }
     }
 
@@ -171,7 +171,7 @@ public class LockTree {
 
     void clear() {
       if (myLock != null) {
-        LOG.warn("lock_is_leaked at" + constructPath(new LinkedList<>()));
+        log.warn("lock_is_leaked at" + constructPath(new LinkedList<>()));
         myLock = null;
       }
       for (Node node : children.values()) node.clear();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
index 3df6501..66a31c5 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskQueue.java
@@ -43,7 +43,7 @@ import org.slf4j.LoggerFactory;
  * This is inefficient!  But the API on this class is kind of muddy..
  */
 public class OverseerTaskQueue extends ZkDistributedQueue {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private static final String RESPONSE_PREFIX = "qnr-" ;
 
@@ -70,7 +70,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
           if (data != null) {
             ZkNodeProps message = ZkNodeProps.load(data);
             if (message.containsKey(requestIdKey)) {
-              LOG.debug("Looking for {}, found {}", message.get(requestIdKey), requestId);
+              log.debug("Looking for {}, found {}", message.get(requestIdKey), requestId);
               if(message.get(requestIdKey).equals(requestId)) return true;
             }
           }
@@ -96,7 +96,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
       if (zookeeper.exists(responsePath, true)) {
         zookeeper.setData(responsePath, event.getBytes(), true);
       } else {
-        LOG.info("Response ZK path: " + responsePath + " doesn't exist."
+        log.info("Response ZK path: " + responsePath + " doesn't exist."
             + "  Requestor may have disconnected from ZooKeeper");
       }
       try {
@@ -136,7 +136,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
         return;
       }
       // If latchEventType is not null, only fire if the type matches
-      LOG.debug("{} fired on path {} state {} latchEventType {}", event.getType(), event.getPath(), event.getState(), latchEventType);
+      log.debug("{} fired on path {} state {} latchEventType {}", event.getType(), event.getPath(), event.getState(), latchEventType);
       if (latchEventType == null || event.getType() == latchEventType) {
         lock.lock();
         try {
@@ -234,7 +234,7 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
       throws KeeperException, InterruptedException {
     ArrayList<QueueEvent> topN = new ArrayList<>();
 
-    LOG.debug("Peeking for top {} elements. ExcludeSet: {}", n, excludeSet);
+    log.debug("Peeking for top {} elements. ExcludeSet: {}", n, excludeSet);
     Timer.Context time;
     if (waitMillis == Long.MAX_VALUE) time = stats.time(dir + "_peekTopN_wait_forever");
     else time = stats.time(dir + "_peekTopN_wait" + waitMillis);
@@ -252,13 +252,13 @@ public class OverseerTaskQueue extends ZkDistributedQueue {
   }
 
   private static void printQueueEventsListElementIds(ArrayList<QueueEvent> topN) {
-    if (LOG.isDebugEnabled() && !topN.isEmpty()) {
+    if (log.isDebugEnabled() && !topN.isEmpty()) {
       StringBuilder sb = new StringBuilder("[");
       for (QueueEvent queueEvent : topN) {
         sb.append(queueEvent.getId()).append(", ");
       }
       sb.append("]");
-      LOG.debug("Returning topN elements: {}", sb.toString());
+      log.debug("Returning topN elements: {}", sb.toString());
     }
   }
 


[06/15] lucene-solr:jira/http2: LUCENE-8461: Add Lucene80Codec.

Posted by da...@apache.org.
LUCENE-8461: Add Lucene80Codec.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/025350ea
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/025350ea
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/025350ea

Branch: refs/heads/jira/http2
Commit: 025350ea12f648b8f5864a0ba6ef85ddff577a2a
Parents: 8cde127
Author: Adrien Grand <jp...@gmail.com>
Authored: Tue Aug 21 14:58:19 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Aug 23 10:51:45 2018 +0200

----------------------------------------------------------------------
 .../lucene/codecs/lucene70/Lucene70Codec.java   | 133 ++++++
 .../apache/lucene/codecs/lucene70/package.html  |  25 ++
 .../services/org.apache.lucene.codecs.Codec     |   1 +
 .../benchmark/byTask/tasks/CreateIndexTask.java |   4 +-
 .../java/org/apache/lucene/codecs/Codec.java    |   2 +-
 .../lucene/codecs/lucene70/Lucene70Codec.java   | 175 --------
 .../lucene/codecs/lucene80/Lucene80Codec.java   | 177 ++++++++
 .../lucene/codecs/lucene80/package-info.java    | 409 +++++++++++++++++++
 .../services/org.apache.lucene.codecs.Codec     |   2 +-
 ...cene50StoredFieldsFormatHighCompression.java |   8 +-
 .../lucene70/TestLucene70NormsFormat.java       |   4 +-
 .../org/apache/lucene/index/Test2BPoints.java   |   2 +-
 .../apache/lucene/index/TestPointValues.java    |   8 +-
 .../org/apache/lucene/search/TestBoolean2.java  |   8 +-
 .../apache/lucene/search/TestPointQueries.java  |   4 +-
 .../document/TestFloatPointNearestNeighbor.java |   2 +-
 .../org/apache/lucene/search/TestNearest.java   |   2 +-
 .../apache/lucene/spatial3d/TestGeo3DPoint.java |   4 +-
 .../suggest/document/TestSuggestField.java      |   4 +-
 .../apache/lucene/geo/BaseGeoPointTestCase.java |   2 +-
 .../util/TestRuleSetupAndRestoreClassEnv.java   |  30 +-
 .../java/org/apache/lucene/util/TestUtil.java   |   4 +-
 .../apache/solr/core/SchemaCodecFactory.java    |   4 +-
 .../solr/collection1/conf/schema_codec.xml      |   2 +-
 24 files changed, 793 insertions(+), 223 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
new file mode 100644
index 0000000..6841345
--- /dev/null
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.codecs.lucene70;
+
+import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.codecs.CompoundFormat;
+import org.apache.lucene.codecs.DocValuesFormat;
+import org.apache.lucene.codecs.FieldInfosFormat;
+import org.apache.lucene.codecs.FilterCodec;
+import org.apache.lucene.codecs.LiveDocsFormat;
+import org.apache.lucene.codecs.NormsFormat;
+import org.apache.lucene.codecs.PointsFormat;
+import org.apache.lucene.codecs.PostingsFormat;
+import org.apache.lucene.codecs.SegmentInfoFormat;
+import org.apache.lucene.codecs.StoredFieldsFormat;
+import org.apache.lucene.codecs.TermVectorsFormat;
+import org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat;
+import org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat;
+import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
+import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
+import org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat;
+import org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat;
+import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
+import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat;
+import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
+
+/**
+ * Implements the Lucene 7.0 index format, with configurable per-field postings
+ * and docvalues formats.
+ * <p>
+ * If you want to reuse functionality of this codec in another codec, extend
+ * {@link FilterCodec}.
+ *
+ * @see org.apache.lucene.codecs.lucene70 package documentation for file format details.
+ *
+ * @lucene.experimental
+ */
+public class Lucene70Codec extends Codec {
+  private final TermVectorsFormat vectorsFormat = new Lucene50TermVectorsFormat();
+  private final FieldInfosFormat fieldInfosFormat = new Lucene60FieldInfosFormat();
+  private final SegmentInfoFormat segmentInfosFormat = new Lucene70SegmentInfoFormat();
+  private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat();
+  private final CompoundFormat compoundFormat = new Lucene50CompoundFormat();
+  
+  private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() {
+    @Override
+    public PostingsFormat getPostingsFormatForField(String field) {
+      throw new IllegalStateException("This codec should only be used for reading, not writing");
+    }
+  };
+  
+  private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() {
+    @Override
+    public DocValuesFormat getDocValuesFormatForField(String field) {
+      throw new IllegalStateException("This codec should only be used for reading, not writing");
+    }
+  };
+  
+  private final StoredFieldsFormat storedFieldsFormat = new Lucene50StoredFieldsFormat(Mode.BEST_SPEED);
+
+  /** 
+   * Instantiates a new codec.
+   */
+  public Lucene70Codec() {
+    super("Lucene70");
+  }
+  
+  @Override
+  public final StoredFieldsFormat storedFieldsFormat() {
+    return storedFieldsFormat;
+  }
+  
+  @Override
+  public final TermVectorsFormat termVectorsFormat() {
+    return vectorsFormat;
+  }
+
+  @Override
+  public final PostingsFormat postingsFormat() {
+    return postingsFormat;
+  }
+  
+  @Override
+  public final FieldInfosFormat fieldInfosFormat() {
+    return fieldInfosFormat;
+  }
+  
+  @Override
+  public final SegmentInfoFormat segmentInfoFormat() {
+    return segmentInfosFormat;
+  }
+  
+  @Override
+  public final LiveDocsFormat liveDocsFormat() {
+    return liveDocsFormat;
+  }
+
+  @Override
+  public final CompoundFormat compoundFormat() {
+    return compoundFormat;
+  }
+
+  @Override
+  public final PointsFormat pointsFormat() {
+    return new Lucene60PointsFormat();
+  }
+  
+  @Override
+  public final DocValuesFormat docValuesFormat() {
+    return docValuesFormat;
+  }
+
+  private final NormsFormat normsFormat = new Lucene70NormsFormat();
+
+  @Override
+  public final NormsFormat normsFormat() {
+    return normsFormat;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package.html
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package.html b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package.html
new file mode 100644
index 0000000..b324e67
--- /dev/null
+++ b/lucene/backward-codecs/src/java/org/apache/lucene/codecs/lucene70/package.html
@@ -0,0 +1,25 @@
+<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements.  See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<html>
+<head>
+   <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
+</head>
+<body>
+Lucene 7.0 file format.
+</body>
+</html>

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
----------------------------------------------------------------------
diff --git a/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec b/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
index 4a812de..773c168 100644
--- a/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
+++ b/lucene/backward-codecs/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
@@ -13,3 +13,4 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 
+org.apache.lucene.codecs.lucene70.Lucene70Codec

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
----------------------------------------------------------------------
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
index c2c145b..d4d68d6 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/CreateIndexTask.java
@@ -29,7 +29,7 @@ import org.apache.lucene.benchmark.byTask.PerfRunData;
 import org.apache.lucene.benchmark.byTask.utils.Config;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene70.Lucene70Codec;
+import org.apache.lucene.codecs.lucene80.Lucene80Codec;
 import org.apache.lucene.index.ConcurrentMergeScheduler;
 import org.apache.lucene.index.IndexCommit;
 import org.apache.lucene.index.IndexDeletionPolicy;
@@ -139,7 +139,7 @@ public class CreateIndexTask extends PerfTask {
     if (defaultCodec == null && postingsFormat != null) {
       try {
         final PostingsFormat postingsFormatChosen = PostingsFormat.forName(postingsFormat);
-        iwConf.setCodec(new Lucene70Codec() {
+        iwConf.setCodec(new Lucene80Codec() {
           @Override
           public PostingsFormat getPostingsFormatForField(String field) {
             return postingsFormatChosen;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/java/org/apache/lucene/codecs/Codec.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java
index d864710..4aa22a6 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/Codec.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/Codec.java
@@ -57,7 +57,7 @@ public abstract class Codec implements NamedSPILoader.NamedSPI {
     }
     
     // TODO: should we use this, or maybe a system property is better?
-    static Codec defaultCodec = LOADER.lookup("Lucene70");
+    static Codec defaultCodec = LOADER.lookup("Lucene80");
   }
 
   private final String name;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
deleted file mode 100644
index d04d554..0000000
--- a/lucene/core/src/java/org/apache/lucene/codecs/lucene70/Lucene70Codec.java
+++ /dev/null
@@ -1,175 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.lucene.codecs.lucene70;
-
-import java.util.Objects;
-
-import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.codecs.CompoundFormat;
-import org.apache.lucene.codecs.DocValuesFormat;
-import org.apache.lucene.codecs.FieldInfosFormat;
-import org.apache.lucene.codecs.FilterCodec;
-import org.apache.lucene.codecs.LiveDocsFormat;
-import org.apache.lucene.codecs.NormsFormat;
-import org.apache.lucene.codecs.PointsFormat;
-import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.SegmentInfoFormat;
-import org.apache.lucene.codecs.StoredFieldsFormat;
-import org.apache.lucene.codecs.TermVectorsFormat;
-import org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat;
-import org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat;
-import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
-import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
-import org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat;
-import org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat;
-import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
-import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat;
-import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
-
-/**
- * Implements the Lucene 7.0 index format, with configurable per-field postings
- * and docvalues formats.
- * <p>
- * If you want to reuse functionality of this codec in another codec, extend
- * {@link FilterCodec}.
- *
- * @see org.apache.lucene.codecs.lucene70 package documentation for file format details.
- *
- * @lucene.experimental
- */
-public class Lucene70Codec extends Codec {
-  private final TermVectorsFormat vectorsFormat = new Lucene50TermVectorsFormat();
-  private final FieldInfosFormat fieldInfosFormat = new Lucene60FieldInfosFormat();
-  private final SegmentInfoFormat segmentInfosFormat = new Lucene70SegmentInfoFormat();
-  private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat();
-  private final CompoundFormat compoundFormat = new Lucene50CompoundFormat();
-  
-  private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() {
-    @Override
-    public PostingsFormat getPostingsFormatForField(String field) {
-      return Lucene70Codec.this.getPostingsFormatForField(field);
-    }
-  };
-  
-  private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() {
-    @Override
-    public DocValuesFormat getDocValuesFormatForField(String field) {
-      return Lucene70Codec.this.getDocValuesFormatForField(field);
-    }
-  };
-  
-  private final StoredFieldsFormat storedFieldsFormat;
-
-  /** 
-   * Instantiates a new codec.
-   */
-  public Lucene70Codec() {
-    this(Mode.BEST_SPEED);
-  }
-  
-  /** 
-   * Instantiates a new codec, specifying the stored fields compression
-   * mode to use.
-   * @param mode stored fields compression mode to use for newly 
-   *             flushed/merged segments.
-   */
-  public Lucene70Codec(Mode mode) {
-    super("Lucene70");
-    this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode));
-  }
-  
-  @Override
-  public final StoredFieldsFormat storedFieldsFormat() {
-    return storedFieldsFormat;
-  }
-  
-  @Override
-  public final TermVectorsFormat termVectorsFormat() {
-    return vectorsFormat;
-  }
-
-  @Override
-  public final PostingsFormat postingsFormat() {
-    return postingsFormat;
-  }
-  
-  @Override
-  public final FieldInfosFormat fieldInfosFormat() {
-    return fieldInfosFormat;
-  }
-  
-  @Override
-  public final SegmentInfoFormat segmentInfoFormat() {
-    return segmentInfosFormat;
-  }
-  
-  @Override
-  public final LiveDocsFormat liveDocsFormat() {
-    return liveDocsFormat;
-  }
-
-  @Override
-  public final CompoundFormat compoundFormat() {
-    return compoundFormat;
-  }
-
-  @Override
-  public final PointsFormat pointsFormat() {
-    return new Lucene60PointsFormat();
-  }
-
-  /** Returns the postings format that should be used for writing 
-   *  new segments of <code>field</code>.
-   *  
-   *  The default implementation always returns "Lucene50".
-   *  <p>
-   *  <b>WARNING:</b> if you subclass, you are responsible for index 
-   *  backwards compatibility: future version of Lucene are only 
-   *  guaranteed to be able to read the default implementation. 
-   */
-  public PostingsFormat getPostingsFormatForField(String field) {
-    return defaultFormat;
-  }
-  
-  /** Returns the docvalues format that should be used for writing 
-   *  new segments of <code>field</code>.
-   *  
-   *  The default implementation always returns "Lucene70".
-   *  <p>
-   *  <b>WARNING:</b> if you subclass, you are responsible for index 
-   *  backwards compatibility: future version of Lucene are only 
-   *  guaranteed to be able to read the default implementation. 
-   */
-  public DocValuesFormat getDocValuesFormatForField(String field) {
-    return defaultDVFormat;
-  }
-  
-  @Override
-  public final DocValuesFormat docValuesFormat() {
-    return docValuesFormat;
-  }
-
-  private final PostingsFormat defaultFormat = PostingsFormat.forName("Lucene50");
-  private final DocValuesFormat defaultDVFormat = DocValuesFormat.forName("Lucene70");
-
-  private final NormsFormat normsFormat = new Lucene70NormsFormat();
-
-  @Override
-  public final NormsFormat normsFormat() {
-    return normsFormat;
-  }
-}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/java/org/apache/lucene/codecs/lucene80/Lucene80Codec.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene80/Lucene80Codec.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene80/Lucene80Codec.java
new file mode 100644
index 0000000..1905b70
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene80/Lucene80Codec.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.codecs.lucene80;
+
+import java.util.Objects;
+
+import org.apache.lucene.codecs.Codec;
+import org.apache.lucene.codecs.CompoundFormat;
+import org.apache.lucene.codecs.DocValuesFormat;
+import org.apache.lucene.codecs.FieldInfosFormat;
+import org.apache.lucene.codecs.FilterCodec;
+import org.apache.lucene.codecs.LiveDocsFormat;
+import org.apache.lucene.codecs.NormsFormat;
+import org.apache.lucene.codecs.PointsFormat;
+import org.apache.lucene.codecs.PostingsFormat;
+import org.apache.lucene.codecs.SegmentInfoFormat;
+import org.apache.lucene.codecs.StoredFieldsFormat;
+import org.apache.lucene.codecs.TermVectorsFormat;
+import org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat;
+import org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat;
+import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
+import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
+import org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat;
+import org.apache.lucene.codecs.lucene60.Lucene60FieldInfosFormat;
+import org.apache.lucene.codecs.lucene60.Lucene60PointsFormat;
+import org.apache.lucene.codecs.lucene70.Lucene70NormsFormat;
+import org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat;
+import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat;
+import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
+
+/**
+ * Implements the Lucene 8.0 index format, with configurable per-field postings
+ * and docvalues formats.
+ * <p>
+ * If you want to reuse functionality of this codec in another codec, extend
+ * {@link FilterCodec}.
+ *
+ * @see org.apache.lucene.codecs.lucene80 package documentation for file format details.
+ *
+ * @lucene.experimental
+ */
+public class Lucene80Codec extends Codec {
+  private final TermVectorsFormat vectorsFormat = new Lucene50TermVectorsFormat();
+  private final FieldInfosFormat fieldInfosFormat = new Lucene60FieldInfosFormat();
+  private final SegmentInfoFormat segmentInfosFormat = new Lucene70SegmentInfoFormat();
+  private final LiveDocsFormat liveDocsFormat = new Lucene50LiveDocsFormat();
+  private final CompoundFormat compoundFormat = new Lucene50CompoundFormat();
+  
+  private final PostingsFormat postingsFormat = new PerFieldPostingsFormat() {
+    @Override
+    public PostingsFormat getPostingsFormatForField(String field) {
+      return Lucene80Codec.this.getPostingsFormatForField(field);
+    }
+  };
+  
+  private final DocValuesFormat docValuesFormat = new PerFieldDocValuesFormat() {
+    @Override
+    public DocValuesFormat getDocValuesFormatForField(String field) {
+      return Lucene80Codec.this.getDocValuesFormatForField(field);
+    }
+  };
+  
+  private final StoredFieldsFormat storedFieldsFormat;
+
+  /** 
+   * Instantiates a new codec.
+   */
+  public Lucene80Codec() {
+    this(Mode.BEST_SPEED);
+  }
+  
+  /** 
+   * Instantiates a new codec, specifying the stored fields compression
+   * mode to use.
+   * @param mode stored fields compression mode to use for newly 
+   *             flushed/merged segments.
+   */
+  public Lucene80Codec(Mode mode) {
+    super("Lucene80");
+    this.storedFieldsFormat = new Lucene50StoredFieldsFormat(Objects.requireNonNull(mode));
+  }
+  
+  @Override
+  public final StoredFieldsFormat storedFieldsFormat() {
+    return storedFieldsFormat;
+  }
+  
+  @Override
+  public final TermVectorsFormat termVectorsFormat() {
+    return vectorsFormat;
+  }
+
+  @Override
+  public final PostingsFormat postingsFormat() {
+    return postingsFormat;
+  }
+  
+  @Override
+  public final FieldInfosFormat fieldInfosFormat() {
+    return fieldInfosFormat;
+  }
+  
+  @Override
+  public final SegmentInfoFormat segmentInfoFormat() {
+    return segmentInfosFormat;
+  }
+  
+  @Override
+  public final LiveDocsFormat liveDocsFormat() {
+    return liveDocsFormat;
+  }
+
+  @Override
+  public final CompoundFormat compoundFormat() {
+    return compoundFormat;
+  }
+
+  @Override
+  public final PointsFormat pointsFormat() {
+    return new Lucene60PointsFormat();
+  }
+
+  /** Returns the postings format that should be used for writing 
+   *  new segments of <code>field</code>.
+   *  
+   *  The default implementation always returns "Lucene50".
+   *  <p>
+   *  <b>WARNING:</b> if you subclass, you are responsible for index 
+   *  backwards compatibility: future version of Lucene are only 
+   *  guaranteed to be able to read the default implementation. 
+   */
+  public PostingsFormat getPostingsFormatForField(String field) {
+    return defaultFormat;
+  }
+  
+  /** Returns the docvalues format that should be used for writing 
+   *  new segments of <code>field</code>.
+   *  
+   *  The default implementation always returns "Lucene70".
+   *  <p>
+   *  <b>WARNING:</b> if you subclass, you are responsible for index 
+   *  backwards compatibility: future version of Lucene are only 
+   *  guaranteed to be able to read the default implementation. 
+   */
+  public DocValuesFormat getDocValuesFormatForField(String field) {
+    return defaultDVFormat;
+  }
+  
+  @Override
+  public final DocValuesFormat docValuesFormat() {
+    return docValuesFormat;
+  }
+
+  private final PostingsFormat defaultFormat = PostingsFormat.forName("Lucene50");
+  private final DocValuesFormat defaultDVFormat = DocValuesFormat.forName("Lucene70");
+
+  private final NormsFormat normsFormat = new Lucene70NormsFormat();
+
+  @Override
+  public final NormsFormat normsFormat() {
+    return normsFormat;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/java/org/apache/lucene/codecs/lucene80/package-info.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/lucene80/package-info.java b/lucene/core/src/java/org/apache/lucene/codecs/lucene80/package-info.java
new file mode 100644
index 0000000..2b8a7e1
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/codecs/lucene80/package-info.java
@@ -0,0 +1,409 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * Lucene 8.0 file format.
+ * 
+ * <h1>Apache Lucene - Index File Formats</h1>
+ * <div>
+ * <ul>
+ * <li><a href="#Introduction">Introduction</a></li>
+ * <li><a href="#Definitions">Definitions</a>
+ *   <ul>
+ *   <li><a href="#Inverted_Indexing">Inverted Indexing</a></li>
+ *   <li><a href="#Types_of_Fields">Types of Fields</a></li>
+ *   <li><a href="#Segments">Segments</a></li>
+ *   <li><a href="#Document_Numbers">Document Numbers</a></li>
+ *   </ul>
+ * </li>
+ * <li><a href="#Overview">Index Structure Overview</a></li>
+ * <li><a href="#File_Naming">File Naming</a></li>
+ * <li><a href="#file-names">Summary of File Extensions</a>
+ *   <ul>
+ *   <li><a href="#Lock_File">Lock File</a></li>
+ *   <li><a href="#History">History</a></li>
+ *   <li><a href="#Limitations">Limitations</a></li>
+ *   </ul>
+ * </li>
+ * </ul>
+ * </div>
+ * <a name="Introduction"></a>
+ * <h2>Introduction</h2>
+ * <div>
+ * <p>This document defines the index file formats used in this version of Lucene.
+ * If you are using a different version of Lucene, please consult the copy of
+ * <code>docs/</code> that was distributed with
+ * the version you are using.</p>
+ * <p>This document attempts to provide a high-level definition of the Apache
+ * Lucene file formats.</p>
+ * </div>
+ * <a name="Definitions"></a>
+ * <h2>Definitions</h2>
+ * <div>
+ * <p>The fundamental concepts in Lucene are index, document, field and term.</p>
+ * <p>An index contains a sequence of documents.</p>
+ * <ul>
+ * <li>A document is a sequence of fields.</li>
+ * <li>A field is a named sequence of terms.</li>
+ * <li>A term is a sequence of bytes.</li>
+ * </ul>
+ * <p>The same sequence of bytes in two different fields is considered a different 
+ * term. Thus terms are represented as a pair: the string naming the field, and the
+ * bytes within the field.</p>
+ * <a name="Inverted_Indexing"></a>
+ * <h3>Inverted Indexing</h3>
+ * <p>The index stores statistics about terms in order to make term-based search
+ * more efficient. Lucene's index falls into the family of indexes known as an
+ * <i>inverted index.</i> This is because it can list, for a term, the documents
+ * that contain it. This is the inverse of the natural relationship, in which
+ * documents list terms.</p>
+ * <a name="Types_of_Fields"></a>
+ * <h3>Types of Fields</h3>
+ * <p>In Lucene, fields may be <i>stored</i>, in which case their text is stored
+ * in the index literally, in a non-inverted manner. Fields that are inverted are
+ * called <i>indexed</i>. A field may be both stored and indexed.</p>
+ * <p>The text of a field may be <i>tokenized</i> into terms to be indexed, or the
+ * text of a field may be used literally as a term to be indexed. Most fields are
+ * tokenized, but sometimes it is useful for certain identifier fields to be
+ * indexed literally.</p>
+ * <p>See the {@link org.apache.lucene.document.Field Field}
+ * java docs for more information on Fields.</p>
+ * <a name="Segments"></a>
+ * <h3>Segments</h3>
+ * <p>Lucene indexes may be composed of multiple sub-indexes, or <i>segments</i>.
+ * Each segment is a fully independent index, which could be searched separately.
+ * Indexes evolve by:</p>
+ * <ol>
+ * <li>Creating new segments for newly added documents.</li>
+ * <li>Merging existing segments.</li>
+ * </ol>
+ * <p>Searches may involve multiple segments and/or multiple indexes, each index
+ * potentially composed of a set of segments.</p>
+ * <a name="Document_Numbers"></a>
+ * <h3>Document Numbers</h3>
+ * <p>Internally, Lucene refers to documents by an integer <i>document number</i>.
+ * The first document added to an index is numbered zero, and each subsequent
+ * document added gets a number one greater than the previous.</p>
+ * <p>Note that a document's number may change, so caution should be taken when
+ * storing these numbers outside of Lucene. In particular, numbers may change in
+ * the following situations:</p>
+ * <ul>
+ * <li>
+ * <p>The numbers stored in each segment are unique only within the segment, and
+ * must be converted before they can be used in a larger context. The standard
+ * technique is to allocate each segment a range of values, based on the range of
+ * numbers used in that segment. To convert a document number from a segment to an
+ * external value, the segment's <i>base</i> document number is added. To convert
+ * an external value back to a segment-specific value, the segment is identified
+ * by the range that the external value is in, and the segment's base value is
+ * subtracted. For example two five document segments might be combined, so that
+ * the first segment has a base value of zero, and the second of five. Document
+ * three from the second segment would have an external value of eight.</p>
+ * </li>
+ * <li>
+ * <p>When documents are deleted, gaps are created in the numbering. These are
+ * eventually removed as the index evolves through merging. Deleted documents are
+ * dropped when segments are merged. A freshly-merged segment thus has no gaps in
+ * its numbering.</p>
+ * </li>
+ * </ul>
+ * </div>
+ * <a name="Overview"></a>
+ * <h2>Index Structure Overview</h2>
+ * <div>
+ * <p>Each segment index maintains the following:</p>
+ * <ul>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat Segment info}.
+ *    This contains metadata about a segment, such as the number of documents,
+ *    what files it uses, 
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50FieldInfosFormat Field names}. 
+ *    This contains the set of field names used in the index.
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Stored Field values}. 
+ * This contains, for each document, a list of attribute-value pairs, where the attributes 
+ * are field names. These are used to store auxiliary information about the document, such as 
+ * its title, url, or an identifier to access a database. The set of stored fields are what is 
+ * returned for each hit when searching. This is keyed by document number.
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat Term dictionary}. 
+ * A dictionary containing all of the terms used in all of the
+ * indexed fields of all of the documents. The dictionary also contains the number
+ * of documents which contain the term, and pointers to the term's frequency and
+ * proximity data.
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat Term Frequency data}. 
+ * For each term in the dictionary, the numbers of all the
+ * documents that contain that term, and the frequency of the term in that
+ * document, unless frequencies are omitted (IndexOptions.DOCS_ONLY)
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat Term Proximity data}. 
+ * For each term in the dictionary, the positions that the
+ * term occurs in each document. Note that this will not exist if all fields in
+ * all documents omit position data.
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene70.Lucene70NormsFormat Normalization factors}. 
+ * For each field in each document, a value is stored
+ * that is multiplied into the score for hits on that field.
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vectors}. 
+ * For each field in each document, the term vector (sometimes
+ * called document vector) may be stored. A term vector consists of term text and
+ * term frequency. To add Term Vectors to your index see the 
+ * {@link org.apache.lucene.document.Field Field} constructors
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene70.Lucene70DocValuesFormat Per-document values}. 
+ * Like stored values, these are also keyed by document
+ * number, but are generally intended to be loaded into main memory for fast
+ * access. Whereas stored values are generally intended for summary results from
+ * searches, per-document values are useful for things like scoring factors.
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live documents}. 
+ * An optional file indicating which documents are live.
+ * </li>
+ * <li>
+ * {@link org.apache.lucene.codecs.lucene60.Lucene60PointsFormat Point values}.
+ * Optional pair of files, recording dimensionally indexed fields, to enable fast
+ * numeric range filtering and large numeric values like BigInteger and BigDecimal (1D)
+ * and geographic shape intersection (2D, 3D).
+ * </li>
+ * </ul>
+ * <p>Details on each of these are provided in their linked pages.</p>
+ * </div>
+ * <a name="File_Naming"></a>
+ * <h2>File Naming</h2>
+ * <div>
+ * <p>All files belonging to a segment have the same name with varying extensions.
+ * The extensions correspond to the different file formats described below. When
+ * using the Compound File format (default for small segments) these files (except
+ * for the Segment info file, the Lock file, and Deleted documents file) are collapsed 
+ * into a single .cfs file (see below for details)</p>
+ * <p>Typically, all segments in an index are stored in a single directory,
+ * although this is not required.</p>
+ * <p>File names are never re-used. That is, when any file is saved
+ * to the Directory it is given a never before used filename. This is achieved
+ * using a simple generations approach. For example, the first segments file is
+ * segments_1, then segments_2, etc. The generation is a sequential long integer
+ * represented in alpha-numeric (base 36) form.</p>
+ * </div>
+ * <a name="file-names"></a>
+ * <h2>Summary of File Extensions</h2>
+ * <div>
+ * <p>The following table summarizes the names and extensions of the files in
+ * Lucene:</p>
+ * <table cellspacing="1" cellpadding="4" summary="lucene filenames by extension">
+ * <tr>
+ * <th>Name</th>
+ * <th>Extension</th>
+ * <th>Brief Description</th>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.index.SegmentInfos Segments File}</td>
+ * <td>segments_N</td>
+ * <td>Stores information about a commit point</td>
+ * </tr>
+ * <tr>
+ * <td><a href="#Lock_File">Lock File</a></td>
+ * <td>write.lock</td>
+ * <td>The Write lock prevents multiple IndexWriters from writing to the same
+ * file.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene70.Lucene70SegmentInfoFormat Segment Info}</td>
+ * <td>.si</td>
+ * <td>Stores metadata about a segment</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50CompoundFormat Compound File}</td>
+ * <td>.cfs, .cfe</td>
+ * <td>An optional "virtual" file consisting of all the other index files for
+ * systems that frequently run out of file handles.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50FieldInfosFormat Fields}</td>
+ * <td>.fnm</td>
+ * <td>Stores information about the fields</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Field Index}</td>
+ * <td>.fdx</td>
+ * <td>Contains pointers to field data</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat Field Data}</td>
+ * <td>.fdt</td>
+ * <td>The stored fields for documents</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat Term Dictionary}</td>
+ * <td>.tim</td>
+ * <td>The term dictionary, stores term info</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat Term Index}</td>
+ * <td>.tip</td>
+ * <td>The index into the Term Dictionary</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat Frequencies}</td>
+ * <td>.doc</td>
+ * <td>Contains the list of docs which contain each term along with frequency</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat Positions}</td>
+ * <td>.pos</td>
+ * <td>Stores position information about where a term occurs in the index</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat Payloads}</td>
+ * <td>.pay</td>
+ * <td>Stores additional per-position metadata information such as character offsets and user payloads</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene70.Lucene70NormsFormat Norms}</td>
+ * <td>.nvd, .nvm</td>
+ * <td>Encodes length and boost factors for docs and fields</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene70.Lucene70DocValuesFormat Per-Document Values}</td>
+ * <td>.dvd, .dvm</td>
+ * <td>Encodes additional scoring factors or other per-document information.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vector Index}</td>
+ * <td>.tvx</td>
+ * <td>Stores offset into the document data file</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50TermVectorsFormat Term Vector Data}</td>
+ * <td>.tvd</td>
+ * <td>Contains term vector data.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene50.Lucene50LiveDocsFormat Live Documents}</td>
+ * <td>.liv</td>
+ * <td>Info about what documents are live</td>
+ * </tr>
+ * <tr>
+ * <td>{@link org.apache.lucene.codecs.lucene60.Lucene60PointsFormat Point values}</td>
+ * <td>.dii, .dim</td>
+ * <td>Holds indexed points, if any</td>
+ * </tr>
+ * </table>
+ * </div>
+ * <a name="Lock_File"></a>
+ * <h2>Lock File</h2>
+ * The write lock, which is stored in the index directory by default, is named
+ * "write.lock". If the lock directory is different from the index directory then
+ * the write lock will be named "XXXX-write.lock" where XXXX is a unique prefix
+ * derived from the full path to the index directory. When this file is present, a
+ * writer is currently modifying the index (adding or removing documents). This
+ * lock file ensures that only one writer is modifying the index at a time.
+ * <a name="History"></a>
+ * <h2>History</h2>
+ * <p>Compatibility notes are provided in this document, describing how file
+ * formats have changed from prior versions:</p>
+ * <ul>
+ * <li>In version 2.1, the file format was changed to allow lock-less commits (ie,
+ * no more commit lock). The change is fully backwards compatible: you can open a
+ * pre-2.1 index for searching or adding/deleting of docs. When the new segments
+ * file is saved (committed), it will be written in the new file format (meaning
+ * no specific "upgrade" process is needed). But note that once a commit has
+ * occurred, pre-2.1 Lucene will not be able to read the index.</li>
+ * <li>In version 2.3, the file format was changed to allow segments to share a
+ * single set of doc store (vectors &amp; stored fields) files. This allows for
+ * faster indexing in certain cases. The change is fully backwards compatible (in
+ * the same way as the lock-less commits change in 2.1).</li>
+ * <li>In version 2.4, Strings are now written as true UTF-8 byte sequence, not
+ * Java's modified UTF-8. See <a href="http://issues.apache.org/jira/browse/LUCENE-510">
+ * LUCENE-510</a> for details.</li>
+ * <li>In version 2.9, an optional opaque Map&lt;String,String&gt; CommitUserData
+ * may be passed to IndexWriter's commit methods (and later retrieved), which is
+ * recorded in the segments_N file. See <a href="http://issues.apache.org/jira/browse/LUCENE-1382">
+ * LUCENE-1382</a> for details. Also,
+ * diagnostics were added to each segment written recording details about why it
+ * was written (due to flush, merge; which OS/JRE was used; etc.). See issue
+ * <a href="http://issues.apache.org/jira/browse/LUCENE-1654">LUCENE-1654</a> for details.</li>
+ * <li>In version 3.0, compressed fields are no longer written to the index (they
+ * can still be read, but on merge the new segment will write them, uncompressed).
+ * See issue <a href="http://issues.apache.org/jira/browse/LUCENE-1960">LUCENE-1960</a> 
+ * for details.</li>
+ * <li>In version 3.1, segments records the code version that created them. See
+ * <a href="http://issues.apache.org/jira/browse/LUCENE-2720">LUCENE-2720</a> for details. 
+ * Additionally segments track explicitly whether or not they have term vectors. 
+ * See <a href="http://issues.apache.org/jira/browse/LUCENE-2811">LUCENE-2811</a> 
+ * for details.</li>
+ * <li>In version 3.2, numeric fields are written as natively to stored fields
+ * file, previously they were stored in text format only.</li>
+ * <li>In version 3.4, fields can omit position data while still indexing term
+ * frequencies.</li>
+ * <li>In version 4.0, the format of the inverted index became extensible via
+ * the {@link org.apache.lucene.codecs.Codec Codec} api. Fast per-document storage
+ * ({@code DocValues}) was introduced. Normalization factors need no longer be a 
+ * single byte, they can be any {@link org.apache.lucene.index.NumericDocValues NumericDocValues}.
+ * Terms need not be unicode strings, they can be any byte sequence. Term offsets 
+ * can optionally be indexed into the postings lists. Payloads can be stored in the 
+ * term vectors.</li>
+ * <li>In version 4.1, the format of the postings list changed to use either
+ * of FOR compression or variable-byte encoding, depending upon the frequency
+ * of the term. Terms appearing only once were changed to inline directly into
+ * the term dictionary. Stored fields are compressed by default. </li>
+ * <li>In version 4.2, term vectors are compressed by default. DocValues has 
+ * a new multi-valued type (SortedSet), that can be used for faceting/grouping/joining
+ * on multi-valued fields.</li>
+ * <li>In version 4.5, DocValues were extended to explicitly represent missing values.</li>
+ * <li>In version 4.6, FieldInfos were extended to support per-field DocValues generation, to 
+ * allow updating NumericDocValues fields.</li>
+ * <li>In version 4.8, checksum footers were added to the end of each index file 
+ * for improved data integrity. Specifically, the last 8 bytes of every index file
+ * contain the zlib-crc32 checksum of the file.</li>
+ * <li>In version 4.9, DocValues has a new multi-valued numeric type (SortedNumeric)
+ * that is suitable for faceting/sorting/analytics.
+ * <li>In version 5.4, DocValues have been improved to store more information on disk:
+ * addresses for binary fields and ord indexes for multi-valued fields.
+ * <li>In version 6.0, Points were added, for multi-dimensional range/distance search.
+ * <li>In version 6.2, new Segment info format that reads/writes the index sort, to support index sorting.
+ * <li>In version 7.0, DocValues have been improved to better support sparse doc values
+ * thanks to an iterator API.</li>
+ * <li>In version 8.0, postings have been enhanced to record, for each block of
+ * doc ids, the (term freq, normalization factor) pairs that may trigger the
+ * maximum score of the block. This information is recorded alongside skip data
+ * in order to be able to skip blocks of doc ids if they may not produce high
+ * enough scores.</li>
+ * </ul>
+ * <a name="Limitations"></a>
+ * <h2>Limitations</h2>
+ * <div>
+ * <p>Lucene uses a Java <code>int</code> to refer to
+ * document numbers, and the index file format uses an <code>Int32</code>
+ * on-disk to store document numbers. This is a limitation
+ * of both the index file format and the current implementation. Eventually these
+ * should be replaced with either <code>UInt64</code> values, or
+ * better yet, {@link org.apache.lucene.store.DataOutput#writeVInt VInt} values which have no limit.</p>
+ * </div>
+ */
+package org.apache.lucene.codecs.lucene80;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
----------------------------------------------------------------------
diff --git a/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec b/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
index 773c168..a818e35 100644
--- a/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
+++ b/lucene/core/src/resources/META-INF/services/org.apache.lucene.codecs.Codec
@@ -13,4 +13,4 @@
 #  See the License for the specific language governing permissions and
 #  limitations under the License.
 
-org.apache.lucene.codecs.lucene70.Lucene70Codec
+org.apache.lucene.codecs.lucene80.Lucene80Codec

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java
index 909f658..48864e3 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene50/TestLucene50StoredFieldsFormatHighCompression.java
@@ -19,7 +19,7 @@ package org.apache.lucene.codecs.lucene50;
 
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
-import org.apache.lucene.codecs.lucene70.Lucene70Codec;
+import org.apache.lucene.codecs.lucene80.Lucene80Codec;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.StoredField;
 import org.apache.lucene.index.BaseStoredFieldsFormatTestCase;
@@ -33,7 +33,7 @@ import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 public class TestLucene50StoredFieldsFormatHighCompression extends BaseStoredFieldsFormatTestCase {
   @Override
   protected Codec getCodec() {
-    return new Lucene70Codec(Mode.BEST_COMPRESSION);
+    return new Lucene80Codec(Mode.BEST_COMPRESSION);
   }
   
   /**
@@ -44,7 +44,7 @@ public class TestLucene50StoredFieldsFormatHighCompression extends BaseStoredFie
     Directory dir = newDirectory();
     for (int i = 0; i < 10; i++) {
       IndexWriterConfig iwc = newIndexWriterConfig();
-      iwc.setCodec(new Lucene70Codec(RandomPicks.randomFrom(random(), Mode.values())));
+      iwc.setCodec(new Lucene80Codec(RandomPicks.randomFrom(random(), Mode.values())));
       IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig());
       Document doc = new Document();
       doc.add(new StoredField("field1", "value1"));
@@ -71,7 +71,7 @@ public class TestLucene50StoredFieldsFormatHighCompression extends BaseStoredFie
   
   public void testInvalidOptions() throws Exception {
     expectThrows(NullPointerException.class, () -> {
-      new Lucene70Codec(null);
+      new Lucene80Codec(null);
     });
     
     expectThrows(NullPointerException.class, () -> {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70NormsFormat.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70NormsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70NormsFormat.java
index cc07cee..f7d7714 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70NormsFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene70/TestLucene70NormsFormat.java
@@ -18,14 +18,14 @@ package org.apache.lucene.codecs.lucene70;
 
 
 import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.codecs.lucene70.Lucene70Codec;
+import org.apache.lucene.codecs.lucene80.Lucene80Codec;
 import org.apache.lucene.index.BaseNormsFormatTestCase;
 
 /**
  * Tests Lucene70NormsFormat
  */
 public class TestLucene70NormsFormat extends BaseNormsFormatTestCase {
-  private final Codec codec = new Lucene70Codec();
+  private final Codec codec = new Lucene80Codec();
   
   @Override
   protected Codec getCodec() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
index a12f31c..23e2c91 100644
--- a/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
+++ b/lucene/core/src/test/org/apache/lucene/index/Test2BPoints.java
@@ -135,6 +135,6 @@ public class Test2BPoints extends LuceneTestCase {
   }
 
   private static Codec getCodec() {
-    return Codec.forName("Lucene70");
+    return Codec.forName("Lucene80");
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
index 7228f37..1ef7abc 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestPointValues.java
@@ -393,11 +393,11 @@ public class TestPointValues extends LuceneTestCase {
     dir.close();
   }
 
-  // Write point values, one segment with Lucene70, another with SimpleText, then forceMerge with SimpleText
+  // Write point values, one segment with Lucene80, another with SimpleText, then forceMerge with SimpleText
   public void testDifferentCodecs1() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
-    iwc.setCodec(Codec.forName("Lucene70"));
+    iwc.setCodec(Codec.forName("Lucene80"));
     IndexWriter w = new IndexWriter(dir, iwc);
     Document doc = new Document();
     doc.add(new IntPoint("int", 1));
@@ -416,7 +416,7 @@ public class TestPointValues extends LuceneTestCase {
     dir.close();
   }
 
-  // Write point values, one segment with Lucene70, another with SimpleText, then forceMerge with Lucene70
+  // Write point values, one segment with Lucene80, another with SimpleText, then forceMerge with Lucene80
   public void testDifferentCodecs2() throws Exception {
     Directory dir = newDirectory();
     IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
@@ -428,7 +428,7 @@ public class TestPointValues extends LuceneTestCase {
     w.close();
     
     iwc = new IndexWriterConfig(new MockAnalyzer(random()));
-    iwc.setCodec(Codec.forName("Lucene70"));
+    iwc.setCodec(Codec.forName("Lucene80"));
     w = new IndexWriter(dir, iwc);
     doc = new Document();
     doc.add(new IntPoint("int", 1));

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
index 9478841..19fa917 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestBoolean2.java
@@ -96,7 +96,7 @@ public class TestBoolean2 extends LuceneTestCase {
 
     IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     // randomized codecs are sometimes too costly for this test:
-    iwc.setCodec(Codec.forName("Lucene70"));
+    iwc.setCodec(Codec.forName("Lucene80"));
     iwc.setMergePolicy(newLogMergePolicy());
     RandomIndexWriter writer= new RandomIndexWriter(random(), directory, iwc);
     // we'll make a ton of docs, disable store/norms/vectors
@@ -141,7 +141,7 @@ public class TestBoolean2 extends LuceneTestCase {
     iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     // we need docID order to be preserved:
     // randomized codecs are sometimes too costly for this test:
-    iwc.setCodec(Codec.forName("Lucene70"));
+    iwc.setCodec(Codec.forName("Lucene80"));
     iwc.setMergePolicy(newLogMergePolicy());
     try (IndexWriter w = new IndexWriter(singleSegmentDirectory, iwc)) {
       w.forceMerge(1, true);
@@ -167,7 +167,7 @@ public class TestBoolean2 extends LuceneTestCase {
 
       iwc = newIndexWriterConfig(new MockAnalyzer(random()));
       // randomized codecs are sometimes too costly for this test:
-      iwc.setCodec(Codec.forName("Lucene70"));
+      iwc.setCodec(Codec.forName("Lucene80"));
       RandomIndexWriter w = new RandomIndexWriter(random(), dir2, iwc);
       w.addIndexes(copy);
       copy.close();
@@ -179,7 +179,7 @@ public class TestBoolean2 extends LuceneTestCase {
     iwc = newIndexWriterConfig(new MockAnalyzer(random()));
     iwc.setMaxBufferedDocs(TestUtil.nextInt(random(), 50, 1000));
     // randomized codecs are sometimes too costly for this test:
-    iwc.setCodec(Codec.forName("Lucene70"));
+    iwc.setCodec(Codec.forName("Lucene80"));
     RandomIndexWriter w = new RandomIndexWriter(random(), dir2, iwc);
 
     doc = new Document();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
----------------------------------------------------------------------
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
index 7ef2565..9a8bde34 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestPointQueries.java
@@ -1161,14 +1161,14 @@ public class TestPointQueries extends LuceneTestCase {
   }
 
   private static Codec getCodec() {
-    if (Codec.getDefault().getName().equals("Lucene70")) {
+    if (Codec.getDefault().getName().equals("Lucene80")) {
       int maxPointsInLeafNode = TestUtil.nextInt(random(), 16, 2048);
       double maxMBSortInHeap = 5.0 + (3*random().nextDouble());
       if (VERBOSE) {
         System.out.println("TEST: using Lucene60PointsFormat with maxPointsInLeafNode=" + maxPointsInLeafNode + " and maxMBSortInHeap=" + maxMBSortInHeap);
       }
 
-      return new FilterCodec("Lucene70", Codec.getDefault()) {
+      return new FilterCodec("Lucene80", Codec.getDefault()) {
         @Override
         public PointsFormat pointsFormat() {
           return new PointsFormat() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java
index 72e7900..335ad17 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestFloatPointNearestNeighbor.java
@@ -234,7 +234,7 @@ public class TestFloatPointNearestNeighbor extends LuceneTestCase {
 
   private IndexWriterConfig getIndexWriterConfig() {
     IndexWriterConfig iwc = newIndexWriterConfig();
-    iwc.setCodec(Codec.forName("Lucene70"));
+    iwc.setCodec(Codec.forName("Lucene80"));
     return iwc;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java
----------------------------------------------------------------------
diff --git a/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java b/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java
index cb42f22..40c521e 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/search/TestNearest.java
@@ -247,7 +247,7 @@ public class TestNearest extends LuceneTestCase {
 
   private IndexWriterConfig getIndexWriterConfig() {
     IndexWriterConfig iwc = newIndexWriterConfig();
-    iwc.setCodec(Codec.forName("Lucene70"));
+    iwc.setCodec(Codec.forName("Lucene80"));
     return iwc;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
----------------------------------------------------------------------
diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
index 7e26dfa..b77fe5a 100644
--- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
+++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
@@ -86,14 +86,14 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 public class TestGeo3DPoint extends LuceneTestCase {
 
   private static Codec getCodec() {
-    if (Codec.getDefault().getName().equals("Lucene70")) {
+    if (Codec.getDefault().getName().equals("Lucene80")) {
       int maxPointsInLeafNode = TestUtil.nextInt(random(), 16, 2048);
       double maxMBSortInHeap = 3.0 + (3*random().nextDouble());
       if (VERBOSE) {
         System.out.println("TEST: using Lucene60PointsFormat with maxPointsInLeafNode=" + maxPointsInLeafNode + " and maxMBSortInHeap=" + maxMBSortInHeap);
       }
 
-      return new FilterCodec("Lucene70", Codec.getDefault()) {
+      return new FilterCodec("Lucene80", Codec.getDefault()) {
         @Override
         public PointsFormat pointsFormat() {
           return new PointsFormat() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
----------------------------------------------------------------------
diff --git a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
index 2bb4f71..4b8cab3 100644
--- a/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
+++ b/lucene/suggest/src/test/org/apache/lucene/search/suggest/document/TestSuggestField.java
@@ -39,7 +39,7 @@ import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
 import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
 import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.PostingsFormat;
-import org.apache.lucene.codecs.lucene70.Lucene70Codec;
+import org.apache.lucene.codecs.lucene80.Lucene80Codec;
 import org.apache.lucene.document.Document;
 import org.apache.lucene.document.Field;
 import org.apache.lucene.document.IntPoint;
@@ -884,7 +884,7 @@ public class TestSuggestField extends LuceneTestCase {
   static IndexWriterConfig iwcWithSuggestField(Analyzer analyzer, final Set<String> suggestFields) {
     IndexWriterConfig iwc = newIndexWriterConfig(random(), analyzer);
     iwc.setMergePolicy(newLogMergePolicy());
-    Codec filterCodec = new Lucene70Codec() {
+    Codec filterCodec = new Lucene80Codec() {
       PostingsFormat postingsFormat = new Completion50PostingsFormat();
 
       @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java
index 3334344..f44e858 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/BaseGeoPointTestCase.java
@@ -1252,7 +1252,7 @@ public abstract class BaseGeoPointTestCase extends LuceneTestCase {
     // Else seeds may not reproduce:
     iwc.setMergeScheduler(new SerialMergeScheduler());
     int pointsInLeaf = 2 + random().nextInt(4);
-    iwc.setCodec(new FilterCodec("Lucene70", TestUtil.getDefaultCodec()) {
+    iwc.setCodec(new FilterCodec("Lucene80", TestUtil.getDefaultCodec()) {
       @Override
       public PointsFormat pointsFormat() {
         return new PointsFormat() {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
index 603bd50..87b8f97 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestRuleSetupAndRestoreClassEnv.java
@@ -16,6 +16,17 @@
  */
 package org.apache.lucene.util;
 
+import static org.apache.lucene.util.LuceneTestCase.INFOSTREAM;
+import static org.apache.lucene.util.LuceneTestCase.TEST_CODEC;
+import static org.apache.lucene.util.LuceneTestCase.TEST_DOCVALUESFORMAT;
+import static org.apache.lucene.util.LuceneTestCase.TEST_POSTINGSFORMAT;
+import static org.apache.lucene.util.LuceneTestCase.VERBOSE;
+import static org.apache.lucene.util.LuceneTestCase.assumeFalse;
+import static org.apache.lucene.util.LuceneTestCase.localeForLanguageTag;
+import static org.apache.lucene.util.LuceneTestCase.random;
+import static org.apache.lucene.util.LuceneTestCase.randomLocale;
+import static org.apache.lucene.util.LuceneTestCase.randomTimeZone;
+
 import java.io.PrintStream;
 import java.util.Arrays;
 import java.util.HashSet;
@@ -32,31 +43,20 @@ import org.apache.lucene.codecs.asserting.AssertingPostingsFormat;
 import org.apache.lucene.codecs.cheapbastard.CheapBastardCodec;
 import org.apache.lucene.codecs.compressing.CompressingCodec;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat;
-import org.apache.lucene.codecs.lucene70.Lucene70Codec;
+import org.apache.lucene.codecs.lucene80.Lucene80Codec;
 import org.apache.lucene.codecs.mockrandom.MockRandomPostingsFormat;
 import org.apache.lucene.codecs.simpletext.SimpleTextCodec;
 import org.apache.lucene.index.RandomCodec;
 import org.apache.lucene.search.similarities.AssertingSimilarity;
 import org.apache.lucene.search.similarities.RandomSimilarity;
 import org.apache.lucene.search.similarities.Similarity;
+import org.apache.lucene.util.LuceneTestCase.LiveIWCFlushMode;
 import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
 import org.junit.internal.AssumptionViolatedException;
 
 import com.carrotsearch.randomizedtesting.RandomizedContext;
 import com.carrotsearch.randomizedtesting.generators.RandomPicks;
 
-import static org.apache.lucene.util.LuceneTestCase.INFOSTREAM;
-import static org.apache.lucene.util.LuceneTestCase.LiveIWCFlushMode;
-import static org.apache.lucene.util.LuceneTestCase.TEST_CODEC;
-import static org.apache.lucene.util.LuceneTestCase.TEST_DOCVALUESFORMAT;
-import static org.apache.lucene.util.LuceneTestCase.TEST_POSTINGSFORMAT;
-import static org.apache.lucene.util.LuceneTestCase.VERBOSE;
-import static org.apache.lucene.util.LuceneTestCase.assumeFalse;
-import static org.apache.lucene.util.LuceneTestCase.localeForLanguageTag;
-import static org.apache.lucene.util.LuceneTestCase.random;
-import static org.apache.lucene.util.LuceneTestCase.randomLocale;
-import static org.apache.lucene.util.LuceneTestCase.randomTimeZone;
-
 /**
  * Setup and restore suite-level environment (fine grained junk that 
  * doesn't fit anywhere else).
@@ -188,8 +188,8 @@ final class TestRuleSetupAndRestoreClassEnv extends AbstractBeforeAfterRule {
       codec = new AssertingCodec();
     } else if ("Compressing".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 6 && !shouldAvoidCodec("Compressing"))) {
       codec = CompressingCodec.randomInstance(random);
-    } else if ("Lucene70".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 5 && !shouldAvoidCodec("Lucene70"))) {
-      codec = new Lucene70Codec(RandomPicks.randomFrom(random, Lucene50StoredFieldsFormat.Mode.values()));
+    } else if ("Lucene80".equals(TEST_CODEC) || ("random".equals(TEST_CODEC) && randomVal == 5 && !shouldAvoidCodec("Lucene80"))) {
+      codec = new Lucene80Codec(RandomPicks.randomFrom(random, Lucene50StoredFieldsFormat.Mode.values()));
     } else if (!"random".equals(TEST_CODEC)) {
       codec = Codec.forName(TEST_CODEC);
     } else if ("random".equals(TEST_POSTINGSFORMAT)) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
----------------------------------------------------------------------
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
index b12d7b8..65329b5 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/TestUtil.java
@@ -53,8 +53,8 @@ import org.apache.lucene.codecs.asserting.AssertingCodec;
 import org.apache.lucene.codecs.blockterms.LuceneFixedGap;
 import org.apache.lucene.codecs.blocktreeords.BlockTreeOrdsPostingsFormat;
 import org.apache.lucene.codecs.lucene50.Lucene50PostingsFormat;
-import org.apache.lucene.codecs.lucene70.Lucene70Codec;
 import org.apache.lucene.codecs.lucene70.Lucene70DocValuesFormat;
+import org.apache.lucene.codecs.lucene80.Lucene80Codec;
 import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat;
 import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat;
 import org.apache.lucene.document.BinaryDocValuesField;
@@ -914,7 +914,7 @@ public final class TestUtil {
    * This may be different than {@link Codec#getDefault()} because that is randomized. 
    */
   public static Codec getDefaultCodec() {
-    return new Lucene70Codec();
+    return new Lucene80Codec();
   }
   
   /** 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java
index c69770c..5adc161 100644
--- a/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/SchemaCodecFactory.java
@@ -24,7 +24,7 @@ import org.apache.lucene.codecs.Codec;
 import org.apache.lucene.codecs.DocValuesFormat;
 import org.apache.lucene.codecs.PostingsFormat;
 import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode;
-import org.apache.lucene.codecs.lucene70.Lucene70Codec;
+import org.apache.lucene.codecs.lucene80.Lucene80Codec;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.NamedList;
@@ -91,7 +91,7 @@ public class SchemaCodecFactory extends CodecFactory implements SolrCoreAware {
       compressionMode = SOLR_DEFAULT_COMPRESSION_MODE;
       log.debug("Using default compressionMode: " + compressionMode);
     }
-    codec = new Lucene70Codec(compressionMode) {
+    codec = new Lucene80Codec(compressionMode) {
       @Override
       public PostingsFormat getPostingsFormatForField(String field) {
         final SchemaField schemaField = core.getLatestSchema().getFieldOrNull(field);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/025350ea/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml b/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml
index e259740..5c88fcd 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml
@@ -19,7 +19,7 @@
   <fieldType name="string_direct" class="solr.StrField" postingsFormat="Direct" docValuesFormat="Direct"/>
   <fieldType name="string_standard" class="solr.StrField" postingsFormat="Lucene50"/>
 
-  <fieldType name="string_disk" class="solr.StrField" docValuesFormat="Lucene70"/>
+  <fieldType name="string_disk" class="solr.StrField" docValuesFormat="Lucene80"/>
 
   <fieldType name="string" class="solr.StrField"/>
 


[09/15] lucene-solr:jira/http2: LUCENE-8461: Fix test failure.

Posted by da...@apache.org.
LUCENE-8461: Fix test failure.


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/4368ad72
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/4368ad72
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/4368ad72

Branch: refs/heads/jira/http2
Commit: 4368ad72d2ccbb40583fa7d2e55464c47e341f8b
Parents: 95cb7aa
Author: Adrien Grand <jp...@gmail.com>
Authored: Thu Aug 23 23:04:19 2018 +0200
Committer: Adrien Grand <jp...@gmail.com>
Committed: Thu Aug 23 23:04:19 2018 +0200

----------------------------------------------------------------------
 solr/core/src/test-files/solr/collection1/conf/schema_codec.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/4368ad72/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml
----------------------------------------------------------------------
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml b/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml
index 5c88fcd..e259740 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema_codec.xml
@@ -19,7 +19,7 @@
   <fieldType name="string_direct" class="solr.StrField" postingsFormat="Direct" docValuesFormat="Direct"/>
   <fieldType name="string_standard" class="solr.StrField" postingsFormat="Lucene50"/>
 
-  <fieldType name="string_disk" class="solr.StrField" docValuesFormat="Lucene80"/>
+  <fieldType name="string_disk" class="solr.StrField" docValuesFormat="Lucene70"/>
 
   <fieldType name="string" class="solr.StrField"/>
 


[10/15] lucene-solr:jira/http2: SOLR-12028: BadApple and AwaitsFix annotations usage

Posted by da...@apache.org.
SOLR-12028: BadApple and AwaitsFix annotations usage


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/aa10cb78
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/aa10cb78
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/aa10cb78

Branch: refs/heads/jira/http2
Commit: aa10cb7802ca2f2e0159a84c180193db43ca7926
Parents: 4368ad7
Author: Erick Erickson <Er...@gmail.com>
Authored: Thu Aug 23 16:59:58 2018 -0700
Committer: Erick Erickson <Er...@gmail.com>
Committed: Thu Aug 23 16:59:58 2018 -0700

----------------------------------------------------------------------
 .../solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java | 2 +-
 .../src/test/org/apache/solr/cloud/DistribCursorPagingTest.java  | 1 +
 .../core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java | 3 ++-
 .../org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java   | 2 +-
 .../org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java | 1 +
 solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java | 2 ++
 .../api/collections/HdfsCollectionsAPIDistributedZkTest.java     | 3 +--
 .../solr/cloud/api/collections/TestHdfsCloudBackupRestore.java   | 3 +--
 .../cloud/autoscaling/HdfsAutoAddReplicasIntegrationTest.java    | 2 +-
 .../org/apache/solr/cloud/autoscaling/ScheduledTriggerTest.java  | 2 +-
 .../src/test/org/apache/solr/handler/TestReplicationHandler.java | 2 ++
 .../src/test/org/apache/solr/schema/SchemaApiFailureTest.java    | 1 +
 solr/core/src/test/org/apache/solr/update/TestHdfsUpdateLog.java | 2 ++
 .../apache/solr/client/solrj/io/stream/StreamDecoratorTest.java  | 4 ++--
 .../apache/solr/common/cloud/TestCloudCollectionsListeners.java  | 1 +
 15 files changed, 20 insertions(+), 11 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
index 7e08f0e..0e9cd33 100644
--- a/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
+++ b/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
@@ -143,7 +143,7 @@ public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTe
     super.tearDown();
   }
 
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
+  //commented 23-AUG-2018  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
   public void testFullImport() {
     assertQ(req("*:*"), "//result[@numFound='0']");
     

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
index 5de766f..46f77c0 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
@@ -76,6 +76,7 @@ public class DistribCursorPagingTest extends AbstractFullDistribZkTestBase {
   }
 
   @Test
+  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
   public void test() throws Exception {
     boolean testFinished = false;
     try {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java
index f063b54..4786671 100644
--- a/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/MoveReplicaHDFSTest.java
@@ -67,7 +67,8 @@ public class MoveReplicaHDFSTest extends MoveReplicaTest {
   @Test
   //2018-06-18 (commented) @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 21-May-2018
   //commented 9-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 17-Aug-2018
+  //commented 23-AUG-2018  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 17-Aug-2018
+  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
   public void testNormalFailedMove() throws Exception {
     inPlaceMove = false;
     testFailedMove();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java
index 64c1fc8..4fac532 100644
--- a/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/SharedFSAutoReplicaFailoverTest.java
@@ -140,7 +140,7 @@ public class SharedFSAutoReplicaFailoverTest extends AbstractFullDistribZkTestBa
   @Test
   @ShardsFixed(num = 4)
   // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028")
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
+  //commented 23-AUG-2018  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
   public void test() throws Exception {
     try {
       // to keep uncommitted docs during failover

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java
index 7ba11cf..c6aac78 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestSolrCloudWithDelegationTokens.java
@@ -327,6 +327,7 @@ public class TestSolrCloudWithDelegationTokens extends SolrTestCaseJ4 {
   }
 
   @Test
+  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
   public void testDelegationTokenRenew() throws Exception {
     // test with specifying renewer
     verifyDelegationTokenRenew("bar", "bar");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java b/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java
index 01753e0..1ed985a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestWithCollection.java
@@ -175,6 +175,7 @@ public class TestWithCollection extends SolrCloudTestCase {
   }
 
   @Test
+  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
   public void testDeleteWithCollection() throws IOException, SolrServerException, InterruptedException {
     String prefix = "testDeleteWithCollection";
     String xyz = prefix + "_xyz";
@@ -346,6 +347,7 @@ public class TestWithCollection extends SolrCloudTestCase {
   }
 
   @Test
+  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
   public void testMoveReplicaWithCollection() throws Exception {
     String prefix = "testMoveReplicaWithCollection";
     String xyz = prefix + "_xyz";

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCollectionsAPIDistributedZkTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCollectionsAPIDistributedZkTest.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCollectionsAPIDistributedZkTest.java
index aeb97db..ed962ec 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCollectionsAPIDistributedZkTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/HdfsCollectionsAPIDistributedZkTest.java
@@ -29,7 +29,6 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Metric;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
-import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrServerException;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
@@ -54,7 +53,7 @@ import org.junit.Test;
 @ThreadLeakFilters(defaultFilters = true, filters = {
     BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
 })
-@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
+//commented 23-AUG-2018  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
 public class HdfsCollectionsAPIDistributedZkTest extends CollectionsAPIDistributedZkTest {
 
   private static MiniDFSCluster dfsCluster;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java
index 8af397f..2700419 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestHdfsCloudBackupRestore.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.DistributedFileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
-import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.client.solrj.impl.CloudSolrClient;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.cloud.hdfs.HdfsTestUtil;
@@ -63,7 +62,7 @@ import static org.apache.solr.core.backup.BackupManager.ZK_STATE_DIR;
     BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
 })
 //05-Jul-2018  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 04-May-2018
-@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
+//commented 23-AUG-2018  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
 public class TestHdfsCloudBackupRestore extends AbstractCloudBackupRestoreTestCase {
   public static final String SOLR_XML = "<solr>\n" +
       "\n" +

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/autoscaling/HdfsAutoAddReplicasIntegrationTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/HdfsAutoAddReplicasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/HdfsAutoAddReplicasIntegrationTest.java
index 260f39e..cedf713 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/HdfsAutoAddReplicasIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/HdfsAutoAddReplicasIntegrationTest.java
@@ -32,7 +32,7 @@ import org.junit.BeforeClass;
     BadHdfsThreadsFilter.class, // hdfs currently leaks thread(s)
     MoveReplicaHDFSTest.ForkJoinThreadsFilter.class
 })
-@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
+//commented 23-AUG-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
 public class HdfsAutoAddReplicasIntegrationTest extends AutoAddReplicasIntegrationTest {
 
   private static MiniDFSCluster dfsCluster;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerTest.java
index 215c12b..bff2f5a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/ScheduledTriggerTest.java
@@ -57,7 +57,7 @@ public class ScheduledTriggerTest extends SolrCloudTestCase {
 
   @Test
 //2018-06-18 (commented)   @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 09-Apr-2018
-  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
+//commented 23-AUG-2018   @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
   public void testTrigger() throws Exception {
     CoreContainer container = cluster.getJettySolrRunners().get(0).getCoreContainer();
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
index 07c7b8f..28ed3d1 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
@@ -42,6 +42,7 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.store.FSDirectory;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.BaseDistributedSearchTestCase;
@@ -87,6 +88,7 @@ import org.slf4j.LoggerFactory;
 @Slow
 @SuppressSSL     // Currently unknown why SSL does not work with this test
 // commented 20-July-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 12-Jun-2018
+@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
 public class TestReplicationHandler extends SolrTestCaseJ4 {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java b/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java
index cf19d96..bc865ad 100644
--- a/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java
+++ b/solr/core/src/test/org/apache/solr/schema/SchemaApiFailureTest.java
@@ -45,6 +45,7 @@ public class SchemaApiFailureTest extends SolrCloudTestCase {
   }
 
   @Test
+  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
   public void testAddTheSameFieldTwice() throws Exception {
     CloudSolrClient client = cluster.getSolrClient();
     SchemaRequest.Update fieldAddition = new SchemaRequest.AddField

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/core/src/test/org/apache/solr/update/TestHdfsUpdateLog.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/update/TestHdfsUpdateLog.java b/solr/core/src/test/org/apache/solr/update/TestHdfsUpdateLog.java
index a1f80ef..100b5f4 100644
--- a/solr/core/src/test/org/apache/solr/update/TestHdfsUpdateLog.java
+++ b/solr/core/src/test/org/apache/solr/update/TestHdfsUpdateLog.java
@@ -23,6 +23,7 @@ import java.net.URISyntaxException;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.SolrTestCaseJ4.SuppressObjectReleaseTracker;
 import org.apache.solr.cloud.hdfs.HdfsTestUtil;
@@ -39,6 +40,7 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters;
     BadHdfsThreadsFilter.class // hdfs currently leaks thread(s)
 })
 @SuppressObjectReleaseTracker(bugUrl = "https://issues.apache.org/jira/browse/SOLR-7115")
+@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
 public class TestHdfsUpdateLog extends SolrTestCaseJ4 {
   
   private static MiniDFSCluster dfsCluster;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
index ba050ff..ef5729d 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
@@ -66,7 +66,7 @@ import org.junit.Test;
 
 @Slow
 @LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40","Lucene41","Lucene42","Lucene45"})
-@LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
+//commented 23-AUG-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
 public class StreamDecoratorTest extends SolrCloudTestCase {
 
   private static final String COLLECTIONORALIAS = "collection1";
@@ -106,7 +106,7 @@ public class StreamDecoratorTest extends SolrCloudTestCase {
   }
 
   @Test
-  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
+  //commented 23-AUG-2018  @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 2-Aug-2018
   public void testUniqueStream() throws Exception {
 
     new UpdateRequest()

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/aa10cb78/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java
----------------------------------------------------------------------
diff --git a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java
index 32f6146..7006cd8 100644
--- a/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java
+++ b/solr/solrj/src/test/org/apache/solr/common/cloud/TestCloudCollectionsListeners.java
@@ -129,6 +129,7 @@ public class TestCloudCollectionsListeners extends SolrCloudTestCase {
   }
 
   @Test
+  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 23-Aug-2018
   public void testCollectionDeletion() throws Exception {
 
     CloudSolrClient client = cluster.getSolrClient();


[04/15] lucene-solr:jira/http2: SOLR-12690: Regularize LoggerFactory declarations

Posted by da...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
index f361324..24c53bd 100644
--- a/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
+++ b/solr/core/src/java/org/apache/solr/cloud/RecoveryStrategy.java
@@ -96,7 +96,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
     }
   }
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private int waitForUpdatesWithStaleStatePauseMilliSeconds = Integer.getInteger("solr.cloud.wait-for-updates-with-stale-state-pause", 2500);
   private int maxRetries = 500;
@@ -171,13 +171,13 @@ public class RecoveryStrategy implements Runnable, Closeable {
     if (prevSendPreRecoveryHttpUriRequest != null) {
       prevSendPreRecoveryHttpUriRequest.abort();
     }
-    LOG.warn("Stopping recovery for core=[{}] coreNodeName=[{}]", coreName, coreZkNodeName);
+    log.warn("Stopping recovery for core=[{}] coreNodeName=[{}]", coreName, coreZkNodeName);
   }
 
   final private void recoveryFailed(final SolrCore core,
       final ZkController zkController, final String baseUrl,
       final String shardZkNodeName, final CoreDescriptor cd) throws Exception {
-    SolrException.log(LOG, "Recovery failed - I give up.");
+    SolrException.log(log, "Recovery failed - I give up.");
     try {
       zkController.publish(cd, Replica.State.RECOVERY_FAILED);
     } finally {
@@ -200,7 +200,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
 
     final String leaderUrl = getReplicateLeaderUrl(leaderprops);
     
-    LOG.info("Attempting to replicate from [{}].", leaderUrl);
+    log.info("Attempting to replicate from [{}].", leaderUrl);
     
     // send commit
     commitOnLeader(leaderUrl);
@@ -231,14 +231,14 @@ public class RecoveryStrategy implements Runnable, Closeable {
     }
     
     // solrcloud_debug
-    if (LOG.isDebugEnabled()) {
+    if (log.isDebugEnabled()) {
       try {
         RefCounted<SolrIndexSearcher> searchHolder = core
             .getNewestSearcher(false);
         SolrIndexSearcher searcher = searchHolder.get();
         Directory dir = core.getDirectoryFactory().get(core.getIndexDir(), DirContext.META_DATA, null);
         try {
-          LOG.debug(core.getCoreContainer()
+          log.debug(core.getCoreContainer()
               .getZkController().getNodeName()
               + " replicated "
               + searcher.count(new MatchAllDocsQuery())
@@ -255,7 +255,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
           searchHolder.decref();
         }
       } catch (Exception e) {
-        LOG.debug("Error in solrcloud_debug block", e);
+        log.debug("Error in solrcloud_debug block", e);
       }
     }
 
@@ -283,21 +283,21 @@ public class RecoveryStrategy implements Runnable, Closeable {
     try (SolrCore core = cc.getCore(coreName)) {
 
       if (core == null) {
-        SolrException.log(LOG, "SolrCore not found - cannot recover:" + coreName);
+        SolrException.log(log, "SolrCore not found - cannot recover:" + coreName);
         return;
       }
       MDCLoggingContext.setCore(core);
 
-      LOG.info("Starting recovery process. recoveringAfterStartup=" + recoveringAfterStartup);
+      log.info("Starting recovery process. recoveringAfterStartup=" + recoveringAfterStartup);
 
       try {
         doRecovery(core);
       } catch (InterruptedException e) {
         Thread.currentThread().interrupt();
-        SolrException.log(LOG, "", e);
+        SolrException.log(log, "", e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
       } catch (Exception e) {
-        LOG.error("", e);
+        log.error("", e);
         throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR, "", e);
       }
     } finally {
@@ -317,7 +317,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
     boolean successfulRecovery = false;
 
 //  if (core.getUpdateHandler().getUpdateLog() != null) {
-//    SolrException.log(LOG, "'replicate-only' recovery strategy should only be used if no update logs are present, but this core has one: "
+//    SolrException.log(log, "'replicate-only' recovery strategy should only be used if no update logs are present, but this core has one: "
 //        + core.getUpdateHandler().getUpdateLog());
 //    return;
 //  }
@@ -340,50 +340,50 @@ public class RecoveryStrategy implements Runnable, Closeable {
       if (cloudDesc.isLeader()) {
         assert cloudDesc.getReplicaType() != Replica.Type.PULL;
         // we are now the leader - no one else must have been suitable
-        LOG.warn("We have not yet recovered - but we are now the leader!");
-        LOG.info("Finished recovery process.");
+        log.warn("We have not yet recovered - but we are now the leader!");
+        log.info("Finished recovery process.");
         zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE);
         return;
       }
       
       
-      LOG.info("Publishing state of core [{}] as recovering, leader is [{}] and I am [{}]", core.getName(), leaderUrl,
+      log.info("Publishing state of core [{}] as recovering, leader is [{}] and I am [{}]", core.getName(), leaderUrl,
           ourUrl);
       zkController.publish(core.getCoreDescriptor(), Replica.State.RECOVERING);
       
       if (isClosed()) {
-        LOG.info("Recovery for core {} has been closed", core.getName());
+        log.info("Recovery for core {} has been closed", core.getName());
         break;
       }
-      LOG.info("Starting Replication Recovery.");
+      log.info("Starting Replication Recovery.");
 
       try {
-        LOG.info("Stopping background replicate from leader process");
+        log.info("Stopping background replicate from leader process");
         zkController.stopReplicationFromLeader(coreName);
         replicate(zkController.getNodeName(), core, leaderprops);
 
         if (isClosed()) {
-          LOG.info("Recovery for core {} has been closed", core.getName());
+          log.info("Recovery for core {} has been closed", core.getName());
           break;
         }
 
-        LOG.info("Replication Recovery was successful.");
+        log.info("Replication Recovery was successful.");
         successfulRecovery = true;
       } catch (Exception e) {
-        SolrException.log(LOG, "Error while trying to recover", e);
+        SolrException.log(log, "Error while trying to recover", e);
       }
 
     } catch (Exception e) {
-      SolrException.log(LOG, "Error while trying to recover. core=" + coreName, e);
+      SolrException.log(log, "Error while trying to recover. core=" + coreName, e);
     } finally {
       if (successfulRecovery) {
-        LOG.info("Restaring background replicate from leader process");
+        log.info("Restaring background replicate from leader process");
         zkController.startReplicationFromLeader(coreName, false);
-        LOG.info("Registering as Active after recovery.");
+        log.info("Registering as Active after recovery.");
         try {
           zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE);
         } catch (Exception e) {
-          LOG.error("Could not publish as ACTIVE after succesful recovery", e);
+          log.error("Could not publish as ACTIVE after succesful recovery", e);
           successfulRecovery = false;
         }
         
@@ -401,24 +401,24 @@ public class RecoveryStrategy implements Runnable, Closeable {
       try {
 
         if (isClosed()) {
-          LOG.info("Recovery for core {} has been closed", core.getName());
+          log.info("Recovery for core {} has been closed", core.getName());
           break;
         }
         
-        LOG.error("Recovery failed - trying again... (" + retries + ")");
+        log.error("Recovery failed - trying again... (" + retries + ")");
         
         retries++;
         if (retries >= maxRetries) {
-          SolrException.log(LOG, "Recovery failed - max retries exceeded (" + retries + ").");
+          SolrException.log(log, "Recovery failed - max retries exceeded (" + retries + ").");
           try {
             recoveryFailed(core, zkController, baseUrl, coreZkNodeName, core.getCoreDescriptor());
           } catch (Exception e) {
-            SolrException.log(LOG, "Could not publish that recovery failed", e);
+            SolrException.log(log, "Could not publish that recovery failed", e);
           }
           break;
         }
       } catch (Exception e) {
-        SolrException.log(LOG, "An error has occurred during recovery", e);
+        SolrException.log(log, "An error has occurred during recovery", e);
       }
 
       try {
@@ -427,25 +427,25 @@ public class RecoveryStrategy implements Runnable, Closeable {
         // will always be the minimum of the two (12). Since we sleep at 5 seconds sub-intervals in
         // order to check if we were closed, 12 is chosen as the maximum loopCount (5s * 12 = 1m).
         int loopCount = retries < 4 ? (int) Math.min(Math.pow(2, retries), 12) : 12;
-        LOG.info("Wait [{}] seconds before trying to recover again (attempt={})",
+        log.info("Wait [{}] seconds before trying to recover again (attempt={})",
             TimeUnit.MILLISECONDS.toSeconds(loopCount * startingRecoveryDelayMilliSeconds), retries);
         for (int i = 0; i < loopCount; i++) {
           if (isClosed()) {
-            LOG.info("Recovery for core {} has been closed", core.getName());
+            log.info("Recovery for core {} has been closed", core.getName());
             break; // check if someone closed us
           }
           Thread.sleep(startingRecoveryDelayMilliSeconds);
         }
       } catch (InterruptedException e) {
         Thread.currentThread().interrupt();
-        LOG.warn("Recovery was interrupted.", e);
+        log.warn("Recovery was interrupted.", e);
         close = true;
       }
     }
 
   }
   // We skip core.seedVersionBuckets(); We don't have a transaction log
-  LOG.info("Finished recovery process, successful=[{}]", Boolean.toString(successfulRecovery));
+  log.info("Finished recovery process, successful=[{}]", Boolean.toString(successfulRecovery));
 }
 
   // TODO: perhaps make this grab a new core each time through the loop to handle core reloads?
@@ -455,7 +455,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
     UpdateLog ulog;
     ulog = core.getUpdateHandler().getUpdateLog();
     if (ulog == null) {
-      SolrException.log(LOG, "No UpdateLog found - cannot recover.");
+      SolrException.log(log, "No UpdateLog found - cannot recover.");
       recoveryFailed(core, zkController, baseUrl, coreZkNodeName,
           core.getCoreDescriptor());
       return;
@@ -468,7 +468,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
     try (UpdateLog.RecentUpdates recentUpdates = ulog.getRecentUpdates()) {
       recentVersions = recentUpdates.getVersions(ulog.getNumRecordsToKeep());
     } catch (Exception e) {
-      SolrException.log(LOG, "Corrupt tlog - ignoring.", e);
+      SolrException.log(log, "Corrupt tlog - ignoring.", e);
       recentVersions = new ArrayList<>(0);
     }
 
@@ -484,13 +484,13 @@ public class RecoveryStrategy implements Runnable, Closeable {
         }
         
         if (oldIdx > 0) {
-          LOG.info("####### Found new versions added after startup: num=[{}]", oldIdx);
-          LOG.info("###### currentVersions=[{}]",recentVersions);
+          log.info("####### Found new versions added after startup: num=[{}]", oldIdx);
+          log.info("###### currentVersions=[{}]",recentVersions);
         }
         
-        LOG.info("###### startupVersions=[{}]", startingVersions);
+        log.info("###### startupVersions=[{}]", startingVersions);
       } catch (Exception e) {
-        SolrException.log(LOG, "Error getting recent versions.", e);
+        SolrException.log(log, "Error getting recent versions.", e);
         recentVersions = new ArrayList<>(0);
       }
     }
@@ -504,11 +504,11 @@ public class RecoveryStrategy implements Runnable, Closeable {
           // this means we were previously doing a full index replication
           // that probably didn't complete and buffering updates in the
           // meantime.
-          LOG.info("Looks like a previous replication recovery did not complete - skipping peer sync.");
+          log.info("Looks like a previous replication recovery did not complete - skipping peer sync.");
           firstTime = false; // skip peersync
         }
       } catch (Exception e) {
-        SolrException.log(LOG, "Error trying to get ulog starting operation.", e);
+        SolrException.log(log, "Error trying to get ulog starting operation.", e);
         firstTime = false; // skip peersync
       }
     }
@@ -524,7 +524,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
         CloudDescriptor cloudDesc = core.getCoreDescriptor().getCloudDescriptor();
         final Replica leader = pingLeader(ourUrl, core.getCoreDescriptor(), true);
         if (isClosed()) {
-          LOG.info("RecoveryStrategy has been closed");
+          log.info("RecoveryStrategy has been closed");
           break;
         }
 
@@ -534,17 +534,17 @@ public class RecoveryStrategy implements Runnable, Closeable {
         }
         if (cloudDesc.isLeader()) {
           // we are now the leader - no one else must have been suitable
-          LOG.warn("We have not yet recovered - but we are now the leader!");
-          LOG.info("Finished recovery process.");
+          log.warn("We have not yet recovered - but we are now the leader!");
+          log.info("Finished recovery process.");
           zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE);
           return;
         }
 
-        LOG.info("Begin buffering updates. core=[{}]", coreName);
+        log.info("Begin buffering updates. core=[{}]", coreName);
         // recalling buffer updates will drop the old buffer tlog
         ulog.bufferUpdates();
 
-        LOG.info("Publishing state of core [{}] as recovering, leader is [{}] and I am [{}]", core.getName(), leader.getCoreUrl(),
+        log.info("Publishing state of core [{}] as recovering, leader is [{}] and I am [{}]", core.getName(), leader.getCoreUrl(),
             ourUrl);
         zkController.publish(core.getCoreDescriptor(), Replica.State.RECOVERING);
         
@@ -559,14 +559,14 @@ public class RecoveryStrategy implements Runnable, Closeable {
         }
         
         if (isClosed()) {
-          LOG.info("RecoveryStrategy has been closed");
+          log.info("RecoveryStrategy has been closed");
           break;
         }
 
         sendPrepRecoveryCmd(leader.getBaseUrl(), leader.getCoreName(), slice);
         
         if (isClosed()) {
-          LOG.info("RecoveryStrategy has been closed");
+          log.info("RecoveryStrategy has been closed");
           break;
         }
         
@@ -584,7 +584,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
         // first thing we just try to sync
         if (firstTime) {
           firstTime = false; // only try sync the first time through the loop
-          LOG.info("Attempting to PeerSync from [{}] - recoveringAfterStartup=[{}]", leader.getCoreUrl(), recoveringAfterStartup);
+          log.info("Attempting to PeerSync from [{}] - recoveringAfterStartup=[{}]", leader.getCoreUrl(), recoveringAfterStartup);
           // System.out.println("Attempting to PeerSync from " + leaderUrl
           // + " i am:" + zkController.getNodeName());
           PeerSyncWithLeader peerSyncWithLeader = new PeerSyncWithLeader(core,
@@ -596,12 +596,12 @@ public class RecoveryStrategy implements Runnable, Closeable {
             // force open a new searcher
             core.getUpdateHandler().commit(new CommitUpdateCommand(req, false));
             req.close();
-            LOG.info("PeerSync stage of recovery was successful.");
+            log.info("PeerSync stage of recovery was successful.");
 
             // solrcloud_debug
             cloudDebugLog(core, "synced");
             
-            LOG.info("Replaying updates buffered during PeerSync.");
+            log.info("Replaying updates buffered during PeerSync.");
             replay(core);
 
             // sync success
@@ -609,54 +609,54 @@ public class RecoveryStrategy implements Runnable, Closeable {
             return;
           }
 
-          LOG.info("PeerSync Recovery was not successful - trying replication.");
+          log.info("PeerSync Recovery was not successful - trying replication.");
         }
 
         if (isClosed()) {
-          LOG.info("RecoveryStrategy has been closed");
+          log.info("RecoveryStrategy has been closed");
           break;
         }
         
-        LOG.info("Starting Replication Recovery.");
+        log.info("Starting Replication Recovery.");
 
         try {
 
           replicate(zkController.getNodeName(), core, leader);
 
           if (isClosed()) {
-            LOG.info("RecoveryStrategy has been closed");
+            log.info("RecoveryStrategy has been closed");
             break;
           }
 
           replayFuture = replay(core);
 
           if (isClosed()) {
-            LOG.info("RecoveryStrategy has been closed");
+            log.info("RecoveryStrategy has been closed");
             break;
           }
 
-          LOG.info("Replication Recovery was successful.");
+          log.info("Replication Recovery was successful.");
           successfulRecovery = true;
         } catch (InterruptedException e) {
           Thread.currentThread().interrupt();
-          LOG.warn("Recovery was interrupted", e);
+          log.warn("Recovery was interrupted", e);
           close = true;
         } catch (Exception e) {
-          SolrException.log(LOG, "Error while trying to recover", e);
+          SolrException.log(log, "Error while trying to recover", e);
         }
 
       } catch (Exception e) {
-        SolrException.log(LOG, "Error while trying to recover. core=" + coreName, e);
+        SolrException.log(log, "Error while trying to recover. core=" + coreName, e);
       } finally {
         if (successfulRecovery) {
-          LOG.info("Registering as Active after recovery.");
+          log.info("Registering as Active after recovery.");
           try {
             if (replicaType == Replica.Type.TLOG) {
               zkController.startReplicationFromLeader(coreName, true);
             }
             zkController.publish(core.getCoreDescriptor(), Replica.State.ACTIVE);
           } catch (Exception e) {
-            LOG.error("Could not publish as ACTIVE after succesful recovery", e);
+            log.error("Could not publish as ACTIVE after succesful recovery", e);
             successfulRecovery = false;
           }
           
@@ -674,24 +674,24 @@ public class RecoveryStrategy implements Runnable, Closeable {
         try {
 
           if (isClosed()) {
-            LOG.info("RecoveryStrategy has been closed");
+            log.info("RecoveryStrategy has been closed");
             break;
           }
           
-          LOG.error("Recovery failed - trying again... (" + retries + ")");
+          log.error("Recovery failed - trying again... (" + retries + ")");
           
           retries++;
           if (retries >= maxRetries) {
-            SolrException.log(LOG, "Recovery failed - max retries exceeded (" + retries + ").");
+            SolrException.log(log, "Recovery failed - max retries exceeded (" + retries + ").");
             try {
               recoveryFailed(core, zkController, baseUrl, coreZkNodeName, core.getCoreDescriptor());
             } catch (Exception e) {
-              SolrException.log(LOG, "Could not publish that recovery failed", e);
+              SolrException.log(log, "Could not publish that recovery failed", e);
             }
             break;
           }
         } catch (Exception e) {
-          SolrException.log(LOG, "An error has occurred during recovery", e);
+          SolrException.log(log, "An error has occurred during recovery", e);
         }
 
         try {
@@ -700,17 +700,17 @@ public class RecoveryStrategy implements Runnable, Closeable {
           // will always be the minimum of the two (12). Since we sleep at 5 seconds sub-intervals in
           // order to check if we were closed, 12 is chosen as the maximum loopCount (5s * 12 = 1m).
           double loopCount = retries < 4 ? Math.min(Math.pow(2, retries), 12) : 12;
-          LOG.info("Wait [{}] seconds before trying to recover again (attempt={})", loopCount, retries);
+          log.info("Wait [{}] seconds before trying to recover again (attempt={})", loopCount, retries);
           for (int i = 0; i < loopCount; i++) {
             if (isClosed()) {
-              LOG.info("RecoveryStrategy has been closed");
+              log.info("RecoveryStrategy has been closed");
               break; // check if someone closed us
             }
             Thread.sleep(startingRecoveryDelayMilliSeconds);
           }
         } catch (InterruptedException e) {
           Thread.currentThread().interrupt();
-          LOG.warn("Recovery was interrupted.", e);
+          log.warn("Recovery was interrupted.", e);
           close = true;
         }
       }
@@ -720,11 +720,11 @@ public class RecoveryStrategy implements Runnable, Closeable {
     // if replay was skipped (possibly to due pulling a full index from the leader),
     // then we still need to update version bucket seeds after recovery
     if (successfulRecovery && replayFuture == null) {
-      LOG.info("Updating version bucket highest from index after successful recovery.");
+      log.info("Updating version bucket highest from index after successful recovery.");
       core.seedVersionBuckets();
     }
 
-    LOG.info("Finished recovery process, successful=[{}]", Boolean.toString(successfulRecovery));
+    log.info("Finished recovery process, successful=[{}]", Boolean.toString(successfulRecovery));
   }
 
   private final Replica pingLeader(String ourUrl, CoreDescriptor coreDesc, boolean mayPutReplicaAsDown) throws Exception {
@@ -763,11 +763,11 @@ public class RecoveryStrategy implements Runnable, Closeable {
         SolrPingResponse resp = httpSolrClient.ping();
         return leaderReplica;
       } catch (IOException e) {
-        LOG.info("Failed to connect leader {} on recovery, try again", leaderReplica.getBaseUrl());
+        log.info("Failed to connect leader {} on recovery, try again", leaderReplica.getBaseUrl());
         Thread.sleep(500);
       } catch (Exception e) {
         if (e.getCause() instanceof IOException) {
-          LOG.info("Failed to connect leader {} on recovery, try again", leaderReplica.getBaseUrl());
+          log.info("Failed to connect leader {} on recovery, try again", leaderReplica.getBaseUrl());
           Thread.sleep(500);
         } else {
           return leaderReplica;
@@ -794,13 +794,13 @@ public class RecoveryStrategy implements Runnable, Closeable {
     Future<RecoveryInfo> future = core.getUpdateHandler().getUpdateLog().applyBufferedUpdates();
     if (future == null) {
       // no replay needed\
-      LOG.info("No replay needed.");
+      log.info("No replay needed.");
     } else {
-      LOG.info("Replaying buffered documents.");
+      log.info("Replaying buffered documents.");
       // wait for replay
       RecoveryInfo report = future.get();
       if (report.failed) {
-        SolrException.log(LOG, "Replay failed");
+        SolrException.log(log, "Replay failed");
         throw new SolrException(ErrorCode.SERVER_ERROR, "Replay failed");
       }
     }
@@ -815,7 +815,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
   }
   
   final private void cloudDebugLog(SolrCore core, String op) {
-    if (!LOG.isDebugEnabled()) {
+    if (!log.isDebugEnabled()) {
       return;
     }
     try {
@@ -824,12 +824,12 @@ public class RecoveryStrategy implements Runnable, Closeable {
       try {
         final int totalHits = searcher.count(new MatchAllDocsQuery());
         final String nodeName = core.getCoreContainer().getZkController().getNodeName();
-        LOG.debug("[{}] {} [{} total hits]", nodeName, op, totalHits);
+        log.debug("[{}] {} [{} total hits]", nodeName, op, totalHits);
       } finally {
         searchHolder.decref();
       }
     } catch (Exception e) {
-      LOG.debug("Error in solrcloud_debug block", e);
+      log.debug("Error in solrcloud_debug block", e);
     }
   }
 
@@ -861,7 +861,7 @@ public class RecoveryStrategy implements Runnable, Closeable {
       HttpUriRequestResponse mrr = client.httpUriRequest(prepCmd);
       prevSendPreRecoveryHttpUriRequest = mrr.httpUriRequest;
 
-      LOG.info("Sending prep recovery command to [{}]; [{}]", leaderBaseUrl, prepCmd.toString());
+      log.info("Sending prep recovery command to [{}]; [{}]", leaderBaseUrl, prepCmd.toString());
 
       mrr.future.get();
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
index aa648dd..5fb0946 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
@@ -37,7 +37,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class ReplicateFromLeader {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private CoreContainer cc;
   private String coreName;
@@ -71,7 +71,7 @@ public class ReplicateFromLeader {
       } else if (uinfo.autoSoftCommmitMaxTime != -1) {
         pollIntervalStr = toPollIntervalStr(uinfo.autoSoftCommmitMaxTime/2);
       }
-      LOG.info("Will start replication from leader with poll interval: {}", pollIntervalStr );
+      log.info("Will start replication from leader with poll interval: {}", pollIntervalStr );
 
       NamedList<Object> slaveConfig = new NamedList<>();
       slaveConfig.add("fetchFromLeader", Boolean.TRUE);
@@ -114,7 +114,7 @@ public class ReplicateFromLeader {
       if (commitVersion == null) return null;
       else return commitVersion;
     } catch (Exception e) {
-      LOG.warn("Cannot get commit command version from index commit point ",e);
+      log.warn("Cannot get commit command version from index commit point ",e);
       return null;
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java b/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
index 418a5f0..664b541 100644
--- a/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/SolrZkServer.java
@@ -150,7 +150,7 @@ public class SolrZkServer {
 // Allows us to set a default for the data dir before parsing
 // zoo.cfg (which validates that there is a dataDir)
 class SolrZkServerProps extends QuorumPeerConfig {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private static final Pattern MISSING_MYID_FILE_PATTERN = Pattern.compile(".*myid file is missing$");
 
   String solrPort; // port that Solr is listening on
@@ -164,7 +164,7 @@ class SolrZkServerProps extends QuorumPeerConfig {
   public static Properties getProperties(String path) throws ConfigException {
     File configFile = new File(path);
 
-    LOG.info("Reading configuration from: " + configFile);
+    log.info("Reading configuration from: " + configFile);
 
     try {
       if (!configFile.exists()) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java b/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
index 3a7c750..7acdfef 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ZkDistributedQueue.java
@@ -55,7 +55,7 @@ import org.slf4j.LoggerFactory;
  * the results should be correct but inefficient
  */
 public class ZkDistributedQueue implements DistributedQueue {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   static final String PREFIX = "qn-";
 
@@ -245,7 +245,7 @@ public class ZkDistributedQueue implements DistributedQueue {
             try {
               zookeeper.delete(ops.get(j).getPath(), -1, true);
             } catch (KeeperException.NoNodeException e2) {
-              LOG.debug("Can not remove node which is not exist : " + ops.get(j).getPath());
+              log.debug("Can not remove node which is not exist : " + ops.get(j).getPath());
             }
           }
         }
@@ -412,7 +412,7 @@ public class ZkDistributedQueue implements DistributedQueue {
         for (String childName : childNames) {
           // Check format
           if (!childName.regionMatches(0, PREFIX, 0, PREFIX.length())) {
-            LOG.debug("Found child node with improper name: " + childName);
+            log.debug("Found child node with improper name: " + childName);
             continue;
           }
           orderedChildren.add(childName);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/autoscaling/HttpTriggerListener.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/HttpTriggerListener.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/HttpTriggerListener.java
index e620966..b4f9bf0 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/HttpTriggerListener.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/HttpTriggerListener.java
@@ -57,7 +57,7 @@ import org.slf4j.LoggerFactory;
  * </ul>
  */
 public class HttpTriggerListener extends TriggerListenerBase {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private String urlTemplate;
   private String payloadTemplate;
@@ -158,7 +158,7 @@ public class HttpTriggerListener extends TriggerListenerBase {
     try {
       cloudManager.httpRequest(url, SolrRequest.METHOD.POST, headers, payload, timeout, followRedirects);
     } catch (IOException e) {
-      LOG.warn("Exception sending request for event " + event, e);
+      log.warn("Exception sending request for event " + event, e);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/autoscaling/LoggingListener.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/LoggingListener.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/LoggingListener.java
index bfda572..a7dcf63 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/LoggingListener.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/LoggingListener.java
@@ -28,11 +28,11 @@ import org.slf4j.LoggerFactory;
  * events to a log.
  */
 public class LoggingListener extends TriggerListenerBase {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   public void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName, ActionContext context,
                       Throwable error, String message) {
-    LOG.info("{}: stage={}, actionName={}, event={}, error={}, messsage={}", config.name, stage, actionName, event, error, message);
+    log.info("{}: stage={}, actionName={}, event={}, error={}, messsage={}", config.name, stage, actionName, event, error, message);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
index 12d95bc..214552e 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerBase.java
@@ -49,7 +49,7 @@ import org.slf4j.LoggerFactory;
  * It handles state snapshot / restore in ZK.
  */
 public abstract class TriggerBase implements AutoScaling.Trigger {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   protected final String name;
   protected SolrCloudManager cloudManager;
@@ -128,7 +128,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
     } catch (AlreadyExistsException e) {
       // ignore
     } catch (InterruptedException | KeeperException | IOException e) {
-      LOG.warn("Exception checking ZK path " + ZkStateReader.SOLR_AUTOSCALING_TRIGGER_STATE_PATH, e);
+      log.warn("Exception checking ZK path " + ZkStateReader.SOLR_AUTOSCALING_TRIGGER_STATE_PATH, e);
       throw e;
     }
     for (TriggerAction action : actions) {
@@ -240,7 +240,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
       }
       lastState = state;
     } catch (InterruptedException | BadVersionException | AlreadyExistsException | IOException | KeeperException e) {
-      LOG.warn("Exception updating trigger state '" + path + "'", e);
+      log.warn("Exception updating trigger state '" + path + "'", e);
     }
   }
 
@@ -254,7 +254,7 @@ public abstract class TriggerBase implements AutoScaling.Trigger {
         data = versionedData.getData();
       }
     } catch (Exception e) {
-      LOG.warn("Exception getting trigger state '" + path + "'", e);
+      log.warn("Exception getting trigger state '" + path + "'", e);
     }
     if (data != null) {
       Map<String, Object> restoredState = (Map<String, Object>)Utils.fromJSON(data);

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
index db76314..fd587de 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/TriggerEventQueue.java
@@ -35,7 +35,7 @@ import org.slf4j.LoggerFactory;
  *
  */
 public class TriggerEventQueue {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final String ENQUEUE_TIME = "_enqueue_time_";
   public static final String DEQUEUE_TIME = "_dequeue_time_";
@@ -58,7 +58,7 @@ public class TriggerEventQueue {
       delegate.offer(data);
       return true;
     } catch (Exception e) {
-      LOG.warn("Exception adding event " + event + " to queue " + triggerName, e);
+      log.warn("Exception adding event " + event + " to queue " + triggerName, e);
       return false;
     }
   }
@@ -68,19 +68,19 @@ public class TriggerEventQueue {
     try {
       while ((data = delegate.peek()) != null) {
         if (data.length == 0) {
-          LOG.warn("ignoring empty data...");
+          log.warn("ignoring empty data...");
           continue;
         }
         try {
           Map<String, Object> map = (Map<String, Object>) Utils.fromJSON(data);
           return fromMap(map);
         } catch (Exception e) {
-          LOG.warn("Invalid event data, ignoring: " + new String(data, StandardCharsets.UTF_8));
+          log.warn("Invalid event data, ignoring: " + new String(data, StandardCharsets.UTF_8));
           continue;
         }
       }
     } catch (Exception e) {
-      LOG.warn("Exception peeking queue of trigger " + triggerName, e);
+      log.warn("Exception peeking queue of trigger " + triggerName, e);
     }
     return null;
   }
@@ -90,19 +90,19 @@ public class TriggerEventQueue {
     try {
       while ((data = delegate.poll()) != null) {
         if (data.length == 0) {
-          LOG.warn("ignoring empty data...");
+          log.warn("ignoring empty data...");
           continue;
         }
         try {
           Map<String, Object> map = (Map<String, Object>) Utils.fromJSON(data);
           return fromMap(map);
         } catch (Exception e) {
-          LOG.warn("Invalid event data, ignoring: " + new String(data, StandardCharsets.UTF_8));
+          log.warn("Invalid event data, ignoring: " + new String(data, StandardCharsets.UTF_8));
           continue;
         }
       }
     } catch (Exception e) {
-      LOG.warn("Exception polling queue of trigger " + triggerName, e);
+      log.warn("Exception polling queue of trigger " + triggerName, e);
     }
     return null;
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
index e4aabb0..13e1de1 100644
--- a/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/HdfsDirectoryFactory.java
@@ -72,7 +72,7 @@ import org.slf4j.LoggerFactory;
 import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
 
 public class HdfsDirectoryFactory extends CachingDirectoryFactory implements SolrCoreAware, SolrMetricProducer {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static final String BLOCKCACHE_SLAB_COUNT = "solr.hdfs.blockcache.slab.count";
   public static final String BLOCKCACHE_DIRECT_MEMORY_ALLOCATION = "solr.hdfs.blockcache.direct.memory.allocation";
@@ -155,12 +155,12 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
     if (this.hdfsDataDir != null && this.hdfsDataDir.length() == 0) {
       this.hdfsDataDir = null;
     } else {
-      LOG.info(HDFS_HOME + "=" + this.hdfsDataDir);
+      log.info(HDFS_HOME + "=" + this.hdfsDataDir);
     }
     cacheMerges = getConfig(CACHE_MERGES, false);
     cacheReadOnce = getConfig(CACHE_READONCE, false);
     boolean kerberosEnabled = getConfig(KERBEROS_ENABLED, false);
-    LOG.info("Solr Kerberos Authentication "
+    log.info("Solr Kerberos Authentication "
         + (kerberosEnabled ? "enabled" : "disabled"));
     if (kerberosEnabled) {
       initKerberos();
@@ -171,7 +171,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
   protected LockFactory createLockFactory(String rawLockType) throws IOException {
     if (null == rawLockType) {
       rawLockType = DirectoryFactory.LOCK_TYPE_HDFS;
-      LOG.warn("No lockType configured, assuming '"+rawLockType+"'.");
+      log.warn("No lockType configured, assuming '"+rawLockType+"'.");
     }
     final String lockType = rawLockType.toLowerCase(Locale.ROOT).trim();
     switch (lockType) {
@@ -191,7 +191,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
   @SuppressWarnings("resource")
   protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
     assert params != null : "init must be called before create";
-    LOG.info("creating directory factory for path {}", path);
+    log.info("creating directory factory for path {}", path);
     Configuration conf = getConf();
     
     if (metrics == null) {
@@ -215,10 +215,10 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
       boolean directAllocation = getConfig(BLOCKCACHE_DIRECT_MEMORY_ALLOCATION, true);
       
       int slabSize = numberOfBlocksPerBank * blockSize;
-      LOG.info(
+      log.info(
           "Number of slabs of block cache [{}] with direct memory allocation set to [{}]",
           bankCount, directAllocation);
-      LOG.info(
+      log.info(
           "Block cache target memory usage, slab size of [{}] will allocate [{}] slabs and use ~[{}] bytes",
           new Object[] {slabSize, bankCount,
               ((long) bankCount * (long) slabSize)});
@@ -285,13 +285,13 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
   private BlockCache getBlockDirectoryCache(int numberOfBlocksPerBank, int blockSize, int bankCount,
       boolean directAllocation, int slabSize, int bufferSize, int bufferCount, boolean staticBlockCache) {
     if (!staticBlockCache) {
-      LOG.info("Creating new single instance HDFS BlockCache");
+      log.info("Creating new single instance HDFS BlockCache");
       return createBlockCache(numberOfBlocksPerBank, blockSize, bankCount, directAllocation, slabSize, bufferSize, bufferCount);
     }
     synchronized (HdfsDirectoryFactory.class) {
       
       if (globalBlockCache == null) {
-        LOG.info("Creating new global HDFS BlockCache");
+        log.info("Creating new global HDFS BlockCache");
         globalBlockCache = createBlockCache(numberOfBlocksPerBank, blockSize, bankCount,
             directAllocation, slabSize, bufferSize, bufferCount);
       }
@@ -328,7 +328,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
     try {
       return fileSystem.exists(hdfsDirPath);
     } catch (IOException e) {
-      LOG.error("Error checking if hdfs path exists", e);
+      log.error("Error checking if hdfs path exists", e);
       throw new RuntimeException("Error checking if hdfs path exists", e);
     }
   }
@@ -351,7 +351,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
         throw new RuntimeException("Could not remove directory");
       }
     } catch (Exception e) {
-      LOG.error("Could not remove directory", e);
+      log.error("Could not remove directory", e);
       throw new SolrException(ErrorCode.SERVER_ERROR,
           "Could not remove directory", e);
     }
@@ -428,7 +428,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
     try {
       return fileSystem.getContentSummary(hdfsDirPath).getLength();
     } catch (IOException e) {
-      LOG.error("Error checking if hdfs path exists", e);
+      log.error("Error checking if hdfs path exists", e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error checking if hdfs path exists", e);
     } finally {
       IOUtils.closeQuietly(fileSystem);
@@ -474,7 +474,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
         final Configuration ugiConf = new Configuration(getConf());
         ugiConf.set(HADOOP_SECURITY_AUTHENTICATION, kerberos);
         UserGroupInformation.setConfiguration(ugiConf);
-        LOG.info(
+        log.info(
             "Attempting to acquire kerberos ticket with keytab: {}, principal: {} ",
             keytabFile, principal);
         try {
@@ -482,7 +482,7 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
         } catch (IOException ioe) {
           throw new RuntimeException(ioe);
         }
-        LOG.info("Got Kerberos ticket");
+        log.info("Got Kerberos ticket");
       }
     }
   }
@@ -514,10 +514,10 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
     try {
       pathExists = fileSystem.exists(dataDirPath);
     } catch (IOException e) {
-      LOG.error("Error checking if hdfs path "+dataDir+" exists", e);
+      log.error("Error checking if hdfs path "+dataDir+" exists", e);
     }
     if (!pathExists) {
-      LOG.warn("{} does not point to a valid data directory; skipping clean-up of old index directories.", dataDir);
+      log.warn("{} does not point to a valid data directory; skipping clean-up of old index directories.", dataDir);
       return;
     }
 
@@ -534,16 +534,16 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
             accept = fs.isDirectory(path) && !path.equals(currentIndexDirPath) &&
                 (pathName.equals("index") || pathName.matches(INDEX_W_TIMESTAMP_REGEX));
           } catch (IOException e) {
-            LOG.error("Error checking if path {} is an old index directory, caused by: {}", path, e);
+            log.error("Error checking if path {} is an old index directory, caused by: {}", path, e);
           }
           return accept;
         }
       });
     } catch (FileNotFoundException fnfe) {
       // already deleted - ignore
-      LOG.debug("Old index directory already deleted - skipping...", fnfe);
+      log.debug("Old index directory already deleted - skipping...", fnfe);
     } catch (IOException ioExc) {
-      LOG.error("Error checking for old index directories to clean-up.", ioExc);
+      log.error("Error checking for old index directories to clean-up.", ioExc);
     }
 
     if (oldIndexDirs == null || oldIndexDirs.length == 0)
@@ -560,23 +560,23 @@ public class HdfsDirectoryFactory extends CachingDirectoryFactory implements Sol
     
     int i = 0;
     if (afterReload) {
-      LOG.info("Will not remove most recent old directory on reload {}", oldIndexDirs[0]);
+      log.info("Will not remove most recent old directory on reload {}", oldIndexDirs[0]);
       i = 1;
     }
-    LOG.info("Found {} old index directories to clean-up under {} afterReload={}", oldIndexDirs.length - i, dataDirPath, afterReload);
+    log.info("Found {} old index directories to clean-up under {} afterReload={}", oldIndexDirs.length - i, dataDirPath, afterReload);
     for (; i < oldIndexPaths.size(); i++) {
       Path oldDirPath = oldIndexPaths.get(i);
       if (livePaths.contains(oldDirPath.toString())) {
-        LOG.warn("Cannot delete directory {} because it is still being referenced in the cache.", oldDirPath);
+        log.warn("Cannot delete directory {} because it is still being referenced in the cache.", oldDirPath);
       } else {
         try {
           if (fileSystem.delete(oldDirPath, true)) {
-            LOG.info("Deleted old index directory {}", oldDirPath);
+            log.info("Deleted old index directory {}", oldDirPath);
           } else {
-            LOG.warn("Failed to delete old index directory {}", oldDirPath);
+            log.warn("Failed to delete old index directory {}", oldDirPath);
           }
         } catch (IOException e) {
-          LOG.error("Failed to delete old index directory {} due to: {}", oldDirPath, e);
+          log.error("Failed to delete old index directory {} due to: {}", oldDirPath, e);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java b/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java
index 2969d5b..40e65b7 100644
--- a/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java
+++ b/solr/core/src/java/org/apache/solr/core/IndexDeletionPolicyWrapper.java
@@ -50,7 +50,7 @@ import org.slf4j.LoggerFactory;
  * @see org.apache.lucene.index.IndexDeletionPolicy
  */
 public final class IndexDeletionPolicyWrapper extends IndexDeletionPolicy {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   private final IndexDeletionPolicy deletionPolicy;
   private volatile Map<Long, IndexCommit> solrVersionVsCommits = new ConcurrentHashMap<>();
@@ -94,7 +94,7 @@ public final class IndexDeletionPolicyWrapper extends IndexDeletionPolicy {
       // this is the common success case: the older time didn't exist, or
       // came before the new time.
       if (previousTime == null || previousTime <= timeToSet) {
-        LOG.debug("Commit point reservation for generation {} set to {} (requested reserve time of {})",
+        log.debug("Commit point reservation for generation {} set to {} (requested reserve time of {})",
             indexGen, timeToSet, reserveTime);
         break;
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
index 22a4895..9e02b21 100644
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
+++ b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
@@ -31,7 +31,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class BackupRepositoryFactory {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final Map<String,PluginInfo> backupRepoPluginByName = new HashMap<>();
   private PluginInfo defaultBackupRepoPlugin = null;
@@ -52,14 +52,14 @@ public class BackupRepositoryFactory {
           this.defaultBackupRepoPlugin = backupRepoPlugins[i];
         }
         backupRepoPluginByName.put(name, backupRepoPlugins[i]);
-        LOG.info("Added backup repository with configuration params {}", backupRepoPlugins[i]);
+        log.info("Added backup repository with configuration params {}", backupRepoPlugins[i]);
       }
       if (backupRepoPlugins.length == 1) {
         this.defaultBackupRepoPlugin = backupRepoPlugins[0];
       }
 
       if (this.defaultBackupRepoPlugin != null) {
-        LOG.info("Default configuration for backup repository is with configuration params {}",
+        log.info("Default configuration for backup repository is with configuration params {}",
             defaultBackupRepoPlugin);
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
index 4b4145d..a34ad7a 100644
--- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
+++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
@@ -120,7 +120,7 @@ public class IndexFetcher {
 
   public static final String INDEX_PROPERTIES = "index.properties";
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private String masterUrl;
 
@@ -242,7 +242,7 @@ public class IndexFetcher {
               "'masterUrl' is required for a slave");
     if (masterUrl != null && masterUrl.endsWith(ReplicationHandler.PATH)) {
       masterUrl = masterUrl.substring(0, masterUrl.length()-12);
-      LOG.warn("'masterUrl' must be specified without the "+ReplicationHandler.PATH+" suffix");
+      log.warn("'masterUrl' must be specified without the "+ReplicationHandler.PATH+" suffix");
     }
     this.masterUrl = masterUrl;
 
@@ -327,7 +327,7 @@ public class IndexFetcher {
         filesToDownload = Collections.synchronizedList(files);
       else {
         filesToDownload = Collections.emptyList();
-        LOG.error("No files to download for index generation: "+ gen);
+        log.error("No files to download for index generation: "+ gen);
       }
 
       files = (List<Map<String,Object>>) response.get(CONF_FILES);
@@ -373,7 +373,7 @@ public class IndexFetcher {
       // when we are a bit more confident we may want to try a partial replication
       // if the error is connection related or something, but we have to be careful
       forceReplication = true;
-      LOG.info("Last replication failed, so I'll force replication");
+      log.info("Last replication failed, so I'll force replication");
     }
 
     try {
@@ -385,19 +385,19 @@ public class IndexFetcher {
           return IndexFetchResult.EXPECTING_NON_LEADER;
         }
         if (replica.getState() != Replica.State.ACTIVE) {
-          LOG.info("Replica {} is leader but it's state is {}, skipping replication", replica.getName(), replica.getState());
+          log.info("Replica {} is leader but it's state is {}, skipping replication", replica.getName(), replica.getState());
           return IndexFetchResult.LEADER_IS_NOT_ACTIVE;
         }
         if (!solrCore.getCoreContainer().getZkController().getClusterState().liveNodesContain(replica.getNodeName())) {
-          LOG.info("Replica {} is leader but it's not hosted on a live node, skipping replication", replica.getName());
+          log.info("Replica {} is leader but it's not hosted on a live node, skipping replication", replica.getName());
           return IndexFetchResult.LEADER_IS_NOT_ACTIVE;
         }
         if (!replica.getCoreUrl().equals(masterUrl)) {
           masterUrl = replica.getCoreUrl();
-          LOG.info("Updated masterUrl to {}", masterUrl);
+          log.info("Updated masterUrl to {}", masterUrl);
           // TODO: Do we need to set forceReplication = true?
         } else {
-          LOG.debug("masterUrl didn't change");
+          log.debug("masterUrl didn't change");
         }
       }
       //get the current 'replicateable' index version in the master
@@ -407,10 +407,10 @@ public class IndexFetcher {
       } catch (Exception e) {
         final String errorMsg = e.toString();
         if (!Strings.isNullOrEmpty(errorMsg) && errorMsg.contains(INTERRUPT_RESPONSE_MESSAGE)) {
-            LOG.warn("Master at: " + masterUrl + " is not available. Index fetch failed by interrupt. Exception: " + errorMsg);
+            log.warn("Master at: " + masterUrl + " is not available. Index fetch failed by interrupt. Exception: " + errorMsg);
             return new IndexFetchResult(IndexFetchResult.FAILED_BY_INTERRUPT_MESSAGE, false, e);
         } else {
-            LOG.warn("Master at: " + masterUrl + " is not available. Index fetch failed by exception: " + errorMsg);
+            log.warn("Master at: " + masterUrl + " is not available. Index fetch failed by exception: " + errorMsg);
             return new IndexFetchResult(IndexFetchResult.FAILED_BY_EXCEPTION_MESSAGE, false, e);
         }
     }
@@ -418,8 +418,8 @@ public class IndexFetcher {
       long latestVersion = (Long) response.get(CMD_INDEX_VERSION);
       long latestGeneration = (Long) response.get(GENERATION);
 
-      LOG.info("Master's generation: " + latestGeneration);
-      LOG.info("Master's version: " + latestVersion);
+      log.info("Master's generation: " + latestGeneration);
+      log.info("Master's version: " + latestVersion);
 
       // TODO: make sure that getLatestCommit only returns commit points for the main index (i.e. no side-car indexes)
       IndexCommit commit = solrCore.getDeletionPolicy().getLatestCommit();
@@ -429,7 +429,7 @@ public class IndexFetcher {
         try {
           searcherRefCounted = solrCore.getNewestSearcher(false);
           if (searcherRefCounted == null) {
-            LOG.warn("No open searcher found - fetch aborted");
+            log.warn("No open searcher found - fetch aborted");
             return IndexFetchResult.NO_INDEX_COMMIT_EXIST;
           }
           commit = searcherRefCounted.get().getIndexReader().getIndexCommit();
@@ -439,14 +439,14 @@ public class IndexFetcher {
         }
       }
 
-      LOG.info("Slave's generation: " + commit.getGeneration());
-      LOG.info("Slave's version: " + IndexDeletionPolicyWrapper.getCommitTimestamp(commit));
+      log.info("Slave's generation: " + commit.getGeneration());
+      log.info("Slave's version: " + IndexDeletionPolicyWrapper.getCommitTimestamp(commit));
 
       if (latestVersion == 0L) {
         if (commit.getGeneration() != 0) {
           // since we won't get the files for an empty index,
           // we just clear ours and commit
-          LOG.info("New index in Master. Deleting mine...");
+          log.info("New index in Master. Deleting mine...");
           RefCounted<IndexWriter> iw = solrCore.getUpdateHandler().getSolrCoreState().getIndexWriter(solrCore);
           try {
             iw.get().deleteAll();
@@ -464,27 +464,27 @@ public class IndexFetcher {
 
         //there is nothing to be replicated
         successfulInstall = true;
-        LOG.debug("Nothing to replicate, master's version is 0");
+        log.debug("Nothing to replicate, master's version is 0");
         return IndexFetchResult.MASTER_VERSION_ZERO;
       }
 
       // TODO: Should we be comparing timestamps (across machines) here?
       if (!forceReplication && IndexDeletionPolicyWrapper.getCommitTimestamp(commit) == latestVersion) {
         //master and slave are already in sync just return
-        LOG.info("Slave in sync with master.");
+        log.info("Slave in sync with master.");
         successfulInstall = true;
         return IndexFetchResult.ALREADY_IN_SYNC;
       }
-      LOG.info("Starting replication process");
+      log.info("Starting replication process");
       // get the list of files first
       fetchFileList(latestGeneration);
       // this can happen if the commit point is deleted before we fetch the file list.
       if (filesToDownload.isEmpty()) {
         return IndexFetchResult.PEER_INDEX_COMMIT_DELETED;
       }
-      LOG.info("Number of files in latest index in master: " + filesToDownload.size());
+      log.info("Number of files in latest index in master: " + filesToDownload.size());
       if (tlogFilesToDownload != null) {
-        LOG.info("Number of tlog files in master: " + tlogFilesToDownload.size());
+        log.info("Number of tlog files in master: " + tlogFilesToDownload.size());
       }
 
       // Create the sync service
@@ -540,17 +540,17 @@ public class IndexFetcher {
             indexWriter.deleteUnusedFiles();
             while (hasUnusedFiles(indexDir, commit)) {
               indexWriter.deleteUnusedFiles();
-              LOG.info("Sleeping for 1000ms to wait for unused lucene index files to be delete-able");
+              log.info("Sleeping for 1000ms to wait for unused lucene index files to be delete-able");
               Thread.sleep(1000);
               c++;
               if (c >= 30)  {
-                LOG.warn("IndexFetcher unable to cleanup unused lucene index files so we must do a full copy instead");
+                log.warn("IndexFetcher unable to cleanup unused lucene index files so we must do a full copy instead");
                 isFullCopyNeeded = true;
                 break;
               }
             }
             if (c > 0)  {
-              LOG.info("IndexFetcher slept for " + (c * 1000) + "ms for unused lucene index files to be delete-able");
+              log.info("IndexFetcher slept for " + (c * 1000) + "ms for unused lucene index files to be delete-able");
             }
           } finally {
             writer.decref();
@@ -564,7 +564,7 @@ public class IndexFetcher {
             solrCore.getUpdateHandler().getSolrCoreState().closeIndexWriter(solrCore, true);
           }
 
-          LOG.info("Starting download (fullCopy={}) to {}", isFullCopyNeeded, tmpIndexDir);
+          log.info("Starting download (fullCopy={}) to {}", isFullCopyNeeded, tmpIndexDir);
           successfulInstall = false;
 
           long bytesDownloaded = downloadIndexFiles(isFullCopyNeeded, indexDir,
@@ -575,7 +575,7 @@ public class IndexFetcher {
           }
           final long timeTakenSeconds = getReplicationTimeElapsed();
           final Long bytesDownloadedPerSecond = (timeTakenSeconds != 0 ? Long.valueOf(bytesDownloaded / timeTakenSeconds) : null);
-          LOG.info("Total time taken for download (fullCopy={},bytesDownloaded={}) : {} secs ({} bytes/sec) to {}",
+          log.info("Total time taken for download (fullCopy={},bytesDownloaded={}) : {} secs ({} bytes/sec) to {}",
               isFullCopyNeeded, bytesDownloaded, timeTakenSeconds, bytesDownloadedPerSecond, tmpIndexDir);
 
           Collection<Map<String,Object>> modifiedConfFiles = getModifiedConfFiles(confFilesToDownload);
@@ -603,7 +603,7 @@ public class IndexFetcher {
                 }
               }
 
-              LOG.info("Configuration files are modified, core will be reloaded");
+              log.info("Configuration files are modified, core will be reloaded");
               logReplicationTimeAndConfFiles(modifiedConfFiles,
                   successfulInstall);// write to a file time of replication and
                                      // conf files.
@@ -633,7 +633,7 @@ public class IndexFetcher {
 
         // we must reload the core after we open the IW back up
        if (successfulInstall && (reloadCore || forceCoreReload)) {
-         LOG.info("Reloading SolrCore {}", solrCore.getName());
+         log.info("Reloading SolrCore {}", solrCore.getName());
           reloadCore();
         }
 
@@ -642,7 +642,7 @@ public class IndexFetcher {
             // let the system know we are changing dir's and the old one
             // may be closed
             if (indexDir != null) {
-              LOG.info("removing old index directory " + indexDir);
+              log.info("removing old index directory " + indexDir);
               solrCore.getDirectoryFactory().doneWithDirectory(indexDir);
               solrCore.getDirectoryFactory().remove(indexDir);
             }
@@ -658,7 +658,7 @@ public class IndexFetcher {
           cleanup(solrCore, tmpIndexDir, indexDir, deleteTmpIdxDir, tmpTlogDir, successfulInstall);
           cleanupDone = true;
           // we try with a full copy of the index
-          LOG.warn(
+          log.warn(
               "Replication attempt was not successful - trying a full index replication reloadCore={}",
               reloadCore);
           successfulInstall = fetchLatestIndex(true, reloadCore).getSuccessful();
@@ -667,7 +667,7 @@ public class IndexFetcher {
         markReplicationStop();
         return successfulInstall ? IndexFetchResult.INDEX_FETCH_SUCCESS : IndexFetchResult.INDEX_FETCH_FAILURE;
       } catch (ReplicationHandlerException e) {
-        LOG.error("User aborted Replication");
+        log.error("User aborted Replication");
         return new IndexFetchResult(IndexFetchResult.FAILED_BY_EXCEPTION_MESSAGE, false, e);
       } catch (SolrException e) {
         throw e;
@@ -699,7 +699,7 @@ public class IndexFetcher {
           logReplicationTimeAndConfFiles(null, successfulInstall);
         } catch (Exception e) {
           // this can happen on shutdown, a fetch may be running in a thread after DirectoryFactory is closed
-          LOG.warn("Could not log failed replication details", e);
+          log.warn("Could not log failed replication details", e);
         }
       }
 
@@ -724,24 +724,24 @@ public class IndexFetcher {
           core.getDirectoryFactory().remove(tmpIndexDir);
         }
       } catch (Exception e) {
-        SolrException.log(LOG, e);
+        SolrException.log(log, e);
       } finally {
         try {
           if (tmpIndexDir != null) core.getDirectoryFactory().release(tmpIndexDir);
         } catch (Exception e) {
-          SolrException.log(LOG, e);
+          SolrException.log(log, e);
         }
         try {
           if (indexDir != null) {
             core.getDirectoryFactory().release(indexDir);
           }
         } catch (Exception e) {
-          SolrException.log(LOG, e);
+          SolrException.log(log, e);
         }
         try {
           if (tmpTlogDir != null) delTree(tmpTlogDir);
         } catch (Exception e) {
-          SolrException.log(LOG, e);
+          SolrException.log(log, e);
         }
       }
     }
@@ -754,7 +754,7 @@ public class IndexFetcher {
     String[] allFiles = indexDir.listAll();
     for (String file : allFiles) {
       if (!file.equals(segmentsFileName) && !currentFiles.contains(file) && !file.endsWith(".lock")) {
-        LOG.info("Found unused file: " + file);
+        log.info("Found unused file: " + file);
         return true;
       }
     }
@@ -838,7 +838,7 @@ public class IndexFetcher {
       
       solrCore.getDirectoryFactory().renameWithOverwrite(dir, tmpFileName, REPLICATION_PROPERTIES);
     } catch (Exception e) {
-      LOG.warn("Exception while updating statistics", e);
+      log.warn("Exception while updating statistics", e);
     } finally {
       if (dir != null) {
         solrCore.getDirectoryFactory().release(dir);
@@ -899,7 +899,7 @@ public class IndexFetcher {
         try {
           waitSearcher[0].get();
         } catch (InterruptedException | ExecutionException e) {
-          SolrException.log(LOG, e);
+          SolrException.log(log, e);
         }
       }
       commitPoint = searcher.get().getIndexReader().getIndexCommit();
@@ -921,7 +921,7 @@ public class IndexFetcher {
       try {
         solrCore.getCoreContainer().reload(solrCore.getName());
       } catch (Exception e) {
-        LOG.error("Could not reload core ", e);
+        log.error("Could not reload core ", e);
       } finally {
         latch.countDown();
       }
@@ -935,7 +935,7 @@ public class IndexFetcher {
   }
 
   private void downloadConfFiles(List<Map<String, Object>> confFilesToDownload, long latestGeneration) throws Exception {
-    LOG.info("Starting download of configuration files from master: " + confFilesToDownload);
+    log.info("Starting download of configuration files from master: " + confFilesToDownload);
     confFilesDownloaded = Collections.synchronizedList(new ArrayList<>());
     File tmpconfDir = new File(solrCore.getResourceLoader().getConfigDir(), "conf." + getDateAsStr(new Date()));
     try {
@@ -964,7 +964,7 @@ public class IndexFetcher {
    * Download all the tlog files to the temp tlog directory.
    */
   private long downloadTlogFiles(File tmpTlogDir, long latestGeneration) throws Exception {
-    LOG.info("Starting download of tlog files from master: " + tlogFilesToDownload);
+    log.info("Starting download of tlog files from master: " + tlogFilesToDownload);
     tlogFilesDownloaded = Collections.synchronizedList(new ArrayList<>());
     long bytesDownloaded = 0;
 
@@ -998,8 +998,8 @@ public class IndexFetcher {
   private long downloadIndexFiles(boolean downloadCompleteIndex, Directory indexDir, Directory tmpIndexDir,
                                   String indexDirPath, String tmpIndexDirPath, long latestGeneration)
       throws Exception {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("Download files to dir: " + Arrays.asList(indexDir.listAll()));
+    if (log.isDebugEnabled()) {
+      log.debug("Download files to dir: " + Arrays.asList(indexDir.listAll()));
     }
     long bytesDownloaded = 0;
     long bytesSkippedCopying = 0;
@@ -1013,12 +1013,12 @@ public class IndexFetcher {
       long size = (Long) file.get(SIZE);
       CompareResult compareResult = compareFile(indexDir, filename, size, (Long) file.get(CHECKSUM));
       boolean alwaysDownload = filesToAlwaysDownloadIfNoChecksums(filename, size, compareResult);
-      LOG.debug("Downloading file={} size={} checksum={} alwaysDownload={}", filename, size, file.get(CHECKSUM), alwaysDownload);
+      log.debug("Downloading file={} size={} checksum={} alwaysDownload={}", filename, size, file.get(CHECKSUM), alwaysDownload);
       if (!compareResult.equal || downloadCompleteIndex || alwaysDownload) {
         File localFile = new File(indexDirPath, filename);
         if (downloadCompleteIndex && doDifferentialCopy && compareResult.equal && compareResult.checkSummed
             && localFile.exists()) {
-          LOG.info("Don't need to download this file. Local file's path is: {}, checksum is: {}",
+          log.info("Don't need to download this file. Local file's path is: {}, checksum is: {}",
               localFile.getAbsolutePath(), file.get(CHECKSUM));
           // A hard link here should survive the eventual directory move, and should be more space efficient as
           // compared to a file copy. TODO: Maybe we could do a move safely here?
@@ -1033,10 +1033,10 @@ public class IndexFetcher {
         }
         filesDownloaded.add(new HashMap<>(file));
       } else {
-        LOG.info("Skipping download for {} because it already exists", file.get(NAME));
+        log.info("Skipping download for {} because it already exists", file.get(NAME));
       }
     }
-    LOG.info("Bytes downloaded: {}, Bytes skipped downloading: {}", bytesDownloaded, bytesSkippedCopying);
+    log.info("Bytes downloaded: {}, Bytes skipped downloading: {}", bytesDownloaded, bytesSkippedCopying);
     return bytesDownloaded;
   }
   
@@ -1065,7 +1065,7 @@ public class IndexFetcher {
             indexFileChecksum = CodecUtil.retrieveChecksum(indexInput);
             compareResult.checkSummed = true;
           } catch (Exception e) {
-            LOG.warn("Could not retrieve checksum from file.", e);
+            log.warn("Could not retrieve checksum from file.", e);
           }
         }
 
@@ -1076,7 +1076,7 @@ public class IndexFetcher {
             compareResult.equal = true;
             return compareResult;
           } else {
-            LOG.info(
+            log.info(
                 "File {} did not match. expected length is {} and actual length is {}", filename, backupIndexFileLen, indexFileLen);
             compareResult.equal = false;
             return compareResult;
@@ -1089,7 +1089,7 @@ public class IndexFetcher {
           compareResult.equal = true;
           return compareResult;
         } else {
-          LOG.warn("File {} did not match. expected checksum is {} and actual is checksum {}. " +
+          log.warn("File {} did not match. expected checksum is {} and actual is checksum {}. " +
               "expected length is {} and actual length is {}", filename, backupIndexFileChecksum, indexFileChecksum,
               backupIndexFileLen, indexFileLen);
           compareResult.equal = false;
@@ -1100,7 +1100,7 @@ public class IndexFetcher {
       compareResult.equal = false;
       return compareResult;
     } catch (IOException e) {
-      LOG.error("Could not read file " + filename + ". Downloading it again", e);
+      log.error("Could not read file " + filename + ". Downloading it again", e);
       compareResult.equal = false;
       return compareResult;
     }
@@ -1139,7 +1139,7 @@ public class IndexFetcher {
           }
         } else {
           if (length != dir.fileLength(filename)) {
-            LOG.warn("File {} did not match. expected length is {} and actual length is {}",
+            log.warn("File {} did not match. expected length is {} and actual length is {}",
                 filename, length, dir.fileLength(filename));
             return true;
           }
@@ -1154,25 +1154,25 @@ public class IndexFetcher {
    * <p/>
    */
   private boolean moveAFile(Directory tmpIdxDir, Directory indexDir, String fname) {
-    LOG.debug("Moving file: {}", fname);
+    log.debug("Moving file: {}", fname);
     boolean success = false;
     try {
       if (slowFileExists(indexDir, fname)) {
-        LOG.warn("Cannot complete replication attempt because file already exists:" + fname);
+        log.warn("Cannot complete replication attempt because file already exists:" + fname);
         
         // we fail - we downloaded the files we need, if we can't move one in, we can't
         // count on the correct index
         return false;
       }
     } catch (IOException e) {
-      SolrException.log(LOG, "could not check if a file exists", e);
+      SolrException.log(log, "could not check if a file exists", e);
       return false;
     }
     try {
       solrCore.getDirectoryFactory().move(tmpIdxDir, indexDir, fname, DirectoryFactory.IOCONTEXT_NO_CACHE);
       success = true;
     } catch (IOException e) {
-      SolrException.log(LOG, "Could not move file", e);
+      SolrException.log(log, "Could not move file", e);
     }
     return success;
   }
@@ -1181,10 +1181,10 @@ public class IndexFetcher {
    * Copy all index files from the temp index dir to the actual index. The segments_N file is copied last.
    */
   private boolean moveIndexFiles(Directory tmpIdxDir, Directory indexDir) {
-    if (LOG.isDebugEnabled()) {
+    if (log.isDebugEnabled()) {
       try {
-        LOG.info("From dir files:" + Arrays.asList(tmpIdxDir.listAll()));
-        LOG.info("To dir files:" + Arrays.asList(indexDir.listAll()));
+        log.info("From dir files:" + Arrays.asList(tmpIdxDir.listAll()));
+        log.info("To dir files:" + Arrays.asList(indexDir.listAll()));
       } catch (IOException e) {
         throw new RuntimeException(e);
       }
@@ -1245,7 +1245,7 @@ public class IndexFetcher {
       ((CdcrUpdateLog) ulog).initForRecovery(bufferedUpdates.tlog, bufferedUpdates.offset);
     }
     catch (Exception e) {
-      LOG.error("Unable to copy tlog files", e);
+      log.error("Unable to copy tlog files", e);
       return false;
     }
     finally {
@@ -1319,7 +1319,7 @@ public class IndexFetcher {
     try {
       Files.move(tlogDir, backupTlogDir, StandardCopyOption.ATOMIC_MOVE);
     } catch (IOException e) {
-      SolrException.log(LOG, "Unable to rename: " + tlogDir + " to: " + backupTlogDir, e);
+      SolrException.log(log, "Unable to rename: " + tlogDir + " to: " + backupTlogDir, e);
       return false;
     }
 
@@ -1327,7 +1327,7 @@ public class IndexFetcher {
     try {
       Files.move(src, tlogDir, StandardCopyOption.ATOMIC_MOVE);
     } catch (IOException e) {
-      SolrException.log(LOG, "Unable to rename: " + src + " to: " + tlogDir, e);
+      SolrException.log(log, "Unable to rename: " + src + " to: " + tlogDir, e);
 
       // In case of error, try to revert back the original tlog directory
       try {
@@ -1404,7 +1404,7 @@ public class IndexFetcher {
       org.apache.lucene.util.IOUtils.rm(dir.toPath());
       return true;
     } catch (IOException e) {
-      LOG.warn("Unable to delete directory : " + dir, e);
+      log.warn("Unable to delete directory : " + dir, e);
       return false;
     }
   }
@@ -1551,7 +1551,7 @@ public class IndexFetcher {
         fetch();
       } catch(Exception e) {
         if (!aborted) {
-          SolrException.log(IndexFetcher.LOG, "Error fetching file, doing one retry...", e);
+          SolrException.log(IndexFetcher.log, "Error fetching file, doing one retry...", e);
           // one retry
           fetch();
         } else {
@@ -1605,7 +1605,7 @@ public class IndexFetcher {
           //read the size of the packet
           int packetSize = readInt(intbytes);
           if (packetSize <= 0) {
-            LOG.warn("No content received for file: {}", fileName);
+            log.warn("No content received for file: {}", fileName);
             return NO_CONTENT;
           }
           //TODO consider recoding the remaining logic to not use/need buf[]; instead use the internal buffer of fis
@@ -1626,7 +1626,7 @@ public class IndexFetcher {
             checksum.update(buf, 0, packetSize);
             long checkSumClient = checksum.getValue();
             if (checkSumClient != checkSumServer) {
-              LOG.error("Checksum not matched between client and server for file: {}", fileName);
+              log.error("Checksum not matched between client and server for file: {}", fileName);
               //if checksum is wrong it is a problem return for retry
               return 1;
             }
@@ -1634,7 +1634,7 @@ public class IndexFetcher {
           //if everything is fine, write down the packet to the file
           file.write(buf, packetSize);
           bytesDownloaded += packetSize;
-          LOG.debug("Fetched and wrote {} bytes of file: {}", bytesDownloaded, fileName);
+          log.debug("Fetched and wrote {} bytes of file: {}", bytesDownloaded, fileName);
           if (bytesDownloaded >= size)
             return 0;
           //errorCount is always set to zero after a successful packet
@@ -1643,7 +1643,7 @@ public class IndexFetcher {
       } catch (ReplicationHandlerException e) {
         throw e;
       } catch (Exception e) {
-        LOG.warn("Error in fetching file: {} (downloaded {} of {} bytes)",
+        log.warn("Error in fetching file: {} (downloaded {} of {} bytes)",
             fileName, bytesDownloaded, size, e);
         //for any failure, increment the error count
         errorCount++;
@@ -1686,7 +1686,7 @@ public class IndexFetcher {
       try {
         file.close();
       } catch (Exception e) {/* no-op */
-        LOG.error("Error closing file: {}", this.saveAs, e);
+        log.error("Error closing file: {}", this.saveAs, e);
       }
       if (bytesDownloaded != size) {
         //if the download is not complete then
@@ -1694,7 +1694,7 @@ public class IndexFetcher {
         try {
           file.delete();
         } catch (Exception e) {
-          LOG.error("Error deleting file: {}", this.saveAs, e);
+          log.error("Error deleting file: {}", this.saveAs, e);
         }
         //if the failure is due to a user abort it is returned normally else an exception is thrown
         if (!aborted)


[12/15] lucene-solr:jira/http2: SOLR-12572: revert numDocs to be indexed to 1k

Posted by da...@apache.org.
SOLR-12572: revert numDocs to be indexed to 1k


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/cee309a6
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/cee309a6
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/cee309a6

Branch: refs/heads/jira/http2
Commit: cee309a6f76429bfba4334f78786a9c6eb97c3c0
Parents: 9e78be4
Author: Varun Thacker <va...@apache.org>
Authored: Fri Aug 24 01:02:35 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Fri Aug 24 01:02:35 2018 -0700

----------------------------------------------------------------------
 .../src/test/org/apache/solr/handler/export/TestExportWriter.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/cee309a6/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
index 206132e..6328a3f 100644
--- a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
+++ b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
@@ -618,7 +618,7 @@ public class TestExportWriter extends SolrTestCaseJ4 {
     assertU(delQ("*:*"));
     assertU(commit());
 
-    int numDocs = 1000*40;
+    int numDocs = 1000;
 
     //10 unique values
     String[] str_vals = new String[10];


[07/15] lucene-solr:jira/http2: SOLR-12572: While exporting documents using the export writer, if a field is specified as a sort parameter and also in the fl (field list) parameter, we save on one doc-value lookup. This can bring performance improvements

Posted by da...@apache.org.
SOLR-12572: While exporting documents using the export writer, if a field is specified as a sort parameter and also in the fl (field list) parameter, we save on one doc-value lookup. This can bring performance improvements of 15% and upwards depending on how many fields are in common


Project: http://git-wip-us.apache.org/repos/asf/lucene-solr/repo
Commit: http://git-wip-us.apache.org/repos/asf/lucene-solr/commit/dfd2801c
Tree: http://git-wip-us.apache.org/repos/asf/lucene-solr/tree/dfd2801c
Diff: http://git-wip-us.apache.org/repos/asf/lucene-solr/diff/dfd2801c

Branch: refs/heads/jira/http2
Commit: dfd2801cd27ccc1e24179cc0ee5768a22bb2e64c
Parents: 025350e
Author: Varun Thacker <va...@apache.org>
Authored: Thu Aug 23 10:27:15 2018 -0700
Committer: Varun Thacker <va...@apache.org>
Committed: Thu Aug 23 10:48:10 2018 -0700

----------------------------------------------------------------------
 solr/CHANGES.txt                                |  4 +++
 .../solr/handler/export/BoolFieldWriter.java    | 23 +++++++++++-----
 .../solr/handler/export/DateFieldWriter.java    | 22 ++++++++++-----
 .../solr/handler/export/DoubleFieldWriter.java  | 27 +++++++++++++------
 .../apache/solr/handler/export/DoubleValue.java | 20 ++++++++++++++
 .../solr/handler/export/DoubleValueSortDoc.java |  9 +++++++
 .../solr/handler/export/ExportWriter.java       |  2 +-
 .../apache/solr/handler/export/FieldWriter.java |  2 +-
 .../solr/handler/export/FloatFieldWriter.java   | 27 +++++++++++++------
 .../apache/solr/handler/export/FloatValue.java  | 20 ++++++++++++++
 .../solr/handler/export/IntFieldWriter.java     | 22 ++++++++++-----
 .../apache/solr/handler/export/IntValue.java    | 25 +++++++++++++++--
 .../solr/handler/export/LongFieldWriter.java    | 20 ++++++++++----
 .../apache/solr/handler/export/LongValue.java   | 20 ++++++++++++++
 .../solr/handler/export/MultiFieldWriter.java   |  6 ++---
 .../solr/handler/export/QuadValueSortDoc.java   | 13 +++++++++
 .../solr/handler/export/SingleValueSortDoc.java |  7 +++++
 .../org/apache/solr/handler/export/SortDoc.java | 10 +++++++
 .../apache/solr/handler/export/SortValue.java   |  8 ++++++
 .../solr/handler/export/StringFieldWriter.java  | 24 ++++++++++++-----
 .../apache/solr/handler/export/StringValue.java | 20 ++++++++++++++
 .../solr/handler/export/TripleValueSortDoc.java | 11 ++++++++
 .../solr/handler/export/TestExportWriter.java   | 28 +++++++++++++++++++-
 23 files changed, 316 insertions(+), 54 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/CHANGES.txt
----------------------------------------------------------------------
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index a902422..48ed840 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -280,6 +280,10 @@ Optimizations
 * SOLR-12616: Optimize Export writer upto 4 sort fields to get better performance.
   This was removed in SOLR-11598 but brought back in the same version (Amrit Sarkar, Varun Thacker)
 
+* SOLR-12572: While exporting documents using the export writer, if a field is specified as a sort parameter and also
+  in the fl (field list) parameter, we save on one doc-value lookup. This can bring performance improvements of 15%
+  and upwards depending on how many fields are in common. (Amrit Sarkar, Varun Thacker)
+
 Other Changes
 ----------------------
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java
index e67f5ed..20b1598 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/BoolFieldWriter.java
@@ -37,14 +37,25 @@ class BoolFieldWriter extends FieldWriter {
     this.fieldType = fieldType;
   }
 
-  public boolean write(int docId, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
-    SortedDocValues vals = DocValues.getSorted(reader, this.field);
-    if (vals.advance(docId) != docId) {
-      return false;
+  public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
+    BytesRef ref;
+    SortValue sortValue = sortDoc.getSortValue(this.field);
+    if (sortValue != null) {
+      if (sortValue.isPresent()) {
+        ref = (BytesRef) sortValue.getCurrentValue();
+      } else { //empty-value
+        return false;
+      }
+    } else {
+      // field is not part of 'sort' param, but part of 'fl' param
+      SortedDocValues vals = DocValues.getSorted(reader, this.field);
+      if (vals.advance(sortDoc.docId) != sortDoc.docId) {
+        return false;
+      }
+      int ord = vals.ordValue();
+      ref = vals.lookupOrd(ord);
     }
-    int ord = vals.ordValue();
 
-    BytesRef ref = vals.lookupOrd(ord);
     fieldType.indexedToReadable(ref, cref);
     ew.put(this.field, "true".equals(cref.toString()));
     return true;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java
index 3584317..e585a06 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/DateFieldWriter.java
@@ -32,13 +32,23 @@ class DateFieldWriter extends FieldWriter {
     this.field = field;
   }
 
-  public boolean write(int docId, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
-    NumericDocValues vals = DocValues.getNumeric(reader, this.field);
-    long val;
-    if (vals.advance(docId) == docId) {
-      val = vals.longValue();
+  public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
+    Long val;
+    SortValue sortValue = sortDoc.getSortValue(this.field);
+    if (sortValue != null) {
+      if (sortValue.isPresent()) {
+        val = (long) sortValue.getCurrentValue();
+      } else { //empty-value
+        return false;
+      }
     } else {
-      return false;
+      // field is not part of 'sort' param, but part of 'fl' param
+      NumericDocValues vals = DocValues.getNumeric(reader, this.field);
+      if (vals.advance(sortDoc.docId) == sortDoc.docId) {
+        val = vals.longValue();
+      } else {
+        return false;
+      }
     }
     ew.put(this.field, new Date(val));
     return true;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java
index 1b953c5..8211269 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/DoubleFieldWriter.java
@@ -31,15 +31,26 @@ class DoubleFieldWriter extends FieldWriter {
     this.field = field;
   }
 
-  public boolean write(int docId, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
-    NumericDocValues vals = DocValues.getNumeric(reader, this.field);
-    long val;
-    if (vals.advance(docId) == docId) {
-      val = vals.longValue();
+  public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
+    SortValue sortValue = sortDoc.getSortValue(this.field);
+    if (sortValue != null) {
+      if (sortValue.isPresent()) {
+        double val = (double) sortValue.getCurrentValue();
+        ew.put(this.field, val);
+        return true;
+      } else { //empty-value
+        return false;
+      }
     } else {
-      return false;
+      // field is not part of 'sort' param, but part of 'fl' param
+      NumericDocValues vals = DocValues.getNumeric(reader, this.field);
+      if (vals.advance(sortDoc.docId) == sortDoc.docId) {
+        long val = vals.longValue();
+        ew.put(this.field, Double.longBitsToDouble(val));
+        return true;
+      } else {
+        return false;
+      }
     }
-    ew.put(this.field, Double.longBitsToDouble(val));
-    return true;
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/DoubleValue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/DoubleValue.java b/solr/core/src/java/org/apache/solr/handler/export/DoubleValue.java
index a37e13d..d85bbc3 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/DoubleValue.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/DoubleValue.java
@@ -32,11 +32,22 @@ class DoubleValue implements SortValue {
   protected DoubleComp comp;
   private int lastDocID;
   private LeafReader reader;
+  private boolean present;
 
   public DoubleValue(String field, DoubleComp comp) {
     this.field = field;
     this.comp = comp;
     this.currentValue = comp.resetValue();
+    this.present = false;
+  }
+
+  public Object getCurrentValue() {
+    assert present == true;
+    return currentValue;
+  }
+
+  public String getField() {
+    return field;
   }
 
   public DoubleValue copy() {
@@ -59,19 +70,28 @@ class DoubleValue implements SortValue {
       curDocID = vals.advance(docId);
     }
     if (docId == curDocID) {
+      present = true;
       currentValue = Double.longBitsToDouble(vals.longValue());
     } else {
+      present = false;
       currentValue = 0f;
     }
   }
 
+  @Override
+  public boolean isPresent() {
+    return present;
+  }
+
   public void setCurrentValue(SortValue sv) {
     DoubleValue dv = (DoubleValue)sv;
     this.currentValue = dv.currentValue;
+    this.present = dv.present;
   }
 
   public void reset() {
     this.currentValue = comp.resetValue();
+    this.present = false;
   }
 
   public int compareTo(SortValue o) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/DoubleValueSortDoc.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/DoubleValueSortDoc.java b/solr/core/src/java/org/apache/solr/handler/export/DoubleValueSortDoc.java
index 30fff33..25899f4 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/DoubleValueSortDoc.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/DoubleValueSortDoc.java
@@ -25,6 +25,15 @@ class DoubleValueSortDoc extends SingleValueSortDoc {
 
   protected SortValue value2;
 
+  public SortValue getSortValue(String field) {
+    if (value1.getField().equals(field)) {
+      return value1;
+    } else if (value2.getField().equals(field)) {
+      return value2;
+    }
+    return null;
+  }
+
   public void setNextReader(LeafReaderContext context) throws IOException {
     this.ord = context.ord;
     this.docBase = context.docBase;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java
index 9cacfcc..4aaa6d8 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/ExportWriter.java
@@ -290,7 +290,7 @@ public class ExportWriter implements SolrCore.RawWriter, Closeable {
     LeafReaderContext context = leaves.get(ord);
     int fieldIndex = 0;
     for (FieldWriter fieldWriter : fieldWriters) {
-      if (fieldWriter.write(sortDoc.docId, context.reader(), ew, fieldIndex)) {
+      if (fieldWriter.write(sortDoc, context.reader(), ew, fieldIndex)) {
         ++fieldIndex;
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/FieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/FieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/FieldWriter.java
index bd69fc0..9c1361c 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/FieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/FieldWriter.java
@@ -23,5 +23,5 @@ import org.apache.lucene.index.LeafReader;
 import org.apache.solr.common.MapWriter;
 
 abstract class FieldWriter {
-  public abstract boolean write(int docId, LeafReader reader, MapWriter.EntryWriter out, int fieldIndex) throws IOException;
+  public abstract boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter out, int fieldIndex) throws IOException;
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java
index cfb2e25..e482ebc 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/FloatFieldWriter.java
@@ -31,15 +31,26 @@ class FloatFieldWriter extends FieldWriter {
     this.field = field;
   }
 
-  public boolean write(int docId, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
-    NumericDocValues vals = DocValues.getNumeric(reader, this.field);
-    int val;
-    if (vals.advance(docId) == docId) {
-      val = (int)vals.longValue();
+  public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
+    SortValue sortValue = sortDoc.getSortValue(this.field);
+    if (sortValue != null) {
+      if (sortValue.isPresent()) {
+        float val = (float) sortValue.getCurrentValue();
+        ew.put(this.field, val);
+        return true;
+      } else { //empty-value
+        return false;
+      }
     } else {
-      return false;
+      // field is not part of 'sort' param, but part of 'fl' param
+      NumericDocValues vals = DocValues.getNumeric(reader, this.field);
+      if (vals.advance(sortDoc.docId) == sortDoc.docId) {
+        int val = (int) vals.longValue();
+        ew.put(this.field, Float.intBitsToFloat(val));
+        return true;
+      } else {
+        return false;
+      }
     }
-    ew.put(this.field, Float.intBitsToFloat(val));
-    return true;
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/FloatValue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/FloatValue.java b/solr/core/src/java/org/apache/solr/handler/export/FloatValue.java
index f6a5bbd..6d0d73d 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/FloatValue.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/FloatValue.java
@@ -30,11 +30,22 @@ class FloatValue implements SortValue {
   protected float currentValue;
   protected FloatComp comp;
   private int lastDocID;
+  private boolean present;
 
   public FloatValue(String field, FloatComp comp) {
     this.field = field;
     this.comp = comp;
     this.currentValue = comp.resetValue();
+    this.present = false;
+  }
+
+  public Object getCurrentValue() {
+    assert present == true;
+    return currentValue;
+  }
+
+  public String getField() {
+    return field;
   }
 
   public FloatValue copy() {
@@ -56,19 +67,28 @@ class FloatValue implements SortValue {
       curDocID = vals.advance(docId);
     }
     if (docId == curDocID) {
+      present = true;
       currentValue = Float.intBitsToFloat((int)vals.longValue());
     } else {
+      present = false;
       currentValue = 0f;
     }
   }
 
+  @Override
+  public boolean isPresent() {
+    return present;
+  }
+
   public void setCurrentValue(SortValue sv) {
     FloatValue fv = (FloatValue)sv;
     this.currentValue = fv.currentValue;
+    this.present = fv.present;
   }
 
   public void reset() {
     this.currentValue = comp.resetValue();
+    this.present = false;
   }
 
   public int compareTo(SortValue o) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java
index 66e5936..f78f098 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/IntFieldWriter.java
@@ -31,15 +31,25 @@ class IntFieldWriter extends FieldWriter {
     this.field = field;
   }
 
-  public boolean write(int docId, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
-    NumericDocValues vals = DocValues.getNumeric(reader, this.field);
+  public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
     int val;
-    if (vals.advance(docId) == docId) {
-      val = (int) vals.longValue();
+    SortValue sortValue = sortDoc.getSortValue(this.field);
+    if (sortValue != null) {
+      if (sortValue.isPresent()) {
+        val = (int) sortValue.getCurrentValue();
+      } else { //empty-value
+        return false;
+      }
     } else {
-      return false;
+      // field is not part of 'sort' param, but part of 'fl' param
+      NumericDocValues vals = DocValues.getNumeric(reader, this.field);
+      if (vals.advance(sortDoc.docId) == sortDoc.docId) {
+        val = (int) vals.longValue();
+      } else {
+        return false;
+      }
     }
     ew.put(this.field, val);
     return true;
   }
-}
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/IntValue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/IntValue.java b/solr/core/src/java/org/apache/solr/handler/export/IntValue.java
index d222aa3..bae23f9 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/IntValue.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/IntValue.java
@@ -30,6 +30,16 @@ public class IntValue implements SortValue {
   protected int currentValue;
   protected IntComp comp;
   private int lastDocID;
+  protected boolean present;
+
+  public Object getCurrentValue() {
+    assert present == true;
+    return currentValue;
+  }
+
+  public String getField() {
+    return field;
+  }
 
   public IntValue copy() {
     return new IntValue(field, comp);
@@ -39,6 +49,7 @@ public class IntValue implements SortValue {
     this.field = field;
     this.comp = comp;
     this.currentValue = comp.resetValue();
+    this.present = false;
   }
 
   public void setNextReader(LeafReaderContext context) throws IOException {
@@ -56,22 +67,32 @@ public class IntValue implements SortValue {
       curDocID = vals.advance(docId);
     }
     if (docId == curDocID) {
+      present = true;
       currentValue = (int) vals.longValue();
     } else {
+      present = false;
       currentValue = 0;
     }
   }
 
+  @Override
+  public boolean isPresent() {
+    return this.present;
+  }
+
   public int compareTo(SortValue o) {
     IntValue iv = (IntValue)o;
     return comp.compare(currentValue, iv.currentValue);
   }
 
-  public void setCurrentValue (SortValue value) {
-    currentValue = ((IntValue)value).currentValue;
+  public void setCurrentValue(SortValue sv) {
+    IntValue iv = (IntValue)sv;
+    this.currentValue = iv.currentValue;
+    this.present = iv.present;
   }
 
   public void reset() {
     currentValue = comp.resetValue();
+    this.present = false;
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java
index 87f2a32..6c1b4fb 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/LongFieldWriter.java
@@ -31,13 +31,23 @@ class LongFieldWriter extends FieldWriter {
     this.field = field;
   }
 
-  public boolean write(int docId, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
-    NumericDocValues vals = DocValues.getNumeric(reader, this.field);
+  public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
     long val;
-    if (vals.advance(docId) == docId) {
-      val = vals.longValue();
+    SortValue sortValue = sortDoc.getSortValue(this.field);
+    if (sortValue != null) {
+      if (sortValue.isPresent()) {
+        val = (long) sortValue.getCurrentValue();
+      } else { //empty-value
+        return false;
+      }
     } else {
-      return false;
+      // field is not part of 'sort' param, but part of 'fl' param
+      NumericDocValues vals = DocValues.getNumeric(reader, this.field);
+      if (vals.advance(sortDoc.docId) == sortDoc.docId) {
+        val = vals.longValue();
+      } else {
+        return false;
+      }
     }
     ew.put(field, val);
     return true;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/LongValue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/LongValue.java b/solr/core/src/java/org/apache/solr/handler/export/LongValue.java
index 28b9a07..dce365b 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/LongValue.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/LongValue.java
@@ -30,11 +30,22 @@ public class LongValue implements SortValue {
   protected long currentValue;
   protected LongComp comp;
   private int lastDocID;
+  private boolean present;
 
   public LongValue(String field, LongComp comp) {
     this.field = field;
     this.comp = comp;
     this.currentValue = comp.resetValue();
+    this.present = false;
+  }
+
+  public Object getCurrentValue() {
+    assert present == true;
+    return currentValue;
+  }
+
+  public String getField() {
+    return field;
   }
 
   public LongValue copy() {
@@ -56,15 +67,23 @@ public class LongValue implements SortValue {
       curDocID = vals.advance(docId);
     }
     if (docId == curDocID) {
+      present = true;
       currentValue = vals.longValue();
     } else {
+      present = false;
       currentValue = 0;
     }
   }
 
+  @Override
+  public boolean isPresent() {
+    return present;
+  }
+
   public void setCurrentValue(SortValue sv) {
     LongValue lv = (LongValue)sv;
     this.currentValue = lv.currentValue;
+    this.present = lv.present;
   }
 
   public int compareTo(SortValue o) {
@@ -74,5 +93,6 @@ public class LongValue implements SortValue {
 
   public void reset() {
     this.currentValue = comp.resetValue();
+    this.present = false;
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java
index 2e43207..72135b5 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/MultiFieldWriter.java
@@ -54,10 +54,10 @@ class MultiFieldWriter extends FieldWriter {
     }
   }
 
-  public boolean write(int docId, LeafReader reader, MapWriter.EntryWriter out, int fieldIndex) throws IOException {
+  public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter out, int fieldIndex) throws IOException {
     if (this.fieldType.isPointField()) {
       SortedNumericDocValues vals = DocValues.getSortedNumeric(reader, this.field);
-      if (!vals.advanceExact(docId)) return false;
+      if (!vals.advanceExact(sortDoc.docId)) return false;
       out.put(this.field,
           (IteratorWriter) w -> {
             for (int i = 0; i < vals.docValueCount(); i++) {
@@ -67,7 +67,7 @@ class MultiFieldWriter extends FieldWriter {
       return true;
     } else {
       SortedSetDocValues vals = DocValues.getSortedSet(reader, this.field);
-      if (vals.advance(docId) != docId) return false;
+      if (vals.advance(sortDoc.docId) != sortDoc.docId) return false;
       out.put(this.field,
           (IteratorWriter) w -> {
             long o;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/QuadValueSortDoc.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/QuadValueSortDoc.java b/solr/core/src/java/org/apache/solr/handler/export/QuadValueSortDoc.java
index c2dfcba..e4f5a02 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/QuadValueSortDoc.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/QuadValueSortDoc.java
@@ -25,6 +25,19 @@ class QuadValueSortDoc extends TripleValueSortDoc {
 
   protected SortValue value4;
 
+  public SortValue getSortValue(String field) {
+    if (value1.getField().equals(field)) {
+      return value1;
+    } else if (value2.getField().equals(field)) {
+      return value2;
+    } else if (value3.getField().equals(field)) {
+      return value3;
+    } else if (value4.getField().equals(field)) {
+      return value4;
+    }
+    return null;
+  }
+
   public void setNextReader(LeafReaderContext context) throws IOException {
     this.ord = context.ord;
     this.docBase = context.docBase;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java b/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java
index 88fc799..963901c 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/SingleValueSortDoc.java
@@ -25,6 +25,13 @@ class SingleValueSortDoc extends SortDoc {
 
   protected SortValue value1;
 
+  public SortValue getSortValue(String field) {
+    if (value1.getField().equals(field)) {
+      return value1;
+    }
+    return null;
+  }
+
   public void setNextReader(LeafReaderContext context) throws IOException {
     this.ord = context.ord;
     this.docBase = context.docBase;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java b/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java
index 1302332..d26746c 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/SortDoc.java
@@ -32,9 +32,19 @@ class SortDoc {
   public SortDoc(SortValue[] sortValues) {
     this.sortValues = sortValues;
   }
+
   public SortDoc() {
   }
 
+  public SortValue getSortValue(String field) {
+    for (SortValue value : sortValues) {
+      if (value.getField().equals(field)) {
+        return value;
+      }
+    }
+    return null;
+  }
+
   public void setNextReader(LeafReaderContext context) throws IOException {
     this.ord = context.ord;
     this.docBase = context.docBase;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/SortValue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/SortValue.java b/solr/core/src/java/org/apache/solr/handler/export/SortValue.java
index 98afd0a..ad958c0 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/SortValue.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/SortValue.java
@@ -27,4 +27,12 @@ public interface SortValue extends Comparable<SortValue> {
   public void setCurrentValue(SortValue value);
   public void reset();
   public SortValue copy();
+  public Object getCurrentValue() throws IOException;
+  public String getField();
+
+  /**
+   *
+   * @return true if document has a value for the specified field
+   */
+  public boolean isPresent();
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java b/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java
index 9008f71..60718702 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/StringFieldWriter.java
@@ -37,14 +37,24 @@ class StringFieldWriter extends FieldWriter {
     this.fieldType = fieldType;
   }
 
-  public boolean write(int docId, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
-    SortedDocValues vals = DocValues.getSorted(reader, this.field);
-    if (vals.advance(docId) != docId) {
-      return false;
+  public boolean write(SortDoc sortDoc, LeafReader reader, MapWriter.EntryWriter ew, int fieldIndex) throws IOException {
+    BytesRef ref;
+    SortValue sortValue = sortDoc.getSortValue(this.field);
+    if (sortValue != null) {
+      if (sortValue.isPresent()) {
+        ref = (BytesRef) sortValue.getCurrentValue();
+      } else { //empty-value
+        return false;
+      }
+    } else {
+      // field is not part of 'sort' param, but part of 'fl' param
+      SortedDocValues vals = DocValues.getSorted(reader, this.field);
+      if (vals.advance(sortDoc.docId) != sortDoc.docId) {
+        return false;
+      }
+      int ord = vals.ordValue();
+      ref = vals.lookupOrd(ord);
     }
-    int ord = vals.ordValue();
-
-    BytesRef ref = vals.lookupOrd(ord);
     fieldType.indexedToReadable(ref, cref);
     ew.put(this.field, cref.toString());
     return true;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/StringValue.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/StringValue.java b/solr/core/src/java/org/apache/solr/handler/export/StringValue.java
index 23b1413..b7f020b 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/StringValue.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/StringValue.java
@@ -38,6 +38,7 @@ class StringValue implements SortValue {
   protected int currentOrd;
   protected IntComp comp;
   protected int lastDocID;
+  private boolean present;
 
   public StringValue(SortedDocValues globalDocValues, String field, IntComp comp)  {
     this.globalDocValues = globalDocValues;
@@ -48,6 +49,7 @@ class StringValue implements SortValue {
     this.field = field;
     this.comp = comp;
     this.currentOrd = comp.resetValue();
+    this.present = false;
   }
 
   public StringValue copy() {
@@ -65,15 +67,32 @@ class StringValue implements SortValue {
       docValues.advance(docId);
     }
     if (docId == docValues.docID()) {
+      present = true;
       currentOrd = (int) toGlobal.get(docValues.ordValue());
     } else {
+      present = false;
       currentOrd = -1;
     }
   }
 
+  @Override
+  public boolean isPresent() {
+    return present;
+  }
+
   public void setCurrentValue(SortValue sv) {
     StringValue v = (StringValue)sv;
     this.currentOrd = v.currentOrd;
+    this.present = v.present;
+  }
+
+  public Object getCurrentValue() throws IOException {
+    assert present == true;
+    return docValues.lookupOrd(currentOrd);
+  }
+
+  public String getField() {
+    return field;
   }
 
   public void setNextReader(LeafReaderContext context) throws IOException {
@@ -86,6 +105,7 @@ class StringValue implements SortValue {
 
   public void reset() {
     this.currentOrd = comp.resetValue();
+    this.present = false;
   }
 
   public int compareTo(SortValue o) {

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/java/org/apache/solr/handler/export/TripleValueSortDoc.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/export/TripleValueSortDoc.java b/solr/core/src/java/org/apache/solr/handler/export/TripleValueSortDoc.java
index 0241ebc..3e8bfd8 100644
--- a/solr/core/src/java/org/apache/solr/handler/export/TripleValueSortDoc.java
+++ b/solr/core/src/java/org/apache/solr/handler/export/TripleValueSortDoc.java
@@ -25,6 +25,17 @@ class TripleValueSortDoc extends DoubleValueSortDoc {
 
   protected SortValue value3;
 
+  public SortValue getSortValue(String field) {
+    if (value1.getField().equals(field)) {
+      return value1;
+    } else if (value2.getField().equals(field)) {
+      return value2;
+    } else if (value3.getField().equals(field)) {
+      return value3;
+    }
+    return null;
+  }
+
   public void setNextReader(LeafReaderContext context) throws IOException {
     this.ord = context.ord;
     this.docBase = context.docBase;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/dfd2801c/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
index 0d1a9a0..6bb1bdb 100644
--- a/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
+++ b/solr/core/src/test/org/apache/solr/handler/export/TestExportWriter.java
@@ -58,6 +58,32 @@ public class TestExportWriter extends SolrTestCaseJ4 {
 
   }
 
+  @Test
+  public void testEmptyValues() throws Exception {
+    //Index 2 document with one document that doesn't have field2_i_p
+    //Sort and return field2_i_p
+    //Test SOLR-12572 for potential NPEs
+    assertU(delQ("*:*"));
+    assertU(commit());
+
+
+    assertU(adoc("id","1", "field2_i_p","1"));
+    assertU(adoc("id","2"));
+    assertU(commit());
+
+    String resp = h.query(req("q", "*:*", "qt", "/export", "fl", "id,field2_i_p", "sort", "field2_i_p asc"));
+    assertJsonEquals(resp, "{\n" +
+        "  \"responseHeader\":{\"status\":0},\n" +
+        "  \"response\":{\n" +
+        "    \"numFound\":2,\n" +
+        "    \"docs\":[{\n" +
+        "        \"id\":\"2\"}\n" +
+        "      ,{\n" +
+        "        \"id\":\"1\",\n" +
+        "        \"field2_i_p\":1}]}}");
+
+  }
+
   public static void createIndex() {
     assertU(adoc("id","1",
                  "floatdv","2.1",
@@ -569,7 +595,7 @@ public class TestExportWriter extends SolrTestCaseJ4 {
     assertU(delQ("*:*"));
     assertU(commit());
 
-    int numDocs = 1000;
+    int numDocs = 1000*40;
 
     //10 unique values
     String[] str_vals = new String[10];


[03/15] lucene-solr:jira/http2: SOLR-12690: Regularize LoggerFactory declarations

Posted by da...@apache.org.
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 5041841..654b166 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -131,7 +131,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
 
   public static final String PATH = "/replication";
 
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   SolrCore core;
   
   private volatile boolean closed = false;
@@ -158,11 +158,11 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           try {
             version = Long.parseLong(commitTime);
           } catch (NumberFormatException e) {
-            LOG.warn("Version in commitData was not formatted correctly: " + commitTime, e);
+            log.warn("Version in commitData was not formatted correctly: " + commitTime, e);
           }
         }
       } catch (IOException e) {
-        LOG.warn("Unable to get version from commitData, commit: " + commit, e);
+        log.warn("Unable to get version from commitData, commit: " + commit, e);
       }
       return new CommitVersionInfo(generation, version);
     }
@@ -375,7 +375,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
         nl.add(CMD_GET_FILE_LIST, commitList);
         l.add(nl);
       } catch (IOException e) {
-        LOG.warn("Exception while reading files for commit " + c, e);
+        log.warn("Exception while reading files for commit " + c, e);
       }
     }
     return l;
@@ -392,7 +392,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
         checksum.update(buffer, 0, bytesRead);
       return checksum.getValue();
     } catch (Exception e) {
-      LOG.warn("Exception in finding checksum of " + f, e);
+      log.warn("Exception in finding checksum of " + f, e);
     } finally {
       IOUtils.closeQuietly(fis);
     }
@@ -406,7 +406,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     if (!indexFetchLock.tryLock())
       return IndexFetchResult.LOCK_OBTAIN_FAILED;
     if (core.getCoreContainer().isShutDown()) {
-      LOG.warn("I was asked to replicate but CoreContainer is shutting down");
+      log.warn("I was asked to replicate but CoreContainer is shutting down");
       return IndexFetchResult.CONTAINER_IS_SHUTTING_DOWN; 
     }
     try {
@@ -420,7 +420,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
       }
       return currentIndexFetcher.fetchLatestIndex(forceReplication);
     } catch (Exception e) {
-      SolrException.log(LOG, "Index fetch failed ", e);
+      SolrException.log(log, "Index fetch failed ", e);
       if (currentIndexFetcher != pollingIndexFetcher) {
         currentIndexFetcher.destroy();
       }
@@ -568,7 +568,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
       snapShooter.validateCreateSnapshot();
       snapShooter.createSnapAsync(numberToKeep, (nl) -> snapShootDetails = nl);
     } catch (Exception e) {
-      LOG.error("Exception during creating a snapshot", e);
+      log.error("Exception during creating a snapshot", e);
       rsp.add("exception", e);
     }
   }
@@ -629,7 +629,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
               long checksum = CodecUtil.retrieveChecksum(in);
               fileMeta.put(CHECKSUM, checksum);
             } catch (Exception e) {
-              LOG.warn("Could not read checksum from index file: " + file, e);
+              log.warn("Could not read checksum from index file: " + file, e);
             }
           }
 
@@ -647,7 +647,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           try {
             fileMeta.put(CHECKSUM, CodecUtil.retrieveChecksum(in));
           } catch (Exception e) {
-            LOG.warn("Could not read checksum from index file: " + infos.getSegmentsFileName(), e);
+            log.warn("Could not read checksum from index file: " + infos.getSegmentsFileName(), e);
           }
         }
       }
@@ -655,13 +655,13 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     } catch (IOException e) {
       rsp.add("status", "unable to get file names for given index generation");
       rsp.add(EXCEPTION, e);
-      LOG.error("Unable to get file names for indexCommit generation: " + gen, e);
+      log.error("Unable to get file names for indexCommit generation: " + gen, e);
     } finally {
       if (dir != null) {
         try {
           core.getDirectoryFactory().release(dir);
         } catch (IOException e) {
-          SolrException.log(LOG, "Could not release directory after fetching file list", e);
+          SolrException.log(log, "Could not release directory after fetching file list", e);
         }
       }
     }
@@ -670,19 +670,19 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     if (solrParams.getBool(TLOG_FILES, false)) {
       try {
         List<Map<String, Object>> tlogfiles = getTlogFileList(commit);
-        LOG.info("Adding tlog files to list: " + tlogfiles);
+        log.info("Adding tlog files to list: " + tlogfiles);
         rsp.add(TLOG_FILES, tlogfiles);
       }
       catch (IOException e) {
         rsp.add("status", "unable to get tlog file names for given index generation");
         rsp.add(EXCEPTION, e);
-        LOG.error("Unable to get tlog file names for indexCommit generation: " + gen, e);
+        log.error("Unable to get tlog file names for indexCommit generation: " + gen, e);
       }
     }
 
     if (confFileNameAlias.size() < 1 || core.getCoreContainer().isZooKeeperAware())
       return;
-    LOG.debug("Adding config files to list: " + includeConfFiles);
+    log.debug("Adding config files to list: " + includeConfFiles);
     //if configuration files need to be included get their details
     rsp.add(CONF_FILES, getConfFileInfoFromCache(confFileNameAlias, confFileInfoCache));
   }
@@ -776,14 +776,14 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
   void disablePoll() {
     if (isSlave) {
       pollDisabled.set(true);
-      LOG.info("inside disable poll, value of pollDisabled = " + pollDisabled);
+      log.info("inside disable poll, value of pollDisabled = " + pollDisabled);
     }
   }
 
   void enablePoll() {
     if (isSlave) {
       pollDisabled.set(false);
-      LOG.info("inside enable poll, value of pollDisabled = " + pollDisabled);
+      log.info("inside enable poll, value of pollDisabled = " + pollDisabled);
     }
   }
 
@@ -828,7 +828,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     try {
       return core.withSearcher(searcher -> CommitVersionInfo.build(searcher.getIndexReader().getIndexCommit()));
     } catch (IOException e) {
-      LOG.warn("Unable to get index commit: ", e);
+      log.warn("Unable to get index commit: ", e);
       return null;
     }
   }
@@ -925,7 +925,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           NamedList nl = fetcher.getDetails();
           slave.add("masterDetails", nl.get(CMD_DETAILS));
         } catch (Exception e) {
-          LOG.warn(
+          log.warn(
               "Exception while invoking 'details' method for replication on master ",
               e);
           slave.add(ERR_STATUS, "invalid_master");
@@ -1033,7 +1033,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           slave.add("timeRemaining", String.valueOf(estimatedTimeRemaining) + "s");
           slave.add("downloadSpeed", NumberUtils.readableSize(downloadSpeed));
         } catch (Exception e) {
-          LOG.error("Exception while writing replication details: ", e);
+          log.error("Exception while writing replication details: ", e);
         }
       }
     }
@@ -1141,22 +1141,22 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     pollIntervalStr = intervalStr;
     pollIntervalNs = readIntervalNs(pollIntervalStr);
     if (pollIntervalNs == null || pollIntervalNs <= 0) {
-      LOG.info(" No value set for 'pollInterval'. Timer Task not started.");
+      log.info(" No value set for 'pollInterval'. Timer Task not started.");
       return;
     }
 
     Runnable task = () -> {
       if (pollDisabled.get()) {
-        LOG.info("Poll disabled");
+        log.info("Poll disabled");
         return;
       }
       try {
-        LOG.debug("Polling for index modifications");
+        log.debug("Polling for index modifications");
         markScheduledExecutionStart();
         IndexFetchResult fetchResult = doFetch(null, false);
         if (pollListener != null) pollListener.onComplete(core, fetchResult);
       } catch (Exception e) {
-        LOG.error("Exception in fetching index", e);
+        log.error("Exception in fetching index", e);
       }
     };
     executorService = Executors.newSingleThreadScheduledExecutor(
@@ -1165,7 +1165,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
     long initialDelayNs = new Random().nextLong() % pollIntervalNs
         + TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS);
     executorService.scheduleAtFixedRate(task, initialDelayNs, pollIntervalNs, TimeUnit.NANOSECONDS);
-    LOG.info("Poll scheduled at an interval of {}ms",
+    log.info("Poll scheduled at an interval of {}ms",
         TimeUnit.MILLISECONDS.convert(pollIntervalNs, TimeUnit.NANOSECONDS));
   }
 
@@ -1193,7 +1193,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
 
     if (enableMaster || (enableSlave && !currentIndexFetcher.fetchFromLeader)) {
       if (core.getCoreContainer().getZkController() != null) {
-        LOG.warn("SolrCloud is enabled for core " + core.getName() + " but so is old-style replication. Make sure you" +
+        log.warn("SolrCloud is enabled for core " + core.getName() + " but so is old-style replication. Make sure you" +
             " intend this behavior, it usually indicates a mis-configuration. Master setting is " +
             Boolean.toString(enableMaster) + " and slave setting is " + Boolean.toString(enableSlave));
       }
@@ -1214,7 +1214,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           // if there is an alias add it or it is null
           confFileNameAlias.add(strs[0], strs.length > 1 ? strs[1] : null);
         }
-        LOG.info("Replication enabled for following config files: " + includeConfFiles);
+        log.info("Replication enabled for following config files: " + includeConfFiles);
       }
       List backup = master.getAll("backupAfter");
       boolean backupOnCommit = backup.contains("commit");
@@ -1238,7 +1238,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
             solrPolicy.setMaxOptimizedCommitsToKeep(1);
           }
         } else {
-          LOG.warn("Replication can't call setMaxOptimizedCommitsToKeep on " + policy);
+          log.warn("Replication can't call setMaxOptimizedCommitsToKeep on " + policy);
         }
       }
 
@@ -1282,7 +1282,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           iw.decref();
 
         } catch (IOException e) {
-          LOG.warn("Unable to get IndexCommit on startup", e);
+          log.warn("Unable to get IndexCommit on startup", e);
         } finally {
           if (s!=null) s.decref();
         }
@@ -1309,7 +1309,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
         }
       }
     }
-    LOG.info("Commits will be reserved for " + reserveCommitDuration + "ms.");
+    log.info("Commits will be reserved for " + reserveCommitDuration + "ms.");
   }
 
   // check master or slave is enabled
@@ -1416,7 +1416,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
             snapShooter.validateCreateSnapshot();
             snapShooter.createSnapAsync(numberToKeep, (nl) -> snapShootDetails = nl);
           } catch (Exception e) {
-            LOG.error("Exception while snapshooting", e);
+            log.error("Exception while snapshooting", e);
           }
         }
       }
@@ -1560,7 +1560,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           }
           fos.write(buf, 0, read);
           fos.flush();
-          LOG.debug("Wrote {} bytes for file {}", offset + read, fileName);
+          log.debug("Wrote {} bytes for file {}", offset + read, fileName);
 
           //Pause if necessary
           maxBytesBeforePause += read;
@@ -1577,7 +1577,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           in.seek(offset);
         }
       } catch (IOException e) {
-        LOG.warn("Exception while writing response for params: " + params, e);
+        log.warn("Exception while writing response for params: " + params, e);
       } finally {
         if (in != null) {
           in.close();
@@ -1645,7 +1645,7 @@ public class ReplicationHandler extends RequestHandlerBase implements SolrCoreAw
           writeNothingAndFlush();
         }
       } catch (IOException e) {
-        LOG.warn("Exception while writing response for params: " + params, e);
+        log.warn("Exception while writing response for params: " + params, e);
       } finally {
         IOUtils.closeQuietly(inputStream);
         extendReserveAndReleaseCommitPoint();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
index b955554..2c3c691 100644
--- a/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
+++ b/solr/core/src/java/org/apache/solr/handler/SnapShooter.java
@@ -54,7 +54,7 @@ import org.slf4j.LoggerFactory;
  * @since solr 1.4
  */
 public class SnapShooter {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private SolrCore solrCore;
   private String snapshotName = null;
   private String directoryName = null;
@@ -204,7 +204,7 @@ public class SnapShooter {
       try {
         result.accept(createSnapshot(indexCommit));
       } catch (Exception e) {
-        LOG.error("Exception while creating snapshot", e);
+        log.error("Exception while creating snapshot", e);
         NamedList snapShootDetails = new NamedList<>();
         snapShootDetails.add("exception", e.getMessage());
         result.accept(snapShootDetails);
@@ -215,7 +215,7 @@ public class SnapShooter {
         try {
           deleteOldBackups(numberToKeep);
         } catch (IOException e) {
-          LOG.warn("Unable to delete old snapshots ", e);
+          log.warn("Unable to delete old snapshots ", e);
         }
       }
     }).start();
@@ -225,7 +225,7 @@ public class SnapShooter {
   // note: remember to reserve the indexCommit first so it won't get deleted concurrently
   protected NamedList createSnapshot(final IndexCommit indexCommit) throws Exception {
     assert indexCommit != null;
-    LOG.info("Creating backup snapshot " + (snapshotName == null ? "<not named>" : snapshotName) + " at " + baseSnapDirPath);
+    log.info("Creating backup snapshot " + (snapshotName == null ? "<not named>" : snapshotName) + " at " + baseSnapDirPath);
     boolean success = false;
     try {
       NamedList<Object> details = new NamedList<>();
@@ -245,7 +245,7 @@ public class SnapShooter {
       details.add("status", "success");
       details.add("snapshotCompletedAt", new Date().toString());//bad; should be Instant.now().toString()
       details.add("snapshotName", snapshotName);
-      LOG.info("Done creating backup snapshot: " + (snapshotName == null ? "<not named>" : snapshotName) +
+      log.info("Done creating backup snapshot: " + (snapshotName == null ? "<not named>" : snapshotName) +
           " at " + baseSnapDirPath);
       success = true;
       return details;
@@ -254,7 +254,7 @@ public class SnapShooter {
         try {
           backupRepo.deleteDirectory(snapshotDirPath);
         } catch (Exception excDuringDelete) {
-          LOG.warn("Failed to delete "+snapshotDirPath+" after snapshot creation failed due to: "+excDuringDelete);
+          log.warn("Failed to delete "+snapshotDirPath+" after snapshot creation failed due to: "+excDuringDelete);
         }
       }
     }
@@ -284,7 +284,7 @@ public class SnapShooter {
   }
 
   protected void deleteNamedSnapshot(ReplicationHandler replicationHandler) {
-    LOG.info("Deleting snapshot: " + snapshotName);
+    log.info("Deleting snapshot: " + snapshotName);
 
     NamedList<Object> details = new NamedList<>();
 
@@ -297,7 +297,7 @@ public class SnapShooter {
 
     } catch (IOException e) {
       details.add("status", "Unable to delete snapshot: " + snapshotName);
-      LOG.warn("Unable to delete snapshot: " + snapshotName, e);
+      log.warn("Unable to delete snapshot: " + snapshotName, e);
     }
 
     replicationHandler.snapShootDetails = details;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java
index 69fac0b..ae99453 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/AutoscalingHistoryHandler.java
@@ -49,7 +49,7 @@ import org.slf4j.LoggerFactory;
  * collection.
  */
 public class AutoscalingHistoryHandler extends RequestHandlerBase implements PermissionNameProvider {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final String SYSTEM_COLLECTION_PARAM = "systemCollection";
 
@@ -134,7 +134,7 @@ public class AutoscalingHistoryHandler extends RequestHandlerBase implements Per
       if ((e instanceof SolrException) && e.getMessage().contains("Collection not found")) {
         // relatively benign
         String msg = "Collection " + collection + " does not exist.";
-        LOG.info(msg);
+        log.info(msg);
         rsp.getValues().add("error", msg);
       } else {
         throw e;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
index 2ec8cc4..27eaedd 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
@@ -122,7 +122,7 @@ import org.slf4j.LoggerFactory;
 public class QueryComponent extends SearchComponent
 {
   public static final String COMPONENT_NAME = "query";
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   public void prepare(ResponseBuilder rb) throws IOException
@@ -299,7 +299,7 @@ public class QueryComponent extends SearchComponent
   @Override
   public void process(ResponseBuilder rb) throws IOException
   {
-    LOG.debug("process: {}", rb.req.getParams());
+    log.debug("process: {}", rb.req.getParams());
   
     SolrQueryRequest req = rb.req;
     SolrParams params = req.getParams();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
index b207384..6e0323c 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SpellCheckComponent.java
@@ -87,7 +87,7 @@ import org.slf4j.LoggerFactory;
  * @since solr 1.3
  */
 public class SpellCheckComponent extends SearchComponent implements SolrCoreAware, SpellingParams {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final boolean DEFAULT_ONLY_MORE_POPULAR = false;
 
@@ -258,10 +258,10 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
             }
           }
         } catch (IOException e){
-          LOG.error(e.toString());
+          log.error(e.toString());
           return null;
         } catch (SyntaxError e) {
-          LOG.error(e.toString());
+          log.error(e.toString());
           return null;
         }
         
@@ -407,7 +407,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
             throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
                 "Unable to read spelling info for shard: " + srsp.getShard(), e);
           }
-          LOG.info(srsp.getShard() + " " + nl);
+          log.info(srsp.getShard() + " " + nl);
           if (nl != null) {
             mergeData.totalNumberShardResponses++;
             collectShardSuggestions(nl, mergeData);
@@ -704,7 +704,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
   @Override
   public void inform(SolrCore core) {
     if (initParams != null) {
-      LOG.info("Initializing spell checkers");
+      log.info("Initializing spell checkers");
       boolean hasDefault = false;
       for (int i = 0; i < initParams.size(); i++) {
         if (initParams.getName(i).equals("spellchecker")) {
@@ -728,7 +728,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
 
       //ensure that there is at least one query converter defined
       if (queryConverters.size() == 0) {
-        LOG.trace("No queryConverter defined, using default converter");
+        log.trace("No queryConverter defined, using default converter");
         queryConverters.put("queryConverter", new SpellingQueryConverter());
       }
 
@@ -778,7 +778,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
       boolean buildOnCommit = Boolean.parseBoolean((String) spellchecker.get("buildOnCommit"));
       boolean buildOnOptimize = Boolean.parseBoolean((String) spellchecker.get("buildOnOptimize"));
       if (buildOnCommit || buildOnOptimize) {
-        LOG.info("Registering newSearcher listener for spellchecker: " + checker.getDictionaryName());
+        log.info("Registering newSearcher listener for spellchecker: " + checker.getDictionaryName());
         core.registerNewSearcherListener(new SpellCheckerListener(core, checker, buildOnCommit, buildOnOptimize));
       }
     } else {
@@ -810,11 +810,11 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
       if (currentSearcher == null) {
         // firstSearcher event
         try {
-          LOG.info("Loading spell index for spellchecker: "
+          log.info("Loading spell index for spellchecker: "
                   + checker.getDictionaryName());
           checker.reload(core, newSearcher);
         } catch (IOException e) {
-          LOG.error( "Exception in reloading spell check index for spellchecker: " + checker.getDictionaryName(), e);
+          log.error( "Exception in reloading spell check index for spellchecker: " + checker.getDictionaryName(), e);
         }
       } else {
         // newSearcher event
@@ -824,7 +824,7 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
           if (newSearcher.getIndexReader().leaves().size() == 1)  {
             buildSpellIndex(newSearcher);
           } else  {
-            LOG.info("Index is not optimized therefore skipping building spell check index for: " + checker.getDictionaryName());
+            log.info("Index is not optimized therefore skipping building spell check index for: " + checker.getDictionaryName());
           }
         }
       }
@@ -833,10 +833,10 @@ public class SpellCheckComponent extends SearchComponent implements SolrCoreAwar
 
     private void buildSpellIndex(SolrIndexSearcher newSearcher) {
       try {
-        LOG.info("Building spell index for spell checker: " + checker.getDictionaryName());
+        log.info("Building spell index for spell checker: " + checker.getDictionaryName());
         checker.build(core, newSearcher);
       } catch (Exception e) {
-        LOG.error(
+        log.error(
                 "Exception in building spell check index for spellchecker: " + checker.getDictionaryName(), e);
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
index cba7e93..2d6fdb1 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
@@ -65,7 +65,7 @@ import org.slf4j.LoggerFactory;
  * and for initializing them as specified by SolrConfig
  */
 public class SuggestComponent extends SearchComponent implements SolrCoreAware, SuggesterParams, Accountable, SolrMetricProducer {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   /** Name used to identify whether the user query concerns this component */
   public static final String COMPONENT_NAME = "suggest";
@@ -116,7 +116,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
   @Override
   public void inform(SolrCore core) {
     if (initParams != null) {
-      LOG.info("Initializing SuggestComponent");
+      log.info("Initializing SuggestComponent");
       boolean hasDefault = false;
       for (int i = 0; i < initParams.size(); i++) {
         if (initParams.getName(i).equals(CONFIG_PARAM_LABEL)) {
@@ -152,7 +152,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
           
           if (buildOnCommit || buildOnOptimize || buildOnStartup) {
             SuggesterListener listener = new SuggesterListener(core, suggester, buildOnCommit, buildOnOptimize, buildOnStartup, core.isReloaded());
-            LOG.info("Registering searcher listener for suggester: " + suggester.getName() + " - " + listener);
+            log.info("Registering searcher listener for suggester: " + suggester.getName() + " - " + listener);
             core.registerFirstSearcherListener(listener);
             core.registerNewSearcherListener(listener);
           }
@@ -165,7 +165,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
   @Override
   public void prepare(ResponseBuilder rb) throws IOException {
     SolrParams params = rb.req.getParams();
-    LOG.info("SuggestComponent prepare with : " + params);
+    log.info("SuggestComponent prepare with : " + params);
     if (!params.getBool(COMPONENT_NAME, false)) {
       return;
     }
@@ -197,7 +197,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
   @Override
   public int distributedProcess(ResponseBuilder rb) {
     SolrParams params = rb.req.getParams();
-    LOG.info("SuggestComponent distributedProcess with : " + params);
+    log.info("SuggestComponent distributedProcess with : " + params);
     if (rb.stage < ResponseBuilder.STAGE_EXECUTE_QUERY) 
       return ResponseBuilder.STAGE_EXECUTE_QUERY;
     if (rb.stage == ResponseBuilder.STAGE_EXECUTE_QUERY) {
@@ -219,7 +219,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
   @Override
   public void process(ResponseBuilder rb) throws IOException {
     SolrParams params = rb.req.getParams();
-    LOG.info("SuggestComponent process with : " + params);
+    log.info("SuggestComponent process with : " + params);
     if (!params.getBool(COMPONENT_NAME, false) || suggesters.isEmpty()) {
       return;
     }
@@ -274,7 +274,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
   @Override
   public void finishStage(ResponseBuilder rb) {
     SolrParams params = rb.req.getParams();
-    LOG.info("SuggestComponent finishStage with : " + params);
+    log.info("SuggestComponent finishStage with : " + params);
     if (!params.getBool(COMPONENT_NAME, false) || rb.stage != ResponseBuilder.STAGE_GET_FIELDS)
       return;
     int count = params.getInt(SUGGEST_COUNT, 1);
@@ -289,7 +289,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
           @SuppressWarnings("unchecked")
           Map<String, SimpleOrderedMap<NamedList<Object>>> namedList = 
               (Map<String, SimpleOrderedMap<NamedList<Object>>>) resp.get(SuggesterResultLabels.SUGGEST);
-          LOG.info(srsp.getShard() + " : " + namedList);
+          log.info(srsp.getShard() + " : " + namedList);
           suggesterResults.add(toSuggesterResult(namedList));
         }
       }
@@ -508,20 +508,20 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
                             SolrIndexSearcher currentSearcher) {
       long thisCallCount = callCount.incrementAndGet();
       if (isCoreReload && thisCallCount == 1) {
-        LOG.info("Skipping first newSearcher call for suggester " + suggester + " in core reload");
+        log.info("Skipping first newSearcher call for suggester " + suggester + " in core reload");
         return;
       } else if (thisCallCount == 1 || (isCoreReload && thisCallCount == 2)) {
         if (buildOnStartup) {
-          LOG.info("buildOnStartup: " + suggester.getName());
+          log.info("buildOnStartup: " + suggester.getName());
           buildSuggesterIndex(newSearcher);
         }
       } else {
         if (buildOnCommit)  {
-          LOG.info("buildOnCommit: " + suggester.getName());
+          log.info("buildOnCommit: " + suggester.getName());
           buildSuggesterIndex(newSearcher);
         } else if (buildOnOptimize) {
           if (newSearcher.getIndexReader().leaves().size() == 1)  {
-            LOG.info("buildOnOptimize: " + suggester.getName());
+            log.info("buildOnOptimize: " + suggester.getName());
             buildSuggesterIndex(newSearcher);
           }
         }
@@ -533,7 +533,7 @@ public class SuggestComponent extends SearchComponent implements SolrCoreAware,
       try {
         suggester.build(core, newSearcher);
       } catch (Exception e) {
-        LOG.error("Exception in building suggester index for: " + suggester.getName(), e);
+        log.error("Exception in building suggester index for: " + suggester.getName(), e);
       }
     }
 

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java b/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java
index 5745dec..4cdb78f 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/ReporterClientCache.java
@@ -29,7 +29,7 @@ import org.slf4j.LoggerFactory;
  * {@link org.apache.solr.metrics.SolrMetricReporter}.
  */
 public class ReporterClientCache<T> implements Closeable {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private final Map<String, T> cache = new ConcurrentHashMap<>();
 
@@ -59,7 +59,7 @@ public class ReporterClientCache<T> implements Closeable {
         item = clientProvider.get();
         cache.put(id, item);
       } catch (Exception e) {
-        LOG.warn("Error providing a new client for id=" + id, e);
+        log.warn("Error providing a new client for id=" + id, e);
         item = null;
       }
     }
@@ -75,7 +75,7 @@ public class ReporterClientCache<T> implements Closeable {
         try {
           ((Closeable)client).close();
         } catch (Exception e) {
-          LOG.warn("Error closing client " + client + ", ignoring...", e);
+          log.warn("Error closing client " + client + ", ignoring...", e);
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java
index 54da5fa..ebf47e6 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java
@@ -64,7 +64,7 @@ import org.slf4j.LoggerFactory;
  * </ul>
  */
 public class JmxMetricsReporter implements Reporter, Closeable {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final String INSTANCE_TAG = "_instanceTag";
 
@@ -520,11 +520,11 @@ public class JmxMetricsReporter implements Reporter, Closeable {
     private void registerMBean(Object mBean, ObjectName objectName) throws InstanceAlreadyExistsException, JMException {
       // remove previous bean if exists
       if (mBeanServer.isRegistered(objectName)) {
-        if (LOG.isDebugEnabled()) {
+        if (log.isDebugEnabled()) {
           Set<ObjectInstance> objects = mBeanServer.queryMBeans(objectName, null);
-          LOG.debug("## removing existing " + objects.size() + " bean(s) for " + objectName.getCanonicalName() + ", current tag=" + tag + ":");
+          log.debug("## removing existing " + objects.size() + " bean(s) for " + objectName.getCanonicalName() + ", current tag=" + tag + ":");
           for (ObjectInstance inst : objects) {
-            LOG.debug("## - tag=" + mBeanServer.getAttribute(inst.getObjectName(), INSTANCE_TAG));
+            log.debug("## - tag=" + mBeanServer.getAttribute(inst.getObjectName(), INSTANCE_TAG));
           }
         }
         mBeanServer.unregisterMBean(objectName);
@@ -538,7 +538,7 @@ public class JmxMetricsReporter implements Reporter, Closeable {
       } else {
         registered.put(objectName, objectName);
       }
-      LOG.debug("## registered " + objectInstance.getObjectName().getCanonicalName() + ", tag=" + tag);
+      log.debug("## registered " + objectInstance.getObjectName().getCanonicalName() + ", tag=" + tag);
     }
 
     private void unregisterMBean(ObjectName originalObjectName) throws InstanceNotFoundException, MBeanRegistrationException {
@@ -548,7 +548,7 @@ public class JmxMetricsReporter implements Reporter, Closeable {
       }
       Set<ObjectInstance> objects = mBeanServer.queryMBeans(objectName, exp);
       for (ObjectInstance o : objects) {
-        LOG.debug("## Unregistered " + o.getObjectName().getCanonicalName() + ", tag=" + tag);
+        log.debug("## Unregistered " + o.getObjectName().getCanonicalName() + ", tag=" + tag);
         mBeanServer.unregisterMBean(o.getObjectName());
       }
     }
@@ -566,9 +566,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
           }
         }
       } catch (InstanceAlreadyExistsException e) {
-        LOG.debug("Unable to register gauge", e);
+        log.debug("Unable to register gauge", e);
       } catch (JMException e) {
-        LOG.warn("Unable to register gauge", e);
+        log.warn("Unable to register gauge", e);
       }
     }
 
@@ -578,9 +578,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
         final ObjectName objectName = createName("gauges", name);
         unregisterMBean(objectName);
       } catch (InstanceNotFoundException e) {
-        LOG.debug("Unable to unregister gauge", e);
+        log.debug("Unable to unregister gauge", e);
       } catch (MBeanRegistrationException e) {
-        LOG.warn("Unable to unregister gauge", e);
+        log.warn("Unable to unregister gauge", e);
       }
     }
 
@@ -592,9 +592,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
           registerMBean(new JmxCounter(counter, objectName, tag), objectName);
         }
       } catch (InstanceAlreadyExistsException e) {
-        LOG.debug("Unable to register counter", e);
+        log.debug("Unable to register counter", e);
       } catch (JMException e) {
-        LOG.warn("Unable to register counter", e);
+        log.warn("Unable to register counter", e);
       }
     }
 
@@ -604,9 +604,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
         final ObjectName objectName = createName("counters", name);
         unregisterMBean(objectName);
       } catch (InstanceNotFoundException e) {
-        LOG.debug("Unable to unregister counter", e);
+        log.debug("Unable to unregister counter", e);
       } catch (MBeanRegistrationException e) {
-        LOG.warn("Unable to unregister counter", e);
+        log.warn("Unable to unregister counter", e);
       }
     }
 
@@ -618,9 +618,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
           registerMBean(new JmxHistogram(histogram, objectName, tag), objectName);
         }
       } catch (InstanceAlreadyExistsException e) {
-        LOG.debug("Unable to register histogram", e);
+        log.debug("Unable to register histogram", e);
       } catch (JMException e) {
-        LOG.warn("Unable to register histogram", e);
+        log.warn("Unable to register histogram", e);
       }
     }
 
@@ -630,9 +630,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
         final ObjectName objectName = createName("histograms", name);
         unregisterMBean(objectName);
       } catch (InstanceNotFoundException e) {
-        LOG.debug("Unable to unregister histogram", e);
+        log.debug("Unable to unregister histogram", e);
       } catch (MBeanRegistrationException e) {
-        LOG.warn("Unable to unregister histogram", e);
+        log.warn("Unable to unregister histogram", e);
       }
     }
 
@@ -644,9 +644,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
           registerMBean(new JmxMeter(meter, objectName, rateUnit, tag), objectName);
         }
       } catch (InstanceAlreadyExistsException e) {
-        LOG.debug("Unable to register meter", e);
+        log.debug("Unable to register meter", e);
       } catch (JMException e) {
-        LOG.warn("Unable to register meter", e);
+        log.warn("Unable to register meter", e);
       }
     }
 
@@ -656,9 +656,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
         final ObjectName objectName = createName("meters", name);
         unregisterMBean(objectName);
       } catch (InstanceNotFoundException e) {
-        LOG.debug("Unable to unregister meter", e);
+        log.debug("Unable to unregister meter", e);
       } catch (MBeanRegistrationException e) {
-        LOG.warn("Unable to unregister meter", e);
+        log.warn("Unable to unregister meter", e);
       }
     }
 
@@ -670,9 +670,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
           registerMBean(new JmxTimer(timer, objectName, rateUnit, durationUnit, tag), objectName);
         }
       } catch (InstanceAlreadyExistsException e) {
-        LOG.debug("Unable to register timer", e);
+        log.debug("Unable to register timer", e);
       } catch (JMException e) {
-        LOG.warn("Unable to register timer", e);
+        log.warn("Unable to register timer", e);
       }
     }
 
@@ -682,9 +682,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
         final ObjectName objectName = createName("timers", name);
         unregisterMBean(objectName);
       } catch (InstanceNotFoundException e) {
-        LOG.debug("Unable to unregister timer", e);
+        log.debug("Unable to unregister timer", e);
       } catch (MBeanRegistrationException e) {
-        LOG.warn("Unable to unregister timer", e);
+        log.warn("Unable to unregister timer", e);
       }
     }
 
@@ -697,9 +697,9 @@ public class JmxMetricsReporter implements Reporter, Closeable {
         try {
           unregisterMBean(name);
         } catch (InstanceNotFoundException e) {
-          LOG.debug("Unable to unregister metric", e);
+          log.debug("Unable to unregister metric", e);
         } catch (MBeanRegistrationException e) {
-          LOG.warn("Unable to unregister metric", e);
+          log.warn("Unable to unregister metric", e);
         }
       }
       registered.clear();
@@ -737,7 +737,7 @@ public class JmxMetricsReporter implements Reporter, Closeable {
       } else if (v instanceof Gauge) {
         listener.onGaugeAdded(k, (Gauge)v);
       } else {
-        LOG.warn("Unknown metric type " + v.getClass().getName() + " for metric '" + k + "', ignoring");
+        log.warn("Unknown metric type " + v.getClass().getName() + " for metric '" + k + "', ignoring");
       }
     });
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
index af794fc..e1f0afa 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrShardReporter.java
@@ -63,7 +63,7 @@ public class SolrShardReporter extends SolrCoreReporter {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   public static final List<String> DEFAULT_FILTERS = new ArrayList(){{
-    add("TLOG.*");
+    add("Tlog.*");
     add("CORE\\.fs.*");
     add("REPLICATION.*");
     add("INDEX\\.flush.*");

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
index 2cc6ec4..c054757 100644
--- a/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/SimpleFacets.java
@@ -107,7 +107,7 @@ import static org.apache.solr.common.params.CommonParams.SORT;
  * to leverage any of its functionality.
  */
 public class SimpleFacets {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   /** The main set of documents all facet counts should be relative to */
   protected DocSet docsOrig;
@@ -518,7 +518,7 @@ public class SimpleFacets {
             if (ft.isPointField() && mincount <= 0) { // default is mincount=0.  See SOLR-10033 & SOLR-11174.
               String warningMessage 
                   = "Raising facet.mincount from " + mincount + " to 1, because field " + field + " is Points-based.";
-              LOG.warn(warningMessage);
+              log.warn(warningMessage);
               List<String> warnings = (List<String>)rb.rsp.getResponseHeader().get("warnings");
               if (null == warnings) {
                 warnings = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java b/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
index b841a03..81172f3 100644
--- a/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/BinaryResponseWriter.java
@@ -42,7 +42,7 @@ import org.slf4j.LoggerFactory;
 
 
 public class BinaryResponseWriter implements BinaryQueryResponseWriter {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   public void write(OutputStream out, SolrQueryRequest req, SolrQueryResponse response) throws IOException {
@@ -103,7 +103,7 @@ public class BinaryResponseWriter implements BinaryQueryResponseWriter {
         try {
           o = DocsStreamer.getValue(sf, f);
         } catch (Exception e) {
-          LOG.warn("Error reading a field : " + o, e);
+          log.warn("Error reading a field : " + o, e);
         }
       }
       return o;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java b/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java
index dd2c297..b937faa 100644
--- a/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java
+++ b/solr/core/src/java/org/apache/solr/schema/JsonPreAnalyzedParser.java
@@ -49,7 +49,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class JsonPreAnalyzedParser implements PreAnalyzedParser {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   public static final String VERSION = "1";
   
@@ -132,7 +132,7 @@ public class JsonPreAnalyzedParser implements PreAnalyzedParser {
             try {
               tokenStart = Integer.parseInt(String.valueOf(obj));
             } catch (NumberFormatException nfe) {
-              LOG.warn("Invalid " + OFFSET_START_KEY + " attribute, skipped: '" + obj + "'");
+              log.warn("Invalid " + OFFSET_START_KEY + " attribute, skipped: '" + obj + "'");
               hasOffsetStart = false;
             }
           }
@@ -145,7 +145,7 @@ public class JsonPreAnalyzedParser implements PreAnalyzedParser {
             try {
               tokenEnd = Integer.parseInt(String.valueOf(obj));
             } catch (NumberFormatException nfe) {
-              LOG.warn("Invalid " + OFFSET_END_KEY + " attribute, skipped: '" + obj + "'");
+              log.warn("Invalid " + OFFSET_END_KEY + " attribute, skipped: '" + obj + "'");
               hasOffsetEnd = false;
             }
           }
@@ -158,7 +158,7 @@ public class JsonPreAnalyzedParser implements PreAnalyzedParser {
             try {
               posIncr = Integer.parseInt(String.valueOf(obj));
             } catch (NumberFormatException nfe) {
-              LOG.warn("Invalid " + POSINCR_KEY + " attribute, skipped: '" + obj + "'");
+              log.warn("Invalid " + POSINCR_KEY + " attribute, skipped: '" + obj + "'");
             }
           }
           PositionIncrementAttribute patt = parent.addAttribute(PositionIncrementAttribute.class);
@@ -178,13 +178,13 @@ public class JsonPreAnalyzedParser implements PreAnalyzedParser {
             FlagsAttribute flags = parent.addAttribute(FlagsAttribute.class);
             flags.setFlags(f);
           } catch (NumberFormatException nfe) {
-            LOG.warn("Invalid " + FLAGS_KEY + " attribute, skipped: '" + e.getValue() + "'");            
+            log.warn("Invalid " + FLAGS_KEY + " attribute, skipped: '" + e.getValue() + "'");
           }
         } else if (key.equals(TYPE_KEY)) {
           TypeAttribute tattr = parent.addAttribute(TypeAttribute.class);
           tattr.setType(String.valueOf(e.getValue()));
         } else {
-          LOG.warn("Unknown attribute, skipped: " + e.getKey() + "=" + e.getValue());
+          log.warn("Unknown attribute, skipped: " + e.getKey() + "=" + e.getValue());
         }
       }
       // handle offset attr

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java b/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
index f5affe1..368bfde 100644
--- a/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
+++ b/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
@@ -52,7 +52,7 @@ import static org.apache.solr.common.params.CommonParams.JSON;
  * optionally with an independent stored value of a field.
  */
 public class PreAnalyzedField extends TextField implements HasImplicitIndexAnalyzer {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   /** Init argument name. Value is a fully-qualified class name of the parser
    * that implements {@link PreAnalyzedParser}.
@@ -83,7 +83,7 @@ public class PreAnalyzedField extends TextField implements HasImplicitIndexAnaly
           Constructor<?> c = implClazz.getConstructor(new Class<?>[0]);
           parser = (PreAnalyzedParser) c.newInstance(new Object[0]);
         } catch (Exception e) {
-          LOG.warn("Can't use the configured PreAnalyzedParser class '" + implName +
+          log.warn("Can't use the configured PreAnalyzedParser class '" + implName +
               "', using default " + DEFAULT_IMPL, e);
           parser = new JsonPreAnalyzedParser();
         }
@@ -124,7 +124,7 @@ public class PreAnalyzedField extends TextField implements HasImplicitIndexAnaly
     try {
       f = fromString(field, String.valueOf(value));
     } catch (Exception e) {
-      LOG.warn("Error parsing pre-analyzed field '" + field.getName() + "'", e);
+      log.warn("Error parsing pre-analyzed field '" + field.getName() + "'", e);
       return null;
     }
     return f;
@@ -168,8 +168,8 @@ public class PreAnalyzedField extends TextField implements HasImplicitIndexAnaly
    */
   public static org.apache.lucene.document.FieldType createFieldType(SchemaField field) {
     if (!field.indexed() && !field.stored()) {
-      if (LOG.isTraceEnabled())
-        LOG.trace("Ignoring unindexed/unstored field: " + field);
+      if (log.isTraceEnabled())
+        log.trace("Ignoring unindexed/unstored field: " + field);
       return null;
     }
     org.apache.lucene.document.FieldType newType = new org.apache.lucene.document.FieldType();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/search/SurroundQParserPlugin.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/SurroundQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/SurroundQParserPlugin.java
index 4654a5c..9b48116 100644
--- a/solr/core/src/java/org/apache/solr/search/SurroundQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/SurroundQParserPlugin.java
@@ -79,7 +79,7 @@ public class SurroundQParserPlugin extends QParserPlugin {
         try {
           this.maxBasicQueries = Integer.parseInt(mbqparam);
         } catch (Exception e) {
-          LOG.warn("Couldn't parse maxBasicQueries value " + mbqparam +", using default of 1000");
+          log.warn("Couldn't parse maxBasicQueries value " + mbqparam +", using default of 1000");
           this.maxBasicQueries = DEFMAXBASICQUERIES;
         }
       }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java
index de4f7ec..c7758ff 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java
@@ -29,7 +29,7 @@ import org.slf4j.LoggerFactory;
 
 
 public class ExactSharedStatsCache extends ExactStatsCache {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   // local stats obtained from shard servers
   private final Map<String,Map<String,TermStats>> perShardTermStats = new ConcurrentHashMap<>();
@@ -40,7 +40,7 @@ public class ExactSharedStatsCache extends ExactStatsCache {
 
   @Override
   public StatsSource get(SolrQueryRequest req) {
-    LOG.debug("total={}, cache {}", currentGlobalColStats, currentGlobalTermStats.size());
+    log.debug("total={}, cache {}", currentGlobalColStats, currentGlobalTermStats.size());
     return new ExactStatsSource(currentGlobalTermStats, currentGlobalColStats);
   }
   
@@ -55,7 +55,7 @@ public class ExactSharedStatsCache extends ExactStatsCache {
 
   @Override
   protected void printStats(SolrQueryRequest req) {
-    LOG.debug("perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
+    log.debug("perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java
index 819d371..290b7c1 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java
@@ -55,7 +55,7 @@ import org.slf4j.LoggerFactory;
  * query terms (and collection statistics for term fields).
  */
 public class ExactStatsCache extends StatsCache {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   // experimenting with strategy that takes more RAM, but also doesn't share memory
   // across threads
@@ -74,7 +74,7 @@ public class ExactStatsCache extends StatsCache {
     if (currentGlobalTermStats == null) {
       currentGlobalTermStats = Collections.emptyMap();
     }
-    LOG.debug("Returning StatsSource. Collection stats={}, Term stats size= {}", currentGlobalColStats, currentGlobalTermStats.size());
+    log.debug("Returning StatsSource. Collection stats={}, Term stats size= {}", currentGlobalColStats, currentGlobalTermStats.size());
     return new ExactStatsSource(currentGlobalTermStats, currentGlobalColStats);
   }
 
@@ -95,7 +95,7 @@ public class ExactStatsCache extends StatsCache {
   public void mergeToGlobalStats(SolrQueryRequest req, List<ShardResponse> responses) {
     Set<Object> allTerms = new HashSet<>();
     for (ShardResponse r : responses) {
-      LOG.debug("Merging to global stats, shard={}, response={}", r.getShard(), r.getSolrResponse().getResponse());
+      log.debug("Merging to global stats, shard={}, response={}", r.getShard(), r.getSolrResponse().getResponse());
       String shard = r.getShard();
       SolrResponse res = r.getSolrResponse();
       NamedList<Object> nl = res.getResponse();
@@ -116,7 +116,7 @@ public class ExactStatsCache extends StatsCache {
     if (allTerms.size() > 0) {
       req.getContext().put(TERMS_KEY, Lists.newArrayList(allTerms));
     }
-    if (LOG.isDebugEnabled()) printStats(req);
+    if (log.isDebugEnabled()) printStats(req);
   }
 
   protected void addToPerShardColStats(SolrQueryRequest req, String shard, Map<String,CollectionStats> colStats) {
@@ -137,7 +137,7 @@ public class ExactStatsCache extends StatsCache {
     if (perShardColStats == null) {
       perShardColStats = Collections.emptyMap();
     }
-    LOG.debug("perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
+    log.debug("perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
   }
 
   protected void addToPerShardTermStats(SolrQueryRequest req, String shard, String termStatsString) {
@@ -182,19 +182,19 @@ public class ExactStatsCache extends StatsCache {
       if (statsMap.size() != 0) { //Don't add empty keys
         String termStatsString = StatsUtil.termStatsMapToString(statsMap);
         rb.rsp.add(TERM_STATS_KEY, termStatsString);
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("termStats={}, terms={}, numDocs={}", termStatsString, terms, searcher.maxDoc());
+        if (log.isDebugEnabled()) {
+          log.debug("termStats={}, terms={}, numDocs={}", termStatsString, terms, searcher.maxDoc());
         }
       }
       if (colMap.size() != 0){
         String colStatsString = StatsUtil.colStatsMapToString(colMap);
         rb.rsp.add(COL_STATS_KEY, colStatsString);
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("collectionStats={}, terms={}, numDocs={}", colStatsString, terms, searcher.maxDoc());
+        if (log.isDebugEnabled()) {
+          log.debug("collectionStats={}, terms={}, numDocs={}", colStatsString, terms, searcher.maxDoc());
         }
       }
     } catch (IOException e) {
-      LOG.error("Error collecting local stats, query='" + q.toString() + "'", e);
+      log.error("Error collecting local stats, query='" + q.toString() + "'", e);
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error collecting local stats.", e);
     }
   }
@@ -248,7 +248,7 @@ public class ExactStatsCache extends StatsCache {
           g.add(termStats);
         }
       }
-      LOG.debug("terms={}, termStats={}", terms, globalTermStats);
+      log.debug("terms={}, termStats={}", terms, globalTermStats);
       // need global TermStats here...
       params.add(TERM_STATS_KEY, StatsUtil.termStatsMapToString(globalTermStats));
     }
@@ -283,7 +283,7 @@ public class ExactStatsCache extends StatsCache {
         }
       }
     }
-    LOG.debug("Global collection stats={}", globalColStats);
+    log.debug("Global collection stats={}", globalColStats);
     if (globalTermStats == null) return;
     Map<String,TermStats> termStats = StatsUtil.termStatsMapFromString(globalTermStats);
     if (termStats != null) {
@@ -329,7 +329,7 @@ public class ExactStatsCache extends StatsCache {
       // see returnLocalStats, if docFreq == 0, they are not added anyway
       // Not sure we need a warning here
       if (termStats == null) {
-        LOG.debug("Missing global termStats info for term={}, using local stats", term);
+        log.debug("Missing global termStats info for term={}, using local stats", term);
         return localSearcher.localTermStatistics(term, context);
       } else {
         return termStats.toTermStatistics();
@@ -341,7 +341,7 @@ public class ExactStatsCache extends StatsCache {
         throws IOException {
       CollectionStats colStats = colStatsCache.get(field);
       if (colStats == null) {
-        LOG.debug("Missing global colStats info for field={}, using local", field);
+        log.debug("Missing global colStats info for field={}, using local", field);
         return localSearcher.localCollectionStatistics(field);
       } else {
         return colStats.toCollectionStatistics();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java
index 94e3a5f..c94695a 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java
@@ -49,7 +49,7 @@ import org.slf4j.LoggerFactory;
  * that is updated with the global statistics on every request.
  */
 public class LRUStatsCache extends ExactStatsCache {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   // local stats obtained from shard servers
   private final Map<String,SolrCache<String,TermStats>> perShardTermStats = new ConcurrentHashMap<>();
@@ -65,7 +65,7 @@ public class LRUStatsCache extends ExactStatsCache {
   
   @Override
   public StatsSource get(SolrQueryRequest req) {
-    LOG.debug("## GET total={}, cache {}", currentGlobalColStats , currentGlobalTermStats.size());
+    log.debug("## GET total={}, cache {}", currentGlobalColStats , currentGlobalTermStats.size());
     return new LRUStatsSource(currentGlobalTermStats, currentGlobalColStats);
   }
   
@@ -120,7 +120,7 @@ public class LRUStatsCache extends ExactStatsCache {
 
   @Override
   protected void printStats(SolrQueryRequest req) {
-    LOG.debug("## MERGED: perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
+    log.debug("## MERGED: perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
   }
   
   static class LRUStatsSource extends StatsSource {
@@ -136,7 +136,7 @@ public class LRUStatsCache extends ExactStatsCache {
         throws IOException {
       TermStats termStats = termStatsCache.get(term.toString());
       if (termStats == null) {
-        LOG.debug("## Missing global termStats info: {}, using local", term);
+        log.debug("## Missing global termStats info: {}, using local", term);
         return localSearcher.localTermStatistics(term, context);
       } else {
         return termStats.toTermStatistics();
@@ -148,7 +148,7 @@ public class LRUStatsCache extends ExactStatsCache {
         throws IOException {
       CollectionStats colStats = colStatsCache.get(field);
       if (colStats == null) {
-        LOG.debug("## Missing global colStats info: {}, using local", field);
+        log.debug("## Missing global colStats info: {}, using local", field);
         return localSearcher.localCollectionStatistics(field);
       } else {
         return colStats.toCollectionStatistics();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java
index 90395f5..a0fb5b6 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java
@@ -34,11 +34,11 @@ import org.slf4j.LoggerFactory;
  * uses local term statistics.
  */
 public class LocalStatsCache extends StatsCache {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
   public StatsSource get(SolrQueryRequest req) {
-    LOG.debug("## GET {}", req);
+    log.debug("## GET {}", req);
     return new LocalStatsSource();
   }
 
@@ -49,33 +49,33 @@ public class LocalStatsCache extends StatsCache {
   // by returning null we don't create additional round-trip request.
   @Override
   public ShardRequest retrieveStatsRequest(ResponseBuilder rb) {
-    LOG.debug("## RDR {}", rb.req);
+    log.debug("## RDR {}", rb.req);
     return null;
   }
 
   @Override
   public void mergeToGlobalStats(SolrQueryRequest req,
           List<ShardResponse> responses) {
-    if (LOG.isDebugEnabled()) {
-      LOG.debug("## MTGD {}", req);
+    if (log.isDebugEnabled()) {
+      log.debug("## MTGD {}", req);
       for (ShardResponse r : responses) {
-        LOG.debug(" - {}", r);
+        log.debug(" - {}", r);
       }
     }
   }
 
   @Override
   public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) {
-    LOG.debug("## RLD {}", rb.req);
+    log.debug("## RLD {}", rb.req);
   }
 
   @Override
   public void receiveGlobalStats(SolrQueryRequest req) {
-    LOG.debug("## RGD {}", req);
+    log.debug("## RGD {}", req);
   }
 
   @Override
   public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) {
-    LOG.debug("## SGD {}", outgoing);
+    log.debug("## SGD {}", outgoing);
   }
 }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java b/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java
index 3e82e27..21377d0 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java
@@ -33,7 +33,7 @@ import org.slf4j.LoggerFactory;
  */
 public class StatsUtil {
   
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   /**
    * Make a String representation of {@link CollectionStats}
@@ -54,12 +54,12 @@ public class StatsUtil {
   
   private static CollectionStats colStatsFromString(String data) {
     if (data == null || data.trim().length() == 0) {
-      LOG.warn("Invalid empty collection stats string");
+      log.warn("Invalid empty collection stats string");
       return null;
     }
     String[] vals = data.split(",");
     if (vals.length != 5) {
-      LOG.warn("Invalid collection stats string, num fields " + vals.length
+      log.warn("Invalid collection stats string, num fields " + vals.length
           + " != 5, '" + data + "'");
       return null;
     }
@@ -72,7 +72,7 @@ public class StatsUtil {
       return new CollectionStats(field, maxDoc, docCount, sumTotalTermFreq,
           sumDocFreq);
     } catch (Exception e) {
-      LOG.warn("Invalid collection stats string '" + data + "': "
+      log.warn("Invalid collection stats string '" + data + "': "
           + e.toString());
       return null;
     }
@@ -88,12 +88,12 @@ public class StatsUtil {
   
   private static Term termFromString(String data) {
     if (data == null || data.trim().length() == 0) {
-      LOG.warn("Invalid empty term value");
+      log.warn("Invalid empty term value");
       return null;
     }
     int idx = data.indexOf(':');
     if (idx == -1) {
-      LOG.warn("Invalid term data without ':': '" + data + "'");
+      log.warn("Invalid term data without ':': '" + data + "'");
       return null;
     }
     String field = data.substring(0, idx);
@@ -104,7 +104,7 @@ public class StatsUtil {
       // byte[] bytes = Base64.base64ToByteArray(value);
       // return new Term(field, new BytesRef(bytes));
     } catch (Exception e) {
-      LOG.warn("Invalid term value '" + value + "'");
+      log.warn("Invalid term value '" + value + "'");
       return null;
     }
   }
@@ -123,12 +123,12 @@ public class StatsUtil {
   
   private static TermStats termStatsFromString(String data, Term t) {
     if (data == null || data.trim().length() == 0) {
-      LOG.warn("Invalid empty term stats string");
+      log.warn("Invalid empty term stats string");
       return null;
     }
     String[] vals = data.split(",");
     if (vals.length < 2) {
-      LOG.warn("Invalid term stats string, num fields " + vals.length
+      log.warn("Invalid term stats string, num fields " + vals.length
           + " < 2, '" + data + "'");
       return null;
     }
@@ -150,7 +150,7 @@ public class StatsUtil {
       termToUse = t;
     }
     if (termToUse == null) {
-      LOG.warn("Missing term in termStats '" + data + "'");
+      log.warn("Missing term in termStats '" + data + "'");
       return null;
     }
     try {
@@ -158,7 +158,7 @@ public class StatsUtil {
       long totalTermFreq = Long.parseLong(vals[idx]);
       return new TermStats(termToUse.toString(), docFreq, totalTermFreq);
     } catch (Exception e) {
-      LOG.warn("Invalid termStats string '" + data + "'");
+      log.warn("Invalid termStats string '" + data + "'");
       return null;
     }
   }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java b/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java
index 1d874e6..202696f 100644
--- a/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java
+++ b/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java
@@ -16,13 +16,15 @@
  */
 package org.apache.solr.servlet;
 
+import java.lang.invoke.MethodHandles;
+
 import org.slf4j.LoggerFactory;
 
 final class CheckLoggingConfiguration {
   
   static void check() {
     try {
-      LoggerFactory.getLogger(CheckLoggingConfiguration.class);
+      LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
     } catch (NoClassDefFoundError e) {
       throw new NoClassDefFoundError("Failed to initialize Apache Solr: "
           +"Could not find necessary SLF4j logging jars. If using Jetty, the SLF4j logging jars need to go in "

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java b/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java
index 9188f54..a29d80d 100644
--- a/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java
+++ b/solr/core/src/java/org/apache/solr/spelling/DirectSolrSpellChecker.java
@@ -59,7 +59,7 @@ import org.slf4j.LoggerFactory;
  * @see DirectSpellChecker
  */
 public class DirectSolrSpellChecker extends SolrSpellChecker {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   // configuration params shared with other spellcheckers
   public static final String COMPARATOR_CLASS = AbstractLuceneSpellChecker.COMPARATOR_CLASS;
@@ -96,7 +96,7 @@ public class DirectSolrSpellChecker extends SolrSpellChecker {
 
     SolrParams params = config.toSolrParams();
 
-    LOG.info("init: " + config);
+    log.info("init: " + config);
     String name = super.init(config, core);
     
     Comparator<SuggestWord> comp = SuggestWordQueue.DEFAULT_COMPARATOR;
@@ -173,7 +173,7 @@ public class DirectSolrSpellChecker extends SolrSpellChecker {
   @Override
   public SpellingResult getSuggestions(SpellingOptions options)
       throws IOException {
-    LOG.debug("getSuggestions: " + options.tokens);
+    log.debug("getSuggestions: " + options.tokens);
         
     SpellingResult result = new SpellingResult();
     float accuracy = (options.accuracy == Float.MIN_VALUE) ? checker.getAccuracy() : options.accuracy;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
index ad3accf..8b7ce53 100644
--- a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
+++ b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
@@ -43,7 +43,7 @@ import org.slf4j.LoggerFactory;
 import static org.apache.solr.common.params.CommonParams.ID;
 
 public class SpellCheckCollator {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private int maxCollations = 1;
   private int maxCollationTries = 0;
   private int maxCollationEvaluations = 10000;
@@ -73,7 +73,7 @@ public class SpellCheckCollator {
       verifyCandidateWithQuery = false;
     }
     if (queryComponent == null && verifyCandidateWithQuery) {
-      LOG.info("Could not find an instance of QueryComponent.  Disabling collation verification against the index.");
+      log.info("Could not find an instance of QueryComponent.  Disabling collation verification against the index.");
       maxTries = 1;
       verifyCandidateWithQuery = false;
     }
@@ -174,7 +174,7 @@ public class SpellCheckCollator {
                            / (float)etce.getNumberScanned() );
           }
         } catch (Exception e) {
-          LOG.warn("Exception trying to re-query to check if a spell check possibility would return any hits.", e);
+          log.warn("Exception trying to re-query to check if a spell check possibility would return any hits.", e);
         } finally {
           checkResponse.req.close();  
         }
@@ -193,8 +193,8 @@ public class SpellCheckCollator {
         collation.setMisspellingsAndCorrections(misspellingsAndCorrections);
         collations.add(collation);
       }
-      if (LOG.isDebugEnabled()) {
-        LOG.debug("Collation: " + collationQueryStr + (verifyCandidateWithQuery ? (" will return " + hits + " hits.") : ""));
+      if (log.isDebugEnabled()) {
+        log.debug("Collation: " + collationQueryStr + (verifyCandidateWithQuery ? (" will return " + hits + " hits.") : ""));
       }
     }
     return collations;

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java b/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java
index 267d9ad..f33298d 100644
--- a/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java
+++ b/solr/core/src/java/org/apache/solr/spelling/suggest/SolrSuggester.java
@@ -58,7 +58,7 @@ import static org.apache.solr.spelling.suggest.fst.AnalyzingInfixLookupFactory.C
  * {@link Dictionary}
  * */
 public class SolrSuggester implements Accountable {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   /** Name used when an unnamed suggester config is passed */
   public static final String DEFAULT_DICT_NAME = "default";
@@ -100,7 +100,7 @@ public class SolrSuggester implements Accountable {
    * Lucene suggester
    * */
   public String init(NamedList<?> config, SolrCore core) {
-    LOG.info("init: " + config);
+    log.info("init: " + config);
     
     // read the config
     name = config.get(NAME) != null ? (String) config.get(NAME)
@@ -112,7 +112,7 @@ public class SolrSuggester implements Accountable {
 
     if (lookupImpl == null) {
       lookupImpl = LookupFactory.DEFAULT_FILE_BASED_DICT;
-      LOG.info("No " + LOOKUP_IMPL + " parameter was provided falling back to " + lookupImpl);
+      log.info("No " + LOOKUP_IMPL + " parameter was provided falling back to " + lookupImpl);
     }
 
     contextFilterQueryAnalyzer = new TokenizerChain(new StandardTokenizerFactory(Collections.EMPTY_MAP), null);
@@ -128,7 +128,7 @@ public class SolrSuggester implements Accountable {
           try {
             ((Closeable) lookup).close();
           } catch (IOException e) {
-            LOG.warn("Could not close the suggester lookup.", e);
+            log.warn("Could not close the suggester lookup.", e);
           }
         }
 
@@ -147,13 +147,13 @@ public class SolrSuggester implements Accountable {
       if (!storeDir.exists()) {
         storeDir.mkdirs();
       } else if (getStoreFile().exists()) {
-        if (LOG.isDebugEnabled()) {
-          LOG.debug("attempt reload of the stored lookup from file " + getStoreFile());
+        if (log.isDebugEnabled()) {
+          log.debug("attempt reload of the stored lookup from file " + getStoreFile());
         }
         try {
           lookup.load(new FileInputStream(getStoreFile()));
         } catch (IOException e) {
-          LOG.warn("Loading stored lookup data failed, possibly not cached yet");
+          log.warn("Loading stored lookup data failed, possibly not cached yet");
         }
       }
     }
@@ -162,19 +162,19 @@ public class SolrSuggester implements Accountable {
     if (dictionaryImpl == null) {
       dictionaryImpl = (sourceLocation == null) ? DictionaryFactory.DEFAULT_INDEX_BASED_DICT : 
         DictionaryFactory.DEFAULT_FILE_BASED_DICT;
-      LOG.info("No " + DICTIONARY_IMPL + " parameter was provided falling back to " + dictionaryImpl);
+      log.info("No " + DICTIONARY_IMPL + " parameter was provided falling back to " + dictionaryImpl);
     }
 
     dictionaryFactory = core.getResourceLoader().newInstance(dictionaryImpl, DictionaryFactory.class);
     dictionaryFactory.setParams(config);
-    LOG.info("Dictionary loaded with params: " + config);
+    log.info("Dictionary loaded with params: " + config);
 
     return name;
   }
 
   /** Build the underlying Lucene Suggester */
   public void build(SolrCore core, SolrIndexSearcher searcher) throws IOException {
-    LOG.info("SolrSuggester.build(" + name + ")");
+    log.info("SolrSuggester.build(" + name + ")");
 
     dictionary = dictionaryFactory.create(core, searcher);
     try {
@@ -188,16 +188,16 @@ public class SolrSuggester implements Accountable {
     if (storeDir != null) {
       File target = getStoreFile();
       if(!lookup.store(new FileOutputStream(target))) {
-        LOG.error("Store Lookup build failed");
+        log.error("Store Lookup build failed");
       } else {
-        LOG.info("Stored suggest data to: " + target.getAbsolutePath());
+        log.info("Stored suggest data to: " + target.getAbsolutePath());
       }
     }
   }
 
   /** Reloads the underlying Lucene Suggester */
   public void reload(SolrCore core, SolrIndexSearcher searcher) throws IOException {
-    LOG.info("SolrSuggester.reload(" + name + ")");
+    log.info("SolrSuggester.reload(" + name + ")");
     if (dictionary == null && storeDir != null) {
       File lookupFile = getStoreFile();
       if (lookupFile.exists()) {
@@ -211,7 +211,7 @@ public class SolrSuggester implements Accountable {
           IOUtils.closeWhileHandlingException(is);
         }
       } else {
-        LOG.info("lookup file doesn't exist");
+        log.info("lookup file doesn't exist");
       }
     }
   }
@@ -230,9 +230,9 @@ public class SolrSuggester implements Accountable {
 
   /** Returns suggestions based on the {@link SuggesterOptions} passed */
   public SuggesterResult getSuggestions(SuggesterOptions options) throws IOException {
-    LOG.debug("getSuggestions: " + options.token);
+    log.debug("getSuggestions: " + options.token);
     if (lookup == null) {
-      LOG.info("Lookup is null - invoke suggest.build first");
+      log.info("Lookup is null - invoke suggest.build first");
       return EMPTY_RESULT;
     }
     
@@ -247,7 +247,7 @@ public class SolrSuggester implements Accountable {
       if(suggestions == null){
         // Context filtering not supported/configured by lookup
         // Silently ignore filtering and serve a result by querying without context filtering
-        LOG.debug("Context Filtering Query not supported by {}", lookup.getClass());
+        log.debug("Context Filtering Query not supported by {}", lookup.getClass());
         suggestions = lookup.lookup(options.token, false, options.count);
       }
     }

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java b/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java
index c0e7709..25e894d 100644
--- a/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java
+++ b/solr/core/src/java/org/apache/solr/spelling/suggest/Suggester.java
@@ -54,7 +54,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 public class Suggester extends SolrSpellChecker {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
   /** Location of the source data - either a path to a file, or null for the
    * current IndexReader.
@@ -86,7 +86,7 @@ public class Suggester extends SolrSpellChecker {
   
   @Override
   public String init(NamedList config, SolrCore core) {
-    LOG.info("init: " + config);
+    log.info("init: " + config);
     String name = super.init(config, core);
     threshold = config.get(THRESHOLD_TOKEN_FREQUENCY) == null ? 0.0f
             : (Float)config.get(THRESHOLD_TOKEN_FREQUENCY);
@@ -112,7 +112,7 @@ public class Suggester extends SolrSpellChecker {
           try {
             ((Closeable) lookup).close();
           } catch (IOException e) {
-            LOG.warn("Could not close the suggester lookup.", e);
+            log.warn("Could not close the suggester lookup.", e);
           }
         }
       }
@@ -134,7 +134,7 @@ public class Suggester extends SolrSpellChecker {
         try {
           lookup.load(new FileInputStream(new File(storeDir, factory.storeFileName())));
         } catch (IOException e) {
-          LOG.warn("Loading stored lookup data failed", e);
+          log.warn("Loading stored lookup data failed", e);
         }
       }
     }
@@ -144,7 +144,7 @@ public class Suggester extends SolrSpellChecker {
   
   @Override
   public void build(SolrCore core, SolrIndexSearcher searcher) throws IOException {
-    LOG.info("build()");
+    log.info("build()");
     if (sourceLocation == null) {
       reader = searcher.getIndexReader();
       dictionary = new HighFrequencyDictionary(reader, field, threshold);
@@ -154,7 +154,7 @@ public class Suggester extends SolrSpellChecker {
                 core.getResourceLoader().openResource(sourceLocation), StandardCharsets.UTF_8));
       } catch (UnsupportedEncodingException e) {
         // should not happen
-        LOG.error("should not happen", e);
+        log.error("should not happen", e);
       }
     }
 
@@ -164,19 +164,19 @@ public class Suggester extends SolrSpellChecker {
       if(!lookup.store(new FileOutputStream(target))) {
         if (sourceLocation == null) {
           assert reader != null && field != null;
-          LOG.error("Store Lookup build from index on field: " + field + " failed reader has: " + reader.maxDoc() + " docs");
+          log.error("Store Lookup build from index on field: " + field + " failed reader has: " + reader.maxDoc() + " docs");
         } else {
-          LOG.error("Store Lookup build from sourceloaction: " + sourceLocation + " failed");
+          log.error("Store Lookup build from sourceloaction: " + sourceLocation + " failed");
         }
       } else {
-        LOG.info("Stored suggest data to: " + target.getAbsolutePath());
+        log.info("Stored suggest data to: " + target.getAbsolutePath());
       }
     }
   }
 
   @Override
   public void reload(SolrCore core, SolrIndexSearcher searcher) throws IOException {
-    LOG.info("reload()");
+    log.info("reload()");
     if (dictionary == null && storeDir != null) {
       // this may be a firstSearcher event, try loading it
       FileInputStream is = new FileInputStream(new File(storeDir, factory.storeFileName()));
@@ -187,7 +187,7 @@ public class Suggester extends SolrSpellChecker {
       } finally {
         IOUtils.closeWhileHandlingException(is);
       }
-      LOG.debug("load failed, need to build Lookup again");
+      log.debug("load failed, need to build Lookup again");
     }
     // loading was unsuccessful - build it again
     build(core, searcher);
@@ -197,9 +197,9 @@ public class Suggester extends SolrSpellChecker {
 
   @Override
   public SpellingResult getSuggestions(SpellingOptions options) throws IOException {
-    LOG.debug("getSuggestions: " + options.tokens);
+    log.debug("getSuggestions: " + options.tokens);
     if (lookup == null) {
-      LOG.info("Lookup is null - invoke spellchecker.build first");
+      log.info("Lookup is null - invoke spellchecker.build first");
       return EMPTY_RESULT;
     }
     SpellingResult res = new SpellingResult();

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/8cde1277/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java b/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java
index 0e613f7..4d2e2e0 100644
--- a/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java
+++ b/solr/core/src/java/org/apache/solr/spelling/suggest/jaspell/JaspellLookupFactory.java
@@ -31,12 +31,12 @@ import org.slf4j.LoggerFactory;
  * <b>Note:</b> This Suggester is not very RAM efficient.
  */
 public class JaspellLookupFactory extends LookupFactory {
-  private static final Logger LOG = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private static final String FILENAME = "jaspell.dat";
 
   @Override
   public Lookup create(NamedList params, SolrCore core) {
-    LOG.info("init: " + params);
+    log.info("init: " + params);
     return new JaspellLookup();
   }