You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by da...@apache.org on 2018/10/23 00:05:48 UTC

[27/52] [abbrv] [partial] lucene-solr:jira/gradle: Add gradle support for Solr

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/SolrCore.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
deleted file mode 100644
index 6e13039..0000000
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ /dev/null
@@ -1,3154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import java.io.Closeable;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.lang.invoke.MethodHandles;
-import java.lang.reflect.Constructor;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.NoSuchFileException;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Optional;
-import java.util.Properties;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.locks.ReentrantLock;
-
-import com.codahale.metrics.Counter;
-import com.codahale.metrics.MetricRegistry;
-import com.codahale.metrics.Timer;
-import com.google.common.collect.Iterators;
-import com.google.common.collect.MapMaker;
-import org.apache.commons.io.FileUtils;
-import org.apache.lucene.analysis.util.ResourceLoader;
-import org.apache.lucene.codecs.Codec;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexDeletionPolicy;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.index.LeafReaderContext;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexInput;
-import org.apache.lucene.store.IndexOutput;
-import org.apache.lucene.store.LockObtainFailedException;
-import org.apache.solr.client.solrj.impl.BinaryResponseParser;
-import org.apache.solr.cloud.CloudDescriptor;
-import org.apache.solr.cloud.RecoveryStrategy;
-import org.apache.solr.cloud.ZkSolrResourceLoader;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.params.CollectionAdminParams;
-import org.apache.solr.common.params.CommonParams;
-import org.apache.solr.common.params.CommonParams.EchoParamStyle;
-import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.params.UpdateParams;
-import org.apache.solr.common.util.ExecutorUtil;
-import org.apache.solr.common.util.IOUtils;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.ObjectReleaseTracker;
-import org.apache.solr.common.util.SimpleOrderedMap;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.DirectoryFactory.DirContext;
-import org.apache.solr.core.snapshots.SolrSnapshotManager;
-import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager;
-import org.apache.solr.core.snapshots.SolrSnapshotMetaDataManager.SnapshotMetaData;
-import org.apache.solr.handler.IndexFetcher;
-import org.apache.solr.handler.ReplicationHandler;
-import org.apache.solr.handler.RequestHandlerBase;
-import org.apache.solr.handler.SolrConfigHandler;
-import org.apache.solr.handler.component.HighlightComponent;
-import org.apache.solr.handler.component.SearchComponent;
-import org.apache.solr.logging.MDCLoggingContext;
-import org.apache.solr.metrics.SolrCoreMetricManager;
-import org.apache.solr.metrics.SolrMetricManager;
-import org.apache.solr.metrics.SolrMetricProducer;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.request.SolrRequestHandler;
-import org.apache.solr.response.BinaryResponseWriter;
-import org.apache.solr.response.CSVResponseWriter;
-import org.apache.solr.response.GeoJSONResponseWriter;
-import org.apache.solr.response.GraphMLResponseWriter;
-import org.apache.solr.response.JSONResponseWriter;
-import org.apache.solr.response.PHPResponseWriter;
-import org.apache.solr.response.PHPSerializedResponseWriter;
-import org.apache.solr.response.PythonResponseWriter;
-import org.apache.solr.response.QueryResponseWriter;
-import org.apache.solr.response.RawResponseWriter;
-import org.apache.solr.response.RubyResponseWriter;
-import org.apache.solr.response.SchemaXmlResponseWriter;
-import org.apache.solr.response.SmileResponseWriter;
-import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.response.XMLResponseWriter;
-import org.apache.solr.response.transform.TransformerFactory;
-import org.apache.solr.rest.ManagedResourceStorage;
-import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
-import org.apache.solr.rest.RestManager;
-import org.apache.solr.schema.FieldType;
-import org.apache.solr.schema.IndexSchema;
-import org.apache.solr.schema.IndexSchemaFactory;
-import org.apache.solr.schema.ManagedIndexSchema;
-import org.apache.solr.schema.SimilarityFactory;
-import org.apache.solr.search.QParserPlugin;
-import org.apache.solr.search.SolrFieldCacheBean;
-import org.apache.solr.search.SolrIndexSearcher;
-import org.apache.solr.search.ValueSourceParser;
-import org.apache.solr.search.stats.LocalStatsCache;
-import org.apache.solr.search.stats.StatsCache;
-import org.apache.solr.update.DefaultSolrCoreState;
-import org.apache.solr.update.DirectUpdateHandler2;
-import org.apache.solr.update.IndexFingerprint;
-import org.apache.solr.update.SolrCoreState;
-import org.apache.solr.update.SolrCoreState.IndexWriterCloser;
-import org.apache.solr.update.SolrIndexWriter;
-import org.apache.solr.update.UpdateHandler;
-import org.apache.solr.update.VersionInfo;
-import org.apache.solr.update.processor.DistributedUpdateProcessorFactory;
-import org.apache.solr.update.processor.LogUpdateProcessorFactory;
-import org.apache.solr.update.processor.RunUpdateProcessorFactory;
-import org.apache.solr.update.processor.UpdateRequestProcessorChain;
-import org.apache.solr.update.processor.UpdateRequestProcessorChain.ProcessorInfo;
-import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
-import org.apache.solr.util.DefaultSolrThreadFactory;
-import org.apache.solr.util.IOFunction;
-import org.apache.solr.util.NumberUtils;
-import org.apache.solr.util.PropertiesInputStream;
-import org.apache.solr.util.PropertiesOutputStream;
-import org.apache.solr.util.RefCounted;
-import org.apache.solr.util.plugin.NamedListInitializedPlugin;
-import org.apache.solr.util.plugin.PluginInfoInitialized;
-import org.apache.solr.util.plugin.SolrCoreAware;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.data.Stat;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.apache.solr.common.params.CommonParams.NAME;
-import static org.apache.solr.common.params.CommonParams.PATH;
-
-/**
- * SolrCore got its name because it represents the "core" of Solr -- one index and everything needed to make it work.
- * When multi-core support was added to Solr way back in version 1.3, this class was required so that the core
- * functionality could be re-used multiple times.
- */
-public final class SolrCore implements SolrInfoBean, SolrMetricProducer, Closeable {
-
-  public static final String version="1.0";
-
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private static final Logger requestLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".Request");
-  private static final Logger slowLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".SlowRequest");
-
-  private String name;
-  private String logid; // used to show what name is set
-
-  private boolean isReloaded = false;
-
-  private StatsCache statsCache;
-
-  private final SolrConfig solrConfig;
-  private final SolrResourceLoader resourceLoader;
-  private volatile IndexSchema schema;
-  private final NamedList configSetProperties;
-  private final String dataDir;
-  private final String ulogDir;
-  private final UpdateHandler updateHandler;
-  private final SolrCoreState solrCoreState;
-
-  private final Date startTime = new Date();
-  private final long startNanoTime = System.nanoTime();
-  private final RequestHandlers reqHandlers;
-  private final PluginBag<SearchComponent> searchComponents = new PluginBag<>(SearchComponent.class, this);
-  private final PluginBag<UpdateRequestProcessorFactory> updateProcessors = new PluginBag<>(UpdateRequestProcessorFactory.class, this, true);
-  private final Map<String,UpdateRequestProcessorChain> updateProcessorChains;
-  private final SolrCoreMetricManager coreMetricManager;
-  private final Map<String, SolrInfoBean> infoRegistry = new ConcurrentHashMap<>();
-  private final IndexDeletionPolicyWrapper solrDelPolicy;
-  private final SolrSnapshotMetaDataManager snapshotMgr;
-  private final DirectoryFactory directoryFactory;
-  private final RecoveryStrategy.Builder recoveryStrategyBuilder;
-  private IndexReaderFactory indexReaderFactory;
-  private final Codec codec;
-  private final MemClassLoader memClassLoader;
-
-  private final List<Runnable> confListeners = new CopyOnWriteArrayList<>();
-
-  private final ReentrantLock ruleExpiryLock;
-  private final ReentrantLock snapshotDelLock; // A lock instance to guard against concurrent deletions.
-
-  private Timer newSearcherTimer;
-  private Timer newSearcherWarmupTimer;
-  private Counter newSearcherCounter;
-  private Counter newSearcherMaxReachedCounter;
-  private Counter newSearcherOtherErrorsCounter;
-  private final CoreContainer coreContainer;
-
-  private Set<String> metricNames = ConcurrentHashMap.newKeySet();
-  private String metricTag = Integer.toHexString(hashCode());
-
-  public Set<String> getMetricNames() {
-    return metricNames;
-  }
-
-
-  public Date getStartTimeStamp() { return startTime; }
-
-  private final Map<IndexReader.CacheKey, IndexFingerprint> perSegmentFingerprintCache = new MapMaker().weakKeys().makeMap();
-
-  public long getStartNanoTime() {
-    return startNanoTime;
-  }
-
-  public long getUptimeMs() {
-    return TimeUnit.MILLISECONDS.convert(System.nanoTime() - startNanoTime, TimeUnit.NANOSECONDS);
-  }
-
-  private final RestManager restManager;
-
-  public RestManager getRestManager() {
-    return restManager;
-  }
-
-  static int boolean_query_max_clause_count = Integer.MIN_VALUE;
-
-
-  /**
-   * The SolrResourceLoader used to load all resources for this core.
-   * @since solr 1.3
-   */
-  public SolrResourceLoader getResourceLoader() {
-    return resourceLoader;
-  }
-
-  /**
-   * Gets the configuration resource name used by this core instance.
-   * @since solr 1.3
-   */
-  public String getConfigResource() {
-    return solrConfig.getResourceName();
-  }
-
-  /**
-   * Gets the configuration object used by this core instance.
-   */
-  public SolrConfig getSolrConfig() {
-    return solrConfig;
-  }
-
-  /**
-   * Gets the schema resource name used by this core instance.
-   * @since solr 1.3
-   */
-  public String getSchemaResource() {
-    return getLatestSchema().getResourceName();
-  }
-  
-  /** 
-   * @return the latest snapshot of the schema used by this core instance. 
-   * @see #setLatestSchema 
-   */
-  public IndexSchema getLatestSchema() {
-    return schema;
-  }
-  
-  /** 
-   * Sets the latest schema snapshot to be used by this core instance. 
-   * If the specified <code>replacementSchema</code> uses a {@link SimilarityFactory} which is 
-   * {@link SolrCoreAware} then this method will {@link SolrCoreAware#inform} that factory about 
-   * this SolrCore prior to using the <code>replacementSchema</code>
-   * @see #getLatestSchema
-   */
-  public void setLatestSchema(IndexSchema replacementSchema) {
-    // 1) For a newly instantiated core, the Similarity needs SolrCore before inform() is called on
-    // any registered SolrCoreAware listeners (which will likeley need to use the SolrIndexSearcher.
-    //
-    // 2) If a new IndexSchema is assigned to an existing live SolrCore (ie: managed schema
-    // replacement via SolrCloud) then we need to explicitly inform() the similarity because
-    // we can't rely on the normal SolrResourceLoader lifecycle because the sim was instantiated
-    // after the SolrCore was already live (see: SOLR-8311 + SOLR-8280)
-    final SimilarityFactory similarityFactory = replacementSchema.getSimilarityFactory();
-    if (similarityFactory instanceof SolrCoreAware) {
-      ((SolrCoreAware) similarityFactory).inform(this);
-    }
-    this.schema = replacementSchema;
-  }
-  
-  public NamedList getConfigSetProperties() {
-    return configSetProperties;
-  }
-
-  public String getDataDir() {
-    return dataDir;
-  }
-
-  public String getUlogDir() {
-    return ulogDir;
-  }
-
-  public String getIndexDir() {
-    synchronized (searcherLock) {
-      if (_searcher == null) return getNewIndexDir();
-      SolrIndexSearcher searcher = _searcher.get();
-      return searcher.getPath() == null ? dataDir + "index/" : searcher
-          .getPath();
-    }
-  }
-
-
-  /**
-   * Returns the indexdir as given in index.properties. If index.properties exists in dataDir and
-   * there is a property <i>index</i> available and it points to a valid directory
-   * in dataDir that is returned. Else dataDir/index is returned. Only called for creating new indexSearchers
-   * and indexwriters. Use the getIndexDir() method to know the active index directory
-   *
-   * @return the indexdir as given in index.properties
-   *
-   * @throws SolrException if for any reason the a reasonable index directory cannot be determined.
-   */
-  public String getNewIndexDir() {
-    Directory dir = null;
-    try {
-      dir = getDirectoryFactory().get(getDataDir(), DirContext.META_DATA, getSolrConfig().indexConfig.lockType);
-      String result = getIndexPropertyFromPropFile(dir);
-      if (!result.equals(lastNewIndexDir)) {
-        log.debug("New index directory detected: old={} new={}", lastNewIndexDir, result);
-      }
-      lastNewIndexDir = result;
-      return result;
-    } catch (IOException e) {
-      SolrException.log(log, "", e);
-      // See SOLR-11687. It is inadvisable to assume we can do the right thing for any but a small
-      // number of exceptions that ware caught and swallowed in getIndexProperty.
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Error in getNewIndexDir, exception: ", e);
-    } finally {
-      if (dir != null) {
-        try {
-          getDirectoryFactory().release(dir);
-        } catch (IOException e) {
-          SolrException.log(log, "", e);
-        }
-      }
-    }
-  }
-
-  // This is guaranteed to return a string or throw an exception.
-  //
-  // NOTE: Not finding the index.properties file is normal.
-  //
-  // We return dataDir/index if there is an index.properties file with no value for "index"
-  // See SOLR-11687
-  //
-
-  private String getIndexPropertyFromPropFile(Directory dir) throws IOException {
-    IndexInput input;
-    try {
-      input = dir.openInput(IndexFetcher.INDEX_PROPERTIES, IOContext.DEFAULT);
-    } catch (FileNotFoundException | NoSuchFileException e) {
-      // Swallow this error, dataDir/index is the right thing to return
-      // if there is no index.properties file
-      // All other exceptions are will propagate to caller.
-      return dataDir + "index/";
-    }
-    final InputStream is = new PropertiesInputStream(input); // c'tor just assigns a variable here, no exception thrown.
-    try {
-      Properties p = new Properties();
-      p.load(new InputStreamReader(is, StandardCharsets.UTF_8));
-
-      String s = p.getProperty("index");
-      if (s != null && s.trim().length() > 0) {
-        return dataDir + s.trim();
-      }
-
-      // We'll return dataDir/index/ if the properties file has an "index" property with
-      // no associated value or does not have an index property at all.
-      return dataDir + "index/";
-    } finally {
-      IOUtils.closeQuietly(is);
-    }
-  }
-
-  private String lastNewIndexDir; // for debugging purposes only... access not synchronized, but that's ok
-
-
-  public DirectoryFactory getDirectoryFactory() {
-    return directoryFactory;
-  }
-
-  public IndexReaderFactory getIndexReaderFactory() {
-    return indexReaderFactory;
-  }
-  
-  public long getIndexSize() {
-    Directory dir;
-    long size = 0;
-    try {
-      if (directoryFactory.exists(getIndexDir())) {
-        dir = directoryFactory.get(getIndexDir(), DirContext.DEFAULT, solrConfig.indexConfig.lockType);
-        try {
-          size = DirectoryFactory.sizeOfDirectory(dir);
-        } finally {
-          directoryFactory.release(dir);
-        }
-      }
-    } catch (IOException e) {
-      SolrException.log(log, "IO error while trying to get the size of the Directory", e);
-    }
-    return size;
-  }
-
-  @Override
-  public String getName() {
-    return name;
-  }
-
-  public void setName(String v) {
-    this.name = v;
-    this.logid = (v==null)?"":("["+v+"] ");
-    if (coreMetricManager != null) {
-      coreMetricManager.afterCoreSetName();
-    }
-  }
-
-  public String getLogId()
-  {
-    return this.logid;
-  }
-
-  /**
-   * Returns the {@link SolrCoreMetricManager} for this core.
-   *
-   * @return the {@link SolrCoreMetricManager} for this core
-   */
-  public SolrCoreMetricManager getCoreMetricManager() {
-    return coreMetricManager;
-  }
-
-  /**
-   * Returns a Map of name vs SolrInfoBean objects. The returned map is an instance of
-   * a ConcurrentHashMap and therefore no synchronization is needed for putting, removing
-   * or iterating over it.
-   *
-   * @return the Info Registry map which contains SolrInfoBean objects keyed by name
-   * @since solr 1.3
-   */
-  public Map<String, SolrInfoBean> getInfoRegistry() {
-    return infoRegistry;
-  }
-
-  private IndexDeletionPolicyWrapper initDeletionPolicy(IndexDeletionPolicyWrapper delPolicyWrapper) {
-    if (delPolicyWrapper != null) {
-      return delPolicyWrapper;
-    }
-    
-    final PluginInfo info = solrConfig.getPluginInfo(IndexDeletionPolicy.class.getName());
-    final IndexDeletionPolicy delPolicy;
-    if (info != null) {
-      delPolicy = createInstance(info.className, IndexDeletionPolicy.class, "Deletion Policy for SOLR", this, getResourceLoader());
-      if (delPolicy instanceof NamedListInitializedPlugin) {
-        ((NamedListInitializedPlugin) delPolicy).init(info.initArgs);
-      }
-    } else {
-      delPolicy = new SolrDeletionPolicy();
-    }
-
-    return new IndexDeletionPolicyWrapper(delPolicy, snapshotMgr);
-  }
-
-  private SolrSnapshotMetaDataManager initSnapshotMetaDataManager() {
-    try {
-      String dirName = getDataDir() + SolrSnapshotMetaDataManager.SNAPSHOT_METADATA_DIR + "/";
-      Directory snapshotDir = directoryFactory.get(dirName, DirContext.DEFAULT,
-           getSolrConfig().indexConfig.lockType);
-      return new SolrSnapshotMetaDataManager(this, snapshotDir);
-    } catch (IOException e) {
-      throw new IllegalStateException(e);
-    }
-  }
-
-  /**
-   * This method deletes the snapshot with the specified name. If the directory
-   * storing the snapshot is not the same as the *current* core index directory,
-   * then delete the files corresponding to this snapshot. Otherwise we leave the
-   * index files related to snapshot as is (assuming the underlying Solr IndexDeletionPolicy
-   * will clean them up appropriately).
-   *
-   * @param commitName The name of the snapshot to be deleted.
-   * @throws IOException in case of I/O error.
-   */
-  public void deleteNamedSnapshot(String commitName) throws IOException {
-    // Note this lock is required to prevent multiple snapshot deletions from
-    // opening multiple IndexWriter instances simultaneously.
-    this.snapshotDelLock.lock();
-    try {
-      Optional<SnapshotMetaData> metadata = snapshotMgr.release(commitName);
-      if (metadata.isPresent()) {
-        long gen = metadata.get().getGenerationNumber();
-        String indexDirPath = metadata.get().getIndexDirPath();
-
-        if (!indexDirPath.equals(getIndexDir())) {
-          Directory d = getDirectoryFactory().get(indexDirPath, DirContext.DEFAULT, "none");
-          try {
-            Collection<SnapshotMetaData> snapshots = snapshotMgr.listSnapshotsInIndexDir(indexDirPath);
-            log.info("Following snapshots exist in the index directory {} : {}", indexDirPath, snapshots);
-            if (snapshots.isEmpty()) {// No snapshots remain in this directory. Can be cleaned up!
-              log.info("Removing index directory {} since all named snapshots are deleted.", indexDirPath);
-              getDirectoryFactory().remove(d);
-            } else {
-              SolrSnapshotManager.deleteSnapshotIndexFiles(this, d, gen);
-            }
-          } finally {
-            getDirectoryFactory().release(d);
-          }
-        }
-      }
-    } finally {
-      snapshotDelLock.unlock();
-    }
-  }
-
-  /**
-   * This method deletes the index files not associated with any named snapshot only
-   * if the specified indexDirPath is not the *current* index directory.
-   *
-   * @param indexDirPath The path of the directory
-   * @throws IOException In case of I/O error.
-   */
-  public void deleteNonSnapshotIndexFiles(String indexDirPath) throws IOException {
-    // Skip if the specified indexDirPath is the *current* index directory.
-    if (getIndexDir().equals(indexDirPath)) {
-      return;
-    }
-
-    // Note this lock is required to prevent multiple snapshot deletions from
-    // opening multiple IndexWriter instances simultaneously.
-    this.snapshotDelLock.lock();
-    Directory dir = getDirectoryFactory().get(indexDirPath, DirContext.DEFAULT, "none");
-    try {
-      Collection<SnapshotMetaData> snapshots = snapshotMgr.listSnapshotsInIndexDir(indexDirPath);
-      log.info("Following snapshots exist in the index directory {} : {}", indexDirPath, snapshots);
-      // Delete the old index directory only if no snapshot exists in that directory.
-      if (snapshots.isEmpty()) {
-        log.info("Removing index directory {} since all named snapshots are deleted.", indexDirPath);
-        getDirectoryFactory().remove(dir);
-      } else {
-        SolrSnapshotManager.deleteNonSnapshotIndexFiles(this, dir, snapshots);
-      }
-    } finally {
-      snapshotDelLock.unlock();
-      if (dir != null) {
-        getDirectoryFactory().release(dir);
-      }
-    }
-  }
-
-
-  private void initListeners() {
-    final Class<SolrEventListener> clazz = SolrEventListener.class;
-    final String label = "Event Listener";
-    for (PluginInfo info : solrConfig.getPluginInfos(SolrEventListener.class.getName())) {
-      final String event = info.attributes.get("event");
-      if ("firstSearcher".equals(event)) {
-        SolrEventListener obj = createInitInstance(info, clazz, label, null);
-        firstSearcherListeners.add(obj);
-        log.debug("[{}] Added SolrEventListener for firstSearcher: [{}]", logid, obj);
-      } else if ("newSearcher".equals(event)) {
-        SolrEventListener obj = createInitInstance(info, clazz, label, null);
-        newSearcherListeners.add(obj);
-        log.debug("[{}] Added SolrEventListener for newSearcher: [{}]", logid, obj);
-      }
-    }
-  }
-
-  final List<SolrEventListener> firstSearcherListeners = new ArrayList<>();
-  final List<SolrEventListener> newSearcherListeners = new ArrayList<>();
-
-  /**
-   * NOTE: this function is not thread safe.  However, it is safe to call within the
-   * <code>inform( SolrCore core )</code> function for <code>SolrCoreAware</code> classes.
-   * Outside <code>inform</code>, this could potentially throw a ConcurrentModificationException
-   *
-   * @see SolrCoreAware
-   */
-  public void registerFirstSearcherListener( SolrEventListener listener )
-  {
-    firstSearcherListeners.add( listener );
-  }
-
-  /**
-   * NOTE: this function is not thread safe.  However, it is safe to call within the
-   * <code>inform( SolrCore core )</code> function for <code>SolrCoreAware</code> classes.
-   * Outside <code>inform</code>, this could potentially throw a ConcurrentModificationException
-   *
-   * @see SolrCoreAware
-   */
-  public void registerNewSearcherListener( SolrEventListener listener )
-  {
-    newSearcherListeners.add( listener );
-  }
-
-  /**
-   * NOTE: this function is not thread safe.  However, it is safe to call within the
-   * <code>inform( SolrCore core )</code> function for <code>SolrCoreAware</code> classes.
-   * Outside <code>inform</code>, this could potentially throw a ConcurrentModificationException
-   *
-   * @see SolrCoreAware
-   */
-  public QueryResponseWriter registerResponseWriter( String name, QueryResponseWriter responseWriter ){
-    return responseWriters.put(name, responseWriter);
-  }
-
-  public SolrCore reload(ConfigSet coreConfig) throws IOException {
-    // only one reload at a time
-    synchronized (getUpdateHandler().getSolrCoreState().getReloadLock()) {
-      solrCoreState.increfSolrCoreState();
-      final SolrCore currentCore;
-      if (!getNewIndexDir().equals(getIndexDir())) {
-        // the directory is changing, don't pass on state
-        currentCore = null;
-      } else {
-        currentCore = this;
-      }
-
-      boolean success = false;
-      SolrCore core = null;
-      try {
-        CoreDescriptor cd = new CoreDescriptor(name, getCoreDescriptor());
-        cd.loadExtraProperties(); //Reload the extra properties
-        core = new SolrCore(coreContainer, getName(), getDataDir(), coreConfig.getSolrConfig(),
-            coreConfig.getIndexSchema(), coreConfig.getProperties(),
-            cd, updateHandler, solrDelPolicy, currentCore, true);
-        
-        // we open a new IndexWriter to pick up the latest config
-        core.getUpdateHandler().getSolrCoreState().newIndexWriter(core, false);
-        
-        core.getSearcher(true, false, null, true);
-        success = true;
-        return core;
-      } finally {
-        // close the new core on any errors that have occurred.
-        if (!success) {
-          IOUtils.closeQuietly(core);
-        }
-      }
-    }
-  }
-
-  private DirectoryFactory initDirectoryFactory() {
-    return DirectoryFactory.loadDirectoryFactory(solrConfig, coreContainer, coreMetricManager.getRegistryName());
-  }
-
-  private RecoveryStrategy.Builder initRecoveryStrategyBuilder() {
-    final PluginInfo info = solrConfig.getPluginInfo(RecoveryStrategy.Builder.class.getName());
-    final RecoveryStrategy.Builder rsBuilder;
-    if (info != null && info.className != null) {
-      log.info(info.className);
-      rsBuilder = getResourceLoader().newInstance(info.className, RecoveryStrategy.Builder.class);
-    } else {
-      log.debug("solr.RecoveryStrategy.Builder");
-      rsBuilder = new RecoveryStrategy.Builder();
-    }
-    if (info != null) {
-      rsBuilder.init(info.initArgs);
-    }
-    return rsBuilder;
-  }
-
-  private void initIndexReaderFactory() {
-    IndexReaderFactory indexReaderFactory;
-    PluginInfo info = solrConfig.getPluginInfo(IndexReaderFactory.class.getName());
-    if (info != null) {
-      indexReaderFactory = resourceLoader.newInstance(info.className, IndexReaderFactory.class);
-      indexReaderFactory.init(info.initArgs);
-    } else {
-      indexReaderFactory = new StandardIndexReaderFactory();
-    }
-    this.indexReaderFactory = indexReaderFactory;
-  }
-
-  // protect via synchronized(SolrCore.class)
-  private static Set<String> dirs = new HashSet<>();
-
-  /**
-   * Returns <code>true</code> iff the index in the named directory is
-   * currently locked.
-   * @param directory the directory to check for a lock
-   * @throws IOException if there is a low-level IO error
-   * @deprecated Use of this method can only lead to race conditions. Try
-   *             to actually obtain a lock instead.
-   */
-  @Deprecated
-  private static boolean isWriterLocked(Directory directory) throws IOException {
-    try {
-      directory.obtainLock(IndexWriter.WRITE_LOCK_NAME).close();
-      return false;
-    } catch (LockObtainFailedException failed) {
-      return true;
-    }
-  }
-
-  void initIndex(boolean passOnPreviousState, boolean reload) throws IOException {
-    String indexDir = getNewIndexDir();
-    boolean indexExists = getDirectoryFactory().exists(indexDir);
-    boolean firstTime;
-    synchronized (SolrCore.class) {
-      firstTime = dirs.add(getDirectoryFactory().normalize(indexDir));
-    }
-
-    initIndexReaderFactory();
-
-    if (indexExists && firstTime && !passOnPreviousState) {
-      final String lockType = getSolrConfig().indexConfig.lockType;
-      Directory dir = directoryFactory.get(indexDir, DirContext.DEFAULT, lockType);
-      try {
-        if (isWriterLocked(dir)) {
-          log.error("{}Solr index directory '{}' is locked (lockType={}).  Throwing exception.", logid,
-              indexDir, lockType);
-          throw new LockObtainFailedException(
-              "Index dir '" + indexDir + "' of core '" + name + "' is already locked. " +
-                  "The most likely cause is another Solr server (or another solr core in this server) " +
-                  "also configured to use this directory; other possible causes may be specific to lockType: " +
-                  lockType);
-        }
-      } finally {
-        directoryFactory.release(dir);
-      }
-    }
-
-    // Create the index if it doesn't exist.
-    if (!indexExists) {
-      log.debug("{}Solr index directory '{}' doesn't exist. Creating new index...", logid, indexDir);
-
-      SolrIndexWriter writer = SolrIndexWriter.create(this, "SolrCore.initIndex", indexDir, getDirectoryFactory(), true,
-          getLatestSchema(), solrConfig.indexConfig, solrDelPolicy, codec);
-      writer.close();
-    }
-
-    cleanupOldIndexDirectories(reload);
-  }
-
-
-  /**
-   * Creates an instance by trying a constructor that accepts a SolrCore before
-   * trying the default (no arg) constructor.
-   *
-   * @param className the instance class to create
-   * @param cast      the class or interface that the instance should extend or implement
-   * @param msg       a message helping compose the exception error if any occurs.
-   * @param core      The SolrCore instance for which this object needs to be loaded
-   * @return the desired instance
-   * @throws SolrException if the object could not be instantiated
-   */
-  public static <T> T createInstance(String className, Class<T> cast, String msg, SolrCore core, ResourceLoader resourceLoader) {
-    Class<? extends T> clazz = null;
-    if (msg == null) msg = "SolrCore Object";
-    try {
-      clazz = resourceLoader.findClass(className, cast);
-      //most of the classes do not have constructors which takes SolrCore argument. It is recommended to obtain SolrCore by implementing SolrCoreAware.
-      // So invariably always it will cause a  NoSuchMethodException. So iterate though the list of available constructors
-      Constructor<?>[] cons = clazz.getConstructors();
-      for (Constructor<?> con : cons) {
-        Class<?>[] types = con.getParameterTypes();
-        if (types.length == 1 && types[0] == SolrCore.class) {
-          return cast.cast(con.newInstance(core));
-        }
-      }
-      return resourceLoader.newInstance(className, cast);//use the empty constructor
-    } catch (SolrException e) {
-      throw e;
-    } catch (Exception e) {
-      // The JVM likes to wrap our helpful SolrExceptions in things like
-      // "InvocationTargetException" that have no useful getMessage
-      if (null != e.getCause() && e.getCause() instanceof SolrException) {
-        SolrException inner = (SolrException) e.getCause();
-        throw inner;
-      }
-
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Error Instantiating " + msg + ", " + className + " failed to instantiate " + cast.getName(), e);
-    }
-  }
-
-  private UpdateHandler createReloadedUpdateHandler(String className, String msg, UpdateHandler updateHandler) {
-    Class<? extends UpdateHandler> clazz = null;
-    if (msg == null) msg = "SolrCore Object";
-    try {
-        clazz = getResourceLoader().findClass(className, UpdateHandler.class);
-        //most of the classes do not have constructors which takes SolrCore argument. It is recommended to obtain SolrCore by implementing SolrCoreAware.
-        // So invariably always it will cause a  NoSuchMethodException. So iterate though the list of available constructors
-        Constructor<?>[] cons =  clazz.getConstructors();
-        for (Constructor<?> con : cons) {
-          Class<?>[] types = con.getParameterTypes();
-          if(types.length == 2 && types[0] == SolrCore.class && types[1] == UpdateHandler.class){
-            return UpdateHandler.class.cast(con.newInstance(this, updateHandler));
-          }
-        }
-        throw new SolrException(ErrorCode.SERVER_ERROR,"Error Instantiating "+msg+", "+className+ " could not find proper constructor for " + UpdateHandler.class.getName());
-    } catch (SolrException e) {
-      throw e;
-    } catch (Exception e) {
-      // The JVM likes to wrap our helpful SolrExceptions in things like
-      // "InvocationTargetException" that have no useful getMessage
-      if (null != e.getCause() && e.getCause() instanceof SolrException) {
-        SolrException inner = (SolrException) e.getCause();
-        throw inner;
-      }
-
-      throw new SolrException(ErrorCode.SERVER_ERROR,"Error Instantiating "+msg+", "+className+ " failed to instantiate " + UpdateHandler.class.getName(), e);
-    }
-  }
-
-  public <T extends Object> T createInitInstance(PluginInfo info,Class<T> cast, String msg, String defClassName){
-    if(info == null) return null;
-    T o = createInstance(info.className == null ? defClassName : info.className ,cast, msg,this, getResourceLoader());
-    if (o instanceof PluginInfoInitialized) {
-      ((PluginInfoInitialized) o).init(info);
-    } else if (o instanceof NamedListInitializedPlugin) {
-      ((NamedListInitializedPlugin) o).init(info.initArgs);
-    }
-    if(o instanceof SearchComponent) {
-      ((SearchComponent) o).setName(info.name);
-    }
-    return o;
-  }
-
-  private UpdateHandler createUpdateHandler(String className) {
-    return createInstance(className, UpdateHandler.class, "Update Handler", this, getResourceLoader());
-  }
-
-  private UpdateHandler createUpdateHandler(String className, UpdateHandler updateHandler) {
-    return createReloadedUpdateHandler(className, "Update Handler", updateHandler);
-  }
-
-  public SolrCore(CoreContainer coreContainer, CoreDescriptor cd, ConfigSet coreConfig) {
-    this(coreContainer, cd.getName(), null, coreConfig.getSolrConfig(), coreConfig.getIndexSchema(), coreConfig.getProperties(),
-        cd, null, null, null, false);
-  }
-
-  public CoreContainer getCoreContainer() {
-    return coreContainer;
-  }
-
-
-  /**
-   * Creates a new core and register it in the list of cores. If a core with the
-   * same name already exists, it will be stopped and replaced by this one.
-   *
-   * @param dataDir
-   *          the index directory
-   * @param config
-   *          a solr config instance
-   * @param schema
-   *          a solr schema instance
-   *
-   * @since solr 1.3
-   */
-  public SolrCore(CoreContainer coreContainer, String name, String dataDir, SolrConfig config,
-                  IndexSchema schema, NamedList configSetProperties,
-                  CoreDescriptor coreDescriptor, UpdateHandler updateHandler,
-                  IndexDeletionPolicyWrapper delPolicy, SolrCore prev, boolean reload) {
-
-    this.coreContainer = coreContainer;
-    
-    assert ObjectReleaseTracker.track(searcherExecutor); // ensure that in unclean shutdown tests we still close this
-
-    CoreDescriptor cd = Objects.requireNonNull(coreDescriptor, "coreDescriptor cannot be null");
-    coreContainer.solrCores.addCoreDescriptor(cd);
-
-    setName(name);
-    MDCLoggingContext.setCore(this);
-    
-    resourceLoader = config.getResourceLoader();
-    this.solrConfig = config;
-    this.configSetProperties = configSetProperties;
-    // Initialize the metrics manager
-    this.coreMetricManager = initCoreMetricManager(config);
-    this.coreMetricManager.loadReporters();
-
-    if (updateHandler == null) {
-      directoryFactory = initDirectoryFactory();
-      recoveryStrategyBuilder = initRecoveryStrategyBuilder();
-      solrCoreState = new DefaultSolrCoreState(directoryFactory, recoveryStrategyBuilder);
-    } else {
-      solrCoreState = updateHandler.getSolrCoreState();
-      directoryFactory = solrCoreState.getDirectoryFactory();
-      recoveryStrategyBuilder = solrCoreState.getRecoveryStrategyBuilder();
-      isReloaded = true;
-    }
-
-    this.dataDir = initDataDir(dataDir, config, coreDescriptor);
-    this.ulogDir = initUpdateLogDir(coreDescriptor);
-
-    log.info("[{}] Opening new SolrCore at [{}], dataDir=[{}]", logid, resourceLoader.getInstancePath(), this.dataDir);
-
-    checkVersionFieldExistsInSchema(schema, coreDescriptor);
-
-    SolrMetricManager metricManager = coreContainer.getMetricManager();
-
-    // initialize searcher-related metrics
-    initializeMetrics(metricManager, coreMetricManager.getRegistryName(), metricTag, null);
-
-    SolrFieldCacheBean solrFieldCacheBean = new SolrFieldCacheBean();
-    // this is registered at the CONTAINER level because it's not core-specific - for now we
-    // also register it here for back-compat
-    solrFieldCacheBean.initializeMetrics(metricManager, coreMetricManager.getRegistryName(), metricTag, "core");
-    infoRegistry.put("fieldCache", solrFieldCacheBean);
-
-
-    initSchema(config, schema);
-
-    this.maxWarmingSearchers = config.maxWarmingSearchers;
-    this.slowQueryThresholdMillis = config.slowQueryThresholdMillis;
-
-    final CountDownLatch latch = new CountDownLatch(1);
-
-    try {
-
-      initListeners();
-
-      this.snapshotMgr = initSnapshotMetaDataManager();
-      this.solrDelPolicy = initDeletionPolicy(delPolicy);
-
-      this.codec = initCodec(solrConfig, this.schema);
-
-      memClassLoader = new MemClassLoader(PluginBag.RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(PluginBag.RuntimeLib.class.getName())), getResourceLoader());
-      initIndex(prev != null, reload);
-
-      initWriters();
-      qParserPlugins.init(QParserPlugin.standardPlugins, this);
-      valueSourceParsers.init(ValueSourceParser.standardValueSourceParsers, this);
-      transformerFactories.init(TransformerFactory.defaultFactories, this);
-      loadSearchComponents();
-      updateProcessors.init(Collections.emptyMap(), this);
-
-      // Processors initialized before the handlers
-      updateProcessorChains = loadUpdateProcessorChains();
-      reqHandlers = new RequestHandlers(this);
-      reqHandlers.initHandlersFromConfig(solrConfig);
-
-      statsCache = initStatsCache();
-
-      // cause the executor to stall so firstSearcher events won't fire
-      // until after inform() has been called for all components.
-      // searchExecutor must be single-threaded for this to work
-      searcherExecutor.submit(() -> {
-        latch.await();
-        return null;
-      });
-
-      this.updateHandler = initUpdateHandler(updateHandler);
-      
-      initSearcher(prev);
-
-      // Initialize the RestManager
-      restManager = initRestManager();
-
-      // Finally tell anyone who wants to know
-      resourceLoader.inform(resourceLoader);
-      resourceLoader.inform(this); // last call before the latch is released.
-      this.updateHandler.informEventListeners(this);
-    } catch (Throwable e) {
-      // release the latch, otherwise we block trying to do the close. This
-      // should be fine, since counting down on a latch of 0 is still fine
-      latch.countDown();
-      if (e instanceof OutOfMemoryError) {
-        throw (OutOfMemoryError)e;
-      }
-
-      try {
-        // close down the searcher and any other resources, if it exists, as this
-        // is not recoverable
-       close();
-      } catch (Throwable t) {
-        if (t instanceof OutOfMemoryError) {
-          throw (OutOfMemoryError) t;
-        }
-        log.error("Error while closing", t);
-      }
-
-      throw new SolrException(ErrorCode.SERVER_ERROR, e.getMessage(), e);
-    } finally {
-      // allow firstSearcher events to fire and make sure it is released
-      latch.countDown();
-    }
-
-    infoRegistry.put("core", this);
-
-    // register any SolrInfoMBeans SolrResourceLoader initialized
-    //
-    // this must happen after the latch is released, because a JMX server impl may
-    // choose to block on registering until properties can be fetched from an MBean,
-    // and a SolrCoreAware MBean may have properties that depend on getting a Searcher
-    // from the core.
-    resourceLoader.inform(infoRegistry);
-
-    // Allow the directory factory to report metrics
-    if (directoryFactory instanceof SolrMetricProducer) {
-      ((SolrMetricProducer)directoryFactory).initializeMetrics(metricManager, coreMetricManager.getRegistryName(), metricTag, "directoryFactory");
-    }
-
-    // seed version buckets with max from index during core initialization ... requires a searcher!
-    seedVersionBuckets();
-
-    bufferUpdatesIfConstructing(coreDescriptor);
-
-    this.ruleExpiryLock = new ReentrantLock();
-    this.snapshotDelLock = new ReentrantLock();
-
-    registerConfListener();
-    
-    assert ObjectReleaseTracker.track(this);
-  }
-
-  public void seedVersionBuckets() {
-    UpdateHandler uh = getUpdateHandler();
-    if (uh != null && uh.getUpdateLog() != null) {
-      RefCounted<SolrIndexSearcher> newestSearcher = getRealtimeSearcher();
-      if (newestSearcher != null) {
-        try {
-          uh.getUpdateLog().seedBucketsWithHighestVersion(newestSearcher.get());
-        } finally {
-          newestSearcher.decref();
-        }
-      } else {
-        log.warn("No searcher available! Cannot seed version buckets with max from index.");
-      }
-    }
-  }
-
-  /** Set UpdateLog to buffer updates if the slice is in construction. */
-  private void bufferUpdatesIfConstructing(CoreDescriptor coreDescriptor) {
-    
-    if (coreContainer != null && coreContainer.isZooKeeperAware()) {
-      if (reqHandlers.get("/get") == null) {
-        log.warn("WARNING: RealTimeGetHandler is not registered at /get. " +
-            "SolrCloud will always use full index replication instead of the more efficient PeerSync method.");
-      }
-
-      // ZK pre-register would have already happened so we read slice properties now
-      final ClusterState clusterState = coreContainer.getZkController().getClusterState();
-      final DocCollection collection = clusterState.getCollection(coreDescriptor.getCloudDescriptor().getCollectionName());
-      final Slice slice = collection.getSlice(coreDescriptor.getCloudDescriptor().getShardId());
-      if (slice.getState() == Slice.State.CONSTRUCTION) {
-        // set update log to buffer before publishing the core
-        getUpdateHandler().getUpdateLog().bufferUpdates();
-      }
-    }
-  }
-
-  private void initSearcher(SolrCore prev) throws IOException {
-    // use the (old) writer to open the first searcher
-    RefCounted<IndexWriter> iwRef = null;
-    if (prev != null) {
-      iwRef = prev.getUpdateHandler().getSolrCoreState().getIndexWriter(null);
-      if (iwRef != null) {
-        final IndexWriter iw = iwRef.get();
-        final SolrCore core = this;
-        newReaderCreator = () -> indexReaderFactory.newReader(iw, core);
-      }
-    }
-
-    try {
-      getSearcher(false, false, null, true);
-    } finally {
-      newReaderCreator = null;
-      if (iwRef != null) {
-        iwRef.decref();
-      }
-    }
-  }
-
-  private UpdateHandler initUpdateHandler(UpdateHandler updateHandler) {
-    String updateHandlerClass = solrConfig.getUpdateHandlerInfo().className;
-    if (updateHandlerClass == null) {
-      updateHandlerClass = DirectUpdateHandler2.class.getName();
-    }
-
-    final UpdateHandler newUpdateHandler;
-    if (updateHandler == null) {
-      newUpdateHandler = createUpdateHandler(updateHandlerClass);
-    } else {
-      newUpdateHandler = createUpdateHandler(updateHandlerClass, updateHandler);
-    }
-    if (newUpdateHandler instanceof SolrMetricProducer) {
-      coreMetricManager.registerMetricProducer("updateHandler", (SolrMetricProducer)newUpdateHandler);
-    }
-    infoRegistry.put("updateHandler", newUpdateHandler);
-    return newUpdateHandler;
-  }
-  
-  /**
-   * Initializes the "Latest Schema" for this SolrCore using either the provided <code>schema</code> 
-   * if non-null, or a new instance build via the factory identified in the specified <code>config</code>
-   * @see IndexSchemaFactory
-   * @see #setLatestSchema
-   */
-  private void initSchema(SolrConfig config, IndexSchema schema) {
-    if (schema == null) {
-      schema = IndexSchemaFactory.buildIndexSchema(IndexSchema.DEFAULT_SCHEMA_FILE, config);
-    }
-    setLatestSchema(schema);
-  }
-
-  /**
-   * Initializes the core's {@link SolrCoreMetricManager} with a given configuration.
-   * If metric reporters are configured, they are also initialized for this core.
-   *
-   * @param config the given configuration
-   * @return an instance of {@link SolrCoreMetricManager}
-   */
-  private SolrCoreMetricManager initCoreMetricManager(SolrConfig config) {
-    SolrCoreMetricManager coreMetricManager = new SolrCoreMetricManager(this);
-    return coreMetricManager;
-  }
-
-  @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    newSearcherCounter = manager.counter(this, registry, "new", Category.SEARCHER.toString());
-    newSearcherTimer = manager.timer(this, registry, "time", Category.SEARCHER.toString(), "new");
-    newSearcherWarmupTimer = manager.timer(this, registry, "warmup", Category.SEARCHER.toString(), "new");
-    newSearcherMaxReachedCounter = manager.counter(this, registry, "maxReached", Category.SEARCHER.toString(), "new");
-    newSearcherOtherErrorsCounter = manager.counter(this, registry, "errors", Category.SEARCHER.toString(), "new");
-
-    manager.registerGauge(this, registry, () -> name == null ? "(null)" : name, getMetricTag(), true, "coreName", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> startTime, getMetricTag(), true, "startTime", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> getOpenCount(), getMetricTag(), true, "refCount", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> resourceLoader.getInstancePath().toString(), getMetricTag(), true, "instanceDir", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> isClosed() ? "(closed)" : getIndexDir(), getMetricTag(), true, "indexDir", Category.CORE.toString());
-    manager.registerGauge(this, registry, () -> isClosed() ? 0 : getIndexSize(), getMetricTag(), true, "sizeInBytes", Category.INDEX.toString());
-    manager.registerGauge(this, registry, () -> isClosed() ? "(closed)" : NumberUtils.readableSize(getIndexSize()), getMetricTag(), true, "size", Category.INDEX.toString());
-    if (coreContainer != null) {
-      manager.registerGauge(this, registry, () -> coreContainer.getNamesForCore(this), getMetricTag(), true, "aliases", Category.CORE.toString());
-      final CloudDescriptor cd = getCoreDescriptor().getCloudDescriptor();
-      if (cd != null) {
-        manager.registerGauge(this, registry, () -> {
-          if (cd.getCollectionName() != null) {
-            return cd.getCollectionName();
-          } else {
-            return "_notset_";
-          }
-        }, getMetricTag(), true, "collection", Category.CORE.toString());
-
-        manager.registerGauge(this, registry, () -> {
-          if (cd.getShardId() != null) {
-            return cd.getShardId();
-          } else {
-            return "_auto_";
-          }
-        }, getMetricTag(), true, "shard", Category.CORE.toString());
-      }
-    }
-    // initialize disk total / free metrics
-    Path dataDirPath = Paths.get(dataDir);
-    File dataDirFile = dataDirPath.toFile();
-    manager.registerGauge(this, registry, () -> dataDirFile.getTotalSpace(), getMetricTag(), true, "totalSpace", Category.CORE.toString(), "fs");
-    manager.registerGauge(this, registry, () -> dataDirFile.getUsableSpace(), getMetricTag(), true, "usableSpace", Category.CORE.toString(), "fs");
-    manager.registerGauge(this, registry, () -> dataDirPath.toAbsolutePath().toString(), getMetricTag(), true, "path", Category.CORE.toString(), "fs");
-    manager.registerGauge(this, registry, () -> {
-      try {
-        return org.apache.lucene.util.IOUtils.spins(dataDirPath.toAbsolutePath());
-      } catch (IOException e) {
-        // default to spinning
-        return true;
-      }
-    }, getMetricTag(), true, "spins", Category.CORE.toString(), "fs");
-  }
-
-  public String getMetricTag() {
-    return metricTag;
-  }
-
-  private void checkVersionFieldExistsInSchema(IndexSchema schema, CoreDescriptor coreDescriptor) {
-    if (null != coreDescriptor.getCloudDescriptor()) {
-      // we are evidently running in cloud mode.  
-      //
-      // In cloud mode, version field is required for correct consistency
-      // ideally this check would be more fine grained, and individual features
-      // would assert it when they initialize, but DistributedUpdateProcessor
-      // is currently a big ball of wax that does more then just distributing
-      // updates (ie: partial document updates), so it needs to work in no cloud
-      // mode as well, and can't assert version field support on init.
-
-      try {
-        VersionInfo.getAndCheckVersionField(schema);
-      } catch (SolrException e) {
-        throw new SolrException(ErrorCode.SERVER_ERROR,
-                                "Schema will not work with SolrCloud mode: " +
-                                e.getMessage(), e);
-      }
-    }
-  }
-
-  private String initDataDir(String dataDir, SolrConfig config, CoreDescriptor coreDescriptor) {
-    return findDataDir(getDirectoryFactory(), dataDir, config, coreDescriptor);
-  }
-
-  /**
-   * Locate the data directory for a given config and core descriptor.
-   *
-   * @param directoryFactory
-   *          The directory factory to use if necessary to calculate an absolute path. Should be the same as what will
-   *          be used to open the data directory later.
-   * @param dataDir
-   *          An optional hint to the data directory location. Will be normalized and used if not null.
-   * @param config
-   *          A solr config to retrieve the default data directory location, if used.
-   * @param coreDescriptor
-   *          descriptor to load the actual data dir from, if not using the defualt.
-   * @return a normalized data directory name
-   * @throws SolrException
-   *           if the data directory cannot be loaded from the core descriptor
-   */
-  static String findDataDir(DirectoryFactory directoryFactory, String dataDir, SolrConfig config, CoreDescriptor coreDescriptor) {
-    if (dataDir == null) {
-      if (coreDescriptor.usingDefaultDataDir()) {
-        dataDir = config.getDataDir();
-      }
-      if (dataDir == null) {
-        try {
-          dataDir = coreDescriptor.getDataDir();
-          if (!directoryFactory.isAbsolute(dataDir)) {
-            dataDir = directoryFactory.getDataHome(coreDescriptor);
-          }
-        } catch (IOException e) {
-          throw new SolrException(ErrorCode.SERVER_ERROR, e);
-        }
-      }
-    }
-    return SolrResourceLoader.normalizeDir(dataDir);
-  }
-
-
-  public boolean modifyIndexProps(String tmpIdxDirName) {
-    return SolrCore.modifyIndexProps(getDirectoryFactory(), getDataDir(), getSolrConfig(), tmpIdxDirName);
-  }
-  
-  /**
-   * Update the index.properties file with the new index sub directory name
-   */
-  // package private
-  static boolean modifyIndexProps(DirectoryFactory directoryFactory, String dataDir, SolrConfig solrConfig, String tmpIdxDirName) {
-    log.info("Updating index properties... index={}", tmpIdxDirName);
-    Directory dir = null;
-    try {
-      dir = directoryFactory.get(dataDir, DirContext.META_DATA, solrConfig.indexConfig.lockType);
-      String tmpIdxPropName = IndexFetcher.INDEX_PROPERTIES + "." + System.nanoTime();
-      writeNewIndexProps(dir, tmpIdxPropName, tmpIdxDirName);
-      directoryFactory.renameWithOverwrite(dir, tmpIdxPropName, IndexFetcher.INDEX_PROPERTIES);
-      return true;
-    } catch (IOException e1) {
-      throw new RuntimeException(e1);
-    } finally {
-      if (dir != null) {
-        try {
-          directoryFactory.release(dir);
-        } catch (IOException e) {
-          SolrException.log(log, "", e);
-        }
-      }
-    }
-  }
-  
-  /**
-   * Write the index.properties file with the new index sub directory name
-   * @param dir a data directory (containing an index.properties file)
-   * @param tmpFileName the file name to write the new index.properties to
-   * @param tmpIdxDirName new index directory name
-   */
-  private static void writeNewIndexProps(Directory dir, String tmpFileName, String tmpIdxDirName) {
-    if (tmpFileName == null) {
-      tmpFileName = IndexFetcher.INDEX_PROPERTIES;
-    }
-    final Properties p = new Properties();
-    
-    // Read existing properties
-    try {
-      final IndexInput input = dir.openInput(IndexFetcher.INDEX_PROPERTIES, DirectoryFactory.IOCONTEXT_NO_CACHE);
-      final InputStream is = new PropertiesInputStream(input);
-      try {
-        p.load(new InputStreamReader(is, StandardCharsets.UTF_8));
-      } catch (Exception e) {
-        log.error("Unable to load {}", IndexFetcher.INDEX_PROPERTIES, e);
-      } finally {
-        IOUtils.closeQuietly(is);
-      }
-    } catch (IOException e) {
-      // ignore; file does not exist
-    }
-    
-    p.put("index", tmpIdxDirName);
-
-    // Write new properties
-    Writer os = null;
-    try {
-      IndexOutput out = dir.createOutput(tmpFileName, DirectoryFactory.IOCONTEXT_NO_CACHE);
-      os = new OutputStreamWriter(new PropertiesOutputStream(out), StandardCharsets.UTF_8);
-      p.store(os, IndexFetcher.INDEX_PROPERTIES);
-      dir.sync(Collections.singleton(tmpFileName));
-    } catch (Exception e) {
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to write " + IndexFetcher.INDEX_PROPERTIES, e);
-    } finally {
-      IOUtils.closeQuietly(os);
-    }
-  }
-
-  private String initUpdateLogDir(CoreDescriptor coreDescriptor) {
-    String updateLogDir = coreDescriptor.getUlogDir();
-    if (updateLogDir == null) {
-      updateLogDir = coreDescriptor.getInstanceDir().resolve(dataDir).normalize().toAbsolutePath().toString();
-    }
-    return updateLogDir;
-  }
-
-  /**
-   * Close the core, if it is still in use waits until is no longer in use.
-   * @see #close() 
-   * @see #isClosed() 
-   */
-  public void closeAndWait() {
-    close();
-    while (!isClosed()) {
-      final long milliSleep = 100;
-      log.info("Core {} is not yet closed, waiting {} ms before checking again.", getName(), milliSleep);
-      try {
-        Thread.sleep(milliSleep);
-      } catch (InterruptedException e) {
-        Thread.currentThread().interrupt();
-        throw new SolrException(ErrorCode.SERVER_ERROR,
-            "Caught InterruptedException whilst waiting for core " + getName() + " to close: "
-                + e.getMessage(), e);
-      }
-    }
-  }
-  
-  private Codec initCodec(SolrConfig solrConfig, final IndexSchema schema) {
-    final PluginInfo info = solrConfig.getPluginInfo(CodecFactory.class.getName());
-    final CodecFactory factory;
-    if (info != null) {
-      factory = schema.getResourceLoader().newInstance(info.className, CodecFactory.class);
-      factory.init(info.initArgs);
-    } else {
-      factory = new CodecFactory() {
-        @Override
-        public Codec getCodec() {
-          return Codec.getDefault();
-        }
-      };
-    }
-    if (factory instanceof SolrCoreAware) {
-      // CodecFactory needs SolrCore before inform() is called on all registered
-      // SolrCoreAware listeners, at the end of the SolrCore constructor
-      ((SolrCoreAware)factory).inform(this);
-    } else {
-      for (FieldType ft : schema.getFieldTypes().values()) {
-        if (null != ft.getPostingsFormat()) {
-          String msg = "FieldType '" + ft.getTypeName() + "' is configured with a postings format, but the codec does not support it: " + factory.getClass();
-          log.error(msg);
-          throw new SolrException(ErrorCode.SERVER_ERROR, msg);
-        }
-        if (null != ft.getDocValuesFormat()) {
-          String msg = "FieldType '" + ft.getTypeName() + "' is configured with a docValues format, but the codec does not support it: " + factory.getClass();
-          log.error(msg);
-          throw new SolrException(ErrorCode.SERVER_ERROR, msg);
-        }
-      }
-    }
-    return factory.getCodec();
-  }
-
-  private StatsCache initStatsCache() {
-    final StatsCache cache;
-    PluginInfo pluginInfo = solrConfig.getPluginInfo(StatsCache.class.getName());
-    if (pluginInfo != null && pluginInfo.className != null && pluginInfo.className.length() > 0) {
-      cache = createInitInstance(pluginInfo, StatsCache.class, null,
-          LocalStatsCache.class.getName());
-      log.debug("Using statsCache impl: {}", cache.getClass().getName());
-    } else {
-      log.debug("Using default statsCache cache: {}", LocalStatsCache.class.getName());
-      cache = new LocalStatsCache();
-    }
-    return cache;
-  }
-
-  /**
-   * Get the StatsCache.
-   */
-  public StatsCache getStatsCache() {
-    return statsCache;
-  }
-
-  /**
-   * Load the request processors
-   */
-   private Map<String,UpdateRequestProcessorChain> loadUpdateProcessorChains() {
-    Map<String, UpdateRequestProcessorChain> map = new HashMap<>();
-    UpdateRequestProcessorChain def = initPlugins(map,UpdateRequestProcessorChain.class, UpdateRequestProcessorChain.class.getName());
-    if(def == null){
-      def = map.get(null);
-    }
-    if (def == null) {
-      log.debug("no updateRequestProcessorChain defined as default, creating implicit default");
-      // construct the default chain
-      UpdateRequestProcessorFactory[] factories = new UpdateRequestProcessorFactory[]{
-              new LogUpdateProcessorFactory(),
-              new DistributedUpdateProcessorFactory(),
-              new RunUpdateProcessorFactory()
-      };
-      def = new UpdateRequestProcessorChain(Arrays.asList(factories), this);
-    }
-    map.put(null, def);
-    map.put("", def);
-    return map;
-  }
-
-  public SolrCoreState getSolrCoreState() {
-    return solrCoreState;
-  }
-
-  /**
-   * @return an update processor registered to the given name.  Throw an exception if this chain is undefined
-   */
-  public UpdateRequestProcessorChain getUpdateProcessingChain( final String name )
-  {
-    UpdateRequestProcessorChain chain = updateProcessorChains.get( name );
-    if( chain == null ) {
-      throw new SolrException( ErrorCode.BAD_REQUEST,
-          "unknown UpdateRequestProcessorChain: "+name );
-    }
-    return chain;
-  }
-
-  public UpdateRequestProcessorChain getUpdateProcessorChain(SolrParams params) {
-    String chainName = params.get(UpdateParams.UPDATE_CHAIN);
-    UpdateRequestProcessorChain defaultUrp = getUpdateProcessingChain(chainName);
-    ProcessorInfo processorInfo = new ProcessorInfo(params);
-    if (processorInfo.isEmpty()) return defaultUrp;
-    return UpdateRequestProcessorChain.constructChain(defaultUrp, processorInfo, this);
-  }
-
-  public PluginBag<UpdateRequestProcessorFactory> getUpdateProcessors() {
-    return updateProcessors;
-  }
-
-  // this core current usage count
-  private final AtomicInteger refCount = new AtomicInteger(1);
-
-  /** expert: increments the core reference count */
-  public void open() {
-    refCount.incrementAndGet();
-  }
-
-  /**
-   * Close all resources allocated by the core if it is no longer in use...
-   * <ul>
-   *   <li>searcher</li>
-   *   <li>updateHandler</li>
-   *   <li>all CloseHooks will be notified</li>
-   *   <li>All MBeans will be unregistered from MBeanServer if JMX was enabled
-   *       </li>
-   * </ul>
-   * <p>
-   * The behavior of this method is determined by the result of decrementing
-   * the core's reference count (A core is created with a reference count of 1)...
-   * </p>
-   * <ul>
-   *   <li>If reference count is &gt; 0, the usage count is decreased by 1 and no
-   *       resources are released.
-   *   </li>
-   *   <li>If reference count is == 0, the resources are released.
-   *   <li>If reference count is &lt; 0, and error is logged and no further action
-   *       is taken.
-   *   </li>
-   * </ul>
-   * @see #isClosed()
-   */
-  @Override
-  public void close() {
-    int count = refCount.decrementAndGet();
-    if (count > 0) return; // close is called often, and only actually closes if nothing is using it.
-    if (count < 0) {
-      log.error("Too many close [count:{}] on {}. Please report this exception to solr-user@lucene.apache.org", count, this );
-      assert false : "Too many closes on SolrCore";
-      return;
-    }
-    log.info("{} CLOSING SolrCore {}", logid, this);
-
-    // stop reporting metrics
-    try {
-      coreMetricManager.close();
-    } catch (Throwable e) {
-      SolrException.log(log, e);
-      if (e instanceof  Error) {
-        throw (Error) e;
-      }
-    }
-
-    if( closeHooks != null ) {
-       for( CloseHook hook : closeHooks ) {
-         try {
-           hook.preClose( this );
-         } catch (Throwable e) {
-           SolrException.log(log, e);
-           if (e instanceof Error) {
-             throw (Error) e;
-           }
-         }
-      }
-    }
-
-    if(reqHandlers != null) reqHandlers.close();
-    responseWriters.close();
-    searchComponents.close();
-    qParserPlugins.close();
-    valueSourceParsers.close();
-    transformerFactories.close();
-
-    if (memClassLoader != null) {
-      try {
-        memClassLoader.close();
-      } catch (Exception e) {
-      }
-    }
-
-
-    try {
-      if (null != updateHandler) {
-        updateHandler.close();
-      }
-    } catch (Throwable e) {
-      SolrException.log(log,e);
-      if (e instanceof Error) {
-        throw (Error) e;
-      }
-    }
-
-    boolean coreStateClosed = false;
-    try {
-      if (solrCoreState != null) {
-        if (updateHandler instanceof IndexWriterCloser) {
-          coreStateClosed = solrCoreState.decrefSolrCoreState((IndexWriterCloser) updateHandler);
-        } else {
-          coreStateClosed = solrCoreState.decrefSolrCoreState(null);
-        }
-      }
-    } catch (Throwable e) {
-      SolrException.log(log, e);
-      if (e instanceof Error) {
-        throw (Error) e;
-      }
-    }
-
-    try {
-      ExecutorUtil.shutdownAndAwaitTermination(searcherExecutor);
-    } catch (Throwable e) {
-      SolrException.log(log, e);
-      if (e instanceof Error) {
-        throw (Error) e;
-      }
-    }
-    assert ObjectReleaseTracker.release(searcherExecutor);
-
-    try {
-      // Since we waited for the searcherExecutor to shut down,
-      // there should be no more searchers warming in the background
-      // that we need to take care of.
-      //
-      // For the case that a searcher was registered *before* warming
-      // then the searchExecutor will throw an exception when getSearcher()
-      // tries to use it, and the exception handling code should close it.
-      closeSearcher();
-    } catch (Throwable e) {
-      SolrException.log(log,e);
-      if (e instanceof Error) {
-        throw (Error) e;
-      }
-    }
-    
-    if (coreStateClosed) {
-      try {
-        cleanupOldIndexDirectories(false);
-      } catch (Exception e) {
-        SolrException.log(log, e);
-      }
-    }
-
-    try {
-      infoRegistry.clear();
-    } catch (Throwable e) {
-      SolrException.log(log, e);
-      if (e instanceof Error) {
-        throw (Error) e;
-      }
-    }
-
-    // Close the snapshots meta-data directory.
-    Directory snapshotsDir = snapshotMgr.getSnapshotsDir();
-    try {
-      this.directoryFactory.release(snapshotsDir);
-    }  catch (Throwable e) {
-      SolrException.log(log,e);
-      if (e instanceof Error) {
-        throw (Error) e;
-      }
-    }
-
-    if (coreStateClosed) {
-      
-      try {
-        directoryFactory.close();
-      } catch (Throwable e) {
-        SolrException.log(log, e);
-        if (e instanceof Error) {
-          throw (Error) e;
-        }
-      }
-    }
-
-    if( closeHooks != null ) {
-       for( CloseHook hook : closeHooks ) {
-         try {
-           hook.postClose( this );
-         } catch (Throwable e) {
-           SolrException.log(log, e);
-           if (e instanceof Error) {
-             throw (Error) e;
-           }
-         }
-      }
-    }
-    
-    assert ObjectReleaseTracker.release(this);
-  }
-
-  /** Current core usage count. */
-  public int getOpenCount() {
-    return refCount.get();
-  }
-
-  /** Whether this core is closed. */
-  public boolean isClosed() {
-      return refCount.get() <= 0;
-  }
-
-  @Override
-  protected void finalize() throws Throwable {
-    try {
-      if (getOpenCount() != 0) {
-        log.error("REFCOUNT ERROR: unreferenced {} ({}) has a reference count of {}", this, getName(), getOpenCount());
-      }
-    } finally {
-      super.finalize();
-    }
-  }
-
-  private Collection<CloseHook> closeHooks = null;
-
-   /**
-    * Add a close callback hook
-    */
-   public void addCloseHook( CloseHook hook )
-   {
-     if( closeHooks == null ) {
-       closeHooks = new ArrayList<>();
-     }
-     closeHooks.add( hook );
-   }
-
-  /** @lucene.internal
-   *  Debugging aid only.  No non-test code should be released with uncommented verbose() calls.  */
-  public static boolean VERBOSE = Boolean.parseBoolean(System.getProperty("tests.verbose","false"));
-  public static void verbose(Object... args) {
-    if (!VERBOSE) return;
-    StringBuilder sb = new StringBuilder("VERBOSE:");
-//    sb.append(Thread.currentThread().getName());
-//    sb.append(':');
-    for (Object o : args) {
-      sb.append(' ');
-      sb.append(o==null ? "(null)" : o.toString());
-    }
-    // System.out.println(sb.toString());
-    log.info(sb.toString());
-  }
-
-
-  ////////////////////////////////////////////////////////////////////////////////
-  // Request Handler
-  ////////////////////////////////////////////////////////////////////////////////
-
-  /**
-   * Get the request handler registered to a given name.
-   *
-   * This function is thread safe.
-   */
-  public SolrRequestHandler getRequestHandler(String handlerName) {
-    return RequestHandlerBase.getRequestHandler(RequestHandlers.normalize(handlerName), reqHandlers.handlers);
-  }
-
-  /**
-   * Returns an unmodifiable Map containing the registered handlers
-   */
-  public PluginBag<SolrRequestHandler> getRequestHandlers() {
-    return reqHandlers.handlers;
-  }
-
-
-  /**
-   * Registers a handler at the specified location.  If one exists there, it will be replaced.
-   * To remove a handler, register <code>null</code> at its path
-   *
-   * Once registered the handler can be accessed through:
-   * <pre>
-   *   http://${host}:${port}/${context}/${handlerName}
-   * or:
-   *   http://${host}:${port}/${context}/select?qt=${handlerName}
-   * </pre>
-   *
-   * Handlers <em>must</em> be initialized before getting registered.  Registered
-   * handlers can immediately accept requests.
-   *
-   * This call is thread safe.
-   *
-   * @return the previous <code>SolrRequestHandler</code> registered to this name <code>null</code> if none.
-   */
-  public SolrRequestHandler registerRequestHandler(String handlerName, SolrRequestHandler handler) {
-    return reqHandlers.register(handlerName,handler);
-  }
-
-  /**
-   * Register the default search components
-   */
-  private void loadSearchComponents()
-  {
-    Map<String, SearchComponent> instances = createInstances(SearchComponent.standard_components);
-    for (Map.Entry<String, SearchComponent> e : instances.entrySet()) e.getValue().setName(e.getKey());
-    searchComponents.init(instances, this);
-
-    for (String name : searchComponents.keySet()) {
-      if (searchComponents.isLoaded(name) && searchComponents.get(name) instanceof HighlightComponent) {
-        if (!HighlightComponent.COMPONENT_NAME.equals(name)) {
-          searchComponents.put(HighlightComponent.COMPONENT_NAME, searchComponents.getRegistry().get(name));
-        }
-        break;
-      }
-    }
-  }
-  /**
-   * @return a Search Component registered to a given name.  Throw an exception if the component is undefined
-   */
-  public SearchComponent getSearchComponent(String name) {
-    return searchComponents.get(name);
-  }
-
-  /**
-   * Accessor for all the Search Components
-   * @return An unmodifiable Map of Search Components
-   */
-  public PluginBag<SearchComponent> getSearchComponents() {
-    return searchComponents;
-  }
-
-  ////////////////////////////////////////////////////////////////////////////////
-  // Update Handler
-  ////////////////////////////////////////////////////////////////////////////////
-
-  /**
-   * RequestHandlers need access to the updateHandler so they can all talk to the
-   * same RAM indexer.
-   */
-  public UpdateHandler getUpdateHandler() {
-    return updateHandler;
-  }
-
-  ////////////////////////////////////////////////////////////////////////////////
-  // Searcher Control
-  ////////////////////////////////////////////////////////////////////////////////
-
-  // The current searcher used to service queries.
-  // Don't access this directly!!!! use getSearcher() to
-  // get it (and it will increment the ref count at the same time).
-  // This reference is protected by searcherLock.
-  private RefCounted<SolrIndexSearcher> _searcher;
-
-  // All of the normal open searchers.  Don't access this directly.
-  // protected by synchronizing on searcherLock.
-  private final LinkedList<RefCounted<SolrIndexSearcher>> _searchers = new LinkedList<>();
-  private final LinkedList<RefCounted<SolrIndexSearcher>> _realtimeSearchers = new LinkedList<>();
-
-  final ExecutorService searcherExecutor = ExecutorUtil.newMDCAwareSingleThreadExecutor(
-      new DefaultSolrThreadFactory("searcherExecutor"));
-  private int onDeckSearchers;  // number of searchers preparing
-  // Lock ordering: one can acquire the openSearcherLock and then the searcherLock, but not vice-versa.
-  private Object searcherLock = new Object();  // the sync object for the searcher
-  private ReentrantLock openSearcherLock = new ReentrantLock(true);     // used to serialize opens/reopens for absolute ordering
-  private final int maxWarmingSearchers;  // max number of on-deck searchers allowed
-  private final int slowQueryThresholdMillis;  // threshold above which a query is considered slow
-
-  private RefCounted<SolrIndexSearcher> realtimeSearcher;
-  private Callable<DirectoryReader> newReaderCreator;
-
-  // For testing
-  boolean areAllSearcherReferencesEmpty() {
-    boolean isEmpty;
-    synchronized (searcherLock) {
-      isEmpty = _searchers.isEmpty();
-      isEmpty = isEmpty && _realtimeSearchers.isEmpty();
-      isEmpty = isEmpty && (_searcher == null);
-      isEmpty = isEmpty && (realtimeSearcher == null);
-    }
-    return isEmpty;
-  }
-
-  /**
-   * Return a registered {@link RefCounted}&lt;{@link SolrIndexSearcher}&gt; with
-   * the reference count incremented.  It <b>must</b> be decremented when no longer needed.
-   * This method should not be called from SolrCoreAware.inform() since it can result
-   * in a deadlock if useColdSearcher==false.
-   * If handling a normal request, the searcher should be obtained from
-   * {@link org.apache.solr.request.SolrQueryRequest#getSearcher()} instead.
-   * If you still think you need to call this, consider {@link #withSearcher(IOFunction)} instead which is easier to
-   * use.
-   * @see SolrQueryRequest#getSearcher()
-   * @see #withSearcher(IOFunction)
-   */
-  public RefCounted<SolrIndexSearcher> getSearcher() {
-    return getSearcher(false,true,null);
-  }
-
-  /**
-   * Executes the lambda with the {@link SolrIndexSearcher}.  This is more convenient than using
-   * {@link #getSearcher()} since there is no ref-counting business to worry about.
-   * Example:
-   * <pre class="prettyprint">
-   *   IndexReader reader = h.getCore().withSearcher(SolrIndexSearcher::getIndexReader);
-   * </pre>
-   * Warning: although a lambda is concise, it may be inappropriate to simply return the IndexReader because it might
-   * be closed soon after this method returns; it really depends.
-   */
-  @SuppressWarnings("unchecked")
-  public <R> R withSearcher(IOFunction<SolrIndexSearcher,R> lambda) throws IOException {
-    final RefCounted<SolrIndexSearcher> refCounted = getSearcher();
-    try {
-      return lambda.apply(refCounted.get());
-    } finally {
-      refCounted.decref();
-    }
-  }
-
-  /**
-   * Computes fingerprint of a segment and caches it only if all the version in segment are included in the fingerprint.
-   * We can't use computeIfAbsent as caching is conditional (as described above)
-   * There is chance that two threads may compute fingerprint on the same segment. It might be OK to do so rather than locking entire map.
-   *
-   * @param searcher   searcher that includes specified LeaderReaderContext
-   * @param ctx        LeafReaderContext of a segment to compute fingerprint of
-   * @param maxVersion maximum version number to consider for fingerprint computation
-   * @return IndexFingerprint of the segment
-   * @throws IOException Can throw IOException
-   */
-  public IndexFingerprint getIndexFingerprint(SolrIndexSearcher searcher, LeafReaderContext ctx, long maxVersion)
-      throws IOException {
-    IndexReader.CacheHelper cacheHelper = ctx.reader().getReaderCacheHelper();
-    if (cacheHelper == null) {
-      log.debug("Cannot cache IndexFingerprint as reader does not support caching. searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
-      return IndexFingerprint.getFingerprint(searcher, ctx, maxVersion);
-    }
-    
-    IndexFingerprint f = null;
-    f = perSegmentFingerprintCache.get(cacheHelper.getKey());
-    // fingerprint is either not cached or
-    // if we want fingerprint only up to a version less than maxVersionEncountered in the segment, or
-    // documents were deleted from segment for which fingerprint was cached
-    //
-    if (f == null || (f.getMaxInHash() > maxVersion) || (f.getNumDocs() != ctx.reader().numDocs())) {
-      log.debug("IndexFingerprint cache miss for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
-      f = IndexFingerprint.getFingerprint(searcher, ctx, maxVersion);
-      // cache fingerprint for the segment only if all the versions in the segment are included in the fingerprint
-      if (f.getMaxVersionEncountered() == f.getMaxInHash()) {
-        log.debug("Caching fingerprint for searcher:{} leafReaderContext:{} mavVersion:{}", searcher, ctx, maxVersion);
-        perSegmentFingerprintCache.put(cacheHelper.getKey(), f);
-      }
-
-    } else {
-      log.debug("IndexFingerprint cache hit for searcher:{} reader:{} readerHash:{} maxVersion:{}", searcher, ctx.reader(), ctx.reader().hashCode(), maxVersion);
-    }
-    log.debug("Cache Size: {}, Segments Size:{}", perSegmentFingerprintCache.size(), searcher.getTopReaderContext().leaves().size());
-    return f;
-  }
-
-  /**
-  * Returns the current registered searcher with its reference count incremented, or null if none are registered.
-  */
-  public RefCounted<SolrIndexSearcher> getRegisteredSearcher() {
-    synchronized (searcherLock) {
-      if (_searcher != null) {
-        _searcher.incref();
-      }
-      return _searcher;
-    }
-  }
-
-  /**
-   * Return the newest normal {@link RefCounted}&lt;{@link SolrIndexSearcher}&gt; with
-   * the reference count incremented.  It <b>must</b> be decremented when no longer needed.
-   * If no searcher is currently open, then if openNew==true a new searcher will be opened,
-   * or null is returned if openNew==false.
-   */
-  public RefCounted<SolrIndexSearcher> getNewestSearcher(boolean openNew) {
-    synchronized (searcherLock) {
-      if (!_searchers.isEmpty()) {
-        RefCounted<SolrIndexSearcher> newest = _searchers.getLast();
-        newest.incref();
-        return newest;
-      }
-    }
-
-    return openNew ? getRealtimeSearcher() : null;
-  }
-
-  /** Gets the latest real-time searcher w/o forcing open a new searcher if one already exists.
-   * The reference count will be incremented.
-   */
-  public RefCounted<SolrIndexSearcher> getRealtimeSearcher() {
-    synchronized (searcherLock) {
-      if (realtimeSearcher != null) {
-        realtimeSearcher.incref();
-        return realtimeSearcher;
-      }
-    }
-
-    // use the searcher lock to prevent multiple people from trying to open at once
-    openSearcherLock.lock();
-    try {
-
-      // try again
-      synchronized (searcherLock) {
-        if (realtimeSearcher != null) {
-          realtimeSearcher.incref();
-          return realtimeSearcher;
-        }
-      }
-
-      // force a new searcher open
-      return openNewSearcher(true, true);
-    } finally {
-      openSearcherLock.unlock();
-    }
-  }
-
-
-  public RefCounted<SolrIndexSearcher> getSearcher(boolean forceNew, boolean returnSearcher, final Future[] waitSearcher) {
-    return getSearcher(forceNew, returnSearcher, waitSearcher, false);
-  }
-
-
-  /** Opens a new searcher and returns a RefCounted&lt;SolrIndexSearcher&gt; with its reference incremented.
-   *
-   * "realtime" means that we need to open quickly for a realtime view of the index, hence don't do any
-   * autowarming and add to the _realtimeSearchers queue rather than the _searchers queue (so it won't
-   * be used for autowarming by a future normal searcher).  A "realtime" searcher will currently never
-   * become "registered" (since it currently lacks caching).
-   *
-   * realtimeSearcher is updated to the latest opened searcher, regardless of the value of "realtime".
-   *
-   * This method acquires openSearcherLock - do not call with searchLock held!
-   */
-  public RefCounted<SolrIndexSearcher>  openNewSearcher(boolean updateHandlerReopens, boolean realtime) {
-    if (isClosed()) { // catch some errors quicker
-      throw new SolrException(ErrorCode.SERVER_ERROR, "openNewSearcher called on closed core");
-    }
-
-    SolrIndexSearcher tmp;
-    RefCounted<SolrIndexSearcher> newestSearcher = null;
-
-    openSearcherLock.lock();
-    try {
-      String newIndexDir = getNewIndexDir();
-      String indexDirFile = null;
-      String newIndexDirFile = null;
-
-      // if it's not a normal near-realtime update, check that paths haven't changed.
-      if (!updateHandlerReopens) {
-        indexDirFile = getDirectoryFactory().normalize(getIndexDir());
-        newIndexDirFile = getDirectoryFactory().normalize(newIndexDir);
-      }
-
-      synchronized (searcherLock) {
-        newestSearcher = realtimeSearcher;
-        if (newestSearcher != null) {
-          newestSearcher.incref();      // the matching decref is in the finally block
-        }
-      }
-
-      if (newestSearcher != null && (updateHandlerReopens || indexDirFile.equals(newIndexDirFile))) {
-
-        DirectoryReader newReader;
-        DirectoryReader currentReader = newestSearcher.get().getRawReader();
-
-        // SolrCore.verbose("start reopen from",previousSearcher,"writer=",writer);
-
-        RefCounted<IndexWriter> writer = getSolrCoreState().getIndexWriter(null);
-
-        try {
-          if (writer != null) {
-            // if in NRT mode, open from the writer
-            newReader = DirectoryReader.openIfChanged(currentReader, writer.get(), true);
-          } else {
-            // verbose("start reopen without writer, reader=", currentReader);
-            newReader = DirectoryReader.openIfChanged(currentReader);
-            // verbose("reopen result", newReader);
-          }
-        } finally {
-          if (writer != null) {
-            writer.decref();
-          }
-        }
-
-        if (newReader == null) { // the underlying index has not changed at all
-
-          if (realtime) {
-            // if this is a request for a realtime searcher, just return the same searcher
-            newestSearcher.incref();
-            return newestSearcher;
-
-          } else if (newestSearcher.get().isCachingEnabled() && newestSearcher.get().getSchema() == getLatestSchema()) {
-            // absolutely nothing has changed, can use the same searcher
-            // but log a message about it to minimize confusion
-
-            newestSearcher.incref();
-            log.debug("SolrIndexSearcher has not changed - not re-opening: {}", newestSearcher.get().getName());
-            return newestSearcher;
-
-          } // ELSE: open a new searcher against the old reader...
-          currentReader.incRef();
-          newReader = currentReader;
-        }
-
-        // for now, turn off caches if this is for a realtime reader 
-        // (caches take a little while to instantiate)
-        final boolean useCaches = !realtime;
-        final String newName = realtime ? "realtime" : "main";
-        tmp = new SolrIndexSearcher(this, newIndexDir, getLatestSchema(), newName,
-                                    newReader, true, useCaches, true, directoryFactory);
-
-      } else {
-        // newestSearcher == null at this point
-
-        if (newReaderCreator != null) {
-          // this is set in the constructor if there is a currently open index writer
-          // so that we pick up any uncommitted changes and so we don't go backwards
-          // in time on a core reload
-          DirectoryReader newReader = newReaderCreator.call();
-          tmp = new SolrIndexSearcher(this, newIndexDir, getLatestSchema(),
-              (realtime ? "realtime":"main"), newReader, true, !realtime, true, directoryFactory);
-        } else  {
-          RefCounted<IndexWriter> writer = getSolrCoreState().getIndexWriter(this);
-          DirectoryReader newReader = null;
-          try {
-            newReader = indexReaderFactory.newReader(writer.get(), this);
-          } finally {
-            writer.decref();
-          }
-          tmp = new SolrIndexSearcher(this, newIndexDir, getLatestSchema(),
-              (realtime ? "realtime":"main"), newReader, true, !realtime, true, directoryFactory);
-        }
-      }
-
-      List<RefCounted<SolrIndexSearcher>> searcherList = realtime ? _realtimeSearchers : _searchers;
-      RefCounted<SolrIndexSearcher> newSearcher = newHolder(tmp, searcherList);    // refcount now at 1
-
-      // Increment reference again for "realtimeSearcher" variable.  It should be at 2 after.
-      // When it's decremented by both the caller of this method, and by realtimeSearcher being replaced,
-      // it will be closed.
-      newSearcher.incref();
-
-      synchronized (searcherLock) {
-        // Check if the core is closed again inside the lock in case this method is racing with a close. If the core is
-        // closed, clean up the new searcher and bail.
-        if (isClosed()) {
-          newSearcher.decref(); // once for caller since we're not returning it
-          newSearcher.decref(); // once for ourselves since it won't be "replaced"
-          throw new SolrException(ErrorCode.SERVER_ERROR, "openNewSearcher called on closed core");
-        }
-
-        if (realtimeSearcher != null) {
-          realtimeSearcher.decref();
-        }
-        realtimeSearcher = newSearcher;
-        searcherList.add(realtimeSearcher);
-      }
-
-      return newSearcher;
-
-    } catch (Exception e) {
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Error opening new searcher", e);
-    }
-    finally {
-      openSearcherLock.unlock();
-      if (newestSearcher != null) {
-        newestSearcher.decref();
-      }
-    }
-  }
-
-  /**
-   * Get a {@link SolrIndexSearcher} or start the process of creating a new one.
-   * <p>
-   * The registered searcher is the default searcher used to service queries.
-   * A searcher will normally be registered after all of the warming
-   * and event handlers (newSearcher or firstSearcher events) have run.
-   * In the case where there is no registered searcher, the newly created searcher will
-   * be registered before running the event handlers (a slow searcher is better than no searcher).
-   *
-   * <p>
-   * These searchers contain read-only IndexReaders. To access a non read-only IndexReader,
-   * see newSearcher(String name, boolean readOnly).
-   *
-   * <p>
-   * If <tt>forceNew==true</tt> then
-   *  A new searcher will be opened and registered regardless of whether there is already
-   *    a registered searcher or other searchers in the process of being created.
-   * <p>
-   * If <tt>forceNew==false</tt> then:<ul>
-   *   <li>If a searcher is already registered, that searcher will be returned</li>
-   *   <li>If no searcher is currently registered, but at least one is in the process of being created, then
-   * this call will block until the first searcher is registered</li>
-   *   <li>If no searcher is currently registered, and no searchers in the process of being registered, a new
-   * searcher will be created.</li>
-   * </ul>
-   * <p>
-   * If <tt>returnSearcher==true</tt> then a {@link RefCounted}&lt;{@link SolrIndexSearcher}&gt; will be returned with
-   * the reference count incremented.  It <b>must</b> be decremented when no longer needed.
-   * <p>
-   * If <tt>waitSearcher!=null</tt> and a new {@link SolrIndexSearcher} was created,
-   * then it is filled in with a Future that will return after the searcher is registered.  The Future may be set to
-   * <tt>null</tt> in which case the SolrIndexSearcher created has already been registered at the time
-   * this method returned.
-   * <p>
-   * @param forceNew             if true, force the open of a new index searcher regardless if there is already one open.
-   * @param returnSearcher       if true, returns a {@link SolrIndexSearcher} holder with the refcount already incremented.
-   * @param waitSearcher         if non-null, will be filled in with a {@link Future} that will return after the new searcher is registered.
-   * @param updateHandlerReopens if true, the UpdateHandler will be used when reopening a {@link SolrIndexSearcher}.
-   */
-  public RefCounted<SolrIndexSearcher> getSearcher(boolean forceNew, boolean returnSearcher, final Future[] waitSearcher, boolean updateHandlerReopens) {
-    // it may take some time to open an index.... we may need to make
-    // sure that two threads aren't trying to open one at the same time
-    // if it isn't necessary.
-
-    synchronized (searcherLock) {
-      for(;;) { // this loop is so w can retry in the event that we exceed maxWarmingSearchers
-        // see if we can retu

<TRUNCATED>