You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by da...@apache.org on 2018/10/23 00:05:46 UTC
[25/52] [abbrv] [partial] lucene-solr:jira/gradle: Add gradle support
for Solr
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java b/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
deleted file mode 100644
index 6636a8f..0000000
--- a/solr/core/src/java/org/apache/solr/core/SolrXmlConfig.java
+++ /dev/null
@@ -1,553 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import javax.management.MBeanServer;
-import javax.xml.xpath.XPath;
-import javax.xml.xpath.XPathConstants;
-import javax.xml.xpath.XPathExpressionException;
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.lang.invoke.MethodHandles;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-import java.util.Set;
-
-import com.google.common.base.Strings;
-import org.apache.commons.io.IOUtils;
-import org.apache.solr.client.solrj.impl.HttpClientUtil;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.logging.LogWatcherConfig;
-import org.apache.solr.metrics.reporters.SolrJmxReporter;
-import org.apache.solr.update.UpdateShardHandlerConfig;
-import org.apache.solr.util.DOMUtil;
-import org.apache.solr.util.JmxUtil;
-import org.apache.solr.util.PropertiesUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-import org.xml.sax.InputSource;
-
-import static org.apache.solr.common.params.CommonParams.NAME;
-
-
-/**
- *
- */
-public class SolrXmlConfig {
-
- public final static String SOLR_XML_FILE = "solr.xml";
- public final static String SOLR_DATA_HOME = "solr.data.home";
-
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- public static NodeConfig fromConfig(Config config) {
-
- checkForIllegalConfig(config);
-
- config.substituteProperties();
-
- CloudConfig cloudConfig = null;
- UpdateShardHandlerConfig deprecatedUpdateConfig = null;
-
- if (config.getNodeList("solr/solrcloud", false).getLength() > 0) {
- NamedList<Object> cloudSection = readNodeListAsNamedList(config, "solr/solrcloud/*[@name]", "<solrcloud>");
- deprecatedUpdateConfig = loadUpdateConfig(cloudSection, false);
- cloudConfig = fillSolrCloudSection(cloudSection);
- }
-
- NamedList<Object> entries = readNodeListAsNamedList(config, "solr/*[@name]", "<solr>");
- String nodeName = (String) entries.remove("nodeName");
- if (Strings.isNullOrEmpty(nodeName) && cloudConfig != null)
- nodeName = cloudConfig.getHost();
-
- UpdateShardHandlerConfig updateConfig;
- if (deprecatedUpdateConfig == null) {
- updateConfig = loadUpdateConfig(readNodeListAsNamedList(config, "solr/updateshardhandler/*[@name]", "<updateshardhandler>"), true);
- }
- else {
- updateConfig = loadUpdateConfig(readNodeListAsNamedList(config, "solr/updateshardhandler/*[@name]", "<updateshardhandler>"), false);
- if (updateConfig != null) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "UpdateShardHandler configuration defined twice in solr.xml");
- }
- updateConfig = deprecatedUpdateConfig;
- }
-
- NodeConfig.NodeConfigBuilder configBuilder = new NodeConfig.NodeConfigBuilder(nodeName, config.getResourceLoader());
- configBuilder.setUpdateShardHandlerConfig(updateConfig);
- configBuilder.setShardHandlerFactoryConfig(getShardHandlerFactoryPluginInfo(config));
- configBuilder.setSolrCoreCacheFactoryConfig(getTransientCoreCacheFactoryPluginInfo(config));
- configBuilder.setLogWatcherConfig(loadLogWatcherConfig(config, "solr/logging/*[@name]", "solr/logging/watcher/*[@name]"));
- configBuilder.setSolrProperties(loadProperties(config));
- if (cloudConfig != null)
- configBuilder.setCloudConfig(cloudConfig);
- configBuilder.setBackupRepositoryPlugins(getBackupRepositoryPluginInfos(config));
- configBuilder.setMetricsConfig(getMetricsConfig(config));
- return fillSolrSection(configBuilder, entries);
- }
-
- public static NodeConfig fromFile(SolrResourceLoader loader, Path configFile) {
-
- log.info("Loading container configuration from {}", configFile);
-
- if (!Files.exists(configFile)) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
- "solr.xml does not exist in " + configFile.getParent() + " cannot start Solr");
- }
-
- try (InputStream inputStream = Files.newInputStream(configFile)) {
- return fromInputStream(loader, inputStream);
- } catch (SolrException exc) {
- throw exc;
- } catch (Exception exc) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
- "Could not load SOLR configuration", exc);
- }
- }
-
- public static NodeConfig fromString(SolrResourceLoader loader, String xml) {
- return fromInputStream(loader, new ByteArrayInputStream(xml.getBytes(StandardCharsets.UTF_8)));
- }
-
- public static NodeConfig fromInputStream(SolrResourceLoader loader, InputStream is) {
- try {
- byte[] buf = IOUtils.toByteArray(is);
- try (ByteArrayInputStream dup = new ByteArrayInputStream(buf)) {
- Config config = new Config(loader, null, new InputSource(dup), null, false);
- return fromConfig(config);
- }
- } catch (SolrException exc) {
- throw exc;
- } catch (Exception e) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
- }
- }
-
- public static NodeConfig fromSolrHome(SolrResourceLoader loader, Path solrHome) {
- return fromFile(loader, solrHome.resolve(SOLR_XML_FILE));
- }
-
- public static NodeConfig fromSolrHome(Path solrHome) {
- SolrResourceLoader loader = new SolrResourceLoader(solrHome);
- return fromSolrHome(loader, solrHome);
- }
-
- private static void checkForIllegalConfig(Config config) {
- failIfFound(config, "solr/@coreLoadThreads");
- failIfFound(config, "solr/@persistent");
- failIfFound(config, "solr/@sharedLib");
- failIfFound(config, "solr/@zkHost");
- failIfFound(config, "solr/cores");
-
- assertSingleInstance("solrcloud", config);
- assertSingleInstance("logging", config);
- assertSingleInstance("logging/watcher", config);
- assertSingleInstance("backup", config);
- }
-
- private static void assertSingleInstance(String section, Config config) {
- if (config.getNodeList("/solr/" + section, false).getLength() > 1)
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Multiple instances of " + section + " section found in solr.xml");
- }
-
- private static void failIfFound(Config config, String xPath) {
-
- if (config.getVal(xPath, false) != null) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Should not have found " + xPath +
- "\n. Please upgrade your solr.xml: https://lucene.apache.org/solr/guide/format-of-solr-xml.html");
- }
- }
-
- private static Properties loadProperties(Config config) {
- try {
- Node node = ((NodeList) config.evaluate("solr", XPathConstants.NODESET)).item(0);
- XPath xpath = config.getXPath();
- NodeList props = (NodeList) xpath.evaluate("property", node, XPathConstants.NODESET);
- Properties properties = new Properties();
- for (int i = 0; i < props.getLength(); i++) {
- Node prop = props.item(i);
- properties.setProperty(DOMUtil.getAttr(prop, NAME),
- PropertiesUtil.substituteProperty(DOMUtil.getAttr(prop, "value"), null));
- }
- return properties;
- }
- catch (XPathExpressionException e) {
- log.warn("Error parsing solr.xml: " + e.getMessage());
- return null;
- }
- }
-
- private static NamedList<Object> readNodeListAsNamedList(Config config, String path, String section) {
- NodeList nodes = config.getNodeList(path, false);
- if (nodes == null) {
- return null;
- }
- return checkForDuplicates(section, DOMUtil.nodesToNamedList(nodes));
- }
-
- private static NamedList<Object> checkForDuplicates(String section, NamedList<Object> nl) {
- Set<String> keys = new HashSet<>();
- for (Map.Entry<String, Object> entry : nl) {
- if (!keys.add(entry.getKey()))
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
- section + " section of solr.xml contains duplicated '" + entry.getKey() + "'");
- }
- return nl;
- }
-
- private static int parseInt(String field, String value) {
- try {
- return Integer.parseInt(value);
- }
- catch (NumberFormatException e) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
- "Error parsing '" + field + "', value '" + value + "' cannot be parsed as int");
- }
- }
-
- private static NodeConfig fillSolrSection(NodeConfig.NodeConfigBuilder builder, NamedList<Object> nl) {
-
- for (Map.Entry<String, Object> entry : nl) {
- String name = entry.getKey();
- if (entry.getValue() == null)
- continue;
- String value = entry.getValue().toString();
- switch (name) {
- case "adminHandler":
- builder.setCoreAdminHandlerClass(value);
- break;
- case "collectionsHandler":
- builder.setCollectionsAdminHandlerClass(value);
- break;
- case "healthCheckHandler":
- builder.setHealthCheckHandlerClass(value);
- break;
- case "infoHandler":
- builder.setInfoHandlerClass(value);
- break;
- case "configSetsHandler":
- builder.setConfigSetsHandlerClass(value);
- break;
- case "coreRootDirectory":
- builder.setCoreRootDirectory(value);
- break;
- case "solrDataHome":
- builder.setSolrDataHome(value);
- break;
- case "managementPath":
- builder.setManagementPath(value);
- break;
- case "sharedLib":
- builder.setSharedLibDirectory(value);
- break;
- case "configSetBaseDir":
- builder.setConfigSetBaseDirectory(value);
- break;
- case "shareSchema":
- builder.setUseSchemaCache(Boolean.parseBoolean(value));
- break;
- case "coreLoadThreads":
- builder.setCoreLoadThreads(parseInt(name, value));
- break;
- case "replayUpdatesThreads":
- builder.setReplayUpdatesThreads(parseInt(name, value));
- break;
- case "transientCacheSize":
- builder.setTransientCacheSize(parseInt(name, value));
- break;
- default:
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown configuration value in solr.xml: " + name);
- }
- }
-
- return builder.build();
- }
-
- private static UpdateShardHandlerConfig loadUpdateConfig(NamedList<Object> nl, boolean alwaysDefine) {
-
- if (nl == null && !alwaysDefine)
- return null;
-
- if (nl == null)
- return UpdateShardHandlerConfig.DEFAULT;
-
- boolean defined = false;
-
- int maxUpdateConnections = HttpClientUtil.DEFAULT_MAXCONNECTIONS;
- int maxUpdateConnectionsPerHost = HttpClientUtil.DEFAULT_MAXCONNECTIONSPERHOST;
- int distributedSocketTimeout = HttpClientUtil.DEFAULT_SO_TIMEOUT;
- int distributedConnectionTimeout = HttpClientUtil.DEFAULT_CONNECT_TIMEOUT;
- String metricNameStrategy = UpdateShardHandlerConfig.DEFAULT_METRICNAMESTRATEGY;
- int maxRecoveryThreads = UpdateShardHandlerConfig.DEFAULT_MAXRECOVERYTHREADS;
-
- Object muc = nl.remove("maxUpdateConnections");
- if (muc != null) {
- maxUpdateConnections = parseInt("maxUpdateConnections", muc.toString());
- defined = true;
- }
-
- Object mucph = nl.remove("maxUpdateConnectionsPerHost");
- if (mucph != null) {
- maxUpdateConnectionsPerHost = parseInt("maxUpdateConnectionsPerHost", mucph.toString());
- defined = true;
- }
-
- Object dst = nl.remove("distribUpdateSoTimeout");
- if (dst != null) {
- distributedSocketTimeout = parseInt("distribUpdateSoTimeout", dst.toString());
- defined = true;
- }
-
- Object dct = nl.remove("distribUpdateConnTimeout");
- if (dct != null) {
- distributedConnectionTimeout = parseInt("distribUpdateConnTimeout", dct.toString());
- defined = true;
- }
-
- Object mns = nl.remove("metricNameStrategy");
- if (mns != null) {
- metricNameStrategy = mns.toString();
- defined = true;
- }
-
- Object mrt = nl.remove("maxRecoveryThreads");
- if (mrt != null) {
- maxRecoveryThreads = parseInt("maxRecoveryThreads", mrt.toString());
- defined = true;
- }
-
- if (!defined && !alwaysDefine)
- return null;
-
- return new UpdateShardHandlerConfig(maxUpdateConnections, maxUpdateConnectionsPerHost, distributedSocketTimeout,
- distributedConnectionTimeout, metricNameStrategy, maxRecoveryThreads);
-
- }
-
- private static String removeValue(NamedList<Object> nl, String key) {
- Object value = nl.remove(key);
- if (value == null)
- return null;
- return value.toString();
- }
-
- private static String required(String section, String key, String value) {
- if (value != null)
- return value;
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, section + " section missing required entry '" + key + "'");
- }
-
- private static CloudConfig fillSolrCloudSection(NamedList<Object> nl) {
-
- String hostName = required("solrcloud", "host", removeValue(nl, "host"));
- int hostPort = parseInt("hostPort", required("solrcloud", "hostPort", removeValue(nl, "hostPort")));
- String hostContext = required("solrcloud", "hostContext", removeValue(nl, "hostContext"));
-
- CloudConfig.CloudConfigBuilder builder = new CloudConfig.CloudConfigBuilder(hostName, hostPort, hostContext);
-
- for (Map.Entry<String, Object> entry : nl) {
- String name = entry.getKey();
- if (entry.getValue() == null)
- continue;
- String value = entry.getValue().toString();
- switch (name) {
- case "leaderVoteWait":
- builder.setLeaderVoteWait(parseInt(name, value));
- break;
- case "leaderConflictResolveWait":
- builder.setLeaderConflictResolveWait(parseInt(name, value));
- break;
- case "zkClientTimeout":
- builder.setZkClientTimeout(parseInt(name, value));
- break;
- case "autoReplicaFailoverBadNodeExpiration": case "autoReplicaFailoverWorkLoopDelay":
- //TODO remove this in Solr 8.0
- log.info("Configuration parameter " + name + " is ignored");
- break;
- case "autoReplicaFailoverWaitAfterExpiration":
- builder.setAutoReplicaFailoverWaitAfterExpiration(parseInt(name, value));
- break;
- case "zkHost":
- builder.setZkHost(value);
- break;
- case "genericCoreNodeNames":
- builder.setUseGenericCoreNames(Boolean.parseBoolean(value));
- break;
- case "zkACLProvider":
- builder.setZkACLProviderClass(value);
- break;
- case "zkCredentialsProvider":
- builder.setZkCredentialsProviderClass(value);
- break;
- case "createCollectionWaitTimeTillActive":
- builder.setCreateCollectionWaitTimeTillActive(parseInt(name, value));
- break;
- case "createCollectionCheckLeaderActive":
- builder.setCreateCollectionCheckLeaderActive(Boolean.parseBoolean(value));
- break;
- default:
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown configuration parameter in <solrcloud> section of solr.xml: " + name);
- }
- }
-
- return builder.build();
- }
-
- private static LogWatcherConfig loadLogWatcherConfig(Config config, String loggingPath, String watcherPath) {
-
- String loggingClass = null;
- boolean enabled = true;
- int watcherQueueSize = 50;
- String watcherThreshold = null;
-
- for (Map.Entry<String, Object> entry : readNodeListAsNamedList(config, loggingPath, "<logging>")) {
- String name = entry.getKey();
- String value = entry.getValue().toString();
- switch (name) {
- case "class":
- loggingClass = value; break;
- case "enabled":
- enabled = Boolean.parseBoolean(value); break;
- default:
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown value in logwatcher config: " + name);
- }
- }
-
- for (Map.Entry<String, Object> entry : readNodeListAsNamedList(config, watcherPath, "<watcher>")) {
- String name = entry.getKey();
- String value = entry.getValue().toString();
- switch (name) {
- case "size":
- watcherQueueSize = parseInt(name, value); break;
- case "threshold":
- watcherThreshold = value; break;
- default:
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Unknown value in logwatcher config: " + name);
- }
- }
-
- return new LogWatcherConfig(enabled, loggingClass, watcherThreshold, watcherQueueSize);
-
- }
-
- private static PluginInfo getShardHandlerFactoryPluginInfo(Config config) {
- Node node = config.getNode("solr/shardHandlerFactory", false);
- return (node == null) ? null : new PluginInfo(node, "shardHandlerFactory", false, true);
- }
-
- private static PluginInfo[] getBackupRepositoryPluginInfos(Config config) {
- NodeList nodes = (NodeList) config.evaluate("solr/backup/repository", XPathConstants.NODESET);
- if (nodes == null || nodes.getLength() == 0)
- return new PluginInfo[0];
- PluginInfo[] configs = new PluginInfo[nodes.getLength()];
- for (int i = 0; i < nodes.getLength(); i++) {
- configs[i] = new PluginInfo(nodes.item(i), "BackupRepositoryFactory", true, true);
- }
- return configs;
- }
-
- private static MetricsConfig getMetricsConfig(Config config) {
- MetricsConfig.MetricsConfigBuilder builder = new MetricsConfig.MetricsConfigBuilder();
- Node node = config.getNode("solr/metrics/suppliers/counter", false);
- if (node != null) {
- builder = builder.setCounterSupplier(new PluginInfo(node, "counterSupplier", false, false));
- }
- node = config.getNode("solr/metrics/suppliers/meter", false);
- if (node != null) {
- builder = builder.setMeterSupplier(new PluginInfo(node, "meterSupplier", false, false));
- }
- node = config.getNode("solr/metrics/suppliers/timer", false);
- if (node != null) {
- builder = builder.setTimerSupplier(new PluginInfo(node, "timerSupplier", false, false));
- }
- node = config.getNode("solr/metrics/suppliers/histogram", false);
- if (node != null) {
- builder = builder.setHistogramSupplier(new PluginInfo(node, "histogramSupplier", false, false));
- }
- node = config.getNode("solr/metrics/history", false);
- if (node != null) {
- builder = builder.setHistoryHandler(new PluginInfo(node, "history", false, false));
- }
- PluginInfo[] reporterPlugins = getMetricReporterPluginInfos(config);
- Set<String> hiddenSysProps = getHiddenSysProps(config);
- return builder
- .setMetricReporterPlugins(reporterPlugins)
- .setHiddenSysProps(hiddenSysProps)
- .build();
- }
-
- private static PluginInfo[] getMetricReporterPluginInfos(Config config) {
- NodeList nodes = (NodeList) config.evaluate("solr/metrics/reporter", XPathConstants.NODESET);
- List<PluginInfo> configs = new ArrayList<>();
- boolean hasJmxReporter = false;
- if (nodes != null && nodes.getLength() > 0) {
- for (int i = 0; i < nodes.getLength(); i++) {
- // we don't require class in order to support predefined replica and node reporter classes
- PluginInfo info = new PluginInfo(nodes.item(i), "SolrMetricReporter", true, false);
- String clazz = info.className;
- if (clazz != null && clazz.equals(SolrJmxReporter.class.getName())) {
- hasJmxReporter = true;
- }
- configs.add(info);
- }
- }
- // if there's an MBean server running but there was no JMX reporter then add a default one
- MBeanServer mBeanServer = JmxUtil.findFirstMBeanServer();
- if (mBeanServer != null && !hasJmxReporter) {
- log.info("MBean server found: " + mBeanServer + ", but no JMX reporters were configured - adding default JMX reporter.");
- Map<String,Object> attributes = new HashMap<>();
- attributes.put("name", "default");
- attributes.put("class", SolrJmxReporter.class.getName());
- PluginInfo defaultPlugin = new PluginInfo("reporter", attributes);
- configs.add(defaultPlugin);
- }
- return configs.toArray(new PluginInfo[configs.size()]);
- }
-
- private static Set<String> getHiddenSysProps(Config config) {
- NodeList nodes = (NodeList) config.evaluate("solr/metrics/hiddenSysProps/str", XPathConstants.NODESET);
- if (nodes == null || nodes.getLength() == 0) {
- return NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS;
- }
- Set<String> props = new HashSet<>();
- for (int i = 0; i < nodes.getLength(); i++) {
- String prop = DOMUtil.getText(nodes.item(i));
- if (prop != null && !prop.trim().isEmpty()) {
- props.add(prop.trim());
- }
- }
- if (props.isEmpty()) {
- return NodeConfig.NodeConfigBuilder.DEFAULT_HIDDEN_SYS_PROPS;
- } else {
- return props;
- }
- }
-
- private static PluginInfo getTransientCoreCacheFactoryPluginInfo(Config config) {
- Node node = config.getNode("solr/transientCoreCacheFactory", false);
- return (node == null) ? null : new PluginInfo(node, "transientCoreCacheFactory", false, true);
- }
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
deleted file mode 100644
index 1bc4914..0000000
--- a/solr/core/src/java/org/apache/solr/core/StandardDirectoryFactory.java
+++ /dev/null
@@ -1,165 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-import java.io.File;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.nio.file.AtomicMoveNotSupportedException;
-import java.nio.file.FileSystems;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.StandardCopyOption;
-import java.util.Locale;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.FSDirectory;
-import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.LockFactory;
-import org.apache.lucene.store.NativeFSLockFactory;
-import org.apache.lucene.store.NoLockFactory;
-import org.apache.lucene.store.SimpleFSLockFactory;
-import org.apache.lucene.store.SingleInstanceLockFactory;
-import org.apache.solr.common.SolrException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Directory provider which mimics original Solr
- * {@link org.apache.lucene.store.FSDirectory} based behavior.
- *
- * File based DirectoryFactory implementations generally extend
- * this class.
- *
- */
-public class StandardDirectoryFactory extends CachingDirectoryFactory {
-
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- @Override
- protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
- // we pass NoLockFactory, because the real lock factory is set later by injectLockFactory:
- return FSDirectory.open(new File(path).toPath(), lockFactory);
- }
-
- @Override
- protected LockFactory createLockFactory(String rawLockType) throws IOException {
- if (null == rawLockType) {
- rawLockType = DirectoryFactory.LOCK_TYPE_NATIVE;
- log.warn("No lockType configured, assuming '"+rawLockType+"'.");
- }
- final String lockType = rawLockType.toLowerCase(Locale.ROOT).trim();
- switch (lockType) {
- case DirectoryFactory.LOCK_TYPE_SIMPLE:
- return SimpleFSLockFactory.INSTANCE;
- case DirectoryFactory.LOCK_TYPE_NATIVE:
- return NativeFSLockFactory.INSTANCE;
- case DirectoryFactory.LOCK_TYPE_SINGLE:
- return new SingleInstanceLockFactory();
- case DirectoryFactory.LOCK_TYPE_NONE:
- return NoLockFactory.INSTANCE;
- default:
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
- "Unrecognized lockType: " + rawLockType);
- }
- }
-
- @Override
- public String normalize(String path) throws IOException {
- String cpath = new File(path).getCanonicalPath();
-
- return super.normalize(cpath);
- }
-
- @Override
- public boolean exists(String path) throws IOException {
- // we go by the persistent storage ...
- File dirFile = new File(path);
- return dirFile.canRead() && dirFile.list().length > 0;
- }
-
- public boolean isPersistent() {
- return true;
- }
-
- @Override
- public boolean isAbsolute(String path) {
- // back compat
- return new File(path).isAbsolute();
- }
-
- @Override
- protected void removeDirectory(CacheValue cacheValue) throws IOException {
- File dirFile = new File(cacheValue.path);
- FileUtils.deleteDirectory(dirFile);
- }
-
- /**
- * Override for more efficient moves.
- *
- * Intended for use with replication - use
- * carefully - some Directory wrappers will
- * cache files for example.
- *
- * You should first {@link Directory#sync(java.util.Collection)} any file that will be
- * moved or avoid cached files through settings.
- *
- * @throws IOException
- * If there is a low-level I/O error.
- */
- @Override
- public void move(Directory fromDir, Directory toDir, String fileName, IOContext ioContext)
- throws IOException {
-
- Directory baseFromDir = getBaseDir(fromDir);
- Directory baseToDir = getBaseDir(toDir);
-
- if (baseFromDir instanceof FSDirectory && baseToDir instanceof FSDirectory) {
-
- Path path1 = ((FSDirectory) baseFromDir).getDirectory().toAbsolutePath();
- Path path2 = ((FSDirectory) baseToDir).getDirectory().toAbsolutePath();
-
- try {
- Files.move(path1.resolve(fileName), path2.resolve(fileName), StandardCopyOption.ATOMIC_MOVE);
- } catch (AtomicMoveNotSupportedException e) {
- Files.move(path1.resolve(fileName), path2.resolve(fileName));
- }
- return;
- }
-
- super.move(fromDir, toDir, fileName, ioContext);
- }
-
- // perform an atomic rename if possible
- public void renameWithOverwrite(Directory dir, String fileName, String toName) throws IOException {
- Directory baseDir = getBaseDir(dir);
- if (baseDir instanceof FSDirectory) {
- Path path = ((FSDirectory) baseDir).getDirectory().toAbsolutePath();
- try {
- Files.move(path.resolve(fileName),
- path.resolve(toName), StandardCopyOption.ATOMIC_MOVE,
- StandardCopyOption.REPLACE_EXISTING);
- } catch (AtomicMoveNotSupportedException e) {
- Files.move(FileSystems.getDefault().getPath(path.toString(), fileName),
- FileSystems.getDefault().getPath(path.toString(), toName), StandardCopyOption.REPLACE_EXISTING);
- }
- } else {
- super.renameWithOverwrite(dir, fileName, toName);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/StandardIndexReaderFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/StandardIndexReaderFactory.java b/solr/core/src/java/org/apache/solr/core/StandardIndexReaderFactory.java
deleted file mode 100644
index 9ad4003..0000000
--- a/solr/core/src/java/org/apache/solr/core/StandardIndexReaderFactory.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-import java.io.IOException;
-
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.store.Directory;
-
-/**
- * Default IndexReaderFactory implementation. Returns a standard Lucene
- * {@link DirectoryReader}.
- *
- * @see DirectoryReader#open(Directory)
- */
-public class StandardIndexReaderFactory extends IndexReaderFactory {
-
- @Override
- public DirectoryReader newReader(Directory indexDir, SolrCore core) throws IOException {
- return DirectoryReader.open(indexDir);
- }
-
- @Override
- public DirectoryReader newReader(IndexWriter writer, SolrCore core) throws IOException {
- return DirectoryReader.open(writer);
- }
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCache.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCache.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCache.java
deleted file mode 100644
index 63df02b..0000000
--- a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCache.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-
-import java.util.Collection;
-import java.util.Collections;
-import java.util.List;
-import java.util.Observable;
-import java.util.Set;
-
-import org.apache.http.annotation.Experimental;
-
-/**
- * The base class for custom transient core maintenance. Any custom plugin that want's to take control of transient
- * caches (i.e. any core defined with transient=true) should override this class.
- *
- * Register your plugin in solr.xml similarly to:
- *
- * <transientCoreCacheFactory name="transientCoreCacheFactory" class="TransientSolrCoreCacheFactoryDefault">
- * <int name="transientCacheSize">4</int>
- * </transientCoreCacheFactory>
- *
- *
- * WARNING: There is quite a bit of higher-level locking done by the CoreContainer to avoid various race conditions
- * etc. You should _only_ manipulate them within the method calls designed to change them. E.g.
- * only add to the transient core descriptors in addTransientDescriptor etc.
- *
- * Trust the higher-level code (mainly SolrCores and CoreContainer) to call the appropriate operations when
- * necessary and to coordinate shutting down cores, manipulating the internal structures and the like..
- *
- * The only real action you should _initiate_ is to close a core for whatever reason, and do that by
- * calling notifyObservers(coreToClose); The observer will call back to removeCore(name) at the appropriate
- * time. There is no need to directly remove the core _at that time_ from the transientCores list, a call
- * will come back to this class when CoreContainer is closing this core.
- *
- * CoreDescriptors are read-once. During "core discovery" all valid descriptors are enumerated and added to
- * the appropriate list. Thereafter, they are NOT re-read from disk. In those situations where you want
- * to re-define the coreDescriptor, maintain a "side list" of changed core descriptors. Then override
- * getTransientDescriptor to return your new core descriptor. NOTE: assuming you've already closed the
- * core, the _next_ time that core is required getTransientDescriptor will be called and if you return the
- * new core descriptor your re-definition should be honored. You'll have to maintain this list for the
- * duration of this Solr instance running. If you persist the coreDescriptor, then next time Solr starts
- * up the new definition will be read.
- *
- *
- * If you need to manipulate the return, for instance block a core from being loaded for some period of time, override
- * say getTransientDescriptor and return null.
- *
- * In particular, DO NOT reach into the transientCores structure from a method called to manipulate core descriptors
- * or vice-versa.
- */
-public abstract class TransientSolrCoreCache extends Observable {
-
- // Gets the core container that encloses this cache.
- public abstract CoreContainer getContainer();
-
- // Add the newly-opened core to the list of open cores.
- public abstract SolrCore addCore(String name, SolrCore core);
-
- // Return the names of all possible cores, whether they are currently loaded or not.
- public abstract Set<String> getAllCoreNames();
-
- // Return the names of all currently loaded cores
- public abstract Set<String> getLoadedCoreNames();
-
- // Remove a core from the internal structures, presumably it
- // being closed. If the core is re-opened, it will be readded by CoreContainer.
- public abstract SolrCore removeCore(String name);
-
- // Get the core associated with the name. Return null if you don't want this core to be used.
- public abstract SolrCore getCore(String name);
-
- // reutrn true if the cache contains the named core.
- public abstract boolean containsCore(String name);
-
- // This method will be called when the container is to be shut down. It should return all
- // transient solr cores and clear any internal structures that hold them.
- public abstract Collection<SolrCore> prepareForShutdown();
-
- // These methods allow the implementation to maintain control over the core descriptors.
-
- // This method will only be called during core discovery at startup.
- public abstract void addTransientDescriptor(String rawName, CoreDescriptor cd);
-
- // This method is used when opening cores and the like. If you want to change a core's descriptor, override this
- // method and return the current core descriptor.
- public abstract CoreDescriptor getTransientDescriptor(String name);
-
-
- // Remove the core descriptor from your list of transient descriptors.
- public abstract CoreDescriptor removeTransientDescriptor(String name);
-
- // Find all the names a specific core is mapped to. Should not return null, return empty set instead.
- @Experimental
- public List<String> getNamesForCore(SolrCore core) {
- return Collections.emptyList();
- }
-
- /**
- * Must be called in order to free resources!
- */
- public abstract void close();
-
-
- // These two methods allow custom implementations to communicate arbitrary information as necessary.
- public abstract int getStatus(String coreName);
- public abstract void setStatus(String coreName, int status);
-}
-
-
-
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java
deleted file mode 100644
index e1fd748..0000000
--- a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheDefault.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Observer;
-import java.util.Set;
-
-import org.apache.solr.common.util.NamedList;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TransientSolrCoreCacheDefault extends TransientSolrCoreCache {
-
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- private int cacheSize = NodeConfig.NodeConfigBuilder.DEFAULT_TRANSIENT_CACHE_SIZE;
-
- protected Observer observer;
- protected CoreContainer coreContainer;
-
- protected final Map<String, CoreDescriptor> transientDescriptors = new LinkedHashMap<>();
-
- //WARNING! The _only_ place you put anything into the list of transient cores is with the putTransientCore method!
- protected Map<String, SolrCore> transientCores = new LinkedHashMap<>(); // For "lazily loaded" cores
-
- /**
- * @param container The enclosing CoreContainer. It allows us to access everything we need.
- */
- public TransientSolrCoreCacheDefault(final CoreContainer container) {
- this.coreContainer = container;
- this.observer= coreContainer.solrCores;
-
- NodeConfig cfg = container.getNodeConfig();
- if (cfg.getTransientCachePluginInfo() == null) {
- // Still handle just having transientCacheSize defined in the body of solr.xml not in a transient handler clause.
- // deprecate this for 7.0?
- this.cacheSize = cfg.getTransientCacheSize();
- } else {
- NamedList args = cfg.getTransientCachePluginInfo().initArgs;
- Object obj = args.get("transientCacheSize");
- if (obj != null) {
- this.cacheSize = (int) obj;
- }
- }
- doInit();
- }
- // This just moves the
- private void doInit() {
- NodeConfig cfg = coreContainer.getNodeConfig();
- if (cfg.getTransientCachePluginInfo() == null) {
- // Still handle just having transientCacheSize defined in the body of solr.xml not in a transient handler clause.
- this.cacheSize = cfg.getTransientCacheSize();
- } else {
- NamedList args = cfg.getTransientCachePluginInfo().initArgs;
- Object obj = args.get("transientCacheSize");
- if (obj != null) {
- this.cacheSize = (int) obj;
- }
- }
-
- log.info("Allocating transient cache for {} transient cores", cacheSize);
- addObserver(this.observer);
- // it's possible for cache
- if (cacheSize < 0) { // Trap old flag
- cacheSize = Integer.MAX_VALUE;
- }
- // Now don't allow ridiculous allocations here, if the size is > 1,000, we'll just deal with
- // adding cores as they're opened. This blows up with the marker value of -1.
- transientCores = new LinkedHashMap<String, SolrCore>(Math.min(cacheSize, 1000), 0.75f, true) {
- @Override
- protected boolean removeEldestEntry(Map.Entry<String, SolrCore> eldest) {
- if (size() > cacheSize) {
- SolrCore coreToClose = eldest.getValue();
- setChanged();
- notifyObservers(coreToClose);
- log.info("Closing transient core [{}]", coreToClose.getName());
- return true;
- }
- return false;
- }
- };
- }
-
-
- @Override
- public Collection<SolrCore> prepareForShutdown() {
- // Returna copy of the values
- List<SolrCore> ret = new ArrayList(transientCores.values());
- transientCores.clear();
- return ret;
- }
-
- @Override
- public CoreContainer getContainer() { return this.coreContainer; }
-
- @Override
- public SolrCore addCore(String name, SolrCore core) {
- return transientCores.put(name, core);
- }
-
- @Override
- public Set<String> getAllCoreNames() {
- return transientDescriptors.keySet();
- }
-
- @Override
- public Set<String> getLoadedCoreNames() {
- return transientCores.keySet();
- }
-
- // Remove a core from the internal structures, presumably it
- // being closed. If the core is re-opened, it will be readded by CoreContainer.
- @Override
- public SolrCore removeCore(String name) {
- return transientCores.remove(name);
- }
-
- // Get the core associated with the name. Return null if you don't want this core to be used.
- @Override
- public SolrCore getCore(String name) {
- return transientCores.get(name);
- }
-
- @Override
- public boolean containsCore(String name) {
- return transientCores.containsKey(name);
- }
-
- // These methods allow the implementation to maintain control over the core descriptors.
-
-
- // This method will only be called during core discovery at startup.
- @Override
- public void addTransientDescriptor(String rawName, CoreDescriptor cd) {
- transientDescriptors.put(rawName, cd);
- }
-
- // This method is used when opening cores and the like. If you want to change a core's descriptor, override this
- // method and return the current core descriptor.
- @Override
- public CoreDescriptor getTransientDescriptor(String name) {
- return transientDescriptors.get(name);
- }
-
- @Override
- public CoreDescriptor removeTransientDescriptor(String name) {
- return transientDescriptors.remove(name);
- }
-
- @Override
- public List<String> getNamesForCore(SolrCore core) {
- List<String> ret = new ArrayList<>();
- for (Map.Entry<String, SolrCore> entry : transientCores.entrySet()) {
- if (core == entry.getValue()) {
- ret.add(entry.getKey());
- }
- }
- return ret;
- }
-
- /**
- * Must be called in order to free resources!
- */
- @Override
- public void close() {
- deleteObserver(this.observer);
- }
-
-
- // For custom implementations to communicate arbitrary information as necessary.
- @Override
- public int getStatus(String coreName) { return 0; } //no_op for default handler.
-
- @Override
- public void setStatus(String coreName, int status) {} //no_op for default handler.
-
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
deleted file mode 100644
index b3b8cf0..0000000
--- a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactory.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import java.lang.invoke.MethodHandles;
-import java.util.Collections;
-import java.util.Locale;
-
-import com.google.common.collect.ImmutableMap;
-import org.apache.solr.util.plugin.PluginInfoInitialized;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An interface that allows custom transient caches to be maintained with different implementations
- */
-public abstract class TransientSolrCoreCacheFactory {
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- private CoreContainer coreContainer = null;
-
- public abstract TransientSolrCoreCache getTransientSolrCoreCache();
- /**
- * Create a new TransientSolrCoreCacheFactory instance
- *
- * @param loader a SolrResourceLoader used to find the TransientSolrCacheFactory classes
- * @param coreContainer CoreContainer that encloses all the Solr cores.
- * @return a new, initialized TransientSolrCoreCache instance
- */
-
- public static TransientSolrCoreCacheFactory newInstance(SolrResourceLoader loader, CoreContainer coreContainer) {
- PluginInfo info = coreContainer.getConfig().getTransientCachePluginInfo();
- if (info == null) { // definition not in our solr.xml file, use default
- info = DEFAULT_TRANSIENT_SOLR_CACHE_INFO;
- }
-
- try {
- // According to the docs, this returns a TransientSolrCoreCacheFactory with the default c'tor
- TransientSolrCoreCacheFactory tccf = loader.findClass(info.className, TransientSolrCoreCacheFactory.class).newInstance();
-
- // OK, now we call it's init method.
- if (PluginInfoInitialized.class.isAssignableFrom(tccf.getClass()))
- PluginInfoInitialized.class.cast(tccf).init(info);
- tccf.setCoreContainer(coreContainer);
- return tccf;
- } catch (Exception e) {
- // Many things could cuse this, bad solrconfig, mis-typed class name, whatever. However, this should not
- // keep the enclosing coreContainer from instantiating, so log an error and continue.
- log.error(String.format(Locale.ROOT, "Error instantiating TransientSolrCoreCacheFactory class [%s]: %s",
- info.className, e.getMessage()));
- return null;
- }
-
- }
- public static final PluginInfo DEFAULT_TRANSIENT_SOLR_CACHE_INFO =
- new PluginInfo("transientSolrCoreCacheFactory",
- ImmutableMap.of("class", TransientSolrCoreCacheFactoryDefault.class.getName(),
- "name", TransientSolrCoreCacheFactory.class.getName()),
- null, Collections.<PluginInfo>emptyList());
-
-
- // Need this because the plugin framework doesn't require a PluginINfo in the init method, don't see a way to
- // pass additional parameters and we need this when we create the transient core cache, it's _really_ important.
- public void setCoreContainer(CoreContainer coreContainer) {
- this.coreContainer = coreContainer;
- }
-
- public CoreContainer getCoreContainer() {
- return coreContainer;
- }
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactoryDefault.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactoryDefault.java b/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactoryDefault.java
deleted file mode 100644
index 722ab9c..0000000
--- a/solr/core/src/java/org/apache/solr/core/TransientSolrCoreCacheFactoryDefault.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-public class TransientSolrCoreCacheFactoryDefault extends TransientSolrCoreCacheFactory {
-
- TransientSolrCoreCache transientSolrCoreCache = null;
-
- @Override
- public TransientSolrCoreCache getTransientSolrCoreCache() {
- if (transientSolrCoreCache == null) {
- transientSolrCoreCache = new TransientSolrCoreCacheDefault(getCoreContainer());
- }
-
- return transientSolrCoreCache;
- }
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/ZkContainer.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/ZkContainer.java b/solr/core/src/java/org/apache/solr/core/ZkContainer.java
deleted file mode 100644
index 34e5764..0000000
--- a/solr/core/src/java/org/apache/solr/core/ZkContainer.java
+++ /dev/null
@@ -1,247 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.TimeoutException;
-import java.util.function.Predicate;
-
-import org.apache.solr.cloud.CurrentCoreDescriptorProvider;
-import org.apache.solr.cloud.SolrZkServer;
-import org.apache.solr.cloud.ZkController;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.ZkConfigManager;
-import org.apache.solr.common.cloud.ZooKeeperException;
-import org.apache.solr.common.util.ExecutorUtil;
-import org.apache.solr.logging.MDCLoggingContext;
-import org.apache.solr.util.DefaultSolrThreadFactory;
-import org.apache.zookeeper.KeeperException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ZkContainer {
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- protected ZkController zkController;
- private SolrZkServer zkServer;
-
- private ExecutorService coreZkRegister = ExecutorUtil.newMDCAwareCachedThreadPool(
- new DefaultSolrThreadFactory("coreZkRegister") );
-
- // see ZkController.zkRunOnly
- private boolean zkRunOnly = Boolean.getBoolean("zkRunOnly"); // expert
-
- public ZkContainer() {
-
- }
-
- public void initZooKeeper(final CoreContainer cc, String solrHome, CloudConfig config) {
-
- ZkController zkController = null;
-
- String zkRun = System.getProperty("zkRun");
-
- if (zkRun != null && config == null)
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Cannot start Solr in cloud mode - no cloud config provided");
-
- if (config == null)
- return; // not in zk mode
-
- String zookeeperHost = config.getZkHost();
-
- // zookeeper in quorum mode currently causes a failure when trying to
- // register log4j mbeans. See SOLR-2369
- // TODO: remove after updating to an slf4j based zookeeper
- System.setProperty("zookeeper.jmx.log4j.disable", "true");
-
- if (zkRun != null) {
- String zkDataHome = System.getProperty("zkServerDataDir", Paths.get(solrHome).resolve("zoo_data").toString());
- String zkConfHome = System.getProperty("zkServerConfDir", solrHome);
- zkServer = new SolrZkServer(stripChroot(zkRun), stripChroot(config.getZkHost()), zkDataHome, zkConfHome, config.getSolrHostPort());
- zkServer.parseConfig();
- zkServer.start();
-
- // set client from server config if not already set
- if (zookeeperHost == null) {
- zookeeperHost = zkServer.getClientString();
- }
- }
-
- int zkClientConnectTimeout = 30000;
-
- if (zookeeperHost != null) {
-
- // we are ZooKeeper enabled
- try {
- // If this is an ensemble, allow for a long connect time for other servers to come up
- if (zkRun != null && zkServer.getServers().size() > 1) {
- zkClientConnectTimeout = 24 * 60 * 60 * 1000; // 1 day for embedded ensemble
- log.info("Zookeeper client=" + zookeeperHost + " Waiting for a quorum.");
- } else {
- log.info("Zookeeper client=" + zookeeperHost);
- }
- String confDir = System.getProperty("bootstrap_confdir");
- boolean boostrapConf = Boolean.getBoolean("bootstrap_conf");
-
- if(!ZkController.checkChrootPath(zookeeperHost, (confDir!=null) || boostrapConf || zkRunOnly)) {
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "A chroot was specified in ZkHost but the znode doesn't exist. " + zookeeperHost);
- }
- zkController = new ZkController(cc, zookeeperHost, zkClientConnectTimeout, config,
- new CurrentCoreDescriptorProvider() {
-
- @Override
- public List<CoreDescriptor> getCurrentDescriptors() {
- List<CoreDescriptor> descriptors = new ArrayList<>(
- cc.getLoadedCoreNames().size());
- Collection<SolrCore> cores = cc.getCores();
- for (SolrCore core : cores) {
- descriptors.add(core.getCoreDescriptor());
- }
- return descriptors;
- }
- });
-
-
- if (zkRun != null && zkServer.getServers().size() > 1 && confDir == null && boostrapConf == false) {
- // we are part of an ensemble and we are not uploading the config - pause to give the config time
- // to get up
- Thread.sleep(10000);
- }
-
- if(confDir != null) {
- Path configPath = Paths.get(confDir);
- if (!Files.isDirectory(configPath))
- throw new IllegalArgumentException("bootstrap_confdir must be a directory of configuration files");
-
- String confName = System.getProperty(ZkController.COLLECTION_PARAM_PREFIX+ZkController.CONFIGNAME_PROP, "configuration1");
- ZkConfigManager configManager = new ZkConfigManager(zkController.getZkClient());
- configManager.uploadConfigDir(configPath, confName);
- }
-
-
-
- if(boostrapConf) {
- ZkController.bootstrapConf(zkController.getZkClient(), cc, solrHome);
- }
-
- } catch (InterruptedException e) {
- // Restore the interrupted status
- Thread.currentThread().interrupt();
- log.error("", e);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "", e);
- } catch (TimeoutException e) {
- log.error("Could not connect to ZooKeeper", e);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "", e);
- } catch (IOException | KeeperException e) {
- log.error("", e);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "", e);
- }
-
-
- }
- this.zkController = zkController;
- }
-
- private String stripChroot(String zkRun) {
- if (zkRun == null || zkRun.trim().length() == 0 || zkRun.lastIndexOf('/') < 0) return zkRun;
- return zkRun.substring(0, zkRun.lastIndexOf('/'));
- }
-
- public static Predicate<CoreDescriptor> testing_beforeRegisterInZk;
-
- public void registerInZk(final SolrCore core, boolean background, boolean skipRecovery) {
- Runnable r = () -> {
- MDCLoggingContext.setCore(core);
- try {
- try {
- if (testing_beforeRegisterInZk != null) {
- testing_beforeRegisterInZk.test(core.getCoreDescriptor());
- }
- zkController.register(core.getName(), core.getCoreDescriptor(), skipRecovery);
- } catch (InterruptedException e) {
- // Restore the interrupted status
- Thread.currentThread().interrupt();
- SolrException.log(log, "", e);
- } catch (Exception e) {
- try {
- zkController.publish(core.getCoreDescriptor(), Replica.State.DOWN);
- } catch (InterruptedException e1) {
- Thread.currentThread().interrupt();
- log.error("", e1);
- } catch (Exception e1) {
- log.error("", e1);
- }
- SolrException.log(log, "", e);
- }
- } finally {
- MDCLoggingContext.clear();
- }
- };
-
- if (zkController != null) {
- if (background) {
- coreZkRegister.execute(r);
- } else {
- MDCLoggingContext.setCore(core);
- try {
- r.run();
- } finally {
- MDCLoggingContext.clear();
- }
- }
- }
- }
-
- public ZkController getZkController() {
- return zkController;
- }
-
- public void close() {
-
- try {
- if (zkController != null) {
- zkController.close();
- }
- } finally {
- try {
- if (zkServer != null) {
- zkServer.stop();
- }
- } finally {
- ExecutorUtil.shutdownAndAwaitTermination(coreZkRegister);
- }
- }
-
- }
-
- public ExecutorService getCoreZkRegisterExecutorService() {
- return coreZkRegister;
- }
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java b/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
deleted file mode 100644
index afba4b1..0000000
--- a/solr/core/src/java/org/apache/solr/core/backup/BackupManager.java
+++ /dev/null
@@ -1,292 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core.backup;
-
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Reader;
-import java.io.Writer;
-import java.lang.invoke.MethodHandles;
-import java.net.URI;
-import java.nio.charset.StandardCharsets;
-import java.util.Collections;
-import java.util.List;
-import java.util.Objects;
-import java.util.Properties;
-
-import com.google.common.base.Preconditions;
-import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexInput;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.cloud.ZkConfigManager;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.backup.repository.BackupRepository;
-import org.apache.solr.core.backup.repository.BackupRepository.PathType;
-import org.apache.solr.util.PropertiesInputStream;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * This class implements functionality to create a backup with extension points provided to integrate with different
- * types of file-systems.
- */
-public class BackupManager {
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
- public static final String COLLECTION_PROPS_FILE = "collection_state.json";
- public static final String BACKUP_PROPS_FILE = "backup.properties";
- public static final String ZK_STATE_DIR = "zk_backup";
- public static final String CONFIG_STATE_DIR = "configs";
-
- // Backup properties
- public static final String COLLECTION_NAME_PROP = "collection";
- public static final String BACKUP_NAME_PROP = "backupName";
- public static final String INDEX_VERSION_PROP = "index.version";
- public static final String START_TIME_PROP = "startTime";
-
- protected final ZkStateReader zkStateReader;
- protected final BackupRepository repository;
-
- public BackupManager(BackupRepository repository, ZkStateReader zkStateReader) {
- this.repository = Objects.requireNonNull(repository);
- this.zkStateReader = Objects.requireNonNull(zkStateReader);
- }
-
- /**
- * @return The version of this backup implementation.
- */
- public final String getVersion() {
- return "1.0";
- }
-
- /**
- * This method returns the configuration parameters for the specified backup.
- *
- * @param backupLoc The base path used to store the backup data.
- * @param backupId The unique name for the backup whose configuration params are required.
- * @return the configuration parameters for the specified backup.
- * @throws IOException In case of errors.
- */
- public Properties readBackupProperties(URI backupLoc, String backupId) throws IOException {
- Objects.requireNonNull(backupLoc);
- Objects.requireNonNull(backupId);
-
- // Backup location
- URI backupPath = repository.resolve(backupLoc, backupId);
- if (!repository.exists(backupPath)) {
- throw new SolrException(ErrorCode.SERVER_ERROR, "Couldn't restore since doesn't exist: " + backupPath);
- }
-
- Properties props = new Properties();
- try (Reader is = new InputStreamReader(new PropertiesInputStream(
- repository.openInput(backupPath, BACKUP_PROPS_FILE, IOContext.DEFAULT)), StandardCharsets.UTF_8)) {
- props.load(is);
- return props;
- }
- }
-
- /**
- * This method stores the backup properties at the specified location in the repository.
- *
- * @param backupLoc The base path used to store the backup data.
- * @param backupId The unique name for the backup whose configuration params are required.
- * @param props The backup properties
- * @throws IOException in case of I/O error
- */
- public void writeBackupProperties(URI backupLoc, String backupId, Properties props) throws IOException {
- URI dest = repository.resolve(backupLoc, backupId, BACKUP_PROPS_FILE);
- try (Writer propsWriter = new OutputStreamWriter(repository.createOutput(dest), StandardCharsets.UTF_8)) {
- props.store(propsWriter, "Backup properties file");
- }
- }
-
- /**
- * This method reads the meta-data information for the backed-up collection.
- *
- * @param backupLoc The base path used to store the backup data.
- * @param backupId The unique name for the backup.
- * @param collectionName The name of the collection whose meta-data is to be returned.
- * @return the meta-data information for the backed-up collection.
- * @throws IOException in case of errors.
- */
- public DocCollection readCollectionState(URI backupLoc, String backupId, String collectionName) throws IOException {
- Objects.requireNonNull(collectionName);
-
- URI zkStateDir = repository.resolve(backupLoc, backupId, ZK_STATE_DIR);
- try (IndexInput is = repository.openInput(zkStateDir, COLLECTION_PROPS_FILE, IOContext.DEFAULT)) {
- byte[] arr = new byte[(int) is.length()]; // probably ok since the json file should be small.
- is.readBytes(arr, 0, (int) is.length());
- ClusterState c_state = ClusterState.load(-1, arr, Collections.emptySet());
- return c_state.getCollection(collectionName);
- }
- }
-
- /**
- * This method writes the collection meta-data to the specified location in the repository.
- *
- * @param backupLoc The base path used to store the backup data.
- * @param backupId The unique name for the backup.
- * @param collectionName The name of the collection whose meta-data is being stored.
- * @param collectionState The collection meta-data to be stored.
- * @throws IOException in case of I/O errors.
- */
- public void writeCollectionState(URI backupLoc, String backupId, String collectionName,
- DocCollection collectionState) throws IOException {
- URI dest = repository.resolve(backupLoc, backupId, ZK_STATE_DIR, COLLECTION_PROPS_FILE);
- try (OutputStream collectionStateOs = repository.createOutput(dest)) {
- collectionStateOs.write(Utils.toJSON(Collections.singletonMap(collectionName, collectionState)));
- }
- }
-
- /**
- * This method uploads the Solr configuration files to the desired location in Zookeeper.
- *
- * @param backupLoc The base path used to store the backup data.
- * @param backupId The unique name for the backup.
- * @param sourceConfigName The name of the config to be copied
- * @param targetConfigName The name of the config to be created.
- * @throws IOException in case of I/O errors.
- */
- public void uploadConfigDir(URI backupLoc, String backupId, String sourceConfigName, String targetConfigName)
- throws IOException {
- URI source = repository.resolve(backupLoc, backupId, ZK_STATE_DIR, CONFIG_STATE_DIR, sourceConfigName);
- String zkPath = ZkConfigManager.CONFIGS_ZKNODE + "/" + targetConfigName;
- uploadToZk(zkStateReader.getZkClient(), source, zkPath);
- }
-
- /**
- * This method stores the contents of a specified Solr config at the specified location in repository.
- *
- * @param backupLoc The base path used to store the backup data.
- * @param backupId The unique name for the backup.
- * @param configName The name of the config to be saved.
- * @throws IOException in case of I/O errors.
- */
- public void downloadConfigDir(URI backupLoc, String backupId, String configName) throws IOException {
- URI dest = repository.resolve(backupLoc, backupId, ZK_STATE_DIR, CONFIG_STATE_DIR, configName);
- repository.createDirectory(repository.resolve(backupLoc, backupId, ZK_STATE_DIR));
- repository.createDirectory(repository.resolve(backupLoc, backupId, ZK_STATE_DIR, CONFIG_STATE_DIR));
- repository.createDirectory(dest);
-
- downloadFromZK(zkStateReader.getZkClient(), ZkConfigManager.CONFIGS_ZKNODE + "/" + configName, dest);
- }
-
- public void uploadCollectionProperties(URI backupLoc, String backupId, String collectionName) throws IOException {
- URI sourceDir = repository.resolve(backupLoc, backupId, ZK_STATE_DIR);
- URI source = repository.resolve(sourceDir, ZkStateReader.COLLECTION_PROPS_ZKNODE);
- if (!repository.exists(source)) {
- // No collection properties to restore
- return;
- }
- String zkPath = ZkStateReader.COLLECTIONS_ZKNODE + '/' + collectionName + '/' + ZkStateReader.COLLECTION_PROPS_ZKNODE;
-
- try (IndexInput is = repository.openInput(sourceDir, ZkStateReader.COLLECTION_PROPS_ZKNODE, IOContext.DEFAULT)) {
- byte[] arr = new byte[(int) is.length()];
- is.readBytes(arr, 0, (int) is.length());
- zkStateReader.getZkClient().create(zkPath, arr, CreateMode.PERSISTENT, true);
- } catch (KeeperException | InterruptedException e) {
- throw new IOException("Error uploading file to zookeeper path " + source.toString() + " to " + zkPath,
- SolrZkClient.checkInterrupted(e));
- }
- }
-
- public void downloadCollectionProperties(URI backupLoc, String backupId, String collectionName) throws IOException {
- URI dest = repository.resolve(backupLoc, backupId, ZK_STATE_DIR, ZkStateReader.COLLECTION_PROPS_ZKNODE);
- String zkPath = ZkStateReader.COLLECTIONS_ZKNODE + '/' + collectionName + '/' + ZkStateReader.COLLECTION_PROPS_ZKNODE;
-
-
- try {
- if (!zkStateReader.getZkClient().exists(zkPath, true)) {
- // Nothing to back up
- return;
- }
-
- try (OutputStream os = repository.createOutput(dest)) {
- byte[] data = zkStateReader.getZkClient().getData(zkPath, null, null, true);
- os.write(data);
- }
- } catch (KeeperException | InterruptedException e) {
- throw new IOException("Error downloading file from zookeeper path " + zkPath + " to " + dest.toString(),
- SolrZkClient.checkInterrupted(e));
- }
- }
-
- private void downloadFromZK(SolrZkClient zkClient, String zkPath, URI dir) throws IOException {
- try {
- if (!repository.exists(dir)) {
- repository.createDirectory(dir);
- }
- List<String> files = zkClient.getChildren(zkPath, null, true);
- for (String file : files) {
- List<String> children = zkClient.getChildren(zkPath + "/" + file, null, true);
- if (children.size() == 0) {
- log.debug("Writing file {}", file);
- byte[] data = zkClient.getData(zkPath + "/" + file, null, null, true);
- try (OutputStream os = repository.createOutput(repository.resolve(dir, file))) {
- os.write(data);
- }
- } else {
- downloadFromZK(zkClient, zkPath + "/" + file, repository.resolve(dir, file));
- }
- }
- } catch (KeeperException | InterruptedException e) {
- throw new IOException("Error downloading files from zookeeper path " + zkPath + " to " + dir.toString(),
- SolrZkClient.checkInterrupted(e));
- }
- }
-
- private void uploadToZk(SolrZkClient zkClient, URI sourceDir, String destZkPath) throws IOException {
- Preconditions.checkArgument(repository.exists(sourceDir), "Path {} does not exist", sourceDir);
- Preconditions.checkArgument(repository.getPathType(sourceDir) == PathType.DIRECTORY,
- "Path {} is not a directory", sourceDir);
-
- for (String file : repository.listAll(sourceDir)) {
- String zkNodePath = destZkPath + "/" + file;
- URI path = repository.resolve(sourceDir, file);
- PathType t = repository.getPathType(path);
- switch (t) {
- case FILE: {
- try (IndexInput is = repository.openInput(sourceDir, file, IOContext.DEFAULT)) {
- byte[] arr = new byte[(int) is.length()]; // probably ok since the config file should be small.
- is.readBytes(arr, 0, (int) is.length());
- zkClient.makePath(zkNodePath, arr, true);
- } catch (KeeperException | InterruptedException e) {
- throw new IOException(SolrZkClient.checkInterrupted(e));
- }
- break;
- }
-
- case DIRECTORY: {
- if (!file.startsWith(".")) {
- uploadToZk(zkClient, path, zkNodePath);
- }
- break;
- }
- default:
- throw new IllegalStateException("Unknown path type " + t);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/backup/package-info.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/package-info.java b/solr/core/src/java/org/apache/solr/core/backup/package-info.java
deleted file mode 100644
index defcad6..0000000
--- a/solr/core/src/java/org/apache/solr/core/backup/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements. See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License. You may obtain a copy of the License at
-*
-* http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-
-
-/**
- * Core classes for Solr's Backup/Restore functionality
- */
-package org.apache.solr.core.backup;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java
deleted file mode 100644
index 875be18..0000000
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepository.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core.backup.repository;
-
-import java.io.Closeable;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.net.URI;
-import java.util.Optional;
-
-import org.apache.lucene.store.Directory;
-import org.apache.lucene.store.IOContext;
-import org.apache.lucene.store.IndexInput;
-import org.apache.solr.common.params.CoreAdminParams;
-import org.apache.solr.util.plugin.NamedListInitializedPlugin;
-
-/**
- * This interface defines the functionality required to backup/restore Solr indexes to an arbitrary storage system.
- */
-public interface BackupRepository extends NamedListInitializedPlugin, Closeable {
-
- /**
- * This enumeration defines the type of a given path.
- */
- enum PathType {
- DIRECTORY, FILE
- }
-
- /**
- * This method returns the location where the backup should be stored (or restored from).
- *
- * @param override The location parameter supplied by the user.
- * @return If <code>override</code> is not null then return the same value
- * Otherwise return the default configuration value for the {@linkplain CoreAdminParams#BACKUP_LOCATION} parameter.
- */
- default String getBackupLocation(String override) {
- return Optional.ofNullable(override).orElse(getConfigProperty(CoreAdminParams.BACKUP_LOCATION));
- }
-
- /**
- * This method returns the value of the specified configuration property.
- */
- <T> T getConfigProperty(String name);
-
- /**
- * This method returns the URI representation for the specified path.
- * Note - the specified path could be a fully qualified URI OR a relative path for a file-system.
- *
- * @param path The path specified by the user.
- * @return the URI representation of the user supplied value
- */
- URI createURI(String path);
-
- /**
- * This method resolves a URI using the specified path components (as method arguments).
- *
- * @param baseUri The base URI to use for creating the path
- * @param pathComponents
- * The directory (or file-name) to be included in the URI.
- * @return A URI containing absolute path
- */
- URI resolve(URI baseUri, String... pathComponents);
-
- /**
- * This method checks if the specified path exists in this repository.
- *
- * @param path
- * The path whose existence needs to be checked.
- * @return if the specified path exists in this repository.
- * @throws IOException
- * in case of errors
- */
- boolean exists(URI path) throws IOException;
-
- /**
- * This method returns the type of a specified path
- *
- * @param path
- * The path whose type needs to be checked.
- * @return the {@linkplain PathType} for the specified path
- * @throws IOException
- * in case of errors
- */
- PathType getPathType(URI path) throws IOException;
-
- /**
- * This method returns all the entries (files and directories) in the specified directory.
- *
- * @param path
- * The directory path
- * @return an array of strings, one for each entry in the directory
- * @throws IOException
- * in case of errors
- */
- String[] listAll(URI path) throws IOException;
-
- /**
- * This method returns a Lucene input stream reading an existing file.
- *
- * @param dirPath
- * The parent directory of the file to be read
- * @param fileName
- * The name of the file to be read
- * @param ctx
- * the Lucene IO context
- * @return Lucene {@linkplain IndexInput} reference
- * @throws IOException
- * in case of errors
- */
- IndexInput openInput(URI dirPath, String fileName, IOContext ctx) throws IOException;
-
- /**
- * This method returns a {@linkplain OutputStream} instance for the specified <code>path</code>
- *
- * @param path
- * The path for which {@linkplain OutputStream} needs to be created
- * @return {@linkplain OutputStream} instance for the specified <code>path</code>
- * @throws IOException
- * in case of errors
- */
- OutputStream createOutput(URI path) throws IOException;
-
- /**
- * This method creates a directory at the specified path.
- *
- * @param path
- * The path where the directory needs to be created.
- * @throws IOException
- * in case of errors
- */
- void createDirectory(URI path) throws IOException;
-
- /**
- * This method deletes a directory at the specified path.
- *
- * @param path
- * The path referring to the directory to be deleted.
- * @throws IOException
- * in case of errors
- */
- void deleteDirectory(URI path) throws IOException;
-
- /**
- * Copy a file from specified <code>sourceDir</code> to the destination repository (i.e. backup).
- *
- * @param sourceDir
- * The source directory hosting the file to be copied.
- * @param fileName
- * The name of the file to by copied
- * @param dest
- * The destination backup location.
- * @throws IOException
- * in case of errors
- */
- void copyFileFrom(Directory sourceDir, String fileName, URI dest) throws IOException;
-
- /**
- * Copy a file from specified <code>sourceRepo</code> to the destination directory (i.e. restore).
- *
- * @param sourceRepo
- * The source URI hosting the file to be copied.
- * @param fileName
- * The name of the file to by copied
- * @param dest
- * The destination where the file should be copied.
- * @throws IOException
- * in case of errors.
- */
- void copyFileTo(URI sourceRepo, String fileName, Directory dest) throws IOException;
-}
http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/0ae21ad0/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
----------------------------------------------------------------------
diff --git a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java b/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
deleted file mode 100644
index 9e02b21..0000000
--- a/solr/core/src/java/org/apache/solr/core/backup/repository/BackupRepositoryFactory.java
+++ /dev/null
@@ -1,88 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core.backup.repository;
-
-import java.lang.invoke.MethodHandles;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Objects;
-
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.SolrResourceLoader;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class BackupRepositoryFactory {
- private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
- private final Map<String,PluginInfo> backupRepoPluginByName = new HashMap<>();
- private PluginInfo defaultBackupRepoPlugin = null;
-
- public BackupRepositoryFactory(PluginInfo[] backupRepoPlugins) {
- if (backupRepoPlugins != null) {
- for (int i = 0; i < backupRepoPlugins.length; i++) {
- String name = backupRepoPlugins[i].name;
- boolean isDefault = backupRepoPlugins[i].isDefault();
-
- if (backupRepoPluginByName.containsKey(name)) {
- throw new SolrException(ErrorCode.SERVER_ERROR, "Duplicate backup repository with name " + name);
- }
- if (isDefault) {
- if (this.defaultBackupRepoPlugin != null) {
- throw new SolrException(ErrorCode.SERVER_ERROR, "More than one backup repository is configured as default");
- }
- this.defaultBackupRepoPlugin = backupRepoPlugins[i];
- }
- backupRepoPluginByName.put(name, backupRepoPlugins[i]);
- log.info("Added backup repository with configuration params {}", backupRepoPlugins[i]);
- }
- if (backupRepoPlugins.length == 1) {
- this.defaultBackupRepoPlugin = backupRepoPlugins[0];
- }
-
- if (this.defaultBackupRepoPlugin != null) {
- log.info("Default configuration for backup repository is with configuration params {}",
- defaultBackupRepoPlugin);
- }
- }
- }
-
- public BackupRepository newInstance(SolrResourceLoader loader, String name) {
- Objects.requireNonNull(loader);
- Objects.requireNonNull(name);
- PluginInfo repo = Objects.requireNonNull(backupRepoPluginByName.get(name),
- "Could not find a backup repository with name " + name);
-
- BackupRepository result = loader.newInstance(repo.className, BackupRepository.class);
- result.init(repo.initArgs);
- return result;
- }
-
- public BackupRepository newInstance(SolrResourceLoader loader) {
- if (defaultBackupRepoPlugin != null) {
- return newInstance(loader, defaultBackupRepoPlugin.name);
- }
-
- LocalFileSystemRepository repo = new LocalFileSystemRepository();
- repo.init(new NamedList<>());
- return repo;
- }
-}