You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ro...@apache.org on 2012/11/23 13:01:26 UTC
svn commit: r1412849 [11/13] - in /lucene/dev/branches/LUCENE-2878: ./
dev-tools/ dev-tools/eclipse/ dev-tools/idea/.idea/libraries/
dev-tools/idea/solr/contrib/dataimporthandler/ dev-tools/maven/
dev-tools/maven/solr/contrib/dataimporthandler/ dev-too...
Modified: lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java Fri Nov 23 12:00:32 2012
@@ -1,3 +1,9 @@
+package org.apache.solr.handler.dataimport;
+
+import org.apache.solr.request.LocalSolrQueryRequest;
+import org.junit.Before;
+import org.junit.Test;
+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
@@ -14,330 +20,134 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.solr.handler.dataimport;
-
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Ignore;
-import org.junit.Test;
-
-import java.io.File;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
/**
- * <p>
- * Test for SqlEntityProcessor which checks variations in primary key names and deleted ids
- * </p>
- *
- *
- *
- * @since solr 1.3
+ * Test with various combinations of parameters, child entites, transformers.
*/
-@Ignore("FIXME: I fail so often it makes me ill!")
-public class TestSqlEntityProcessorDelta extends AbstractDataImportHandlerTestCase {
- private static final String FULLIMPORT_QUERY = "select * from x";
-
- private static final String DELTA_QUERY = "select id from x where last_modified > NOW";
-
- private static final String DELETED_PK_QUERY = "select id from x where last_modified > NOW AND deleted='true'";
-
- private static final String dataConfig_delta =
- "<dataConfig>" +
- " <dataSource type=\"MockDataSource\"/>\n" +
- " <document>\n" +
- " <entity name=\"x\" transformer=\"TemplateTransformer\"" +
- " query=\"" + FULLIMPORT_QUERY + "\"" +
- " deletedPkQuery=\"" + DELETED_PK_QUERY + "\"" +
- " deltaImportQuery=\"select * from x where id='${dih.delta.id}'\"" +
- " deltaQuery=\"" + DELTA_QUERY + "\">\n" +
- " <field column=\"id\" name=\"id\"/>\n" +
- " <entity name=\"y\" query=\"select * from y where y.A='${x.id}'\">\n" +
- " <field column=\"desc\" />\n" +
- " </entity>\n" +
- " </entity>\n" +
- " </document>\n" +
- "</dataConfig>\n";
-
- //TODO: fix this test to not require FSDirectory.
- static String savedFactory;
- @BeforeClass
- public static void beforeClass() throws Exception {
- savedFactory = System.getProperty("solr.DirectoryFactory");
- System.setProperty("solr.directoryFactory", "solr.MockFSDirectoryFactory");
- initCore("dataimport-solrconfig.xml", "dataimport-schema.xml");
+public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestCase {
+ private boolean delta = false;
+ private boolean useParentDeltaQueryParam = false;
+ private IntChanges personChanges = null;
+ private String[] countryChanges = null;
+
+ @Before
+ public void setupDeltaTest() {
+ delta = false;
+ personChanges = null;
+ countryChanges = null;
+ }
+ @Test
+ public void testSingleEntity() throws Exception {
+ singleEntity(1);
+ changeStuff();
+ int c = calculateDatabaseCalls();
+ singleEntity(c);
+ validateChanges();
+ }
+ @Test
+ public void testWithSimpleTransformer() throws Exception {
+ simpleTransform(1);
+ changeStuff();
+ simpleTransform(calculateDatabaseCalls());
+ validateChanges();
+ }
+ @Test
+ public void testWithComplexTransformer() throws Exception {
+ complexTransform(1, 0);
+ changeStuff();
+ complexTransform(calculateDatabaseCalls(), personChanges.deletedKeys.length);
+ validateChanges();
+ }
+ @Test
+ public void testChildEntities() throws Exception {
+ useParentDeltaQueryParam = random().nextBoolean();
+ withChildEntities(false, true);
+ changeStuff();
+ withChildEntities(false, false);
+ validateChanges();
}
- @AfterClass
- public static void afterClass() {
- if (savedFactory == null) {
- System.clearProperty("solr.directoryFactory");
- } else {
- System.setProperty("solr.directoryFactory", savedFactory);
+
+ private int calculateDatabaseCalls() {
+ //The main query generates 1
+ //Deletes generate 1
+ //Each add/mod generate 1
+ int c = 1;
+ if (countryChanges != null) {
+ c += countryChanges.length + 1;
}
- }
-
- @Before @Override
- public void setUp() throws Exception {
- super.setUp();
- clearIndex();
- assertU(commit());
- }
-
- @SuppressWarnings("unchecked")
- private void add1document() throws Exception {
- List parentRow = new ArrayList();
- parentRow.add(createMap("id", "1"));
- MockDataSource.setIterator(FULLIMPORT_QUERY, parentRow.iterator());
-
- List childRow = new ArrayList();
- childRow.add(createMap("desc", "hello"));
- MockDataSource.setIterator("select * from y where y.A='1'", childRow
- .iterator());
-
- runFullImport(dataConfig_delta);
-
- assertQ(req("*:* OR add1document"), "//*[@numFound='1']");
- assertQ(req("id:1"), "//*[@numFound='1']");
- assertQ(req("desc:hello"), "//*[@numFound='1']");
- }
-
- @Test
- @SuppressWarnings("unchecked")
- public void testCompositePk_FullImport() throws Exception {
- add1document();
- }
-
- @Test
- @SuppressWarnings("unchecked")
- public void testNonWritablePersistFile() throws Exception {
- // See SOLR-2551
- String configDir = h.getCore().getResourceLoader().getConfigDir();
- String filePath = configDir;
- if (configDir != null && !configDir.endsWith(File.separator))
- filePath += File.separator;
- filePath += "dataimport.properties";
- File f = new File(filePath);
-
- try {
- // execute the test only if we are able to set file to read only mode
- assumeTrue("No dataimport.properties file", f.exists() || f.createNewFile());
- assumeTrue("dataimport.proprties can't be set read only", f.setReadOnly());
- assumeFalse("dataimport.proprties is still writable even though " +
- "marked readonly - test running as superuser?", f.canWrite());
-
- ignoreException("Properties is not writable");
-
- List parentRow = new ArrayList();
- parentRow.add(createMap("id", "1"));
- MockDataSource.setIterator(FULLIMPORT_QUERY, parentRow.iterator());
-
- List childRow = new ArrayList();
- childRow.add(createMap("desc", "hello"));
- MockDataSource.setIterator("select * from y where y.A='1'",
- childRow.iterator());
-
- runFullImport(dataConfig_delta);
- assertQ(req("id:1"), "//*[@numFound='0']");
- } finally {
- f.delete();
+ if (personChanges != null) {
+ c += personChanges.addedKeys.length + personChanges.changedKeys.length + 1;
}
+ return c;
}
-
- // WORKS
-
- @Test
- @SuppressWarnings("unchecked")
- public void testCompositePk_DeltaImport_delete() throws Exception {
- add1document();
- List deletedRow = new ArrayList();
- deletedRow.add(createMap("id", "1"));
- MockDataSource.setIterator(DELETED_PK_QUERY, deletedRow.iterator());
-
- MockDataSource.setIterator(DELTA_QUERY, Collections
- .EMPTY_LIST.iterator());
-
- List childRow = new ArrayList();
- childRow.add(createMap("desc", "hello"));
- MockDataSource.setIterator("select * from y where y.A='1'", childRow
- .iterator());
-
- runDeltaImport(dataConfig_delta);
- assertQ(req("*:* OR testCompositePk_DeltaImport_delete"), "//*[@numFound='0']");
- }
-
- @Test
- @SuppressWarnings("unchecked")
- public void testCompositePk_DeltaImport_empty() throws Exception {
- List deltaRow = new ArrayList();
- deltaRow.add(createMap("id", "1"));
- MockDataSource.setIterator(DELTA_QUERY, deltaRow.iterator());
-
- MockDataSource.setIterator(DELETED_PK_QUERY, Collections
- .EMPTY_LIST.iterator());
-
- List parentRow = new ArrayList();
- parentRow.add(createMap("id", "1"));
- MockDataSource.setIterator("select * from x where id='1'", parentRow
- .iterator());
-
- List childRow = new ArrayList();
- childRow.add(createMap("desc", "hello"));
- MockDataSource.setIterator("select * from y where y.A='1'",
- childRow.iterator());
-
- runDeltaImport(dataConfig_delta);
-
- assertQ(req("*:* OR testCompositePk_DeltaImport_empty"), "//*[@numFound='1']");
- assertQ(req("id:1"), "//*[@numFound='1']");
- assertQ(req("desc:hello"), "//*[@numFound='1']");
- }
-
- // WORKS
-
- @Test
- @SuppressWarnings("unchecked")
- public void testCompositePk_DeltaImport_replace_delete() throws Exception {
- add1document();
- MockDataSource.clearCache();
-
- List deltaRow = new ArrayList();
- deltaRow.add(createMap("id", "1"));
- MockDataSource.setIterator(DELTA_QUERY,
- deltaRow.iterator());
-
- List deletedRow = new ArrayList();
- deletedRow.add(createMap("id", "1"));
- MockDataSource.setIterator(DELETED_PK_QUERY,
- deletedRow.iterator());
-
- List parentRow = new ArrayList();
- parentRow.add(createMap("id", "1"));
- MockDataSource.setIterator("select * from x where id='1'", parentRow
- .iterator());
-
- List childRow = new ArrayList();
- childRow.add(createMap("desc", "goodbye"));
- MockDataSource.setIterator("select * from y where y.A='1'", childRow
- .iterator());
-
- runDeltaImport(dataConfig_delta);
-
- assertQ(req("*:* OR testCompositePk_DeltaImport_replace_delete"), "//*[@numFound='0']");
- }
-
-
- @Test
- @SuppressWarnings("unchecked")
- public void testCompositePk_DeltaImport_replace_nodelete() throws Exception {
- add1document();
- MockDataSource.clearCache();
-
- List deltaRow = new ArrayList();
- deltaRow.add(createMap("id", "1"));
- MockDataSource.setIterator(DELTA_QUERY,
- deltaRow.iterator());
-
- MockDataSource.setIterator(DELETED_PK_QUERY, Collections
- .EMPTY_LIST.iterator());
-
- List parentRow = new ArrayList();
- parentRow.add(createMap("id", "1"));
- MockDataSource.setIterator("select * from x where id='1'", parentRow
- .iterator());
-
- List childRow = new ArrayList();
- childRow.add(createMap("desc", "goodbye"));
- MockDataSource.setIterator("select * from y where y.A='1'", childRow
- .iterator());
-
- runDeltaImport(dataConfig_delta);
-
- assertQ(req("*:* OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='1']");
- assertQ(req("id:1"), "//*[@numFound='1']");
- assertQ(req("desc:hello OR XtestCompositePk_DeltaImport_replace_nodelete"), "//*[@numFound='0']");
- assertQ(req("desc:goodbye"), "//*[@numFound='1']");
- }
-
- @Test
- @SuppressWarnings("unchecked")
- public void testCompositePk_DeltaImport_add() throws Exception {
- add1document();
- MockDataSource.clearCache();
-
- List deltaRow = new ArrayList();
- deltaRow.add(createMap("id", "2"));
- MockDataSource.setIterator(DELTA_QUERY,
- deltaRow.iterator());
-
- List parentRow = new ArrayList();
- parentRow.add(createMap("id", "2"));
- MockDataSource.setIterator("select * from x where id='2'", parentRow
- .iterator());
-
- List childRow = new ArrayList();
- childRow.add(createMap("desc", "goodbye"));
- MockDataSource.setIterator("select * from y where y.A='2'", childRow
- .iterator());
-
- runDeltaImport(dataConfig_delta);
-
- assertQ(req("*:* OR testCompositePk_DeltaImport_add"), "//*[@numFound='2']");
- assertQ(req("id:1"), "//*[@numFound='1']");
- assertQ(req("id:2"), "//*[@numFound='1']");
- assertQ(req("desc:hello"), "//*[@numFound='1']");
- assertQ(req("desc:goodbye"), "//*[@numFound='1']");
+ private void validateChanges() throws Exception
+ {
+ if(personChanges!=null) {
+ for(int id : personChanges.addedKeys) {
+ assertQ(req("id:" + id), "//*[@numFound='1']");
+ }
+ for(int id : personChanges.deletedKeys) {
+ assertQ(req("id:" + id), "//*[@numFound='0']");
+ }
+ for(int id : personChanges.changedKeys) {
+ assertQ(req("id:" + id), "//*[@numFound='1']", "substring(//doc/arr[@name='NAME_mult_s']/str[1], 1, 8)='MODIFIED'");
+ }
+ }
+ if(countryChanges!=null) {
+ for(String code : countryChanges) {
+ assertQ(req("COUNTRY_CODE_s:" + code), "//*[@numFound='" + numberPeopleByCountryCode(code) + "']", "substring(//doc/str[@name='COUNTRY_NAME_s'], 1, 8)='MODIFIED'");
+ }
+ }
}
-
- @Test
- @SuppressWarnings("unchecked")
- public void testCompositePk_DeltaImport_nodelta() throws Exception {
- add1document();
- MockDataSource.clearCache();
-
- MockDataSource.setIterator(DELTA_QUERY,
- Collections.EMPTY_LIST.iterator());
-
- runDeltaImport(dataConfig_delta);
-
- assertQ(req("*:* OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
- assertQ(req("id:1 OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
- assertQ(req("desc:hello OR testCompositePk_DeltaImport_nodelta"), "//*[@numFound='1']");
+ private void changeStuff() throws Exception {
+ if(countryEntity)
+ {
+ int n = random().nextInt(2);
+ switch(n) {
+ case 0:
+ personChanges = modifySomePeople();
+ break;
+ case 1:
+ countryChanges = modifySomeCountries();
+ break;
+ case 2:
+ personChanges = modifySomePeople();
+ countryChanges = modifySomeCountries();
+ break;
+ }
+ } else {
+ personChanges = modifySomePeople();
+ }
+ delta = true;
}
-
- @Test
- @SuppressWarnings("unchecked")
- public void testCompositePk_DeltaImport_add_delete() throws Exception {
- add1document();
- MockDataSource.clearCache();
-
- List deltaRow = new ArrayList();
- deltaRow.add(createMap("id", "2"));
- MockDataSource.setIterator(DELTA_QUERY,
- deltaRow.iterator());
-
- List deletedRow = new ArrayList();
- deletedRow.add(createMap("id", "1"));
- MockDataSource.setIterator(DELETED_PK_QUERY,
- deletedRow.iterator());
-
- List parentRow = new ArrayList();
- parentRow.add(createMap("id", "2"));
- MockDataSource.setIterator("select * from x where id='2'", parentRow
- .iterator());
-
- List childRow = new ArrayList();
- childRow.add(createMap("desc", "goodbye"));
- MockDataSource.setIterator("select * from y where y.A='2'", childRow
- .iterator());
-
- runDeltaImport(dataConfig_delta);
-
- assertQ(req("*:* OR XtestCompositePk_DeltaImport_add_delete"), "//*[@numFound='1']");
- assertQ(req("id:2"), "//*[@numFound='1']");
- assertQ(req("desc:hello"), "//*[@numFound='0']");
- assertQ(req("desc:goodbye"), "//*[@numFound='1']");
+ @Override
+ protected LocalSolrQueryRequest generateRequest() {
+ return lrf.makeRequest("command", (delta ? "delta-import" : "full-import"), "dataConfig", generateConfig(),
+ "clean", (delta ? "false" : "true"), "commit", "true", "synchronous", "true", "indent", "true");
+ }
+ @Override
+ protected String deltaQueriesPersonTable() {
+ return
+ "deletedPkQuery=''SELECT ID FROM PEOPLE WHERE DELETED='Y' AND last_modified >='${dih.last_index_time}' '' " +
+ "deltaImportQuery=''SELECT ID, NAME, COUNTRY_CODE FROM PEOPLE where ID=${dih.delta.ID} '' " +
+ "deltaQuery=''" +
+ "SELECT ID FROM PEOPLE WHERE DELETED!='Y' AND last_modified >='${dih.last_index_time}' " +
+ (useParentDeltaQueryParam ? "" :
+ "UNION DISTINCT " +
+ "SELECT ID FROM PEOPLE WHERE DELETED!='Y' AND COUNTRY_CODE IN (SELECT CODE FROM COUNTRIES WHERE last_modified >='${dih.last_index_time}') "
+ ) + "'' "
+ ;
+ }
+ @Override
+ protected String deltaQueriesCountryTable() {
+ if(useParentDeltaQueryParam) {
+ return
+ "deltaQuery=''SELECT CODE FROM COUNTRIES WHERE DELETED != 'Y' AND last_modified >='${dih.last_index_time}' '' " +
+ "parentDeltaQuery=''SELECT ID FROM PEOPLE WHERE DELETED != 'Y' AND COUNTRY_CODE='${Countries.CODE}' '' "
+ ;
+
+ }
+ return "";
}
}
Modified: lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestTemplateTransformer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestTemplateTransformer.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestTemplateTransformer.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestTemplateTransformer.java Fri Nov 23 12:00:32 2012
@@ -58,7 +58,7 @@ public class TestTemplateTransformer ext
"lastName", "Mangar",
"mail", mails);
- VariableResolverImpl resolver = new VariableResolverImpl();
+ VariableResolver resolver = new VariableResolver();
resolver.addNamespace("e", row);
Map<String, String> entityAttrs = createMap("name", "e");
Modified: lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestURLDataSource.java Fri Nov 23 12:00:32 2012
@@ -27,7 +27,7 @@ import org.junit.Test;
public class TestURLDataSource extends AbstractDataImportHandlerTestCase {
private List<Map<String, String>> fields = new ArrayList<Map<String, String>>();
private URLDataSource dataSource = new URLDataSource();
- private VariableResolverImpl variableResolver = new VariableResolverImpl();
+ private VariableResolver variableResolver = new VariableResolver();
private Context context = AbstractDataImportHandlerTestCase.getContext(null, variableResolver,
dataSource, Context.FULL_DUMP, fields, null);
private Properties initProps = new Properties();
Modified: lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestVariableResolver.java Fri Nov 23 12:00:32 2012
@@ -16,6 +16,7 @@
*/
package org.apache.solr.handler.dataimport;
+import org.junit.Ignore;
import org.junit.Test;
import org.apache.solr.util.DateMathParser;
@@ -26,144 +27,139 @@ import java.util.*;
* <p>
* Test for VariableResolver
* </p>
- *
- *
+ *
+ *
* @since solr 1.3
*/
public class TestVariableResolver extends AbstractDataImportHandlerTestCase {
-
+
@Test
public void testSimpleNamespace() {
- VariableResolverImpl vri = new VariableResolverImpl();
- Map<String, Object> ns = new HashMap<String, Object>();
+ VariableResolver vri = new VariableResolver();
+ Map<String,Object> ns = new HashMap<String,Object>();
ns.put("world", "WORLD");
vri.addNamespace("hello", ns);
assertEquals("WORLD", vri.resolve("hello.world"));
}
-
+
@Test
- public void testDefaults(){
+ public void testDefaults() {
// System.out.println(System.setProperty(TestVariableResolver.class.getName(),"hello"));
- System.setProperty(TestVariableResolver.class.getName(),"hello");
- // System.out.println("s.gP()"+ System.getProperty(TestVariableResolver.class.getName()));
-
- HashMap m = new HashMap();
- m.put("hello","world");
- VariableResolverImpl vri = new VariableResolverImpl(m);
+ System.setProperty(TestVariableResolver.class.getName(), "hello");
+ // System.out.println("s.gP()"+
+ // System.getProperty(TestVariableResolver.class.getName()));
+
+ Properties p = new Properties();
+ p.put("hello", "world");
+ VariableResolver vri = new VariableResolver(p);
Object val = vri.resolve(TestVariableResolver.class.getName());
// System.out.println("val = " + val);
assertEquals("hello", val);
- assertEquals("world",vri.resolve("hello"));
+ assertEquals("world", vri.resolve("hello"));
}
-
+
@Test
public void testNestedNamespace() {
- VariableResolverImpl vri = new VariableResolverImpl();
- Map<String, Object> ns = new HashMap<String, Object>();
+ VariableResolver vri = new VariableResolver();
+ Map<String,Object> ns = new HashMap<String,Object>();
ns.put("world", "WORLD");
vri.addNamespace("hello", ns);
- ns = new HashMap<String, Object>();
+ ns = new HashMap<String,Object>();
ns.put("world1", "WORLD1");
vri.addNamespace("hello.my", ns);
assertEquals("WORLD1", vri.resolve("hello.my.world1"));
}
-
+
@Test
public void test3LevelNestedNamespace() {
- VariableResolverImpl vri = new VariableResolverImpl();
- Map<String, Object> ns = new HashMap<String, Object>();
+ VariableResolver vri = new VariableResolver();
+ Map<String,Object> ns = new HashMap<String,Object>();
ns.put("world", "WORLD");
vri.addNamespace("hello", ns);
- ns = new HashMap<String, Object>();
+ ns = new HashMap<String,Object>();
ns.put("world1", "WORLD1");
vri.addNamespace("hello.my.new", ns);
assertEquals("WORLD1", vri.resolve("hello.my.new.world1"));
}
-
+
@Test
public void dateNamespaceWithValue() {
- VariableResolverImpl vri = new VariableResolverImpl();
- vri.addNamespace("dataimporter.functions", EvaluatorBag
- .getFunctionsNamespace(Collections.EMPTY_LIST, null, vri));
- Map<String, Object> ns = new HashMap<String, Object>();
+ VariableResolver vri = new VariableResolver();
+ vri.setEvaluators(new DataImporter().getEvaluators(Collections
+ .<Map<String,String>> emptyList()));
+ Map<String,Object> ns = new HashMap<String,Object>();
Date d = new Date();
ns.put("dt", d);
vri.addNamespace("A", ns);
- assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(d),
- vri.replaceTokens("${dataimporter.functions.formatDate(A.dt,'yyyy-MM-dd HH:mm:ss')}"));
+ assertEquals(
+ new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ROOT).format(d),
+ vri.replaceTokens("${dataimporter.functions.formatDate(A.dt,'yyyy-MM-dd HH:mm:ss')}"));
}
-
+
@Test
public void dateNamespaceWithExpr() throws Exception {
- VariableResolverImpl vri = new VariableResolverImpl();
- vri.addNamespace("dataimporter.functions", EvaluatorBag
- .getFunctionsNamespace(Collections.EMPTY_LIST,null, vri));
- SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
+ VariableResolver vri = new VariableResolver();
+ vri.setEvaluators(new DataImporter().getEvaluators(Collections
+ .<Map<String,String>> emptyList()));
+ SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT);
format.setTimeZone(TimeZone.getTimeZone("UTC"));
- resetEvaluatorBagDateMathParser();
- DateMathParser dmp = new DateMathParser(TimeZone.getDefault(), Locale.getDefault());
-
- String s = vri.replaceTokens("${dataimporter.functions.formatDate('NOW/DAY','yyyy-MM-dd HH:mm')}");
- assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm").format(dmp.parseMath("/DAY")), s);
+ DateMathParser dmp = new DateMathParser(TimeZone.getDefault(), Locale.ROOT);
+
+ String s = vri
+ .replaceTokens("${dataimporter.functions.formatDate('NOW/DAY','yyyy-MM-dd HH:mm')}");
+ assertEquals(
+ new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.ROOT).format(dmp.parseMath("/DAY")),
+ s);
}
-
+
@Test
public void testDefaultNamespace() {
- VariableResolverImpl vri = new VariableResolverImpl();
- Map<String, Object> ns = new HashMap<String, Object>();
+ VariableResolver vri = new VariableResolver();
+ Map<String,Object> ns = new HashMap<String,Object>();
ns.put("world", "WORLD");
vri.addNamespace(null, ns);
assertEquals("WORLD", vri.resolve("world"));
}
-
+
@Test
public void testDefaultNamespace1() {
- VariableResolverImpl vri = new VariableResolverImpl();
- Map<String, Object> ns = new HashMap<String, Object>();
+ VariableResolver vri = new VariableResolver();
+ Map<String,Object> ns = new HashMap<String,Object>();
ns.put("world", "WORLD");
vri.addNamespace(null, ns);
assertEquals("WORLD", vri.resolve("world"));
}
-
+
@Test
public void testFunctionNamespace1() throws Exception {
- VariableResolverImpl resolver = new VariableResolverImpl();
- ContextImpl context = new ContextImpl(null, resolver, null, Context.FULL_DUMP, Collections.EMPTY_MAP, null, null);
- final List<Map<String ,String >> l = new ArrayList<Map<String, String>>();
- Map<String ,String > m = new HashMap<String, String>();
- m.put("name","test");
- m.put("class",E.class.getName());
+ VariableResolver resolver = new VariableResolver();
+ final List<Map<String,String>> l = new ArrayList<Map<String,String>>();
+ Map<String,String> m = new HashMap<String,String>();
+ m.put("name", "test");
+ m.put("class", E.class.getName());
l.add(m);
-
- SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
+ resolver.setEvaluators(new DataImporter().getEvaluators(l));
+ ContextImpl context = new ContextImpl(null, resolver, null,
+ Context.FULL_DUMP, Collections.EMPTY_MAP, null, null);
+
+ SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'", Locale.ROOT);
format.setTimeZone(TimeZone.getTimeZone("UTC"));
- resetEvaluatorBagDateMathParser();
- DateMathParser dmp = new DateMathParser(TimeZone.getDefault(), Locale.getDefault());
-
- resolver.addNamespace("dataimporter.functions", EvaluatorBag
- .getFunctionsNamespace(l,null, resolver));
+ DateMathParser dmp = new DateMathParser(TimeZone.getDefault(), Locale.ROOT);
+
String s = resolver
- .replaceTokens("${dataimporter.functions.formatDate('NOW/DAY','yyyy-MM-dd HH:mm')}");
- assertEquals(new SimpleDateFormat("yyyy-MM-dd HH:mm")
- .format(dmp.parseMath("/DAY")), s);
- assertEquals("Hello World", resolver
- .replaceTokens("${dataimporter.functions.test('TEST')}"));
- }
-
- public static class E extends Evaluator{
- @Override
- public String evaluate(String expression, Context context) {
- return "Hello World";
- }
- }
-
- private void resetEvaluatorBagDateMathParser() {
- EvaluatorBag.dateMathParser = new DateMathParser(TimeZone
- .getDefault(), Locale.getDefault()){
- @Override
- public Date getNow() {
- return new Date();
- }
- };
+ .replaceTokens("${dataimporter.functions.formatDate('NOW/DAY','yyyy-MM-dd HH:mm')}");
+ assertEquals(
+ new SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.ROOT).format(dmp.parseMath("/DAY")),
+ s);
+ assertEquals("Hello World",
+ resolver.replaceTokens("${dataimporter.functions.test('TEST')}"));
+ }
+
+ public static class E extends Evaluator {
+ @Override
+ public String evaluate(String expression, Context context) {
+ return "Hello World";
+ }
}
+
}
Modified: lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/contrib/dataimporthandler/src/test/org/apache/solr/handler/dataimport/TestXPathEntityProcessor.java Fri Nov 23 12:00:32 2012
@@ -54,7 +54,7 @@ public class TestXPathEntityProcessor ex
fields.add(createMap("column", "artist", "xpath", "/catalog/cd/artist"));
fields.add(createMap("column", "year", "xpath", "/catalog/cd/year"));
Context c = getContext(null,
- new VariableResolverImpl(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs);
+ new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs);
XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
xPathEntityProcessor.init(c);
List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
@@ -77,7 +77,7 @@ public class TestXPathEntityProcessor ex
List fields = new ArrayList();
fields.add(createMap("column", "a", "xpath", "/root/a", DataImporter.MULTI_VALUED, "true"));
Context c = getContext(null,
- new VariableResolverImpl(), getDataSource(testXml), Context.FULL_DUMP, fields, entityAttrs);
+ new VariableResolver(), getDataSource(testXml), Context.FULL_DUMP, fields, entityAttrs);
XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
xPathEntityProcessor.init(c);
List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
@@ -101,7 +101,7 @@ public class TestXPathEntityProcessor ex
List fields = new ArrayList();
fields.add(createMap("column", "a", "xpath", "/root/a" ,"flatten","true"));
Context c = getContext(null,
- new VariableResolverImpl(), getDataSource(testXmlFlatten), Context.FULL_DUMP, fields, entityAttrs);
+ new VariableResolver(), getDataSource(testXmlFlatten), Context.FULL_DUMP, fields, entityAttrs);
XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
xPathEntityProcessor.init(c);
Map<String, Object> result = null;
@@ -126,7 +126,7 @@ public class TestXPathEntityProcessor ex
fields.add(createMap("column", "artist", "xpath", "/catalog/cd/artist"));
fields.add(createMap("column", "year", "xpath", "/catalog/cd/year"));
Context c = getContext(null,
- new VariableResolverImpl(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs);
+ new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, fields, entityAttrs);
XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor() {
private int count;
@@ -221,7 +221,7 @@ public class TestXPathEntityProcessor ex
XPathEntityProcessor.USE_SOLR_ADD_SCHEMA, "true", "xsl", ""
+ new File(tmpdir, "x.xsl").toURI(), "url", "cd.xml");
Context c = getContext(null,
- new VariableResolverImpl(), getDataSource(cdData), Context.FULL_DUMP, null, entityAttrs);
+ new VariableResolver(), getDataSource(cdData), Context.FULL_DUMP, null, entityAttrs);
XPathEntityProcessor xPathEntityProcessor = new XPathEntityProcessor();
xPathEntityProcessor.init(c);
List<Map<String, Object>> result = new ArrayList<Map<String, Object>>();
Modified: lucene/dev/branches/LUCENE-2878/solr/core/ivy.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/ivy.xml?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/ivy.xml (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/ivy.xml Fri Nov 23 12:00:32 2012
@@ -28,6 +28,7 @@
<dependency org="org.easymock" name="easymock" rev="2.2" transitive="false"/>
<dependency org="com.spatial4j" name="spatial4j" rev="0.3" transitive="false"/>
<dependency org="javax.servlet" name="javax.servlet-api" rev="3.0.1" transitive="false"/>
+ <dependency org="com.yammer.metrics" name="metrics-core" rev="2.1.2" transitive="false"/>
<exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
</dependencies>
</ivy-module>
Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java Fri Nov 23 12:00:32 2012
@@ -46,8 +46,8 @@ import org.apache.solr.response.SolrQuer
import org.apache.solr.servlet.SolrRequestParsers;
/**
- * SolrServer that connects directly to SolrCore
- *
+ * SolrServer that connects directly to SolrCore.
+ * <p>
* TODO -- this implementation sends the response to XML and then parses it.
* It *should* be able to convert the response directly into a named list.
*
Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/DistributedQueue.java Fri Nov 23 12:00:32 2012
@@ -21,7 +21,6 @@ package org.apache.solr.cloud;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.TreeMap;
-import java.util.concurrent.CountDownLatch;
import org.apache.solr.common.cloud.SolrZkClient;
import org.apache.zookeeper.CreateMode;
@@ -40,6 +39,8 @@ public class DistributedQueue {
private static final Logger LOG = LoggerFactory
.getLogger(DistributedQueue.class);
+ private static long DEFAULT_TIMEOUT = 5*60*1000;
+
private final String dir;
private SolrZkClient zookeeper;
@@ -163,20 +164,22 @@ public class DistributedQueue {
private class LatchChildWatcher implements Watcher {
- CountDownLatch latch;
+ Object lock = new Object();
- public LatchChildWatcher() {
- latch = new CountDownLatch(1);
- }
+ public LatchChildWatcher() {}
public void process(WatchedEvent event) {
- LOG.debug("Watcher fired on path: " + event.getPath() + " state: "
+ LOG.info("Watcher fired on path: " + event.getPath() + " state: "
+ event.getState() + " type " + event.getType());
- latch.countDown();
+ synchronized (lock) {
+ lock.notifyAll();
+ }
}
- public void await() throws InterruptedException {
- latch.await();
+ public void await(long timeout) throws InterruptedException {
+ synchronized (lock) {
+ lock.wait(timeout);
+ }
}
}
@@ -197,7 +200,7 @@ public class DistributedQueue {
continue;
}
if (orderedChildren.size() == 0) {
- childWatcher.await();
+ childWatcher.await(DEFAULT_TIMEOUT);
continue;
}
@@ -274,7 +277,7 @@ public class DistributedQueue {
continue;
}
if (orderedChildren.size() == 0) {
- childWatcher.await();
+ childWatcher.await(DEFAULT_TIMEOUT);
continue;
}
Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java Fri Nov 23 12:00:32 2012
@@ -249,7 +249,7 @@ final class ShardLeaderElectionContext e
core.getCoreDescriptor().getCloudDescriptor().isLeader = false;
// we could not publish ourselves as leader - rejoin election
- rejoinLeaderElection(coreName, core);
+ rejoinLeaderElection(leaderSeqPath, core);
} finally {
if (core != null) {
core.close();
Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/Overseer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/Overseer.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/Overseer.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/Overseer.java Fri Nov 23 12:00:32 2012
@@ -221,14 +221,19 @@ public class Overseer {
String nodeName = message.getStr(ZkStateReader.NODE_NAME_PROP);
//get shardId from ClusterState
sliceName = getAssignedId(state, nodeName, message);
+ if (sliceName != null) {
+ log.info("shard=" + sliceName + " is already registered");
+ }
}
if(sliceName == null) {
//request new shardId
if (collectionExists) {
// use existing numShards
numShards = state.getCollectionStates().get(collection).size();
+ log.info("Collection already exists with " + ZkStateReader.NUM_SHARDS_PROP + "=" + numShards);
}
sliceName = AssignShard.assignShard(collection, state, numShards);
+ log.info("Assigning new node to shard shard=" + sliceName);
}
Slice slice = state.getSlice(collection, sliceName);
@@ -398,8 +403,11 @@ public class Overseer {
if(slice.getReplicasMap().containsKey(coreNodeName)) {
Map<String, Replica> newReplicas = slice.getReplicasCopy();
newReplicas.remove(coreNodeName);
- Slice newSlice = new Slice(slice.getName(), newReplicas, slice.getProperties());
- newSlices.put(slice.getName(), newSlice);
+ if (newReplicas.size() != 0) {
+ Slice newSlice = new Slice(slice.getName(), newReplicas,
+ slice.getProperties());
+ newSlices.put(slice.getName(), newSlice);
+ }
} else {
newSlices.put(slice.getName(), slice);
}
Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java Fri Nov 23 12:00:32 2012
@@ -80,7 +80,7 @@ public class OverseerCollectionProcessor
@Override
public void run() {
- log.info("Process current queue of collection creations");
+ log.info("Process current queue of collection messages");
while (amILeader() && !isClosed) {
try {
byte[] head = workQueue.peek(true);
@@ -88,13 +88,20 @@ public class OverseerCollectionProcessor
//if (head != null) { // should not happen since we block above
final ZkNodeProps message = ZkNodeProps.load(head);
final String operation = message.getStr(QUEUE_OPERATION);
-
+ try {
boolean success = processMessage(message, operation);
if (!success) {
// TODO: what to do on failure / partial failure
// if we fail, do we clean up then ?
- SolrException.log(log, "Collection creation of " + message.getStr("name") + " failed");
+ SolrException.log(log,
+ "Collection " + operation + " of " + message.getStr("name")
+ + " failed");
}
+ } catch(Throwable t) {
+ SolrException.log(log,
+ "Collection " + operation + " of " + message.getStr("name")
+ + " failed", t);
+ }
//}
workQueue.remove();
} catch (KeeperException e) {
@@ -151,7 +158,12 @@ public class OverseerCollectionProcessor
}
private boolean createCollection(ClusterState clusterState, ZkNodeProps message) {
-
+ String collectionName = message.getStr("name");
+ if (clusterState.getCollections().contains(collectionName)) {
+ SolrException.log(log, "collection already exists: " + collectionName);
+ return false;
+ }
+
// look at the replication factor and see if it matches reality
// if it does not, find best nodes to create more cores
@@ -172,6 +184,17 @@ public class OverseerCollectionProcessor
return false;
}
+ if (numReplicas < 0) {
+ SolrException.log(log, REPLICATION_FACTOR + " must be > 0");
+ return false;
+ }
+
+ if (numShards < 0) {
+ SolrException.log(log, "numShards must be > 0");
+ return false;
+ }
+
+
String name = message.getStr("name");
String configName = message.getStr("collection.configName");
@@ -191,6 +214,15 @@ public class OverseerCollectionProcessor
Collections.shuffle(nodeList);
int numNodes = numShards * (numReplicas + 1);
+ if (nodeList.size() < numNodes) {
+ log.warn("Not enough nodes available to satisfy create collection request for collection:"
+ + collectionName
+ + " nodes needed:"
+ + numNodes
+ + " nodes available:"
+ + nodeList.size() + " - using nodes available");
+ }
+
List<String> createOnNodes = nodeList.subList(0, Math.min(nodeList.size(), numNodes));
log.info("Create collection " + name + " on " + createOnNodes);
@@ -255,20 +287,25 @@ public class OverseerCollectionProcessor
for (Map.Entry<String,Replica> shardEntry : shardEntries) {
final ZkNodeProps node = shardEntry.getValue();
if (clusterState.liveNodesContain(node.getStr(ZkStateReader.NODE_NAME_PROP))) {
- params.set(CoreAdminParams.CORE, node.getStr(ZkStateReader.CORE_NAME_PROP));
-
+ // For thread safety, only simple clone the ModifiableSolrParams
+ ModifiableSolrParams cloneParams = new ModifiableSolrParams();
+ cloneParams.add(params);
+ cloneParams.set(CoreAdminParams.CORE,
+ node.getStr(ZkStateReader.CORE_NAME_PROP));
+
String replica = node.getStr(ZkStateReader.BASE_URL_PROP);
ShardRequest sreq = new ShardRequest();
+
// yes, they must use same admin handler path everywhere...
- params.set("qt", adminPath);
-
+ cloneParams.set("qt", adminPath);
sreq.purpose = 1;
// TODO: this sucks
if (replica.startsWith("http://")) replica = replica.substring(7);
sreq.shards = new String[] {replica};
sreq.actualShards = sreq.shards;
- sreq.params = params;
- log.info("Collection Admin sending CoreAdmin cmd to " + replica);
+ sreq.params = cloneParams;
+ log.info("Collection Admin sending CoreAdmin cmd to " + replica
+ + " params:" + sreq.params);
shardHandler.submit(sreq, replica, sreq.params);
}
}
Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ZkController.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ZkController.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ZkController.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/cloud/ZkController.java Fri Nov 23 12:00:32 2012
@@ -20,7 +20,9 @@ package org.apache.solr.cloud;
import java.io.File;
import java.io.IOException;
import java.net.InetAddress;
+import java.net.NetworkInterface;
import java.util.Collections;
+import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
@@ -358,7 +360,29 @@ public final class ZkController {
private String getHostAddress(String host) throws IOException {
if (host == null) {
- host = "http://" + InetAddress.getLocalHost().getHostName();
+ String hostaddress = InetAddress.getLocalHost().getHostAddress();
+ // Re-get the IP again for "127.0.0.1", the other case we trust the hosts
+ // file is right.
+ if ("127.0.0.1".equals(hostaddress)) {
+ Enumeration<NetworkInterface> netInterfaces = null;
+ try {
+ netInterfaces = NetworkInterface.getNetworkInterfaces();
+ while (netInterfaces.hasMoreElements()) {
+ NetworkInterface ni = netInterfaces.nextElement();
+ Enumeration<InetAddress> ips = ni.getInetAddresses();
+ while (ips.hasMoreElements()) {
+ InetAddress ip = ips.nextElement();
+ if (ip.isSiteLocalAddress()) {
+ hostaddress = ip.getHostAddress();
+ }
+ }
+ }
+ } catch (Throwable e) {
+ SolrException.log(log,
+ "Error while looking for a better host name than 127.0.0.1", e);
+ }
+ }
+ host = "http://" + hostaddress;
} else {
Matcher m = URL_PREFIX.matcher(host);
if (m.matches()) {
@@ -996,7 +1020,7 @@ public final class ZkController {
}
throw new SolrException(ErrorCode.SERVER_ERROR,
- "Could not get shard_id for core: " + coreName);
+ "Could not get shard_id for core: " + coreName + " coreNodeName:" + shardZkNodeName);
}
public static void uploadToZK(SolrZkClient zkClient, File dir, String zkPath) throws IOException, KeeperException, InterruptedException {
Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CachingDirectoryFactory.java Fri Nov 23 12:00:32 2012
@@ -116,7 +116,7 @@ public abstract class CachingDirectoryFa
while(val.refCnt != 0) {
wait(100);
- if (cnt++ >= 300) {
+ if (cnt++ >= 600) {
log.error("Timeout waiting for all directory ref counts to be released");
break;
}
Modified: lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CoreContainer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CoreContainer.java?rev=1412849&r1=1412848&r2=1412849&view=diff
==============================================================================
--- lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CoreContainer.java (original)
+++ lucene/dev/branches/LUCENE-2878/solr/core/src/java/org/apache/solr/core/CoreContainer.java Fri Nov 23 12:00:32 2012
@@ -28,15 +28,25 @@ import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.concurrent.CompletionService;
import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorCompletionService;
+import java.util.concurrent.Future;
+import java.util.concurrent.RejectedExecutionException;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
-import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
@@ -56,6 +66,7 @@ import org.apache.solr.common.SolrExcept
import org.apache.solr.common.SolrException.ErrorCode;
import org.apache.solr.common.cloud.ZkStateReader;
import org.apache.solr.common.cloud.ZooKeeperException;
+import org.apache.solr.common.util.ExecutorUtil;
import org.apache.solr.core.SolrXMLSerializer.SolrCoreXMLDef;
import org.apache.solr.core.SolrXMLSerializer.SolrXMLDef;
import org.apache.solr.handler.admin.CollectionsHandler;
@@ -67,7 +78,9 @@ import org.apache.solr.logging.LogWatche
import org.apache.solr.logging.jul.JulWatcher;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.update.SolrCoreState;
+import org.apache.solr.util.AdjustableSemaphore;
import org.apache.solr.util.DOMUtil;
+import org.apache.solr.util.DefaultSolrThreadFactory;
import org.apache.solr.util.FileUtils;
import org.apache.solr.util.SystemIdResolver;
import org.apache.zookeeper.KeeperException;
@@ -78,7 +91,6 @@ import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
-import org.xml.sax.SAXException;
/**
@@ -88,6 +100,7 @@ import org.xml.sax.SAXException;
public class CoreContainer
{
private static final String LEADER_VOTE_WAIT = "180000"; // 3 minutes
+ private static final int CORE_LOAD_THREADS = 3;
private static final String DEFAULT_HOST_CONTEXT = "solr";
private static final String DEFAULT_HOST_PORT = "8983";
private static final int DEFAULT_ZK_CLIENT_TIMEOUT = 15000;
@@ -106,9 +119,15 @@ public class CoreContainer
private static final String CORE_COLLECTION = "collection";
private static final String CORE_ROLES = "roles";
private static final String CORE_PROPERTIES = "properties";
+ private static final String CORE_LOADONSTARTUP = "loadOnStartup";
+ private static final String CORE_SWAPPABLE = "swappable";
- protected final Map<String, SolrCore> cores = new LinkedHashMap<String, SolrCore>();
+ protected final Map<String, SolrCore> cores = new LinkedHashMap<String, SolrCore>(); // For "permanent" cores
+
+ protected Map<String, SolrCore> swappableCores = new LinkedHashMap<String, SolrCore>(); // For "lazily loaded" cores
+
+ protected final Map<String, CoreDescriptor> dynamicDescriptors = new LinkedHashMap<String, CoreDescriptor>();
protected final Map<String,Exception> coreInitFailures =
Collections.synchronizedMap(new LinkedHashMap<String,Exception>());
@@ -140,6 +159,8 @@ public class CoreContainer
private String zkHost;
private Map<SolrCore,String> coreToOrigName = new ConcurrentHashMap<SolrCore,String>();
private String leaderVoteWait;
+ protected int swappableCacheSize = Integer.MAX_VALUE; // Use as a flag too, if swappableCacheSize set in solr.xml this will be changed
+ private int coreLoadThreads;
{
log.info("New CoreContainer " + System.identityHashCode(this));
@@ -158,7 +179,7 @@ public class CoreContainer
/**
* Initalize CoreContainer directly from the constructor
*/
- public CoreContainer(String dir, File configFile) throws ParserConfigurationException, IOException, SAXException
+ public CoreContainer(String dir, File configFile)
{
this(dir);
this.load(dir, configFile);
@@ -290,8 +311,7 @@ public class CoreContainer
protected String dataDir = null; // override datadir for single core mode
// core container instantiation
- public CoreContainer initialize() throws IOException,
- ParserConfigurationException, SAXException {
+ public CoreContainer initialize() {
CoreContainer cores = null;
String solrHome = SolrResourceLoader.locateSolrHome();
File fconf = new File(solrHome, containerConfigFilename == null ? "solr.xml"
@@ -303,7 +323,12 @@ public class CoreContainer
cores.load(solrHome, fconf);
} else {
log.info("no solr.xml file found - using default");
- cores.load(solrHome, new InputSource(new ByteArrayInputStream(DEF_SOLR_XML.getBytes("UTF-8"))));
+ try {
+ cores.load(solrHome, new InputSource(new ByteArrayInputStream(DEF_SOLR_XML.getBytes("UTF-8"))));
+ } catch (Exception e) {
+ throw new SolrException(ErrorCode.SERVER_ERROR,
+ "CoreContainer.Initialize failed when trying to load default solr.xml file", e);
+ }
cores.configFile = fconf;
}
@@ -335,7 +360,22 @@ public class CoreContainer
return p;
}
-
+ // Trivial helper method for load, note it implements LRU on swappable cores
+ private void allocateLazyCores(Config cfg) {
+ swappableCacheSize = cfg.getInt("solr/cores/@swappableCacheSize", Integer.MAX_VALUE);
+ if (swappableCacheSize != Integer.MAX_VALUE) {
+ swappableCores = new LinkedHashMap<String, SolrCore>(swappableCacheSize, 0.75f, true) {
+ @Override
+ protected boolean removeEldestEntry(Map.Entry<String, SolrCore> eldest) {
+ if (size() > swappableCacheSize) {
+ eldest.getValue().close();
+ return true;
+ }
+ return false;
+ }
+ };
+ }
+ }
//-------------------------------------------------------------------
// Initialization / Cleanup
@@ -346,7 +386,7 @@ public class CoreContainer
* @param dir the home directory of all resources.
* @param configFile the configuration file
*/
- public void load(String dir, File configFile ) throws ParserConfigurationException, IOException, SAXException {
+ public void load(String dir, File configFile ) {
this.configFile = configFile;
this.load(dir, new InputSource(configFile.toURI().toASCIIString()));
}
@@ -357,70 +397,68 @@ public class CoreContainer
* @param dir the home directory of all resources.
* @param cfgis the configuration file InputStream
*/
- public void load(String dir, InputSource cfgis)
- throws ParserConfigurationException, IOException, SAXException {
-
+ public void load(String dir, InputSource cfgis) {
+ ThreadPoolExecutor coreLoadExecutor = null;
if (null == dir) {
// don't rely on SolrResourceLoader(), determine explicitly first
dir = SolrResourceLoader.locateSolrHome();
}
log.info("Loading CoreContainer using Solr Home: '{}'", dir);
-
+
this.loader = new SolrResourceLoader(dir);
solrHome = loader.getInstanceDir();
- Config cfg = new Config(loader, null, cfgis, null, false);
+ Config cfg;
// keep orig config for persist to consult
try {
+ cfg = new Config(loader, null, cfgis, null, false);
this.cfg = new Config(loader, null, copyDoc(cfg.getDocument()));
- } catch (TransformerException e) {
+ } catch (Exception e) {
throw new SolrException(ErrorCode.SERVER_ERROR, "", e);
}
-
+ // Since the cores var is now initialized to null, let's set it up right
+ // now.
cfg.substituteProperties();
+ allocateLazyCores(cfg);
+
// Initialize Logging
- if(cfg.getBool("solr/logging/@enabled",true)) {
+ if (cfg.getBool("solr/logging/@enabled", true)) {
String slf4jImpl = null;
String fname = cfg.get("solr/logging/watcher/@class", null);
try {
- slf4jImpl = StaticLoggerBinder.getSingleton().getLoggerFactoryClassStr();
- if(fname==null) {
- if( slf4jImpl.indexOf("Log4j") > 0) {
- log.warn("Log watching is not yet implemented for log4j" );
- }
- else if( slf4jImpl.indexOf("JDK") > 0) {
+ slf4jImpl = StaticLoggerBinder.getSingleton()
+ .getLoggerFactoryClassStr();
+ if (fname == null) {
+ if (slf4jImpl.indexOf("Log4j") > 0) {
+ log.warn("Log watching is not yet implemented for log4j");
+ } else if (slf4jImpl.indexOf("JDK") > 0) {
fname = "JUL";
}
}
- }
- catch(Throwable ex) {
- log.warn("Unable to read SLF4J version. LogWatcher will be disabled: "+ex);
+ } catch (Throwable ex) {
+ log.warn("Unable to read SLF4J version. LogWatcher will be disabled: "
+ + ex);
}
// Now load the framework
- if(fname!=null) {
- if("JUL".equalsIgnoreCase(fname)) {
+ if (fname != null) {
+ if ("JUL".equalsIgnoreCase(fname)) {
logging = new JulWatcher(slf4jImpl);
- }
-// else if( "Log4j".equals(fname) ) {
-// logging = new Log4jWatcher(slf4jImpl);
-// }
- else {
+ } else {
try {
logging = loader.newInstance(fname, LogWatcher.class);
- }
- catch (Throwable e) {
+ } catch (Throwable e) {
log.warn("Unable to load LogWatcher", e);
}
}
- if( logging != null ) {
+ if (logging != null) {
ListenerConfig v = new ListenerConfig();
- v.size = cfg.getInt("solr/logging/watcher/@size",50);
- v.threshold = cfg.get("solr/logging/watcher/@threshold",null);
- if(v.size>0) {
+ v.size = cfg.getInt("solr/logging/watcher/@size", 50);
+ v.threshold = cfg.get("solr/logging/watcher/@threshold", null);
+ if (v.size > 0) {
log.info("Registering Log Listener");
logging.registerListener(v, this);
}
@@ -428,40 +466,48 @@ public class CoreContainer
}
}
-
String dcoreName = cfg.get("solr/cores/@defaultCoreName", null);
- if(dcoreName != null && !dcoreName.isEmpty()) {
+ if (dcoreName != null && !dcoreName.isEmpty()) {
defaultCoreName = dcoreName;
}
persistent = cfg.getBool("solr/@persistent", false);
libDir = cfg.get("solr/@sharedLib", null);
- zkHost = cfg.get("solr/@zkHost" , null);
+ zkHost = cfg.get("solr/@zkHost", null);
+ coreLoadThreads = cfg.getInt("solr/@coreLoadThreads", CORE_LOAD_THREADS);
+
adminPath = cfg.get("solr/cores/@adminPath", null);
shareSchema = cfg.getBool("solr/cores/@shareSchema", DEFAULT_SHARE_SCHEMA);
- zkClientTimeout = cfg.getInt("solr/cores/@zkClientTimeout", DEFAULT_ZK_CLIENT_TIMEOUT);
-
+ zkClientTimeout = cfg.getInt("solr/cores/@zkClientTimeout",
+ DEFAULT_ZK_CLIENT_TIMEOUT);
+
hostPort = cfg.get("solr/cores/@hostPort", DEFAULT_HOST_PORT);
-
+
hostContext = cfg.get("solr/cores/@hostContext", DEFAULT_HOST_CONTEXT);
host = cfg.get("solr/cores/@host", null);
leaderVoteWait = cfg.get("solr/cores/@leaderVoteWait", LEADER_VOTE_WAIT);
-
- if(shareSchema){
- indexSchemaCache = new ConcurrentHashMap<String ,IndexSchema>();
+
+ if (shareSchema) {
+ indexSchemaCache = new ConcurrentHashMap<String,IndexSchema>();
}
- adminHandler = cfg.get("solr/cores/@adminHandler", null );
- managementPath = cfg.get("solr/cores/@managementPath", null );
+ adminHandler = cfg.get("solr/cores/@adminHandler", null);
+ managementPath = cfg.get("solr/cores/@managementPath", null);
- zkClientTimeout = Integer.parseInt(System.getProperty("zkClientTimeout", Integer.toString(zkClientTimeout)));
+ zkClientTimeout = Integer.parseInt(System.getProperty("zkClientTimeout",
+ Integer.toString(zkClientTimeout)));
initZooKeeper(zkHost, zkClientTimeout);
-
+
+ if (isZooKeeperAware() && coreLoadThreads <= 1) {
+ throw new SolrException(ErrorCode.SERVER_ERROR,
+ "SolrCloud requires a value of at least 2 in solr.xml for coreLoadThreads");
+ }
+
if (libDir != null) {
File f = FileUtils.resolvePath(new File(dir), libDir);
- log.info( "loading shared library: "+f.getAbsolutePath() );
+ log.info("loading shared library: " + f.getAbsolutePath());
libLoader = SolrResourceLoader.createClassLoader(f, null);
}
-
+
if (adminPath != null) {
if (adminHandler == null) {
coreAdminHandler = new CoreAdminHandler(this);
@@ -469,71 +515,163 @@ public class CoreContainer
coreAdminHandler = this.createMultiCoreHandler(adminHandler);
}
}
-
+
collectionsHandler = new CollectionsHandler(this);
try {
- containerProperties = readProperties(cfg, ((NodeList) cfg.evaluate(DEFAULT_HOST_CONTEXT, XPathConstants.NODESET)).item(0));
+ containerProperties = readProperties(cfg, ((NodeList) cfg.evaluate(
+ DEFAULT_HOST_CONTEXT, XPathConstants.NODESET)).item(0));
} catch (Throwable e) {
- SolrException.log(log,null,e);
+ SolrException.log(log, null, e);
}
-
- NodeList nodes = (NodeList)cfg.evaluate("solr/cores/core", XPathConstants.NODESET);
-
- for (int i=0; i<nodes.getLength(); i++) {
- Node node = nodes.item(i);
- try {
- String rawName = DOMUtil.getAttr(node, CORE_NAME, null);
- if (null == rawName) {
- throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
- "Each core in solr.xml must have a 'name'");
- }
- String name = rawName;
- CoreDescriptor p = new CoreDescriptor(this, name, DOMUtil.getAttr(node, CORE_INSTDIR, null));
-
- // deal with optional settings
- String opt = DOMUtil.getAttr(node, CORE_CONFIG, null);
-
- if (opt != null) {
- p.setConfigName(opt);
- }
- opt = DOMUtil.getAttr(node, CORE_SCHEMA, null);
- if (opt != null) {
- p.setSchemaName(opt);
- }
- if (zkController != null) {
- opt = DOMUtil.getAttr(node, CORE_SHARD, null);
- if (opt != null && opt.length() > 0) {
- p.getCloudDescriptor().setShardId(opt);
+
+ NodeList nodes = (NodeList) cfg.evaluate("solr/cores/core",
+ XPathConstants.NODESET);
+
+ // setup executor to load cores in parallel
+ coreLoadExecutor = new ThreadPoolExecutor(0, Integer.MAX_VALUE, 5,
+ TimeUnit.SECONDS, new SynchronousQueue<Runnable>(),
+ new DefaultSolrThreadFactory("coreLoadExecutor"));
+ try {
+ // 4 threads at a time max
+ final AdjustableSemaphore semaphore = new AdjustableSemaphore(
+ coreLoadThreads);
+
+ CompletionService<SolrCore> completionService = new ExecutorCompletionService<SolrCore>(
+ coreLoadExecutor);
+ Set<Future<SolrCore>> pending = new HashSet<Future<SolrCore>>();
+
+ for (int i = 0; i < nodes.getLength(); i++) {
+ Node node = nodes.item(i);
+ try {
+ String rawName = DOMUtil.getAttr(node, CORE_NAME, null);
+ if (null == rawName) {
+ throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+ "Each core in solr.xml must have a 'name'");
}
- opt = DOMUtil.getAttr(node, CORE_COLLECTION, null);
+ final String name = rawName;
+ final CoreDescriptor p = new CoreDescriptor(this, name,
+ DOMUtil.getAttr(node, CORE_INSTDIR, null));
+
+ // deal with optional settings
+ String opt = DOMUtil.getAttr(node, CORE_CONFIG, null);
+
if (opt != null) {
- p.getCloudDescriptor().setCollectionName(opt);
+ p.setConfigName(opt);
}
- opt = DOMUtil.getAttr(node, CORE_ROLES, null);
- if(opt != null){
- p.getCloudDescriptor().setRoles(opt);
+ opt = DOMUtil.getAttr(node, CORE_SCHEMA, null);
+ if (opt != null) {
+ p.setSchemaName(opt);
}
+
+ if (zkController != null) {
+ opt = DOMUtil.getAttr(node, CORE_SHARD, null);
+ if (opt != null && opt.length() > 0) {
+ p.getCloudDescriptor().setShardId(opt);
+ }
+ opt = DOMUtil.getAttr(node, CORE_COLLECTION, null);
+ if (opt != null) {
+ p.getCloudDescriptor().setCollectionName(opt);
+ }
+ opt = DOMUtil.getAttr(node, CORE_ROLES, null);
+ if (opt != null) {
+ p.getCloudDescriptor().setRoles(opt);
+ }
+ }
+ opt = DOMUtil.getAttr(node, CORE_PROPERTIES, null);
+ if (opt != null) {
+ p.setPropertiesName(opt);
+ }
+ opt = DOMUtil.getAttr(node, CORE_DATADIR, null);
+ if (opt != null) {
+ p.setDataDir(opt);
+ }
+
+ p.setCoreProperties(readProperties(cfg, node));
+
+ opt = DOMUtil.getAttr(node, CORE_LOADONSTARTUP, null);
+ if (opt != null) {
+ p.setLoadOnStartup(("true".equalsIgnoreCase(opt) || "on"
+ .equalsIgnoreCase(opt)) ? true : false);
+ }
+
+ opt = DOMUtil.getAttr(node, CORE_SWAPPABLE, null);
+ if (opt != null) {
+ p.setSwappable(("true".equalsIgnoreCase(opt) || "on"
+ .equalsIgnoreCase(opt)) ? true : false);
+ }
+
+ if (!p.isSwappable() && p.isLoadOnStartup()) { // Just like current
+ // case.
+ Callable<SolrCore> task = new Callable<SolrCore>() {
+ public SolrCore call() {
+ SolrCore c = null;
+ try {
+ c = create(p);
+ register(name, c, false);
+ } catch (Throwable t) {
+ SolrException.log(log, null, t);
+ if (c != null) {
+ c.close();
+ }
+ }
+ semaphore.release();
+
+ return c;
+ }
+ };
+
+ try {
+ semaphore.acquire();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ throw new SolrException(ErrorCode.SERVER_ERROR,
+ "Interrupted while loading SolrCore(s)", e);
+ }
+
+ try {
+ pending.add(completionService.submit(task));
+ } catch (RejectedExecutionException e) {
+ semaphore.release();
+ throw e;
+ }
+
+ } else {
+ // Store it away for later use. includes non-swappable but not
+ // loaded at startup cores.
+ dynamicDescriptors.put(rawName, p);
+ }
+ } catch (Throwable ex) {
+ SolrException.log(log, null, ex);
}
- opt = DOMUtil.getAttr(node, CORE_PROPERTIES, null);
- if (opt != null) {
- p.setPropertiesName(opt);
- }
- opt = DOMUtil.getAttr(node, CORE_DATADIR, null);
- if (opt != null) {
- p.setDataDir(opt);
+ }
+
+ while (pending != null && pending.size() > 0) {
+ try {
+ Future<SolrCore> future = completionService.take();
+ if (future == null) return;
+ pending.remove(future);
+
+ try {
+ SolrCore c = future.get();
+ // track original names
+ if (c != null) {
+ coreToOrigName.put(c, c.getName());
+ }
+ } catch (ExecutionException e) {
+ // shouldn't happen since we catch exceptions ourselves
+ SolrException.log(SolrCore.log,
+ "error sending update request to shard", e);
+ }
+
+ } catch (InterruptedException e) {
+ throw new SolrException(SolrException.ErrorCode.SERVICE_UNAVAILABLE,
+ "interrupted waiting for shard update response", e);
}
-
- p.setCoreProperties(readProperties(cfg, node));
-
- SolrCore core = create(p);
- register(name, core, false);
-
- // track original names
- coreToOrigName.put(core, rawName);
}
- catch (Throwable ex) {
- SolrException.log(log,null,ex);
+ } finally {
+ if (coreLoadExecutor != null) {
+ ExecutorUtil.shutdownNowAndAwaitTermination(coreLoadExecutor);
}
}
}
@@ -579,7 +717,7 @@ public class CoreContainer
}
try {
synchronized (cores) {
-
+
for (SolrCore core : cores.values()) {
try {
core.close();
@@ -589,6 +727,16 @@ public class CoreContainer
}
cores.clear();
}
+ synchronized (swappableCores) {
+ for (SolrCore core : swappableCores.values()) {
+ try {
+ core.close();
+ } catch (Throwable t) {
+ SolrException.log(log, "Error shutting down core", t);
+ }
+ }
+ swappableCores.clear();
+ }
} finally {
if (shardHandlerFactory != null) {
shardHandlerFactory.close();
@@ -642,6 +790,14 @@ public class CoreContainer
* @return a previous core having the same name if it existed
*/
public SolrCore register(String name, SolrCore core, boolean returnPrevNotClosed) {
+ return registerCore(cores, name, core, returnPrevNotClosed);
+ }
+
+ protected SolrCore registerLazyCore(String name, SolrCore core, boolean returnPrevNotClosed) {
+ return registerCore(swappableCores, name, core, returnPrevNotClosed);
+ }
+
+ protected SolrCore registerCore(Map<String,SolrCore> whichCores, String name, SolrCore core, boolean returnPrevNotClosed) {
if( core == null ) {
throw new RuntimeException( "Can not register a null core." );
}
@@ -668,12 +824,12 @@ public class CoreContainer
}
SolrCore old = null;
- synchronized (cores) {
+ synchronized (whichCores) {
if (isShutDown) {
core.close();
throw new IllegalStateException("This CoreContainer has been shutdown");
}
- old = cores.put(name, core);
+ old = whichCores.put(name, core);
coreInitFailures.remove(name);
/*
* set both the name of the descriptor and the name of the
@@ -739,18 +895,99 @@ public class CoreContainer
return register(core.getName(), core, returnPrev);
}
+ // Helper method to separate out creating a core from ZK as opposed to the "usual" way. See create()
+ private SolrCore createFromZk(String instanceDir, CoreDescriptor dcore)
+ {
+ try {
+ SolrResourceLoader solrLoader = null;
+ SolrConfig config = null;
+ String zkConfigName = null;
+ IndexSchema schema;
+ String collection = dcore.getCloudDescriptor().getCollectionName();
+ zkController.createCollectionZkNode(dcore.getCloudDescriptor());
+
+ zkConfigName = zkController.readConfigName(collection);
+ if (zkConfigName == null) {
+ log.error("Could not find config name for collection:" + collection);
+ throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
+ "Could not find config name for collection:" + collection);
+ }
+ solrLoader = new ZkSolrResourceLoader(instanceDir, zkConfigName, libLoader, getCoreProps(instanceDir,
+ dcore.getPropertiesName(), dcore.getCoreProperties()), zkController);
+ config = getSolrConfigFromZk(zkConfigName, dcore.getConfigName(), solrLoader);
+ schema = getSchemaFromZk(zkConfigName, dcore.getSchemaName(), config, solrLoader);
+ return new SolrCore(dcore.getName(), null, config, schema, dcore);
+
+ } catch (KeeperException e) {
+ log.error("", e);
+ throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
+ "", e);
+ } catch (InterruptedException e) {
+ // Restore the interrupted status
+ Thread.currentThread().interrupt();
+ log.error("", e);
+ throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
+ "", e);
+ }
+ }
+
+ // Helper method to separate out creating a core from local configuration files. See create()
+ private SolrCore createFromLocal(String instanceDir, CoreDescriptor dcore) {
+ SolrResourceLoader solrLoader = null;
+
+ SolrConfig config = null;
+ solrLoader = new SolrResourceLoader(instanceDir, libLoader, getCoreProps(instanceDir, dcore.getPropertiesName(), dcore.getCoreProperties()));
+ try {
+ config = new SolrConfig(solrLoader, dcore.getConfigName(), null);
+ } catch (Exception e) {
+ throw new SolrException(ErrorCode.SERVER_ERROR, "Could not load config for " + dcore.getConfigName(), e);
+ }
+
+ IndexSchema schema = null;
+ if (indexSchemaCache != null) {
+ File schemaFile = new File(dcore.getSchemaName());
+ if (!schemaFile.isAbsolute()) {
+ schemaFile = new File(solrLoader.getInstanceDir() + "conf"
+ + File.separator + dcore.getSchemaName());
+ }
+ if (schemaFile.exists()) {
+ String key = schemaFile.getAbsolutePath()
+ + ":"
+ + new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT).format(new Date(
+ schemaFile.lastModified()));
+ schema = indexSchemaCache.get(key);
+ if (schema == null) {
+ log.info("creating new schema object for core: " + dcore.name);
+ schema = new IndexSchema(config, dcore.getSchemaName(), null);
+ indexSchemaCache.put(key, schema);
+ } else {
+ log.info("re-using schema object for core: " + dcore.name);
+ }
+ }
+ }
+
+ if (schema == null) {
+ schema = new IndexSchema(config, dcore.getSchemaName(), null);
+ }
+
+ SolrCore core = new SolrCore(dcore.getName(), null, config, schema, dcore);
+
+ if (core.getUpdateHandler().getUpdateLog() != null) {
+ // always kick off recovery if we are in standalone mode.
+ core.getUpdateHandler().getUpdateLog().recoverFromLog();
+ }
+ return core;
+ }
+
/**
* Creates a new core based on a descriptor but does not register it.
*
* @param dcore a core descriptor
* @return the newly created core
*/
- public SolrCore create(CoreDescriptor dcore) throws ParserConfigurationException, IOException, SAXException {
-
- // :TODO: would be really nice if this method wrapped any underlying errors and only threw SolrException
+ public SolrCore create(CoreDescriptor dcore) {
final String name = dcore.getName();
- Exception failure = null;
try {
// Make the instanceDir relative to the cores instanceDir if not absolute
@@ -758,121 +995,18 @@ public class CoreContainer
String instanceDir = idir.getPath();
log.info("Creating SolrCore '{}' using instanceDir: {}",
dcore.getName(), instanceDir);
+
// Initialize the solr config
- SolrResourceLoader solrLoader = null;
-
- SolrConfig config = null;
- String zkConfigName = null;
- if(zkController == null) {
- solrLoader = new SolrResourceLoader(instanceDir, libLoader, getCoreProps(instanceDir, dcore.getPropertiesName(),dcore.getCoreProperties()));
- config = new SolrConfig(solrLoader, dcore.getConfigName(), null);
+ if (zkController != null) {
+ return createFromZk(instanceDir, dcore);
} else {
- try {
- String collection = dcore.getCloudDescriptor().getCollectionName();
- zkController.createCollectionZkNode(dcore.getCloudDescriptor());
-
- zkConfigName = zkController.readConfigName(collection);
- if (zkConfigName == null) {
- log.error("Could not find config name for collection:" + collection);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "Could not find config name for collection:" + collection);
- }
- solrLoader = new ZkSolrResourceLoader(instanceDir, zkConfigName, libLoader, getCoreProps(instanceDir, dcore.getPropertiesName(),dcore.getCoreProperties()), zkController);
- config = getSolrConfigFromZk(zkConfigName, dcore.getConfigName(), solrLoader);
- } catch (KeeperException e) {
- log.error("", e);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "", e);
- } catch (InterruptedException e) {
- // Restore the interrupted status
- Thread.currentThread().interrupt();
- log.error("", e);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "", e);
- }
- }
-
- IndexSchema schema = null;
- if (indexSchemaCache != null) {
- if (zkController == null) {
- File schemaFile = new File(dcore.getSchemaName());
- if (!schemaFile.isAbsolute()) {
- schemaFile = new File(solrLoader.getInstanceDir() + "conf"
- + File.separator + dcore.getSchemaName());
- }
- if (schemaFile.exists()) {
- String key = schemaFile.getAbsolutePath()
- + ":"
- + new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT).format(new Date(
- schemaFile.lastModified()));
- schema = indexSchemaCache.get(key);
- if (schema == null) {
- log.info("creating new schema object for core: " + dcore.name);
- schema = new IndexSchema(config, dcore.getSchemaName(), null);
- indexSchemaCache.put(key, schema);
- } else {
- log.info("re-using schema object for core: " + dcore.name);
- }
- }
- } else {
- // TODO: handle caching from ZooKeeper - perhaps using ZooKeepers versioning
- // Don't like this cache though - how does it empty as last modified changes?
- }
+ return createFromLocal(instanceDir, dcore);
}
- if(schema == null){
- if(zkController != null) {
- try {
- schema = getSchemaFromZk(zkConfigName, dcore.getSchemaName(), config, solrLoader);
- } catch (KeeperException e) {
- log.error("", e);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "", e);
- } catch (InterruptedException e) {
- // Restore the interrupted status
- Thread.currentThread().interrupt();
- log.error("", e);
- throw new ZooKeeperException(SolrException.ErrorCode.SERVER_ERROR,
- "", e);
- }
- } else {
- schema = new IndexSchema(config, dcore.getSchemaName(), null);
- }
- }
-
- SolrCore core = new SolrCore(dcore.getName(), null, config, schema, dcore);
-
- if (zkController == null && core.getUpdateHandler().getUpdateLog() != null) {
- // always kick off recovery if we are in standalone mode.
- core.getUpdateHandler().getUpdateLog().recoverFromLog();
- }
-
- return core;
// :TODO: Java7...
// http://docs.oracle.com/javase/7/docs/technotes/guides/language/catch-multiple.html
- } catch (ParserConfigurationException e1) {
- failure = e1;
- throw e1;
- } catch (IOException e2) {
- failure = e2;
- throw e2;
- } catch (SAXException e3) {
- failure = e3;
- throw e3;
- } catch (RuntimeException e4) {
- failure = e4;
- throw e4;
- } finally {
- if (null != failure) {
- log.error("Unable to create core: " + name, failure);
- }
- synchronized (coreInitFailures) {
- // remove first so insertion order is updated and newest is last
- coreInitFailures.remove(name);
- if (null != failure) {
- coreInitFailures.put(name, failure);
- }
- }
+ } catch (Exception ex) {
+ throw recordAndThrow(name, "Unable to create core: " + name, ex);
}
}
@@ -895,6 +1029,9 @@ public class CoreContainer
synchronized (cores) {
lst.addAll(this.cores.keySet());
}
+ synchronized (swappableCores) {
+ lst.addAll(this.swappableCores.keySet());
+ }
return lst;
}
@@ -910,6 +1047,14 @@ public class CoreContainer
}
}
}
+ synchronized (swappableCores) {
+ for (Map.Entry<String,SolrCore> entry : swappableCores.entrySet()) {
+ if (core == entry.getValue()) {
+ lst.add(entry.getKey());
+ }
+ }
+ }
+
return lst;
}
@@ -947,11 +1092,7 @@ public class CoreContainer
*
* @param name the name of the SolrCore to reload
*/
- public void reload(String name) throws ParserConfigurationException, IOException, SAXException {
-
- // :TODO: would be really nice if this method wrapped any underlying errors and only threw SolrException
-
- Exception failure = null;
+ public void reload(String name) {
try {
name= checkDefault(name);
@@ -1007,29 +1148,8 @@ public class CoreContainer
// :TODO: Java7...
// http://docs.oracle.com/javase/7/docs/technotes/guides/language/catch-multiple.html
- } catch (ParserConfigurationException e1) {
- failure = e1;
- throw e1;
- } catch (IOException e2) {
- failure = e2;
- throw e2;
- } catch (SAXException e3) {
- failure = e3;
- throw e3;
- } catch (RuntimeException e4) {
- failure = e4;
- throw e4;
- } finally {
- if (null != failure) {
- log.error("Unable to reload core: " + name, failure);
- }
- synchronized (coreInitFailures) {
- // remove first so insertion order is updated and newest is last
- coreInitFailures.remove(name);
- if (null != failure) {
- coreInitFailures.put(name, failure);
- }
- }
+ } catch (Exception ex) {
+ throw recordAndThrow(name, "Unable to reload core: " + name, ex);
}
}
@@ -1104,13 +1224,44 @@ public class CoreContainer
* @return the core if found
*/
public SolrCore getCore(String name) {
- name= checkDefault(name);
- synchronized(cores) {
- SolrCore core = cores.get(name);
- if (core != null)
- core.open(); // increment the ref count while still synchronized
- return core;
+ name = checkDefault(name);
+ // Do this in two phases since we don't want to lock access to the cores over a load.
+ SolrCore core;
+
+ synchronized (cores) {
+ core = cores.get(name);
+ if (core != null) {
+ core.open();
+ return core;
+ }
+ }
+
+ if (dynamicDescriptors.size() == 0) return null; // Nobody even tried to define any swappable cores, so we're done.
+
+ // Now look for already loaded swappable cores.
+ synchronized (swappableCores) {
+ core = swappableCores.get(name);
+ if (core != null) {
+ core.open();
+ return core;
+ }
+ }
+ CoreDescriptor desc = dynamicDescriptors.get(name);
+ if (desc == null) { //Nope, no swappable core with this name
+ return null;
+ }
+ try {
+ core = create(desc); // This should throw an error if it fails.
+ core.open();
+ if (desc.isSwappable()) {
+ registerLazyCore(name, core, false); // This is a swappable core
+ } else {
+ register(name, core, false); // This is a "permanent", although deferred-load core
+ }
+ } catch (Exception ex) {
+ throw recordAndThrow(name, "Unable to create core" + name, ex);
}
+ return core;
}
// ---------------- Multicore self related methods ---------------
@@ -1211,6 +1362,10 @@ public class CoreContainer
if (! (null == defaultCoreName || defaultCoreName.equals("")) ) {
coresAttribs.put("defaultCoreName", defaultCoreName);
}
+
+ if (swappableCacheSize != Integer.MAX_VALUE) {
+ coresAttribs.put("swappableCacheSize", Integer.toString(swappableCacheSize));
+ }
addCoresAttrib(coresAttribs, "hostPort", this.hostPort, DEFAULT_HOST_PORT);
addCoresAttrib(coresAttribs, "zkClientTimeout",
@@ -1218,7 +1373,8 @@ public class CoreContainer
Integer.toString(DEFAULT_ZK_CLIENT_TIMEOUT));
addCoresAttrib(coresAttribs, "hostContext", this.hostContext, DEFAULT_HOST_CONTEXT);
addCoresAttrib(coresAttribs, "leaderVoteWait", this.leaderVoteWait, LEADER_VOTE_WAIT);
-
+ addCoresAttrib(coresAttribs, "coreLoadThreads", Integer.toString(this.coreLoadThreads), Integer.toString(CORE_LOAD_THREADS));
+
List<SolrCoreXMLDef> solrCoreXMLDefs = new ArrayList<SolrCoreXMLDef>();
synchronized (cores) {
@@ -1287,7 +1443,9 @@ public class CoreContainer
String dataDir = dcore.dataDir;
addCoreProperty(coreAttribs, coreNode, CORE_DATADIR, dataDir, null);
-
+ addCoreProperty(coreAttribs, coreNode, CORE_SWAPPABLE, Boolean.toString(dcore.isSwappable()), null);
+ addCoreProperty(coreAttribs, coreNode, CORE_LOADONSTARTUP, Boolean.toString(dcore.isLoadOnStartup()), null);
+
CloudDescriptor cd = dcore.getCloudDescriptor();
String shard = null;
String roles = null;
@@ -1412,19 +1570,33 @@ public class CoreContainer
}
private SolrConfig getSolrConfigFromZk(String zkConfigName, String solrConfigFileName,
- SolrResourceLoader resourceLoader) throws IOException,
- ParserConfigurationException, SAXException, KeeperException,
- InterruptedException {
- byte[] config = zkController.getConfigFileData(zkConfigName, solrConfigFileName);
- InputSource is = new InputSource(new ByteArrayInputStream(config));
- is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(solrConfigFileName));
- SolrConfig cfg = solrConfigFileName == null ? new SolrConfig(
- resourceLoader, SolrConfig.DEFAULT_CONF_FILE, is) : new SolrConfig(
- resourceLoader, solrConfigFileName, is);
+ SolrResourceLoader resourceLoader)
+ {
+ SolrConfig cfg = null;
+ try {
+ byte[] config = zkController.getConfigFileData(zkConfigName, solrConfigFileName);
+ InputSource is = new InputSource(new ByteArrayInputStream(config));
+ is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(solrConfigFileName));
+ cfg = solrConfigFileName == null ? new SolrConfig(
+ resourceLoader, SolrConfig.DEFAULT_CONF_FILE, is) : new SolrConfig(
+ resourceLoader, solrConfigFileName, is);
+ } catch (Exception e) {
+ throw new SolrException(ErrorCode.SERVER_ERROR,
+ "getSolrConfigFromZK failed for " + zkConfigName + " " + solrConfigFileName, e);
+ }
return cfg;
}
-
+
+ // Just to tidy up the code where it did this in-line.
+ private SolrException recordAndThrow(String name, String msg, Exception ex) {
+ synchronized (coreInitFailures) {
+ coreInitFailures.remove(name);
+ coreInitFailures.put(name, ex);
+ }
+ log.error(msg, ex);
+ return new SolrException(ErrorCode.SERVER_ERROR, msg, ex);
+ }
private IndexSchema getSchemaFromZk(String zkConfigName, String schemaName,
SolrConfig config, SolrResourceLoader resourceLoader)
throws KeeperException, InterruptedException {