You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by da...@apache.org on 2018/11/02 11:33:30 UTC

[09/25] lucene-solr:jira/gradle: Adding dataimporthandler-extras module

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFieldReader.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFieldReader.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFieldReader.java
new file mode 100644
index 0000000..347a40b
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFieldReader.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Test for FieldReaderDataSource
+ *
+ *
+ * @see org.apache.solr.handler.dataimport.FieldReaderDataSource
+ * @since 1.4
+ */
+public class TestFieldReader extends AbstractDataImportHandlerTestCase {
+
+  @Test
+  public void simple() {
+    DataImporter di = new DataImporter();
+    di.loadAndInit(config);
+    redirectTempProperties(di);
+
+    TestDocBuilder.SolrWriterImpl sw = new TestDocBuilder.SolrWriterImpl();
+    RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null);
+    List<Map<String, Object>> l = new ArrayList<>();
+    l.add(createMap("xml", xml));
+    MockDataSource.setIterator("select * from a", l.iterator());
+    di.runCmd(rp, sw);
+    assertEquals(sw.docs.get(0).getFieldValue("y"), "Hello");
+    MockDataSource.clearCache();
+  }
+
+  String config = "<dataConfig>\n" +
+          "  <dataSource type=\"FieldReaderDataSource\" name=\"f\"/>\n" +
+          "  <dataSource type=\"MockDataSource\"/>\n" +
+          "  <document>\n" +
+          "    <entity name=\"a\" query=\"select * from a\" >\n" +
+          "      <entity name=\"b\" dataSource=\"f\" processor=\"XPathEntityProcessor\" forEach=\"/x\" dataField=\"a.xml\">\n" +
+          "        <field column=\"y\" xpath=\"/x/y\"/>\n" +
+          "      </entity>\n" +
+          "    </entity>\n" +
+          "  </document>\n" +
+          "</dataConfig>";
+
+  String xml = "<x>\n" +
+          " <y>Hello</y>\n" +
+          "</x>";
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java
new file mode 100644
index 0000000..dd2cf72
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListEntityProcessor.java
@@ -0,0 +1,188 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.solr.common.util.SuppressForbidden;
+import org.junit.Test;
+
+/**
+ * <p>
+ * Test for FileListEntityProcessor
+ * </p>
+ *
+ *
+ * @since solr 1.3
+ */
+public class TestFileListEntityProcessor extends AbstractDataImportHandlerTestCase {
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testSimple() throws IOException {
+    File tmpdir = createTempDir().toFile();
+
+    createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), false);
+    createFile(tmpdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), false);
+    createFile(tmpdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), false);
+    Map attrs = createMap(
+            FileListEntityProcessor.FILE_NAME, "xml$",
+            FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath());
+    Context c = getContext(null,
+            new VariableResolver(), null, Context.FULL_DUMP, Collections.EMPTY_LIST, attrs);
+    FileListEntityProcessor fileListEntityProcessor = new FileListEntityProcessor();
+    fileListEntityProcessor.init(c);
+    List<String> fList = new ArrayList<>();
+    while (true) {
+      Map<String, Object> f = fileListEntityProcessor.nextRow();
+      if (f == null)
+        break;
+      fList.add((String) f.get(FileListEntityProcessor.ABSOLUTE_FILE));
+    }
+    assertEquals(2, fList.size());
+  }
+  
+  @Test
+  public void testBiggerSmallerFiles() throws IOException {
+    File tmpdir = createTempDir().toFile();
+
+    long minLength = Long.MAX_VALUE;
+    String smallestFile = "";
+    byte[] content = "abcdefgij".getBytes(StandardCharsets.UTF_8);
+    createFile(tmpdir, "a.xml", content, false);
+    if (minLength > content.length) {
+      minLength = content.length;
+      smallestFile = "a.xml";
+    }
+    content = "abcdefgij".getBytes(StandardCharsets.UTF_8);
+    createFile(tmpdir, "b.xml", content, false);
+    if (minLength > content.length) {
+      minLength = content.length;
+      smallestFile = "b.xml";
+    }
+    content = "abc".getBytes(StandardCharsets.UTF_8);
+    createFile(tmpdir, "c.props", content, false);
+    if (minLength > content.length) {
+      minLength = content.length;
+      smallestFile = "c.props";
+    }
+    Map attrs = createMap(
+            FileListEntityProcessor.FILE_NAME, ".*",
+            FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(),
+            FileListEntityProcessor.BIGGER_THAN, String.valueOf(minLength));
+    List<String> fList = getFiles(null, attrs);
+    assertEquals(2, fList.size());
+    Set<String> l = new HashSet<>();
+    l.add(new File(tmpdir, "a.xml").getAbsolutePath());
+    l.add(new File(tmpdir, "b.xml").getAbsolutePath());
+    assertEquals(l, new HashSet<>(fList));
+    attrs = createMap(
+            FileListEntityProcessor.FILE_NAME, ".*",
+            FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(),
+            FileListEntityProcessor.SMALLER_THAN, String.valueOf(minLength+1));
+    fList = getFiles(null, attrs);
+    l.clear();
+    l.add(new File(tmpdir, smallestFile).getAbsolutePath());
+    assertEquals(l, new HashSet<>(fList));
+    attrs = createMap(
+            FileListEntityProcessor.FILE_NAME, ".*",
+            FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(),
+            FileListEntityProcessor.SMALLER_THAN, "${a.x}");
+    VariableResolver resolver = new VariableResolver();
+    resolver.addNamespace("a", createMap("x", "4"));
+    fList = getFiles(resolver, attrs);
+    assertEquals(l, new HashSet<>(fList));
+  }
+
+  @SuppressWarnings("unchecked")
+  static List<String> getFiles(VariableResolver resolver, Map attrs) {
+    Context c = getContext(null,
+            resolver, null, Context.FULL_DUMP, Collections.EMPTY_LIST, attrs);
+    FileListEntityProcessor fileListEntityProcessor = new FileListEntityProcessor();
+    fileListEntityProcessor.init(c);
+    List<String> fList = new ArrayList<>();
+    while (true) {
+      Map<String, Object> f = fileListEntityProcessor.nextRow();
+      if (f == null)
+        break;
+      fList.add((String) f.get(FileListEntityProcessor.ABSOLUTE_FILE));
+    }
+    return fList;
+  }
+
+  @SuppressForbidden(reason = "Needs currentTimeMillis to set last modified time")
+  @Test
+  public void testNTOT() throws IOException {
+    File tmpdir = createTempDir().toFile();
+
+    createFile(tmpdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true);
+    createFile(tmpdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), true);
+    createFile(tmpdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), true);
+    Map attrs = createMap(
+            FileListEntityProcessor.FILE_NAME, "xml$",
+            FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(),
+            FileListEntityProcessor.OLDER_THAN, "'NOW'");
+    List<String> fList = getFiles(null, attrs);
+    assertEquals(2, fList.size());
+    attrs = createMap(
+            FileListEntityProcessor.FILE_NAME, ".xml$",
+            FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(),
+            FileListEntityProcessor.NEWER_THAN, "'NOW-2HOURS'");
+    fList = getFiles(null, attrs);
+    assertEquals(2, fList.size());
+
+    // Use a variable for newerThan
+    attrs = createMap(
+            FileListEntityProcessor.FILE_NAME, ".xml$",
+            FileListEntityProcessor.BASE_DIR, tmpdir.getAbsolutePath(),
+            FileListEntityProcessor.NEWER_THAN, "${a.x}");
+    VariableResolver resolver = new VariableResolver();
+    String lastMod = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss", Locale.ROOT).format(new Date(System.currentTimeMillis() - 50000));
+    resolver.addNamespace("a", createMap("x", lastMod));
+    createFile(tmpdir, "t.xml", "t.xml".getBytes(StandardCharsets.UTF_8), false);
+    fList = getFiles(resolver, attrs);
+    assertEquals(1, fList.size());
+    assertEquals("File name must be t.xml", new File(tmpdir, "t.xml").getAbsolutePath(), fList.get(0));
+  }
+
+  @Test
+  public void testRECURSION() throws IOException {
+    File tmpdir = createTempDir().toFile();
+    File childdir = new File(tmpdir + "/child" );
+    childdir.mkdir();
+    createFile(childdir, "a.xml", "a.xml".getBytes(StandardCharsets.UTF_8), true);
+    createFile(childdir, "b.xml", "b.xml".getBytes(StandardCharsets.UTF_8), true);
+    createFile(childdir, "c.props", "c.props".getBytes(StandardCharsets.UTF_8), true);
+    Map attrs = createMap(
+            FileListEntityProcessor.FILE_NAME, "^.*\\.xml$",
+            FileListEntityProcessor.BASE_DIR, childdir.getAbsolutePath(),
+            FileListEntityProcessor.RECURSIVE, "true");
+    List<String> fList = getFiles(null, attrs);
+    assertEquals(2, fList.size());
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListWithLineEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListWithLineEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListWithLineEntityProcessor.java
new file mode 100644
index 0000000..aad8e30
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestFileListWithLineEntityProcessor.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.io.File;
+import java.nio.charset.StandardCharsets;
+
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.request.LocalSolrQueryRequest;
+import org.junit.BeforeClass;
+
+public class TestFileListWithLineEntityProcessor extends AbstractDataImportHandlerTestCase {
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("dataimport-solrconfig.xml", "dataimport-schema.xml");
+  }
+  
+  public void test() throws Exception {
+    File tmpdir = createTempDir(LuceneTestCase.getTestClass().getSimpleName()).toFile();
+    createFile(tmpdir, "a.txt", "a line one\na line two\na line three".getBytes(StandardCharsets.UTF_8), false);
+    createFile(tmpdir, "b.txt", "b line one\nb line two".getBytes(StandardCharsets.UTF_8), false);
+    createFile(tmpdir, "c.txt", "c line one\nc line two\nc line three\nc line four".getBytes(StandardCharsets.UTF_8), false);
+    
+    String config = generateConfig(tmpdir);
+    LocalSolrQueryRequest request = lrf.makeRequest(
+        "command", "full-import", "dataConfig", config,
+        "clean", "true", "commit", "true", "synchronous", "true", "indent", "true");
+    h.query("/dataimport", request);
+    
+    assertQ(req("*:*"), "//*[@numFound='9']");
+    assertQ(req("id:?\\ line\\ one"), "//*[@numFound='3']");
+    assertQ(req("id:a\\ line*"), "//*[@numFound='3']");
+    assertQ(req("id:b\\ line*"), "//*[@numFound='2']");
+    assertQ(req("id:c\\ line*"), "//*[@numFound='4']");    
+  }
+  
+  private String generateConfig(File dir) {
+    return
+    "<dataConfig> \n"+
+    "<dataSource type=\"FileDataSource\" encoding=\"UTF-8\" name=\"fds\"/> \n"+
+    "    <document> \n"+
+    "       <entity name=\"f\" processor=\"FileListEntityProcessor\" fileName=\".*[.]txt\" baseDir=\"" + dir.getAbsolutePath() + "\" recursive=\"false\" rootEntity=\"false\"  transformer=\"TemplateTransformer\"> \n" +
+    "             <entity name=\"jc\" processor=\"LineEntityProcessor\" url=\"${f.fileAbsolutePath}\" dataSource=\"fds\"  rootEntity=\"true\" transformer=\"TemplateTransformer\"> \n" +
+    "              <field column=\"rawLine\" name=\"id\" /> \n" +
+    "             </entity> \n"+              
+    "        </entity> \n"+
+    "    </document> \n"+
+    "</dataConfig> \n";
+  }  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java
new file mode 100644
index 0000000..4a3ba72
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestHierarchicalDocBuilder.java
@@ -0,0 +1,479 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.BooleanClause.Occur;
+import org.apache.lucene.search.BooleanQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.search.join.BitSetProducer;
+import org.apache.lucene.search.join.QueryBitSetProducer;
+import org.apache.lucene.search.join.ScoreMode;
+import org.apache.lucene.search.join.ToParentBlockJoinQuery;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.handler.dataimport.config.ConfigNameConstants;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.search.SolrIndexSearcher;
+import org.apache.solr.util.TestHarness;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test for DocBuilder using the test harness. 
+ * <b> Documents are hierarchical in this test, i.e. each document have nested children documents.</b>
+ */
+public class TestHierarchicalDocBuilder extends AbstractDataImportHandlerTestCase {
+
+  private static final String FIELD_ID = "id";
+  private int id = 0; //unique id
+  private SolrQueryRequest req;
+  
+  /**
+   * Holds the data related to randomly created index.
+   * It is used for making assertions.
+   */
+  private static class ContextHolder {
+    /** Overall documents number **/
+    int counter = 0;
+    
+    /**
+     * Each Hierarchy object represents nested documents with a parent at the root of hierarchy
+     */
+    List<Hierarchy> hierarchies = new ArrayList<Hierarchy>();
+  }
+  
+  /**
+   * Represents a hierarchical document structure
+   */
+  private static class Hierarchy {
+    
+    /**
+     * Type of element, i.e. parent, child, grandchild, etc..
+     */
+    String elementType;
+    
+    /**
+     * Fields of a current element
+     */
+    Map<String, Object> elementData = new HashMap<String,Object>();
+    
+    /**
+     * Nested elements/documents hierarchies. 
+     */
+    List<Hierarchy> elements = new ArrayList<Hierarchy>();
+  }
+  
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    initCore("dataimport-solrconfig.xml", "dataimport-schema.xml");    
+  }
+  
+  @Before
+  public void before() {
+    req = req("*:*"); // don't really care about query
+    MockDataSource.clearCache();
+  }
+  
+  @After
+  public void after() {
+    req.close();
+    MockDataSource.clearCache();
+  }
+
+  @Test
+  public void testThreeLevelHierarchy() throws Exception {
+    int parentsNum = 3; //fixed for simplicity of test
+    int childrenNum = 0;
+    int grandChildrenNum = 0;
+    
+    final String parentType = "parent";
+    final String childType = "child";
+    final String grandChildType = "grand_child";
+
+    List<String> parentIds = createDataIterator("select * from PARENT", parentType, parentType, parentsNum);
+    Collections.shuffle(parentIds, random());
+    final String parentId1 = parentIds.get(0);
+    String parentId2 = parentIds.get(1);
+    
+    //parent 1 children
+    int firstParentChildrenNum = 3; //fixed for simplicity of test
+    String select = "select * from CHILD where parent_id='" + parentId1 + "'";
+    List<String> childrenIds = createDataIterator(select, childType, "child of first parent", firstParentChildrenNum);
+    List<String> firstParentChildrenIds = new ArrayList<String>(childrenIds);
+    childrenNum += childrenIds.size();
+    
+    // grand children of first parent first child
+    final String childId = childrenIds.get(0);
+    String description = "grandchild of first parent, child of " + childId + " child";
+    select = "select * from GRANDCHILD where parent_id='" + childId + "'";
+    List<String> grandChildrenIds = createDataIterator(select, grandChildType, description, atLeast(2));
+    grandChildrenNum += grandChildrenIds.size();
+    
+    // grand children of first parent second child
+    {
+      String childId2 = childrenIds.get(1);
+      description = "grandchild of first parent, child of " + childId2 + " child";
+      select = "select * from GRANDCHILD where parent_id='" + childId2 + "'";
+    }
+    final List<String> grandChildrenIds2 = createDataIterator(select, grandChildType, description, atLeast(2));
+    grandChildrenNum += grandChildrenIds2.size();
+    
+    List<String> allGrandChildrenIds = new ArrayList<>(grandChildrenIds);
+    allGrandChildrenIds.addAll(grandChildrenIds2);
+        
+    // third children of first parent has no grand children
+    
+    // parent 2 children (no grand children)   
+    select = "select * from CHILD where parent_id='" + parentId2 + "'";
+    childrenIds = createDataIterator(select, childType, "child of second parent", atLeast(2));
+    childrenNum += childrenIds.size();
+    
+    // parent 3 has no children and grand children
+    
+    int totalDocsNum = parentsNum + childrenNum + grandChildrenNum;
+    
+    String resp = runFullImport(THREE_LEVEL_HIERARCHY_CONFIG);
+    String xpath = "//arr[@name='documents']/lst[arr[@name='id']/str='"+parentId1+"']/"+
+      "arr[@name='_childDocuments_']/lst[arr[@name='id']/str='"+childId+"']/"+
+      "arr[@name='_childDocuments_']/lst[arr[@name='id']/str='"+grandChildrenIds.get(0)+"']";
+    String results = TestHarness.validateXPath(resp, 
+           xpath);
+    assertTrue("Debug documents does not contain child documents\n"+resp+"\n"+ xpath+
+                                                        "\n"+results, results == null);
+    
+    assertTrue("Update request processor processAdd was not called", TestUpdateRequestProcessor.processAddCalled);
+    assertTrue("Update request processor processCommit was not callled", TestUpdateRequestProcessor.processCommitCalled);
+    assertTrue("Update request processor finish was not called", TestUpdateRequestProcessor.finishCalled);
+    
+    // very simple asserts to check that we at least have correct num of docs indexed
+    assertQ(req("*:*"), "//*[@numFound='" + totalDocsNum + "']");
+    assertQ(req("type_s:parent"), "//*[@numFound='" + parentsNum + "']");
+    assertQ(req("type_s:child"), "//*[@numFound='" + childrenNum + "']");
+    assertQ(req("type_s:grand_child"), "//*[@numFound='" + grandChildrenNum + "']");
+
+    // let's check BlockJoin
+    // get first parent by any grand children
+    String randomGrandChildId = allGrandChildrenIds.get(random().nextInt(allGrandChildrenIds.size()));
+    Query query = createToParentQuery(parentType, FIELD_ID, randomGrandChildId);
+    assertSearch(query, FIELD_ID, parentId1);
+
+    // get first parent by any children 
+    String randomChildId = firstParentChildrenIds.get(random().nextInt(firstParentChildrenIds.size()));
+    query = createToParentQuery(parentType, FIELD_ID, randomChildId);
+    assertSearch(query, FIELD_ID, parentId1);
+    
+    // get parent by children by grand children
+    randomGrandChildId = grandChildrenIds.get(random().nextInt(grandChildrenIds.size()));
+    ToParentBlockJoinQuery childBlockJoinQuery = createToParentQuery(childType, FIELD_ID, randomGrandChildId);
+    ToParentBlockJoinQuery blockJoinQuery = new ToParentBlockJoinQuery(childBlockJoinQuery, createParentFilter(parentType), ScoreMode.Avg);
+    assertSearch(blockJoinQuery, FIELD_ID, parentId1);
+  }
+
+  @Test
+  public void testRandomDepthHierarchy() throws Exception {
+    final String parentType = "parent";
+    
+    // Be aware that hierarchies grows exponentially, thus 
+    // numbers bigger than 6 may lead to significant memory usage
+    // and cause OOME
+    int parentsNum = 2 + random().nextInt(3);
+    int depth = 2 + random().nextInt(3);
+    
+    ContextHolder holder = new ContextHolder();
+    
+    String config = createRandomizedConfig(depth, parentType, parentsNum, holder);
+    runFullImport(config);
+    
+    assertTrue("Update request processor processAdd was not called", TestUpdateRequestProcessor.processAddCalled);
+    assertTrue("Update request processor processCommit was not callled", TestUpdateRequestProcessor.processCommitCalled);
+    assertTrue("Update request processor finish was not called", TestUpdateRequestProcessor.finishCalled);
+    
+    assertQ(req("type_s:" + parentType), "//*[@numFound='" + parentsNum + "']");
+    assertQ(req("-type_s:"+ parentType), "//*[@numFound='" + (holder.counter - parentsNum) + "']");
+    
+    // let's check BlockJoin
+    Hierarchy randomHierarchy = holder.hierarchies.get(random().nextInt(holder.hierarchies.size()));
+       
+    Query deepestQuery = createBlockJoinQuery(randomHierarchy);
+    assertSearch(deepestQuery, FIELD_ID, (String) randomHierarchy.elementData.get(FIELD_ID));
+  }
+  
+  private Query createBlockJoinQuery(Hierarchy hierarchy) {
+    List<Hierarchy> elements = hierarchy.elements;
+    if (elements.isEmpty()) {
+      BooleanQuery.Builder childQuery = new BooleanQuery.Builder();
+      childQuery.add(new TermQuery(new Term(FIELD_ID, (String) hierarchy.elementData.get(FIELD_ID))), Occur.MUST);
+      return childQuery.build();
+    }
+    
+    Query childQuery = createBlockJoinQuery(elements.get(random().nextInt(elements.size())));
+    return createToParentQuery(hierarchy.elementType, childQuery);
+  }
+
+  private ToParentBlockJoinQuery createToParentQuery(String parentType, String childField, String childFieldValue) {
+    BooleanQuery.Builder childQuery = new BooleanQuery.Builder();
+    childQuery.add(new TermQuery(new Term(childField, childFieldValue)), Occur.MUST);
+    ToParentBlockJoinQuery result = createToParentQuery(parentType, childQuery.build());
+    
+    return result;
+  }
+  
+  private ToParentBlockJoinQuery createToParentQuery(String parentType, Query childQuery) {
+    ToParentBlockJoinQuery blockJoinQuery = new ToParentBlockJoinQuery(childQuery, createParentFilter(parentType), ScoreMode.Avg);
+    
+    return blockJoinQuery;
+  }
+  
+  private void assertSearch(Query query, String field, String... values) throws IOException {
+    /* The limit of search queue is doubled to catch the error in case when for some reason there are more docs than expected  */
+    SolrIndexSearcher searcher = req.getSearcher();
+    TopDocs result = searcher.search(query, values.length * 2);
+    assertEquals(values.length, result.totalHits.value);
+    List<String> actualValues = new ArrayList<String>();
+    for (int index = 0; index < values.length; ++index) {
+      Document doc = searcher.doc(result.scoreDocs[index].doc);
+      actualValues.add(doc.get(field));
+    }
+    
+    for (String expectedValue: values) {
+      boolean removed = actualValues.remove(expectedValue);
+      if (!removed) {
+        fail("Search result does not contain expected values");
+      }
+    }
+  }
+  
+  @SuppressWarnings("unchecked")
+  private List<String> createDataIterator(String query, String type, String description, int count) {
+    List<Map<String, Object>> data = new ArrayList<Map<String, Object>>();
+    List<String> ids = new ArrayList<String>(count);
+    for (int index = 0; index < count; ++index) {
+      String docId = nextId();
+      ids.add(docId);
+      Map<String, Object> doc = createMap(FIELD_ID, docId, "desc", docId + " " + description, "type_s", type);
+      data.add(doc);
+    }
+    Collections.shuffle(data, random());
+    MockDataSource.setIterator(query, data.iterator());
+    
+    return ids;
+  }
+  
+  /**
+   * Creates randomized configuration of a specified depth. Simple configuration example:
+   * 
+   * <pre>
+   * 
+   * &lt;dataConfig>
+   *   <dataSource type="MockDataSource" />
+   *   &lt;document>
+   *     &lt;entity name="parent" query="SELECT * FROM parent">
+   *       &lt;field column="id" />
+   *       &lt;field column="desc" />
+   *       &lt;field column="type_s" />
+   *       &lt;entity child="true" name="parentChild0" query="select * from parentChild0 where parentChild0_parent_id='${parent.id}'">
+   *         &lt;field column="id" />
+   *         &lt;field column="desc" />
+   *         &lt;field column="type_s" />
+   *         &lt;entity child="true" name="parentChild0Child0" query="select * from parentChild0Child0 where parentChild0Child0_parent_id='${parentChild0.id}'">
+   *           &lt;field column="id" />
+   *           &lt;field column="desc" />
+   *           &lt;field column="type_s" />
+   *         &lt;/entity>
+   *         &lt;entity child="true" name="parentChild0Child1" query="select * from parentChild0Child1 where parentChild0Child1_parent_id='${parentChild0.id}'">
+   *           &lt;field column="id" />
+   *           &lt;field column="desc" />
+   *           &lt;field column="type_s" />
+   *         &lt;/entity>
+   *       &lt;/entity>
+   *       &lt;entity child="true" name="parentChild1" query="select * from parentChild1 where parentChild1_parent_id='${parent.id}'">
+   *         &lt;field column="id" />
+   *         &lt;field column="desc" />
+   *         &lt;field column="type_s" />
+   *         &lt;entity child="true" name="parentChild1Child0" query="select * from parentChild1Child0 where parentChild1Child0_parent_id='${parentChild1.id}'">
+   *           &lt;field column="id" />
+   *           &lt;field column="desc" />
+   *           &lt;field column="type_s" />
+   *         &lt;/entity>
+   *         &lt;entity child="true" name="parentChild1Child1" query="select * from parentChild1Child1 where parentChild1Child1_parent_id='${parentChild1.id}'">
+   *           &lt;field column="id" />
+   *           &lt;field column="desc" />
+   *           &lt;field column="type_s" />
+   *         &lt;/entity>
+   *       &lt;/entity>
+   *     &lt;/entity>
+   *   &lt;/document>
+   * &lt;/dataConfig>
+   * 
+   * </pre>
+   * 
+   * Internally configures MockDataSource.
+   **/
+  private String createRandomizedConfig(int depth, String parentType, int parentsNum, ContextHolder holder) {
+    List<Hierarchy> parentData = createMockedIterator(parentType, "SELECT * FROM " + parentType, parentsNum, holder);
+    
+    holder.hierarchies = parentData;
+    
+    String children = createChildren(parentType, 0, depth, parentData, holder);
+    
+    String rootFields = createFieldsList(FIELD_ID, "desc", "type_s");
+    String rootEntity = StrUtils.formatString(ROOT_ENTITY_TEMPLATE, parentType, "SELECT * FROM " + parentType, rootFields, children);
+
+    String config = StrUtils.formatString(DATA_CONFIG_TEMPLATE, rootEntity);
+    return config;
+  }
+  
+  @SuppressWarnings("unchecked")
+  private List<Hierarchy> createMockedIterator(String type, String query, int amount, ContextHolder holder) {
+    List<Hierarchy> hierarchies = new ArrayList<Hierarchy>();
+    List<Map<String, Object>> data = new ArrayList<Map<String, Object>>();
+    for (int index = 0; index < amount; ++index) {
+      holder.counter++;      
+      String idStr = String.valueOf(holder.counter);
+      Map<String, Object> element = createMap(FIELD_ID, idStr, "desc", type + "_" + holder.counter, "type_s", type);
+      data.add(element);
+      
+      Hierarchy hierarchy = new Hierarchy();
+      hierarchy.elementType = type;
+      hierarchy.elementData = element;
+      hierarchies.add(hierarchy);
+    }
+    
+    MockDataSource.setIterator(query, data.iterator());
+    
+    return hierarchies;
+  }
+  
+  private List<Hierarchy> createMockedIterator(String type, List<Hierarchy> parentData, ContextHolder holder) {
+    List<Hierarchy> result = new ArrayList<Hierarchy>();
+    for (Hierarchy parentHierarchy: parentData) {
+      Map<String, Object> data = parentHierarchy.elementData;
+      String id = (String) data.get(FIELD_ID);
+      String select = String.format(Locale.ROOT, "select * from %s where %s='%s'", type, type + "_parent_id", id);
+      
+      // Number of actual children documents
+      int childrenNum = 1 + random().nextInt(3);
+      List<Hierarchy> childHierarchies = createMockedIterator(type, select, childrenNum, holder);
+      parentHierarchy.elements.addAll(childHierarchies);
+      result.addAll(childHierarchies);
+    }
+    return result;
+  }
+
+  private String createChildren(String parentName, int currentLevel, int maxLevel,
+      List<Hierarchy> parentData, ContextHolder holder) {
+    
+    if (currentLevel == maxLevel) { //recursion base
+      return "";
+    }
+    
+    // number of different children <b>types</b> of parent, i.e. parentChild0, parentChild1
+    // @see #createMockedIterator for the actual number of each children type 
+    int childrenNumber = 2 + random().nextInt(3);
+    StringBuilder builder = new StringBuilder();
+    for (int childIndex = 0; childIndex < childrenNumber; ++childIndex) {
+      String childName = parentName + "Child" + childIndex;
+      String fields = createFieldsList(FIELD_ID, "desc", "type_s");
+      String select = String.format(Locale.ROOT, "select * from %s where %s='%s'", childName, childName + "_parent_id", "${" + parentName + ".id}");
+      
+      //for each child entity create several iterators
+      List<Hierarchy> childData = createMockedIterator(childName, parentData, holder);
+      
+      String subChildren = createChildren(childName, currentLevel + 1, maxLevel, childData, holder);
+      String child = StrUtils.formatString(CHILD_ENTITY_TEMPLATE, childName, select, fields, subChildren);
+      builder.append(child);
+      builder.append('\n');
+    }
+    
+    return builder.toString();
+  }
+  
+  private String createFieldsList(String... fields) {
+    StringBuilder builder = new StringBuilder();
+    for (String field: fields) {
+      String text = String.format(Locale.ROOT, "<field column='%s' />", field);
+      builder.append(text);
+      builder.append('\n');
+    }
+    return builder.toString();
+  }
+
+  private static final String THREE_LEVEL_HIERARCHY_CONFIG = "<dataConfig>\n" +
+      "  <dataSource type='MockDataSource' />\n" +
+      "  <document>\n" +
+      "    <entity name='PARENT' query='select * from PARENT'>\n" +
+      "      <field column='id' />\n" +
+      "      <field column='desc' />\n" +
+      "      <field column='type_s' />\n" +
+      "      <entity child='true' name='CHILD' query=\"select * from CHILD where parent_id='${PARENT.id}'\">\n" +
+      "        <field column='id' />\n" +
+      "        <field column='desc' />\n" +
+      "        <field column='type_s' />\n" +
+      "          <entity child='true' name='GRANDCHILD' query=\"select * from GRANDCHILD where parent_id='${CHILD.id}'\">\n" +
+      "            <field column='id' />\n" +
+      "            <field column='desc' />\n" +
+      "            <field column='type_s' />\n" +
+      "          </entity>\n" +
+      "      </entity>\n" +
+      "    </entity>\n" +
+      "  </document>\n" +
+      "</dataConfig>";
+  
+  /** {0} is rootEntity block **/
+  private static final String DATA_CONFIG_TEMPLATE = "<dataConfig><dataSource type=\"MockDataSource\" />\n<document>\n {0}</document></dataConfig>";
+  
+  /** 
+   * {0} - entityName, 
+   * {1} - select query
+   * {2} - fieldsList
+   * {3} - childEntitiesList 
+   **/
+  private static final String ROOT_ENTITY_TEMPLATE = "<entity name=\"{0}\" query=\"{1}\">\n{2} {3}\n</entity>\n";
+  
+  /** 
+   * {0} - entityName, 
+   * {1} - select query
+   * {2} - fieldsList
+   * {3} - childEntitiesList 
+   **/
+  private static final String CHILD_ENTITY_TEMPLATE = "<entity " + ConfigNameConstants.CHILD + "=\"true\" name=\"{0}\" query=\"{1}\">\n {2} {3} </entity>\n";
+  
+  private BitSetProducer createParentFilter(String type) {
+    BooleanQuery.Builder parentQuery = new BooleanQuery.Builder();
+    parentQuery.add(new TermQuery(new Term("type_s", type)), Occur.MUST);
+    return new QueryBitSetProducer(parentQuery.build());
+  }
+  
+  private String nextId() {
+    ++id;
+    return String.valueOf(id);
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSource.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
new file mode 100644
index 0000000..e44e7f9
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSource.java
@@ -0,0 +1,662 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.sql.Connection;
+import java.sql.Driver;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.ResultSetMetaData;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import javax.sql.DataSource;
+
+import org.apache.solr.common.util.SuppressForbidden;
+import org.apache.solr.handler.dataimport.JdbcDataSource.ResultSetIterator;
+import static org.mockito.Mockito.*;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * <p>
+ * Test for JdbcDataSource
+ * </p>
+ * <p>
+ * Note: The tests are ignored for the lack of DB support for testing
+ * </p>
+ *
+ *
+ * @since solr 1.3
+ */
+public class TestJdbcDataSource extends AbstractDataImportHandlerTestCase {
+  private Driver driver;
+  private DataSource dataSource;
+  private Connection connection;
+  private JdbcDataSource jdbcDataSource = new JdbcDataSource();
+  List<Map<String, String>> fields = new ArrayList<>();
+
+  Context context = AbstractDataImportHandlerTestCase.getContext(null, null,
+          jdbcDataSource, Context.FULL_DUMP, fields, null);
+
+  Properties props = new Properties();
+
+  String sysProp = System.getProperty("java.naming.factory.initial");
+
+  @BeforeClass
+  public static void beforeClass() {
+    assumeWorkingMockito();
+  }
+  
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    System.setProperty("java.naming.factory.initial",
+            MockInitialContextFactory.class.getName());
+    
+    driver = mock(Driver.class);
+    dataSource = mock(DataSource.class);
+    connection = mock(Connection.class);
+    props.clear();
+  }
+
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    if (sysProp == null) {
+      System.getProperties().remove("java.naming.factory.initial");
+    } else {
+      System.setProperty("java.naming.factory.initial", sysProp);
+    }
+    super.tearDown();
+    reset(driver, dataSource, connection);
+  }
+
+  @Test
+  public void testRetrieveFromJndi() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+
+    when(dataSource.getConnection()).thenReturn(connection);
+
+    Connection conn = jdbcDataSource.createConnectionFactory(context, props)
+            .call();
+
+    verify(connection).setAutoCommit(false);
+    verify(dataSource).getConnection();
+
+    assertSame("connection", conn, connection);
+  }
+
+  @Test
+  public void testRetrieveFromJndiWithCredentials() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    props.put("user", "Fred");
+    props.put("password", "4r3d");
+    props.put("holdability", "HOLD_CURSORS_OVER_COMMIT");
+
+    when(dataSource.getConnection("Fred", "4r3d")).thenReturn(
+            connection);
+
+    Connection conn = jdbcDataSource.createConnectionFactory(context, props)
+            .call();
+
+    verify(connection).setAutoCommit(false);
+    verify(connection).setHoldability(1);
+    verify(dataSource).getConnection("Fred", "4r3d");
+
+    assertSame("connection", conn, connection);
+  }
+
+  @Test
+  public void testRetrieveFromJndiWithCredentialsEncryptedAndResolved() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    String user = "Fred";
+    String plainPassword = "MyPassword";
+    String encryptedPassword = "U2FsdGVkX18QMjY0yfCqlfBMvAB4d3XkwY96L7gfO2o=";
+    String propsNamespace = "exampleNamespace";
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+
+    props.put("user", "${" +propsNamespace +".user}");
+    props.put("encryptKeyFile", "${" +propsNamespace +".encryptKeyFile}");
+    props.put("password", "${" +propsNamespace +".password}");
+
+    when(dataSource.getConnection(user, plainPassword)).thenReturn(
+             connection);
+
+    Map<String,Object> values = new HashMap<>();
+    values.put("user", user);
+    values.put("encryptKeyFile", createEncryptionKeyFile());
+    values.put("password", encryptedPassword);
+    context.getVariableResolver().addNamespace(propsNamespace, values);
+
+    jdbcDataSource.init(context, props);
+    Connection conn = jdbcDataSource.getConnection();
+
+    verify(connection).setAutoCommit(false);
+    verify(dataSource).getConnection(user, plainPassword);
+
+    assertSame("connection", conn, connection);
+  }
+
+  @Test
+  public void testRetrieveFromJndiWithCredentialsWithEncryptedAndResolvedPwd() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    Properties properties = new Properties();
+    properties.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    properties.put("user", "Fred");
+    properties.put("encryptKeyFile", "${foo.bar}");
+    properties.put("password", "U2FsdGVkX18QMjY0yfCqlfBMvAB4d3XkwY96L7gfO2o=");
+    when(dataSource.getConnection("Fred", "MyPassword")).thenReturn(
+        connection);
+
+    Map<String,Object> values = new HashMap<>();
+    values.put("bar", createEncryptionKeyFile());
+    context.getVariableResolver().addNamespace("foo", values);
+
+    jdbcDataSource.init(context, properties);
+    jdbcDataSource.getConnection();
+
+    verify(connection).setAutoCommit(false);
+    verify(dataSource).getConnection("Fred", "MyPassword");
+  }
+
+  @Test
+  public void testRetrieveFromJndiFailureNotHidden() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+
+    SQLException sqlException = new SQLException("fake");
+    when(dataSource.getConnection()).thenThrow(sqlException);
+
+    try {
+      jdbcDataSource.createConnectionFactory(context, props).call();
+    } catch (SQLException ex) {
+      assertSame(sqlException, ex);
+    }
+
+    verify(dataSource).getConnection();
+  }
+
+  @Test
+  public void testClosesConnectionWhenExceptionThrownOnSetAutocommit() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+
+    SQLException sqlException = new SQLException("fake");
+    when(dataSource.getConnection()).thenReturn(connection);
+    doThrow(sqlException).when(connection).setAutoCommit(false);
+
+    try {
+      jdbcDataSource.createConnectionFactory(context, props).call();
+    } catch (DataImportHandlerException ex) {
+      assertSame(sqlException, ex.getCause());
+    }
+    verify(dataSource).getConnection();
+    verify(connection).setAutoCommit(false);
+    verify(connection).close();
+  }
+
+  @Test
+  public void testClosesStatementWhenExceptionThrownOnExecuteQuery() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    when(dataSource.getConnection()).thenReturn(connection);
+
+    jdbcDataSource.init(context, props);
+
+    SQLException sqlException = new SQLException("fake");
+    Statement statement = mock(Statement.class);
+    when(connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY))
+        .thenReturn(statement);
+    when(statement.execute("query")).thenThrow(sqlException);
+
+    try {
+      jdbcDataSource.getData("query");
+      fail("exception expected");
+    } catch (DataImportHandlerException ex) {
+      assertSame(sqlException, ex.getCause());
+    }
+
+    verify(dataSource).getConnection();
+    verify(connection).setAutoCommit(false);
+    verify(connection).createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    verify(statement).setFetchSize(500);
+    verify(statement).setMaxRows(0);
+    verify(statement).execute("query");
+    verify(statement).close();
+  }
+
+  @Test
+  public void testClosesStatementWhenResultSetNull() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    when(dataSource.getConnection()).thenReturn(connection);
+
+    jdbcDataSource.init(context, props);
+
+    Statement statement = mock(Statement.class);
+    when(connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY))
+        .thenReturn(statement);
+    when(statement.execute("query")).thenReturn(false);
+    when(statement.getUpdateCount()).thenReturn(-1);
+
+    jdbcDataSource.getData("query");
+
+    verify(dataSource).getConnection();
+    verify(connection).setAutoCommit(false);
+    verify(connection).createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    verify(statement).setFetchSize(500);
+    verify(statement).setMaxRows(0);
+    verify(statement).execute("query");
+    verify(statement).getUpdateCount();
+    verify(statement).close();
+  }
+
+  @Test
+  public void testClosesStatementWhenHasNextCalledAndResultSetNull() throws Exception {
+
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    when(dataSource.getConnection()).thenReturn(connection);
+
+    jdbcDataSource.init(context, props);
+
+    Statement statement = mock(Statement.class);
+    when(connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY))
+        .thenReturn(statement);
+    when(statement.execute("query")).thenReturn(true);
+    ResultSet resultSet = mock(ResultSet.class);
+    when(statement.getResultSet()).thenReturn(resultSet);
+    ResultSetMetaData metaData = mock(ResultSetMetaData.class);
+    when(resultSet.getMetaData()).thenReturn(metaData);
+    when(metaData.getColumnCount()).thenReturn(0);
+
+    Iterator<Map<String,Object>> data = jdbcDataSource.getData("query");
+
+    ResultSetIterator resultSetIterator = (ResultSetIterator) data.getClass().getDeclaredField("this$1").get(data);
+    resultSetIterator.setResultSet(null);
+
+    data.hasNext();
+
+    verify(dataSource).getConnection();
+    verify(connection).setAutoCommit(false);
+    verify(connection).createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    verify(statement).setFetchSize(500);
+    verify(statement).setMaxRows(0);
+    verify(statement).execute("query");
+    verify(statement).getResultSet();
+    verify(statement).close();
+    verify(resultSet).getMetaData();
+    verify(metaData).getColumnCount();
+  }
+
+  @Test
+  public void testClosesResultSetAndStatementWhenDataSourceIsClosed() throws Exception {
+
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    when(dataSource.getConnection()).thenReturn(connection);
+
+    jdbcDataSource.init(context, props);
+
+    Statement statement = mock(Statement.class);
+    when(connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY))
+        .thenReturn(statement);
+    when(statement.execute("query")).thenReturn(true);
+    ResultSet resultSet = mock(ResultSet.class);
+    when(statement.getResultSet()).thenReturn(resultSet);
+    ResultSetMetaData metaData = mock(ResultSetMetaData.class);
+    when(resultSet.getMetaData()).thenReturn(metaData);
+    when(metaData.getColumnCount()).thenReturn(0);
+
+    jdbcDataSource.getData("query");
+    jdbcDataSource.close();
+
+    verify(dataSource).getConnection();
+    verify(connection).setAutoCommit(false);
+    verify(connection).createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    verify(statement).setFetchSize(500);
+    verify(statement).setMaxRows(0);
+    verify(statement).execute("query");
+    verify(statement).getResultSet();
+    verify(resultSet).getMetaData();
+    verify(metaData).getColumnCount();
+    verify(resultSet).close();
+    verify(statement).close();
+    verify(connection).commit();
+    verify(connection).close();
+  }
+
+  @Test
+  public void testClosesCurrentResultSetIteratorWhenNewOneIsCreated() throws Exception {
+
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    when(dataSource.getConnection()).thenReturn(connection);
+
+    jdbcDataSource.init(context, props);
+
+    Statement statement = mock(Statement.class);
+    when(connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY))
+        .thenReturn(statement);
+    when(statement.execute("query")).thenReturn(true);
+    ResultSet resultSet = mock(ResultSet.class);
+    when(statement.getResultSet()).thenReturn(resultSet);
+    ResultSetMetaData metaData = mock(ResultSetMetaData.class);
+    when(resultSet.getMetaData()).thenReturn(metaData);
+    when(metaData.getColumnCount()).thenReturn(0);
+    when(statement.execute("other query")).thenReturn(false);
+    when(statement.getUpdateCount()).thenReturn(-1);
+
+    jdbcDataSource.getData("query");
+    jdbcDataSource.getData("other query");
+
+    verify(dataSource).getConnection();
+    verify(connection).setAutoCommit(false);
+    verify(connection, times(2)).createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    verify(statement, times(2)).setFetchSize(500);
+    verify(statement, times(2)).setMaxRows(0);
+    verify(statement).execute("query");
+    verify(statement).getResultSet();
+    verify(resultSet).getMetaData();
+    verify(metaData).getColumnCount();
+    verify(resultSet).close();
+    verify(statement, times(2)).close();
+    verify(statement).execute("other query");
+  }
+  
+  @Test
+  public void testMultipleResultsSets_UpdateCountUpdateCountResultSet() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    when(dataSource.getConnection()).thenReturn(connection);
+    jdbcDataSource.init(context, props);
+
+    Statement statement = mock(Statement.class);
+    when(connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY))
+        .thenReturn(statement);
+    when(statement.execute("query")).thenReturn(false);
+    when(statement.getUpdateCount()).thenReturn(1);
+    when(statement.getMoreResults()).thenReturn(false).thenReturn(true);
+    ResultSet resultSet = mock(ResultSet.class);
+    when(statement.getResultSet()).thenReturn(resultSet);
+    ResultSetMetaData metaData = mock(ResultSetMetaData.class);
+    when(resultSet.getMetaData()).thenReturn(metaData);
+    when(metaData.getColumnCount()).thenReturn(0);
+
+    final ResultSetIterator resultSetIterator = jdbcDataSource.new ResultSetIterator("query");
+    assertSame(resultSet, resultSetIterator.getResultSet());
+
+    verify(connection).setAutoCommit(false);
+    verify(connection).createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    verify(statement).setFetchSize(500);
+    verify(statement).setMaxRows(0);
+    verify(statement).execute("query");
+    verify(statement, times(2)).getUpdateCount();
+    verify(statement, times(2)).getMoreResults();
+  }
+
+  @Test
+  public void testMultipleResultsSets_ResultSetResultSet() throws Exception {
+    MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+    props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+    when(dataSource.getConnection()).thenReturn(connection);
+    jdbcDataSource.init(context, props);
+    connection.setAutoCommit(false);
+
+    Statement statement = mock(Statement.class);
+    when(connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY))
+        .thenReturn(statement);
+    when(statement.execute("query")).thenReturn(true);
+    ResultSet resultSet1 = mock(ResultSet.class);
+    ResultSet resultSet2 = mock(ResultSet.class);
+    when(statement.getResultSet()).thenReturn(resultSet1).thenReturn(resultSet2).thenReturn(null);
+    when(statement.getMoreResults()).thenReturn(true).thenReturn(false);
+    ResultSetMetaData metaData1 = mock(ResultSetMetaData.class);
+    when(resultSet1.getMetaData()).thenReturn(metaData1);
+    when(metaData1.getColumnCount()).thenReturn(0);
+    when(resultSet1.next()).thenReturn(false);
+    ResultSetMetaData metaData2 = mock(ResultSetMetaData.class);
+    when(resultSet2.getMetaData()).thenReturn(metaData2);
+    when(metaData2.getColumnCount()).thenReturn(0);
+    when(resultSet2.next()).thenReturn(true).thenReturn(false);
+    when(statement.getUpdateCount()).thenReturn(-1);
+
+    final ResultSetIterator resultSetIterator = jdbcDataSource.new ResultSetIterator("query");
+    assertSame(resultSet1, resultSetIterator.getResultSet());
+    assertTrue(resultSetIterator.hasnext());
+    assertSame(resultSet2, resultSetIterator.getResultSet());
+    assertFalse(resultSetIterator.hasnext());
+
+    verify(dataSource).getConnection();
+    verify(connection, times(2)).setAutoCommit(false);
+    verify(connection).createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+    verify(statement).setFetchSize(500);
+    verify(statement).setMaxRows(0);
+    verify(statement).execute("query");
+    verify(statement, times(2)).getResultSet();
+    verify(resultSet1).getMetaData();
+    verify(metaData1).getColumnCount();
+    verify(resultSet1).next();
+    verify(resultSet1).close();
+    verify(resultSet2).getMetaData();
+    verify(metaData2).getColumnCount();
+    verify(resultSet2, times(2)).next();
+    verify(resultSet2).close();
+    verify(statement, times(2)).getMoreResults();
+    verify(statement).getUpdateCount();
+    verify(statement).close();
+  }
+
+  @Test
+  public void testRetrieveFromDriverManager() throws Exception {
+    // we're not (directly) using a Mockito based mock class here because it won't have a consistent class name
+    // that will work with DriverManager's class bindings
+    MockDriver mockDriver = new MockDriver(connection);
+    DriverManager.registerDriver(mockDriver);
+    try {
+      props.put(JdbcDataSource.DRIVER, MockDriver.class.getName());
+      props.put(JdbcDataSource.URL, MockDriver.MY_JDBC_URL);
+      props.put("holdability", "HOLD_CURSORS_OVER_COMMIT");
+
+      Connection conn = jdbcDataSource.createConnectionFactory(context, props).call();
+
+      verify(connection).setAutoCommit(false);
+      verify(connection).setHoldability(1);
+
+      assertSame("connection", conn, connection);
+    } catch(Exception e) {
+      throw e;
+    } finally {
+      DriverManager.deregisterDriver(mockDriver);
+    }
+  }
+
+
+  @Test
+  public void testEmptyResultSet() throws Exception {
+      MockInitialContextFactory.bind("java:comp/env/jdbc/JndiDB", dataSource);
+
+      props.put(JdbcDataSource.JNDI_NAME, "java:comp/env/jdbc/JndiDB");
+      when(dataSource.getConnection()).thenReturn(connection);
+
+      jdbcDataSource.init(context, props);
+
+      Statement statement = mock(Statement.class);
+      when(connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY))
+          .thenReturn(statement);
+      when(statement.execute("query")).thenReturn(true);
+      ResultSet resultSet = mock(ResultSet.class);
+      when(statement.getResultSet()).thenReturn(resultSet);
+      ResultSetMetaData metaData = mock(ResultSetMetaData.class);
+      when(resultSet.getMetaData()).thenReturn(metaData);
+      when(metaData.getColumnCount()).thenReturn(0);
+      when(resultSet.next()).thenReturn(false);
+      when(statement.getMoreResults()).thenReturn(false);
+      when(statement.getUpdateCount()).thenReturn(-1);
+
+      Iterator<Map<String,Object>> resultSetIterator = jdbcDataSource.getData("query");
+      resultSetIterator.hasNext();
+      resultSetIterator.hasNext();
+
+      verify(connection).setAutoCommit(false);
+      verify(connection).createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
+      verify(statement).setFetchSize(500);
+      verify(statement).setMaxRows(0);
+      verify(statement).execute("query");
+      verify(statement).getResultSet();
+      verify(resultSet).getMetaData();
+      verify(metaData).getColumnCount();
+      verify(resultSet).next();
+      verify(resultSet).close();
+      verify(statement).getMoreResults();
+      verify(statement).getUpdateCount();
+      verify(statement).close();
+  }
+
+  @Test
+  @Ignore("Needs a Mock database server to work")
+  public void testBasic() throws Exception {
+    JdbcDataSource dataSource = new JdbcDataSource();
+    Properties p = new Properties();
+    p.put("driver", "com.mysql.jdbc.Driver");
+    p.put("url", "jdbc:mysql://127.0.0.1/autos");
+    p.put("user", "root");
+    p.put("password", "");
+
+    List<Map<String, String>> flds = new ArrayList<>();
+    Map<String, String> f = new HashMap<>();
+    f.put("column", "trim_id");
+    f.put("type", "long");
+    flds.add(f);
+    f = new HashMap<>();
+    f.put("column", "msrp");
+    f.put("type", "float");
+    flds.add(f);
+
+    Context c = getContext(null, null,
+            dataSource, Context.FULL_DUMP, flds, null);
+    dataSource.init(c, p);
+    Iterator<Map<String, Object>> i = dataSource
+            .getData("select make,model,year,msrp,trim_id from atrimlisting where make='Acura'");
+    int count = 0;
+    Object msrp = null;
+    Object trim_id = null;
+    while (i.hasNext()) {
+      Map<String, Object> map = i.next();
+      msrp = map.get("msrp");
+      trim_id = map.get("trim_id");
+      count++;
+    }
+    assertEquals(5, count);
+    assertEquals(Float.class, msrp.getClass());
+    assertEquals(Long.class, trim_id.getClass());
+  }
+  
+  private String createEncryptionKeyFile() throws IOException {
+    File tmpdir = createTempDir().toFile();
+    byte[] content = "secret".getBytes(StandardCharsets.UTF_8);
+    createFile(tmpdir, "enckeyfile.txt", content, false);
+    return new File(tmpdir, "enckeyfile.txt").getAbsolutePath();
+  }
+
+  /**
+   * A stub driver that returns our mocked connection for connection URL {@link #MY_JDBC_URL}.
+   * <p>
+   * This class is used instead of a Mockito mock because {@link DriverManager} uses the class
+   * name to lookup the driver and also requires the driver to behave in a sane way, if other
+   * drivers are registered in the runtime. A simple Mockito mock is likely to break
+   * depending on JVM runtime version. So this class implements a full {@link Driver},
+   * so {@code DriverManager} can do whatever it wants to find the correct driver for a URL.
+   */
+  public static final class MockDriver implements Driver {
+    public static final String MY_JDBC_URL = "jdbc:fakedb";
+    private final Connection conn;
+    
+    public MockDriver() throws SQLException {
+      throw new AssertionError("The driver should never be directly instantiated by DIH's JdbcDataSource");
+    }
+    
+    MockDriver(Connection conn) throws SQLException {
+      this.conn = conn;
+    }
+    
+    @Override
+    public boolean acceptsURL(String url) throws java.sql.SQLException {
+      return MY_JDBC_URL.equals(url);
+    }
+    
+    @Override
+    public Connection connect(String url, Properties info) throws java.sql.SQLException {
+      return acceptsURL(url) ? conn : null;
+    }
+    
+    @Override
+    public int getMajorVersion() {
+      return 1;
+    }
+    
+    @Override
+    public int getMinorVersion() {
+      return 0;
+    }
+    
+    @SuppressForbidden(reason="Required by JDBC")
+    @Override
+    public java.util.logging.Logger getParentLogger() throws java.sql.SQLFeatureNotSupportedException {
+      throw new java.sql.SQLFeatureNotSupportedException();
+    }
+    
+    @Override
+    public java.sql.DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
+      return new java.sql.DriverPropertyInfo[0];
+    }
+    
+    @Override
+    public boolean jdbcCompliant() {
+      // we are not fully compliant:
+      return false;
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSourceConvertType.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSourceConvertType.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSourceConvertType.java
new file mode 100644
index 0000000..94f437c
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestJdbcDataSourceConvertType.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakAction;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakLingering;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
+import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Properties;
+
+@ThreadLeakAction({ThreadLeakAction.Action.WARN})
+@ThreadLeakLingering(linger = 0)
+@ThreadLeakZombies(ThreadLeakZombies.Consequence.CONTINUE)
+@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
+public class TestJdbcDataSourceConvertType extends AbstractDataImportHandlerTestCase {
+  public void testConvertType() throws Throwable {
+    final Locale loc = Locale.getDefault();
+    assumeFalse("Derby is not happy with locale sr-Latn-*",
+        Objects.equals(new Locale("sr").getLanguage(), loc.getLanguage()) &&
+        Objects.equals("Latn", loc.getScript()));
+
+    // ironically convertType=false causes BigDecimal to String conversion
+    convertTypeTest("false", String.class);
+
+    // convertType=true uses the "long" conversion (see mapping of some_i to "long")
+    convertTypeTest("true", Long.class);
+  }
+
+  private void convertTypeTest(String convertType, Class resultClass) throws Throwable {
+    JdbcDataSource dataSource = new JdbcDataSource();
+    Properties p = new Properties();
+    p.put("driver", "org.apache.derby.jdbc.EmbeddedDriver");
+    p.put("url", "jdbc:derby:memory:tempDB;create=true;territory=en_US");
+    p.put("convertType", convertType);
+
+    List<Map<String, String>> flds = new ArrayList<>();
+    Map<String, String> f = new HashMap<>();
+    f.put("column", "some_i");
+    f.put("type", "long");
+    flds.add(f);
+
+    Context c = getContext(null, null,
+        dataSource, Context.FULL_DUMP, flds, null);
+    dataSource.init(c, p);
+    Iterator<Map<String, Object>> i = dataSource
+        .getData("select 1 as id, CAST(9999 AS DECIMAL) as \"some_i\" from sysibm.sysdummy1");
+    assertTrue(i.hasNext());
+    Map<String, Object> map = i.next();
+    Object val = map.get("some_i");
+    assertEquals(resultClass, val.getClass());
+
+    dataSource.close();
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java
new file mode 100644
index 0000000..eb9989c
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestLineEntityProcessor.java
@@ -0,0 +1,251 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.io.StringReader;
+import java.util.*;
+
+
+/**
+ * <p> Test for TestLineEntityProcessor </p>
+ *
+ *
+ * @since solr 1.4
+ */
+public class TestLineEntityProcessor extends AbstractDataImportHandlerTestCase {
+
+  @Test
+  /************************************************************************/
+  public void testSimple() throws IOException {
+
+    /* we want to create the equiv of :-
+     *  <entity name="list_all_files" 
+     *           processor="LineEntityProcessor"
+     *           fileName="dummy.lis"
+     *           />
+     */
+
+    Map attrs = createMap(
+            LineEntityProcessor.URL, "dummy.lis",
+            LineEntityProcessor.ACCEPT_LINE_REGEX, null,
+            LineEntityProcessor.SKIP_LINE_REGEX, null
+    );
+
+    Context c = getContext(
+            null,                          //parentEntity
+            new VariableResolver(),  //resolver
+            getDataSource(filecontents),   //parentDataSource
+            Context.FULL_DUMP,                             //currProcess
+            Collections.EMPTY_LIST,        //entityFields
+            attrs                          //entityAttrs
+    );
+    LineEntityProcessor ep = new LineEntityProcessor();
+    ep.init(c);
+
+    /// call the entity processor to the list of lines
+    if (VERBOSE) System.out.print("\n");
+    List<String> fList = new ArrayList<>();
+    while (true) {
+      Map<String, Object> f = ep.nextRow();
+      if (f == null) break;
+      fList.add((String) f.get("rawLine"));
+      if (VERBOSE) System.out.print("     rawLine='" + f.get("rawLine") + "'\n");
+    }
+    assertEquals(24, fList.size());
+  }
+
+  @Test
+  /************************************************************************/
+  public void testOnly_xml_files() throws IOException {
+
+    /* we want to create the equiv of :-
+     *  <entity name="list_all_files" 
+     *           processor="LineEntityProcessor"
+     *           fileName="dummy.lis"
+     *           acceptLineRegex="xml"
+     *           />
+     */
+    Map attrs = createMap(
+            LineEntityProcessor.URL, "dummy.lis",
+            LineEntityProcessor.ACCEPT_LINE_REGEX, "xml",
+            LineEntityProcessor.SKIP_LINE_REGEX, null
+    );
+
+    Context c = getContext(
+            null,                          //parentEntity
+            new VariableResolver(),  //resolver
+            getDataSource(filecontents),   //parentDataSource
+            Context.FULL_DUMP,                             //currProcess
+            Collections.EMPTY_LIST,        //entityFields
+            attrs                          //entityAttrs
+    );
+    LineEntityProcessor ep = new LineEntityProcessor();
+    ep.init(c);
+
+    /// call the entity processor to the list of lines
+    List<String> fList = new ArrayList<>();
+    while (true) {
+      Map<String, Object> f = ep.nextRow();
+      if (f == null) break;
+      fList.add((String) f.get("rawLine"));
+    }
+    assertEquals(5, fList.size());
+  }
+
+  @Test
+  /************************************************************************/
+  public void testOnly_xml_files_no_xsd() throws IOException {
+    /* we want to create the equiv of :-
+     *  <entity name="list_all_files" 
+     *           processor="LineEntityProcessor"
+     *           fileName="dummy.lis"
+     *           acceptLineRegex="\\.xml"
+     *           omitLineRegex="\\.xsd"
+     *           />
+     */
+    Map attrs = createMap(
+            LineEntityProcessor.URL, "dummy.lis",
+            LineEntityProcessor.ACCEPT_LINE_REGEX, "\\.xml",
+            LineEntityProcessor.SKIP_LINE_REGEX, "\\.xsd"
+    );
+
+    Context c = getContext(
+            null,                          //parentEntity
+            new VariableResolver(),  //resolver
+            getDataSource(filecontents),   //parentDataSource
+            Context.FULL_DUMP,                             //currProcess
+            Collections.EMPTY_LIST,        //entityFields
+            attrs                          //entityAttrs
+    );
+    LineEntityProcessor ep = new LineEntityProcessor();
+    ep.init(c);
+
+    /// call the entity processor to walk the directory
+    List<String> fList = new ArrayList<>();
+    while (true) {
+      Map<String, Object> f = ep.nextRow();
+      if (f == null) break;
+      fList.add((String) f.get("rawLine"));
+    }
+    assertEquals(4, fList.size());
+  }
+
+  @Test
+  /************************************************************************/
+  public void testNo_xsd_files() throws IOException {
+    /* we want to create the equiv of :-
+     *  <entity name="list_all_files" 
+     *           processor="LineEntityProcessor"
+     *           fileName="dummy.lis"
+     *           omitLineRegex="\\.xsd"
+     *           />
+     */
+    Map attrs = createMap(
+            LineEntityProcessor.URL, "dummy.lis",
+            LineEntityProcessor.SKIP_LINE_REGEX, "\\.xsd"
+    );
+
+    Context c = getContext(
+            null,                          //parentEntity
+            new VariableResolver(),  //resolver
+            getDataSource(filecontents),   //parentDataSource
+            Context.FULL_DUMP,                             //currProcess
+            Collections.EMPTY_LIST,        //entityFields
+            attrs                          //entityAttrs
+    );
+    LineEntityProcessor ep = new LineEntityProcessor();
+    ep.init(c);
+
+    /// call the entity processor to walk the directory
+    List<String> fList = new ArrayList<>();
+    while (true) {
+      Map<String, Object> f = ep.nextRow();
+      if (f == null) break;
+      fList.add((String) f.get("rawLine"));
+    }
+    assertEquals(18, fList.size());
+  }
+
+  /**
+   * ********************************************************************
+   */
+  public static Map<String, String> createField(
+          String col,   // DIH column name
+          String type,  // field type from schema.xml
+          String srcCol,  // DIH transformer attribute 'sourceColName'
+          String re,  // DIH regex attribute 'regex'
+          String rw,  // DIH regex attribute 'replaceWith'
+          String gn    // DIH regex attribute 'groupNames'
+  ) {
+    HashMap<String, String> vals = new HashMap<>();
+    vals.put("column", col);
+    vals.put("type", type);
+    vals.put("sourceColName", srcCol);
+    vals.put("regex", re);
+    vals.put("replaceWith", rw);
+    vals.put("groupNames", gn);
+    return vals;
+  }
+
+  private DataSource<Reader> getDataSource(final String xml) {
+    return new DataSource<Reader>() {
+      @Override
+      public void init(Context context, Properties initProps) {
+      }
+
+      @Override
+      public void close() {
+      }
+
+      @Override
+      public Reader getData(String query) {
+        return new StringReader(xml);
+      }
+    };
+  }
+
+  private static final String filecontents =
+          "\n" +
+                  "# this is what the output from 'find . -ls; looks like, athough the format\n" +
+                  "# of the time stamp varies depending on the age of the file and your LANG \n" +
+                  "# env setting\n" +
+                  "412577   0 drwxr-xr-x  6 user group    204 1 Apr 10:53 /Volumes/spare/ts\n" +
+                  "412582   0 drwxr-xr-x 13 user group    442 1 Apr 10:18 /Volumes/spare/ts/config\n" +
+                  "412583  24 -rwxr-xr-x  1 user group   8318 1 Apr 11:10 /Volumes/spare/ts/config/dc.xsd\n" +
+                  "412584  32 -rwxr-xr-x  1 user group  12847 1 Apr 11:10 /Volumes/spare/ts/config/dcterms.xsd\n" +
+                  "412585   8 -rwxr-xr-x  1 user group   3156 1 Apr 11:10 /Volumes/spare/ts/config/s-deliver.css\n" +
+                  "412586 192 -rwxr-xr-x  1 user group  97764 1 Apr 11:10 /Volumes/spare/ts/config/s-deliver.xsl\n" +
+                  "412587 224 -rwxr-xr-x  1 user group 112700 1 Apr 11:10 /Volumes/spare/ts/config/sml-delivery-2.1.xsd\n" +
+                  "412588 208 -rwxr-xr-x  1 user group 103419 1 Apr 11:10 /Volumes/spare/ts/config/sml-delivery-norm-2.0.dtd\n" +
+                  "412589 248 -rwxr-xr-x  1 user group 125296 1 Apr 11:10 /Volumes/spare/ts/config/sml-delivery-norm-2.1.dtd\n" +
+                  "412590  72 -rwxr-xr-x  1 user group  36256 1 Apr 11:10 /Volumes/spare/ts/config/jm.xsd\n" +
+                  "412591   8 -rwxr-xr-x  1 user group    990 1 Apr 11:10 /Volumes/spare/ts/config/video.gif\n" +
+                  "412592   8 -rwxr-xr-x  1 user group   1498 1 Apr 11:10 /Volumes/spare/ts/config/xlink.xsd\n" +
+                  "412593   8 -rwxr-xr-x  1 user group   1155 1 Apr 11:10 /Volumes/spare/ts/config/xml.xsd\n" +
+                  "412594   0 drwxr-xr-x  4 user group    136 1 Apr 10:18 /Volumes/spare/ts/acm19\n" +
+                  "412621   0 drwxr-xr-x 57 user group   1938 1 Apr 10:18 /Volumes/spare/ts/acm19/data\n" +
+                  "412622  24 -rwxr-xr-x  1 user group   8894 1 Apr 11:09 /Volumes/spare/ts/acm19/data/00000510.xml\n" +
+                  "412623  32 -rwxr-xr-x  1 user group  14124 1 Apr 11:09 /Volumes/spare/ts/acm19/data/00000603.xml\n" +
+                  "412624  24 -rwxr-xr-x  1 user group  11976 1 Apr 11:09 /Volumes/spare/ts/acm19/data/00001292.xml\n" +
+                  "# tacked on an extra line to cause a file to be deleted.\n" +
+                  "DELETE /Volumes/spare/ts/acm19/data/00001292old.xml\n" +
+                  "";
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNestedChildren.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNestedChildren.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNestedChildren.java
new file mode 100644
index 0000000..ca1bfda
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNestedChildren.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.lang.invoke.MethodHandles;
+
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestNestedChildren extends AbstractDIHJdbcTestCase {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  @Test
+  public void test() throws Exception {
+    h.query("/dataimport", generateRequest());
+    assertQ(req("*:*"), "//*[@numFound='1']");
+    assertQ(req("third_s:CHICKEN"), "//*[@numFound='1']");
+  } 
+  
+  @Override
+  protected String generateConfig() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("<dataConfig> \n");
+    sb.append("<dataSource name=\"derby\" driver=\"org.apache.derby.jdbc.EmbeddedDriver\" url=\"jdbc:derby:memory:derbyDB;territory=en_US\" /> \n");
+    sb.append("<document name=\"TestSimplePropertiesWriter\"> \n");
+    sb.append("<entity name=\"FIRST\" processor=\"SqlEntityProcessor\" dataSource=\"derby\" ");
+    sb.append(" query=\"select 1 as id, 'PORK' as FIRST_S from sysibm.sysdummy1 \" >\n");
+    sb.append("  <field column=\"FIRST_S\" name=\"first_s\" /> \n");
+    sb.append("  <entity name=\"SECOND\" processor=\"SqlEntityProcessor\" dataSource=\"derby\" ");
+    sb.append("   query=\"select 1 as id, 2 as SECOND_ID, 'BEEF' as SECOND_S from sysibm.sysdummy1 WHERE 1=${FIRST.ID}\" >\n");
+    sb.append("   <field column=\"SECOND_S\" name=\"second_s\" /> \n");
+    sb.append("   <entity name=\"THIRD\" processor=\"SqlEntityProcessor\" dataSource=\"derby\" ");
+    sb.append("    query=\"select 1 as id, 'CHICKEN' as THIRD_S from sysibm.sysdummy1 WHERE 2=${SECOND.SECOND_ID}\" >\n");
+    sb.append("    <field column=\"THIRD_S\" name=\"third_s\" /> \n");
+    sb.append("   </entity>\n");
+    sb.append("  </entity>\n");
+    sb.append("</entity>\n");
+    sb.append("</document> \n");
+    sb.append("</dataConfig> \n");
+    String config = sb.toString();
+    log.debug(config); 
+    return config;
+  }
+  
+  @Override
+  protected Database setAllowedDatabases() {
+    return Database.DERBY;
+  }   
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNonWritablePersistFile.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNonWritablePersistFile.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNonWritablePersistFile.java
new file mode 100644
index 0000000..1307927
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNonWritablePersistFile.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestNonWritablePersistFile extends AbstractDataImportHandlerTestCase {
+  private static final String FULLIMPORT_QUERY = "select * from x";
+
+  private static final String DELTA_QUERY = "select id from x where last_modified > NOW";
+
+  private static final String DELETED_PK_QUERY = "select id from x where last_modified > NOW AND deleted='true'";
+
+  private static final String dataConfig_delta =
+    "<dataConfig>" +
+    "  <dataSource  type=\"MockDataSource\"/>\n" +
+    "  <document>\n" +
+    "    <entity name=\"x\" transformer=\"TemplateTransformer\"" +
+    "            query=\"" + FULLIMPORT_QUERY + "\"" +
+    "            deletedPkQuery=\"" + DELETED_PK_QUERY + "\"" +
+    "            deltaImportQuery=\"select * from x where id='${dih.delta.id}'\"" +
+    "            deltaQuery=\"" + DELTA_QUERY + "\">\n" +
+    "      <field column=\"id\" name=\"id\"/>\n" +
+    "      <entity name=\"y\" query=\"select * from y where y.A='${x.id}'\">\n" +
+    "        <field column=\"desc\" />\n" +
+    "      </entity>\n" +
+    "    </entity>\n" +
+    "  </document>\n" +
+    "</dataConfig>\n";
+  private static String tmpSolrHome;
+
+  private static File f;
+
+  @BeforeClass
+  public static void createTempSolrHomeAndCore() throws Exception {
+    tmpSolrHome = createTempDir().toFile().getAbsolutePath();
+    FileUtils.copyDirectory(getFile("dih/solr"), new File(tmpSolrHome).getAbsoluteFile());
+    initCore("dataimport-solrconfig.xml", "dataimport-schema.xml", 
+             new File(tmpSolrHome).getAbsolutePath());
+    
+    // See SOLR-2551
+    String configDir = h.getCore().getResourceLoader().getConfigDir();
+    String filePath = configDir;
+    if (configDir != null && !configDir.endsWith(File.separator))
+      filePath += File.separator;
+    filePath += "dataimport.properties";
+    f = new File(filePath);
+    // execute the test only if we are able to set file to read only mode
+    assumeTrue("No dataimport.properties file", f.exists() || f.createNewFile());
+    assumeTrue("dataimport.properties can't be set read only", f.setReadOnly());
+    assumeFalse("dataimport.properties is still writable even though " + 
+                "marked readonly - test running as superuser?", f.canWrite());
+  }
+  
+  @AfterClass
+  public static void afterClass() throws Exception {
+    if (f != null) {
+      f.setWritable(true);
+    }
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testNonWritablePersistFile() throws Exception {
+    ignoreException("Properties is not writable");
+
+    @SuppressWarnings("rawtypes")
+    List parentRow = new ArrayList();
+    parentRow.add(createMap("id", "1"));
+    MockDataSource.setIterator(FULLIMPORT_QUERY, parentRow.iterator());
+      
+    @SuppressWarnings("rawtypes")
+    List childRow = new ArrayList();
+    childRow.add(createMap("desc", "hello"));
+    MockDataSource.setIterator("select * from y where y.A='1'",
+                                 childRow.iterator());
+      
+    runFullImport(dataConfig_delta);
+    assertQ(req("id:1"), "//*[@numFound='0']");
+  }  
+}