You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by da...@apache.org on 2018/11/02 11:33:29 UTC

[08/25] lucene-solr:jira/gradle: Adding dataimporthandler-extras module

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNumberFormatTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNumberFormatTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNumberFormatTransformer.java
new file mode 100644
index 0000000..91bdd00
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestNumberFormatTransformer.java
@@ -0,0 +1,160 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.junit.Test;
+
+import java.text.DecimalFormatSymbols;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+/**
+ * <p>
+ * Test for NumberFormatTransformer
+ * </p>
+ *
+ *
+ * @since solr 1.3
+ */
+public class TestNumberFormatTransformer extends AbstractDataImportHandlerTestCase {
+  private char GROUPING_SEP = new DecimalFormatSymbols(Locale.ROOT).getGroupingSeparator();
+  private char DECIMAL_SEP = new DecimalFormatSymbols(Locale.ROOT).getDecimalSeparator();
+
+  @SuppressWarnings("unchecked")
+  @Test
+  public void testTransformRow_SingleNumber() {
+    char GERMAN_GROUPING_SEP = new DecimalFormatSymbols(Locale.GERMANY).getGroupingSeparator();
+    List<Map<String, String>> l = new ArrayList<>();
+    l.add(createMap("column", "num",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER));
+    l.add(createMap("column", "localizedNum",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER, NumberFormatTransformer.LOCALE, "de-DE"));
+    Context c = getContext(null, null, null, Context.FULL_DUMP, l, null);
+    Map<String,Object> m = createMap("num", "123" + GROUPING_SEP + "567", "localizedNum", "123" + GERMAN_GROUPING_SEP + "567");
+    new NumberFormatTransformer().transformRow(m, c);
+    assertEquals(123567L, m.get("num"));
+    assertEquals(123567L, m.get("localizedNum"));
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testTransformRow_MultipleNumbers() throws Exception {
+    List<Map<String, String>> fields = new ArrayList<>();
+    fields.add(createMap(DataImporter.COLUMN, "inputs"));
+    fields.add(createMap(DataImporter.COLUMN,
+            "outputs", RegexTransformer.SRC_COL_NAME, "inputs",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER));
+
+    List<String> inputs = new ArrayList<>();
+    inputs.add("123" + GROUPING_SEP + "567");
+    inputs.add("245" + GROUPING_SEP + "678");
+    Map<String, Object> row = createMap("inputs", inputs);
+
+    VariableResolver resolver = new VariableResolver();
+    resolver.addNamespace("e", row);
+
+    Context context = getContext(null, resolver, null, Context.FULL_DUMP, fields, null);
+    new NumberFormatTransformer().transformRow(row, context);
+
+    List<Long> output = new ArrayList<>();
+    output.add(123567L);
+    output.add(245678L);
+    Map<String, Object> outputRow = createMap("inputs", inputs, "outputs", output);
+
+    assertEquals(outputRow, row);
+  }
+
+  @Test(expected = DataImportHandlerException.class)
+  @SuppressWarnings("unchecked")
+  public void testTransformRow_InvalidInput1_Number() {
+    List<Map<String, String>> l = new ArrayList<>();
+    l.add(createMap("column", "num",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER));
+    Context c = getContext(null, null, null, Context.FULL_DUMP, l, null);
+    Map<String, Object> m = createMap("num", "123" + GROUPING_SEP + "5a67");
+    new NumberFormatTransformer().transformRow(m, c);
+  }
+
+  @Test(expected = DataImportHandlerException.class)
+  @SuppressWarnings("unchecked")
+  public void testTransformRow_InvalidInput2_Number() {
+    List<Map<String, String>> l = new ArrayList<>();
+    l.add(createMap("column", "num",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER));
+    Context c = getContext(null, null, null, Context.FULL_DUMP, l, null);
+    Map<String, Object> m = createMap("num", "123" + GROUPING_SEP + "567b");
+    new NumberFormatTransformer().transformRow(m, c);
+  }
+
+  @Test(expected = DataImportHandlerException.class)
+  @SuppressWarnings("unchecked")
+  public void testTransformRow_InvalidInput2_Currency() {
+    List<Map<String, String>> l = new ArrayList<>();
+    l.add(createMap("column", "num",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.CURRENCY));
+    Context c = getContext(null, null, null, Context.FULL_DUMP, l, null);
+    Map<String, Object> m = createMap("num", "123" + GROUPING_SEP + "567b");
+    new NumberFormatTransformer().transformRow(m, c);
+  }
+
+  @Test(expected = DataImportHandlerException.class)
+  @SuppressWarnings("unchecked")
+  public void testTransformRow_InvalidInput1_Percent() {
+    List<Map<String, String>> l = new ArrayList<>();
+    l.add(createMap("column", "num",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.PERCENT));
+    Context c = getContext(null, null, null, Context.FULL_DUMP, l, null);
+    Map<String, Object> m = createMap("num", "123" + GROUPING_SEP + "5a67");
+    new NumberFormatTransformer().transformRow(m, c);
+  }
+
+  @Test(expected = DataImportHandlerException.class)
+  @SuppressWarnings("unchecked")
+  public void testTransformRow_InvalidInput3_Currency() {
+    List<Map<String, String>> l = new ArrayList<>();
+    l.add(createMap("column", "num",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.CURRENCY));
+    Context c = getContext(null, null, null, Context.FULL_DUMP, l, null);
+    Map<String, Object> m = createMap("num", "123" + DECIMAL_SEP + "456" + DECIMAL_SEP + "789");
+    new NumberFormatTransformer().transformRow(m, c);
+  }
+
+  @Test(expected = DataImportHandlerException.class)
+  @SuppressWarnings("unchecked")
+  public void testTransformRow_InvalidInput3_Number() {
+    List<Map<String, String>> l = new ArrayList<>();
+    l.add(createMap("column", "num",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER));
+    Context c = getContext(null, null, null, Context.FULL_DUMP, l, null);
+    Map<String, Object> m = createMap("num", "123" + DECIMAL_SEP + "456" + DECIMAL_SEP + "789");
+    new NumberFormatTransformer().transformRow(m, c);
+  }
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testTransformRow_MalformedInput_Number() {
+    List<Map<String, String>> l = new ArrayList<>();
+    l.add(createMap("column", "num",
+            NumberFormatTransformer.FORMAT_STYLE, NumberFormatTransformer.NUMBER));
+    Context c = getContext(null, null, null, Context.FULL_DUMP, l, null);
+    Map<String, Object> m = createMap("num", "123" + GROUPING_SEP + GROUPING_SEP + "789");
+    new NumberFormatTransformer().transformRow(m, c);
+    assertEquals(123789L, m.get("num"));
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java
new file mode 100644
index 0000000..2385438
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestPlainTextEntityProcessor.java
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.StringReader;
+import java.sql.Blob;
+import java.sql.SQLException;
+import java.util.Collections;
+import java.util.Properties;
+
+import org.apache.solr.common.util.Utils;
+import org.junit.Test;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+
+/**
+ * Test for PlainTextEntityProcessor
+ *
+ *
+ * @see org.apache.solr.handler.dataimport.PlainTextEntityProcessor
+ * @since solr 1.4
+ */
+public class TestPlainTextEntityProcessor extends AbstractDataImportHandlerTestCase {
+  @Test
+  public void testSimple() throws IOException {
+    DataImporter di = new DataImporter();
+    di.loadAndInit(DATA_CONFIG);
+    redirectTempProperties(di);
+
+    TestDocBuilder.SolrWriterImpl sw = new TestDocBuilder.SolrWriterImpl();
+    RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null);
+    di.runCmd(rp, sw);
+    assertEquals(DS.s, sw.docs.get(0).getFieldValue("x"));
+  }
+
+  static class BlobImpl implements Blob{
+    private final byte[] bytes;
+
+    BlobImpl(byte[] bytes) {
+      this.bytes = bytes;
+    }
+
+    @Override
+    public long length() throws SQLException {
+      return 0;
+    }
+
+    @Override
+    public byte[] getBytes(long pos, int length) throws SQLException {
+      return bytes;
+    }
+
+    @Override
+    public InputStream getBinaryStream() throws SQLException {
+      return new ByteArrayInputStream(bytes);
+    }
+
+    @Override
+    public long position(byte[] pattern, long start) throws SQLException {
+      return 0;
+    }
+
+    @Override
+    public long position(Blob pattern, long start) throws SQLException {
+      return 0;
+    }
+
+    @Override
+    public int setBytes(long pos, byte[] bytes) throws SQLException {
+      return 0;
+    }
+
+    @Override
+    public int setBytes(long pos, byte[] bytes, int offset, int len) throws SQLException {
+      return 0;
+    }
+
+    @Override
+    public OutputStream setBinaryStream(long pos) throws SQLException {
+      return null;
+    }
+
+    @Override
+    public void truncate(long len) throws SQLException {
+
+    }
+
+    @Override
+    public void free() throws SQLException {
+
+    }
+
+    @Override
+    public InputStream getBinaryStream(long pos, long length) throws SQLException {
+      return new ByteArrayInputStream(bytes);
+    }
+  }
+
+  @Test
+  public void testSimple2() throws IOException {
+    DataImporter di = new DataImporter();
+    MockDataSource.setIterator("select id, name, blob_field from lw_table4", Collections.singletonList(Utils.makeMap("blob_field",new BlobImpl(DS.s.getBytes(UTF_8)) ) ).iterator());
+
+    String dc =
+
+        " <dataConfig>" +
+            "<dataSource name=\"ds1\" type=\"MockDataSource\"/>\n" +
+        " <!-- dataSource for FieldReaderDataSource -->\n" +
+        " <dataSource dataField=\"root.blob_field\" name=\"fr\" type=\"FieldReaderDataSource\"/>\n" +
+        "\n" +
+        " <document name=\"items\">\n" +
+        "   <entity dataSource=\"ds1\" name=\"root\" pk=\"id\"  query=\"select id, name, blob_field from lw_table4\" transformer=\"TemplateTransformer\">\n" +
+        "           <field column=\"id\" name=\"id\"/>\n" +
+        "\n" +
+        "        <entity dataField=\"root.blob_field\" dataSource=\"fr\" format=\"text\" name=\"n1\" processor=\"PlainTextEntityProcessor\" url=\"blob_field\">\n" +
+        "                       <field column=\"plainText\" name=\"plainText\"/>\n" +
+        "           </entity>\n" +
+        "\n" +
+        "   </entity>\n" +
+        " </document>\n" +
+        "</dataConfig>";
+    System.out.println(dc);
+    di.loadAndInit(dc);
+    redirectTempProperties(di);
+
+    TestDocBuilder.SolrWriterImpl sw = new TestDocBuilder.SolrWriterImpl();
+    RequestInfo rp = new RequestInfo(null, createMap("command", "full-import"), null);
+    di.runCmd(rp, sw);
+    assertEquals(DS.s, sw.docs.get(0).getFieldValue("plainText"));
+  }
+
+
+  public static class DS extends DataSource {
+    static String s = "hello world";
+
+    @Override
+    public void init(Context context, Properties initProps) {
+
+    }
+
+    @Override
+    public Object getData(String query) {
+
+      return new StringReader(s);
+    }
+
+    @Override
+    public void close() {
+
+    }
+  }
+
+  static String DATA_CONFIG = "<dataConfig>\n" +
+          "\t<dataSource type=\"TestPlainTextEntityProcessor$DS\" />\n" +
+          "\t<document>\n" +
+          "\t\t<entity processor=\"PlainTextEntityProcessor\" name=\"x\" query=\"x\">\n" +
+          "\t\t\t<field column=\"plainText\" name=\"x\" />\n" +
+          "\t\t</entity>\n" +
+          "\t</document>\n" +
+          "</dataConfig>";
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestRegexTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestRegexTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestRegexTransformer.java
new file mode 100644
index 0000000..e792804
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestRegexTransformer.java
@@ -0,0 +1,208 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import static org.apache.solr.handler.dataimport.RegexTransformer.REGEX;
+import static org.apache.solr.handler.dataimport.RegexTransformer.GROUP_NAMES;
+import static org.apache.solr.handler.dataimport.RegexTransformer.REPLACE_WITH;
+import static org.apache.solr.handler.dataimport.DataImporter.COLUMN;
+
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * <p> Test for RegexTransformer </p>
+ *
+ *
+ * @since solr 1.3
+ */
+public class TestRegexTransformer extends AbstractDataImportHandlerTestCase {
+
+  @Test
+  public void testCommaSeparated() {
+    List<Map<String, String>> fields = new ArrayList<>();
+    // <field column="col1" sourceColName="a" splitBy="," />
+    fields.add(getField("col1", "string", null, "a", ","));
+    Context context = getContext(null, null, null, Context.FULL_DUMP, fields, null);
+
+    Map<String, Object> src = new HashMap<>();
+    src.put("a", "a,bb,cc,d");
+
+    Map<String, Object> result = new RegexTransformer().transformRow(src, context);
+    assertEquals(2, result.size());
+    assertEquals(4, ((List) result.get("col1")).size());
+  }
+
+
+  @Test
+  public void testGroupNames() {
+    List<Map<String, String>> fields = new ArrayList<>();
+    // <field column="col1" regex="(\w*)(\w*) (\w*)" groupNames=",firstName,lastName"/>
+    Map<String ,String > m = new HashMap<>();
+    m.put(COLUMN,"fullName");
+    m.put(GROUP_NAMES,",firstName,lastName");
+    m.put(REGEX,"(\\w*) (\\w*) (\\w*)");
+    fields.add(m);
+    Context context = getContext(null, null, null, Context.FULL_DUMP, fields, null);
+    Map<String, Object> src = new HashMap<>();
+    src.put("fullName", "Mr Noble Paul");
+
+    Map<String, Object> result = new RegexTransformer().transformRow(src, context);
+    assertEquals("Noble", result.get("firstName"));
+    assertEquals("Paul", result.get("lastName"));
+    src= new HashMap<>();
+    List<String> l= new ArrayList();
+    l.add("Mr Noble Paul") ;
+    l.add("Mr Shalin Mangar") ;
+    src.put("fullName", l);
+    result = new RegexTransformer().transformRow(src, context);
+    List l1 = (List) result.get("firstName");
+    List l2 = (List) result.get("lastName");
+    assertEquals("Noble", l1.get(0));
+    assertEquals("Shalin", l1.get(1));
+    assertEquals("Paul", l2.get(0));
+    assertEquals("Mangar", l2.get(1));
+  }
+
+  @Test
+  public void testReplaceWith() {
+    List<Map<String, String>> fields = new ArrayList<>();
+    // <field column="name" regexp="'" replaceWith="''" />
+    Map<String, String> fld = getField("name", "string", "'", null, null);
+    fld.put(REPLACE_WITH, "''");
+    fields.add(fld);
+    Context context = getContext(null, null, null, Context.FULL_DUMP, fields, null);
+
+    Map<String, Object> src = new HashMap<>();
+    String s = "D'souza";
+    src.put("name", s);
+
+    Map<String, Object> result = new RegexTransformer().transformRow(src,
+            context);
+    assertEquals("D''souza", result.get("name"));
+
+    fld = getField("title_underscore", "string", "\\s+", "title", null);
+    fld.put(REPLACE_WITH, "_");
+    fields.clear();
+    fields.add(fld);
+    context = getContext(null, null, null, Context.FULL_DUMP, fields, null);
+    src.clear();
+    src.put("title", "value with spaces"); // a value which will match the regex
+    result = new RegexTransformer().transformRow(src, context);
+    assertEquals("value_with_spaces", result.get("title_underscore"));
+    src.clear();
+    src.put("title", "valueWithoutSpaces"); // value which will not match regex
+    result = new RegexTransformer().transformRow(src, context);
+    assertEquals("valueWithoutSpaces", result.get("title_underscore")); // value should be returned as-is
+  }
+
+  @Test
+  public void testMileage() {
+    // init a whole pile of fields
+    List<Map<String, String>> fields = getFields();
+
+    // add another regex which reuses result from previous regex again!
+    // <field column="hltCityMPG" sourceColName="rowdata" regexp="(${e.city_mileage})" />
+    Map<String, String> fld = getField("hltCityMPG", "string",
+            ".*(${e.city_mileage})", "rowdata", null);
+    fld.put(REPLACE_WITH, "*** $1 ***");
+    fields.add(fld);
+
+    //  **ATTEMPTS** a match WITHOUT a replaceWith
+    // <field column="t1" sourceColName="rowdata" regexp="duff" />
+    fld = getField("t1", "string","duff", "rowdata", null);
+    fields.add(fld);
+
+    //  **ATTEMPTS** a match WITH a replaceWith (should return original data)
+    // <field column="t2" sourceColName="rowdata" regexp="duff" replaceWith="60"/>
+    fld = getField("t2", "string","duff", "rowdata", null);
+    fld.put(REPLACE_WITH, "60");
+    fields.add(fld);
+
+    //  regex WITH both replaceWith and groupName (groupName ignored!)
+    // <field column="t3" sourceColName="rowdata" regexp="(Range)" />
+    fld = getField("t3", "string","(Range)", "rowdata", null);
+    fld.put(REPLACE_WITH, "range");
+    fld.put(GROUP_NAMES,"t4,t5");
+    fields.add(fld);
+
+    Map<String, Object> row = new HashMap<>();
+    String s = "Fuel Economy Range: 26 mpg Hwy, 19 mpg City";
+    row.put("rowdata", s);
+
+    VariableResolver resolver = new VariableResolver();
+    resolver.addNamespace("e", row);
+    Map<String, String> eAttrs = createMap("name", "e");
+    Context context = getContext(null, resolver, null, Context.FULL_DUMP, fields, eAttrs);
+
+    Map<String, Object> result = new RegexTransformer().transformRow(row, context);
+    assertEquals(6, result.size());
+    assertEquals(s, result.get("t2"));
+    assertEquals(s, result.get("rowdata"));
+    assertEquals("26", result.get("highway_mileage"));
+    assertEquals("19", result.get("city_mileage"));
+    assertEquals("*** 19 *** mpg City", result.get("hltCityMPG"));
+    assertEquals("Fuel Economy range: 26 mpg Hwy, 19 mpg City", result.get("t3"));
+  }
+
+  @Test
+  public void testMultiValuedRegex(){
+      List<Map<String, String>> fields = new ArrayList<>();
+//    <field column="participant" sourceColName="person" regex="(.*)" />
+    Map<String, String> fld = getField("participant", null, "(.*)", "person", null);
+    fields.add(fld);
+    Context context = getContext(null, null,
+            null, Context.FULL_DUMP, fields, null);
+
+    ArrayList<String> strings = new ArrayList<>();
+    strings.add("hello");
+    strings.add("world");
+    Map<String, Object> result = new RegexTransformer().transformRow(createMap("person", strings), context);
+    assertEquals(strings,result.get("participant"));
+  }
+
+  public static List<Map<String, String>> getFields() {
+    List<Map<String, String>> fields = new ArrayList<>();
+
+    // <field column="city_mileage" sourceColName="rowdata" regexp=
+    //    "Fuel Economy Range:\\s*?\\d*?\\s*?mpg Hwy,\\s*?(\\d*?)\\s*?mpg City"
+    fields.add(getField("city_mileage", "sint",
+            "Fuel Economy Range:\\s*?\\d*?\\s*?mpg Hwy,\\s*?(\\d*?)\\s*?mpg City",
+            "rowdata", null));
+
+    // <field column="highway_mileage" sourceColName="rowdata" regexp=
+    //    "Fuel Economy Range:\\s*?(\\d*?)\\s*?mpg Hwy,\\s*?\\d*?\\s*?mpg City"
+    fields.add(getField("highway_mileage", "sint",
+            "Fuel Economy Range:\\s*?(\\d*?)\\s*?mpg Hwy,\\s*?\\d*?\\s*?mpg City",
+            "rowdata", null));
+
+    // <field column="seating_capacity" sourceColName="rowdata" regexp="Seating capacity:(.*)"
+    fields.add(getField("seating_capacity", "sint", "Seating capacity:(.*)",
+            "rowdata", null));
+
+    // <field column="warranty" sourceColName="rowdata" regexp="Warranty:(.*)" />
+    fields.add(getField("warranty", "string", "Warranty:(.*)", "rowdata", null));
+
+    // <field column="rowdata" sourceColName="rowdata" />
+    fields.add(getField("rowdata", "string", null, "rowdata", null));
+    return fields;
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestScriptTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestScriptTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestScriptTransformer.java
new file mode 100644
index 0000000..c106f8e
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestScriptTransformer.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.apache.solr.handler.dataimport.config.DIHConfiguration;
+import org.junit.Test;
+import org.w3c.dom.Document;
+import org.xml.sax.InputSource;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Test for ScriptTransformer
+ *
+ *
+ * @since solr 1.3
+ */
+public class TestScriptTransformer extends AbstractDataImportHandlerTestCase {
+
+  @Test
+  public void testBasic() {
+    try {
+      String script = "function f1(row,context){"
+              + "row.put('name','Hello ' + row.get('name'));" + "return row;\n" + "}";
+      Context context = getContext("f1", script);
+      Map<String, Object> map = new HashMap<>();
+      map.put("name", "Scott");
+      EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null, null);
+      sep.init(context);
+      sep.applyTransformer(map);
+      assertEquals("Hello Scott", map.get("name").toString());
+    } catch (DataImportHandlerException e) {    
+      assumeFalse("This JVM does not have JavaScript installed.  Test Skipped.", e
+          .getMessage().startsWith("Cannot load Script Engine for language"));
+      throw e;
+    }
+  }
+
+  private Context getContext(String funcName, String script) {
+    List<Map<String, String>> fields = new ArrayList<>();
+    Map<String, String> entity = new HashMap<>();
+    entity.put("name", "hello");
+    entity.put("transformer", "script:" + funcName);
+
+    TestContext context = getContext(null, null, null,
+            Context.FULL_DUMP, fields, entity);
+    context.script = script;
+    context.scriptlang = "JavaScript";
+    return context;
+  }
+
+  @Test
+  public void testOneparam() {
+    try {
+      String script = "function f1(row){"
+              + "row.put('name','Hello ' + row.get('name'));" + "return row;\n" + "}";
+
+      Context context = getContext("f1", script);
+      Map<String, Object> map = new HashMap<>();
+      map.put("name", "Scott");
+      EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null, null);
+      sep.init(context);
+      sep.applyTransformer(map);
+      assertEquals("Hello Scott", map.get("name").toString());
+    } catch (DataImportHandlerException e) {   
+      assumeFalse("This JVM does not have JavaScript installed.  Test Skipped.", e
+          .getMessage().startsWith("Cannot load Script Engine for language"));
+      throw e;
+    }
+  }
+
+  @Test
+  public void testReadScriptTag() throws Exception {
+    try {
+      DocumentBuilder builder = DocumentBuilderFactory.newInstance()
+              .newDocumentBuilder();
+      Document document = builder.parse(new InputSource(new StringReader(xml)));
+      DataImporter di = new DataImporter();
+      DIHConfiguration dc = di.readFromXml(document);
+      assertTrue(dc.getScript().getText().indexOf("checkNextToken") > -1);
+    } catch (DataImportHandlerException e) {    
+      assumeFalse("This JVM does not have JavaScript installed.  Test Skipped.", e
+          .getMessage().startsWith("Cannot load Script Engine for language"));
+      throw e;
+    }
+  }
+
+  @Test
+  public void testCheckScript() throws Exception {
+    try {
+      DocumentBuilder builder = DocumentBuilderFactory.newInstance()
+              .newDocumentBuilder();
+      Document document = builder.parse(new InputSource(new StringReader(xml)));
+      DataImporter di = new DataImporter();
+      DIHConfiguration dc = di.readFromXml(document);
+      Context c = getContext("checkNextToken", dc.getScript().getText());
+
+      Map map = new HashMap();
+      map.put("nextToken", "hello");
+      EntityProcessorWrapper sep = new EntityProcessorWrapper(new SqlEntityProcessor(), null, null);
+      sep.init(c);
+      sep.applyTransformer(map);
+      assertEquals("true", map.get("$hasMore"));
+      map = new HashMap();
+      map.put("nextToken", "");
+      sep.applyTransformer(map);
+      assertNull(map.get("$hasMore"));
+    } catch (DataImportHandlerException e) {    
+      assumeFalse("This JVM does not have JavaScript installed.  Test Skipped.", e
+          .getMessage().startsWith("Cannot load Script Engine for language"));
+      throw e;
+    }
+  }
+
+  static String xml = "<dataConfig>\n"
+          + "<script><![CDATA[\n"
+          + "function checkNextToken(row)\t{\n"
+          + " var nt = row.get('nextToken');"
+          + " if (nt && nt !='' ){ "
+          + "    row.put('$hasMore', 'true');}\n"
+          + "    return row;\n"
+          + "}]]></script>\t<document>\n"
+          + "\t\t<entity name=\"mbx\" pk=\"articleNumber\" processor=\"XPathEntityProcessor\"\n"
+          + "\t\t\turl=\"?boardId=${dataimporter.defaults.boardId}&amp;maxRecords=20&amp;includeBody=true&amp;startDate=${dataimporter.defaults.startDate}&amp;guid=:autosearch001&amp;reqId=1&amp;transactionId=stringfortracing&amp;listPos=${mbx.nextToken}\"\n"
+          + "\t\t\tforEach=\"/mbmessage/articles/navigation | /mbmessage/articles/article\" transformer=\"script:checkNextToken\">\n"
+          + "\n" + "\t\t\t<field column=\"nextToken\"\n"
+          + "\t\t\t\txpath=\"/mbmessage/articles/navigation/nextToken\" />\n"
+          + "\n" + "\t\t</entity>\n" + "\t</document>\n" + "</dataConfig>";
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java
new file mode 100644
index 0000000..93a26c3
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSimplePropertiesWriter.java
@@ -0,0 +1,151 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.lang.invoke.MethodHandles;
+import java.sql.Connection;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+
+import org.apache.solr.common.util.SuppressForbidden;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class TestSimplePropertiesWriter extends AbstractDIHJdbcTestCase {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  
+  private boolean useJdbcEscapeSyntax;
+  private String dateFormat;
+  private String fileLocation;
+  private String fileName;
+  
+  @Before
+  public void spwBefore() throws Exception {
+    fileLocation = createTempDir().toFile().getAbsolutePath();
+    fileName = "the.properties";
+  }
+
+  @SuppressForbidden(reason = "Needs currentTimeMillis to construct date stamps")
+  @Test
+  public void testSimplePropertiesWriter() throws Exception { 
+    
+    SimpleDateFormat errMsgFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSSSSS", Locale.ROOT);
+    
+    String[] d = { 
+        "{'ts' ''yyyy-MM-dd HH:mm:ss.SSSSSS''}",
+        "{'ts' ''yyyy-MM-dd HH:mm:ss''}",
+        "yyyy-MM-dd HH:mm:ss", 
+        "yyyy-MM-dd HH:mm:ss.SSSSSS"
+    };
+    for(int i=0 ; i<d.length ; i++) {
+      delQ("*:*");
+      commit();
+      if(i<2) {
+        useJdbcEscapeSyntax = true;
+      } else {
+        useJdbcEscapeSyntax = false;
+      }
+      dateFormat = d[i];
+      SimpleDateFormat df = new SimpleDateFormat(dateFormat, Locale.ROOT);
+      Date oneSecondAgo = new Date(System.currentTimeMillis() - 1000);
+      
+      Map<String,String> init = new HashMap<>();
+      init.put("dateFormat", dateFormat);
+      init.put("filename", fileName);
+      init.put("directory", fileLocation);
+      SimplePropertiesWriter spw = new SimplePropertiesWriter();
+      spw.init(new DataImporter(), init);
+      Map<String, Object> props = new HashMap<>();
+      props.put("SomeDates.last_index_time", oneSecondAgo);
+      props.put("last_index_time", oneSecondAgo);
+      spw.persist(props);
+      
+      h.query("/dataimport", generateRequest());  
+      props = spw.readIndexerProperties();
+      Date entityDate = df.parse((String) props.get("SomeDates.last_index_time"));
+      Date docDate= df.parse((String) props.get("last_index_time"));
+      int year = currentYearFromDatabase();
+      
+      Assert.assertTrue("This date: " + errMsgFormat.format(oneSecondAgo) + " should be prior to the document date: " + errMsgFormat.format(docDate), docDate.getTime() - oneSecondAgo.getTime() > 0);
+      Assert.assertTrue("This date: " + errMsgFormat.format(oneSecondAgo) + " should be prior to the entity date: " + errMsgFormat.format(entityDate), entityDate.getTime() - oneSecondAgo.getTime() > 0);   
+      assertQ(req("*:*"), "//*[@numFound='1']", "//doc/str[@name=\"ayear_s\"]=\"" + year + "\"");    
+    }
+  }
+  
+  private int currentYearFromDatabase() throws Exception {
+    Connection conn = null;
+    Statement s = null;
+    ResultSet rs = null;
+    try {
+      conn = newConnection();
+      s = conn.createStatement();
+      rs = s.executeQuery("select year(current_timestamp) from sysibm.sysdummy1");
+      if (rs.next()) {
+        return rs.getInt(1);
+      }
+      Assert.fail("We should have gotten a row from the db.");
+    } catch (SQLException e) {
+      throw e;
+    } finally {
+      try {
+        rs.close();
+      } catch (Exception ex) {}
+      try {
+        s.close();
+      } catch (Exception ex) {}
+      try {
+        conn.close();
+      } catch (Exception ex) {}
+    }
+    return 0;
+  }
+  
+  @Override
+  protected Database setAllowedDatabases() {
+    return Database.DERBY;
+  }  
+  @Override
+  protected String generateConfig() {
+    StringBuilder sb = new StringBuilder();
+    String q = useJdbcEscapeSyntax ? "" : "'";
+    sb.append("<dataConfig> \n");
+    sb.append("<propertyWriter dateFormat=\"" + dateFormat + "\" type=\"SimplePropertiesWriter\" directory=\"" + fileLocation + "\" filename=\"" + fileName + "\" />\n");
+    sb.append("<dataSource name=\"derby\" driver=\"org.apache.derby.jdbc.EmbeddedDriver\" url=\"jdbc:derby:memory:derbyDB;territory=en_US\" /> \n");
+    sb.append("<document name=\"TestSimplePropertiesWriter\"> \n");
+    sb.append("<entity name=\"SomeDates\" processor=\"SqlEntityProcessor\" dataSource=\"derby\" ");
+    sb.append("query=\"select 1 as id, YEAR(" + q + "${dih.last_index_time}" + q + ") as AYEAR_S from sysibm.sysdummy1 \" >\n");
+    sb.append("<field column=\"AYEAR_S\" name=\"ayear_s\" /> \n");
+    sb.append("</entity>\n");
+    sb.append("</document> \n");
+    sb.append("</dataConfig> \n");
+    String config = sb.toString();
+    log.debug(config); 
+    return config;
+  }
+    
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
new file mode 100644
index 0000000..0e9cd33
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSolrEntityProcessorEndToEnd.java
@@ -0,0 +1,373 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.util.IOUtils;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.common.SolrInputDocument;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.nio.file.Files;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Properties;
+
+/**
+ * End-to-end test of SolrEntityProcessor. "Real" test using embedded Solr
+ */
+public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTestCase {
+  
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  
+  private static final String SOLR_CONFIG = "dataimport-solrconfig.xml";
+  private static final String SOLR_SCHEMA = "dataimport-schema.xml";
+  private static final String SOURCE_CONF_DIR = "dih" + File.separator + "solr" + File.separator + "collection1" + File.separator + "conf" + File.separator;
+  private static final String ROOT_DIR = "dih" + File.separator + "solr" + File.separator;
+
+  private static final String DEAD_SOLR_SERVER = "http://[ff01::114]:33332/solr";
+  
+  private static final List<Map<String,Object>> DB_DOCS = new ArrayList<>();
+  private static final List<Map<String,Object>> SOLR_DOCS = new ArrayList<>();
+  
+  static {
+    // dynamic fields in the destination schema
+    Map<String,Object> dbDoc = new HashMap<>();
+    dbDoc.put("dbid_s", "1");
+    dbDoc.put("dbdesc_s", "DbDescription");
+    DB_DOCS.add(dbDoc);
+
+    Map<String,Object> solrDoc = new HashMap<>();
+    solrDoc.put("id", "1");
+    solrDoc.put("desc", "SolrDescription");
+    SOLR_DOCS.add(solrDoc);
+  }
+
+  
+  private SolrInstance instance = null;
+  private JettySolrRunner jetty;
+  
+  private String getDihConfigTagsInnerEntity() {
+    return  "<dataConfig>\r\n"
+        + "  <dataSource type='MockDataSource' />\r\n"
+        + "  <document>\r\n"
+        + "    <entity name='db' query='select * from x'>\r\n"
+        + "      <field column='dbid_s' />\r\n"
+        + "      <field column='dbdesc_s' />\r\n"
+        + "      <entity name='se' processor='SolrEntityProcessor' query='id:${db.dbid_s}'\n"
+        + "     url='" + getSourceUrl() + "' fields='id,desc'>\r\n"
+        + "        <field column='id' />\r\n"
+        + "        <field column='desc' />\r\n" + "      </entity>\r\n"
+        + "    </entity>\r\n" + "  </document>\r\n" + "</dataConfig>\r\n";
+  }
+  
+  private String generateDIHConfig(String options, boolean useDeadServer) {
+    return "<dataConfig>\r\n" + "  <document>\r\n"
+        + "    <entity name='se' processor='SolrEntityProcessor'" + "   url='"
+        + (useDeadServer ? DEAD_SOLR_SERVER : getSourceUrl()) + "' " + options + " />\r\n" + "  </document>\r\n"
+        + "</dataConfig>\r\n";
+  }
+  
+  private String getSourceUrl() {
+    return buildUrl(jetty.getLocalPort(), "/solr/collection1");
+  }
+  
+  //TODO: fix this test to close its directories
+  static String savedFactory;
+  @BeforeClass
+  public static void beforeClass() {
+    savedFactory = System.getProperty("solr.DirectoryFactory");
+    System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
+  }
+  
+  @AfterClass
+  public static void afterClass() {
+    if (savedFactory == null) {
+      System.clearProperty("solr.directoryFactory");
+    } else {
+      System.setProperty("solr.directoryFactory", savedFactory);
+    }
+  }
+
+  @Override
+  @Before
+  public void setUp() throws Exception {
+    super.setUp();
+    // destination solr core
+    initCore(SOLR_CONFIG, SOLR_SCHEMA);
+    // data source solr instance
+    instance = new SolrInstance();
+    instance.setUp();
+    jetty = createJetty(instance);
+  }
+  
+  @Override
+  @After
+  public void tearDown() throws Exception {
+    try {
+      deleteCore();
+    } catch (Exception e) {
+      log.error("Error deleting core", e);
+    }
+    jetty.stop();
+    instance.tearDown();
+    super.tearDown();
+  }
+
+  //commented 23-AUG-2018  @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
+  public void testFullImport() {
+    assertQ(req("*:*"), "//result[@numFound='0']");
+    
+    try {
+      addDocumentsToSolr(SOLR_DOCS);
+      runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", false));
+    } catch (Exception e) {
+      log.error(e.getMessage(), e);
+      fail(e.getMessage());
+    }
+    
+    assertQ(req("*:*"), "//result[@numFound='1']");
+    assertQ(req("id:1"), "//result/doc/str[@name='id'][.='1']",
+        "//result/doc/arr[@name='desc'][.='SolrDescription']");
+  }
+  
+  public void testFullImportFqParam() {
+    assertQ(req("*:*"), "//result[@numFound='0']");
+    
+    try {
+      addDocumentsToSolr(generateSolrDocuments(30));
+      Map<String,String> map = new HashMap<>();
+      map.put("rows", "50");
+      runFullImport(generateDIHConfig("query='*:*' fq='desc:Description1*,desc:Description*2' rows='2'", false), map);
+    } catch (Exception e) {
+      log.error(e.getMessage(), e);
+      fail(e.getMessage());
+    }
+    
+    assertQ(req("*:*"), "//result[@numFound='1']");
+    assertQ(req("id:12"), "//result[@numFound='1']", "//result/doc/arr[@name='desc'][.='Description12']");
+  }
+  
+  public void testFullImportFieldsParam() {
+    assertQ(req("*:*"), "//result[@numFound='0']");
+    
+    try {
+      addDocumentsToSolr(generateSolrDocuments(7));
+      runFullImport(generateDIHConfig("query='*:*' fl='id' rows='2'"+(random().nextBoolean() ?" cursorMark='true' sort='id asc'":""), false));
+    } catch (Exception e) {
+      log.error(e.getMessage(), e);
+      fail(e.getMessage());
+    }
+    
+    assertQ(req("*:*"), "//result[@numFound='7']");
+    assertQ(req("id:1"), "//result[@numFound='1']");
+    try {
+      assertQ(req("id:1"), "//result/doc/arr[@name='desc']");
+      fail("The document has a field with name desc");
+    } catch(Exception e) {
+      
+    }
+    
+  }
+  
+  /**
+   * Receive a row from SQL (Mock) and fetch a row from Solr
+   */
+  public void testFullImportInnerEntity() {
+    assertQ(req("*:*"), "//result[@numFound='0']");
+    
+    try {
+      List<Map<String,Object>> DOCS = new ArrayList<>(DB_DOCS);
+      Map<String, Object> doc = new HashMap<>();
+      doc.put("dbid_s", "2");
+      doc.put("dbdesc_s", "DbDescription2");
+      DOCS.add(doc);
+      MockDataSource.setIterator("select * from x", DOCS.iterator());
+
+      DOCS = new ArrayList<>(SOLR_DOCS);
+      Map<String,Object> solrDoc = new HashMap<>();
+      solrDoc.put("id", "2");
+      solrDoc.put("desc", "SolrDescription2");
+      DOCS.add(solrDoc);
+      addDocumentsToSolr(DOCS);
+      runFullImport(getDihConfigTagsInnerEntity());
+    } catch (Exception e) {
+      log.error(e.getMessage(), e);
+      fail(e.getMessage());
+    } finally {
+      MockDataSource.clearCache();
+    }
+    
+    assertQ(req("*:*"), "//result[@numFound='2']");
+    assertQ(req("id:1"), "//result/doc/str[@name='id'][.='1']",
+        "//result/doc/str[@name='dbdesc_s'][.='DbDescription']",
+        "//result/doc/str[@name='dbid_s'][.='1']",
+        "//result/doc/arr[@name='desc'][.='SolrDescription']");
+    assertQ(req("id:2"), "//result/doc/str[@name='id'][.='2']",
+        "//result/doc/str[@name='dbdesc_s'][.='DbDescription2']",
+        "//result/doc/str[@name='dbid_s'][.='2']",
+        "//result/doc/arr[@name='desc'][.='SolrDescription2']");
+  }
+  
+  public void testFullImportWrongSolrUrl() {
+    assertQ(req("*:*"), "//result[@numFound='0']");
+    
+    try {
+      runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", true /* use dead server */));
+    } catch (Exception e) {
+      log.error(e.getMessage(), e);
+      fail(e.getMessage());
+    }
+    
+    assertQ(req("*:*"), "//result[@numFound='0']");
+  }
+  
+  public void testFullImportBadConfig() {
+    assertQ(req("*:*"), "//result[@numFound='0']");
+    
+    try {
+      runFullImport(generateDIHConfig("query='bogus:3' rows='2' fl='id,desc' onError='"+
+            (random().nextBoolean() ? "abort" : "justtogetcoverage")+"'", false));
+    } catch (Exception e) {
+      log.error(e.getMessage(), e);
+      fail(e.getMessage());
+    }
+    
+    assertQ(req("*:*"), "//result[@numFound='0']");
+  }
+  
+  public void testCursorMarkNoSort() throws SolrServerException, IOException {
+    assertQ(req("*:*"), "//result[@numFound='0']");
+    addDocumentsToSolr(generateSolrDocuments(7));
+    try {     
+      List<String> errors = Arrays.asList("sort='id'", //wrong sort spec
+          "", //no sort spec
+          "sort='id asc' timeout='12345'"); // sort is fine, but set timeout
+      Collections.shuffle(errors, random());
+      String attrs = "query='*:*' rows='2' fl='id,desc' cursorMark='true' "
+                                                            + errors.get(0);
+      runFullImport(generateDIHConfig(attrs,
+            false));
+    } catch (Exception e) {
+      log.error(e.getMessage(), e);
+      fail(e.getMessage());
+    }
+    
+    assertQ(req("*:*"), "//result[@numFound='0']");
+  }
+  
+  private static List<Map<String,Object>> generateSolrDocuments(int num) {
+    List<Map<String,Object>> docList = new ArrayList<>();
+    for (int i = 1; i <= num; i++) {
+      Map<String,Object> map = new HashMap<>();
+      map.put("id", i);
+      map.put("desc", "Description" + i);
+      docList.add(map);
+    }
+    return docList;
+  }
+  
+  private void addDocumentsToSolr(List<Map<String,Object>> docs) throws SolrServerException, IOException {
+    List<SolrInputDocument> sidl = new ArrayList<>();
+    for (Map<String,Object> doc : docs) {
+      SolrInputDocument sd = new SolrInputDocument();
+      for (Entry<String,Object> entry : doc.entrySet()) {
+        sd.addField(entry.getKey(), entry.getValue());
+      }
+      sidl.add(sd);
+    }
+
+    try (HttpSolrClient solrServer = getHttpSolrClient(getSourceUrl(), 15000, 30000)) {
+      solrServer.add(sidl);
+      solrServer.commit(true, true);
+    }
+  }
+  
+  private static class SolrInstance {
+    File homeDir;
+    File confDir;
+    
+    public String getHomeDir() {
+      return homeDir.toString();
+    }
+    
+    public String getSchemaFile() {
+      return SOURCE_CONF_DIR + "dataimport-schema.xml";
+    }
+    
+    public String getDataDir() {
+      return initCoreDataDir.toString();
+    }
+    
+    public String getSolrConfigFile() {
+      return SOURCE_CONF_DIR + "dataimport-solrconfig.xml";
+    }
+
+    public String getSolrXmlFile() {
+      return ROOT_DIR + "solr.xml";
+    }
+
+    public void setUp() throws Exception {
+      homeDir = createTempDir().toFile();
+      initCoreDataDir = new File(homeDir + "/collection1", "data");
+      confDir = new File(homeDir + "/collection1", "conf");
+      
+      homeDir.mkdirs();
+      initCoreDataDir.mkdirs();
+      confDir.mkdirs();
+
+      FileUtils.copyFile(getFile(getSolrXmlFile()), new File(homeDir, "solr.xml"));
+      File f = new File(confDir, "solrconfig.xml");
+      FileUtils.copyFile(getFile(getSolrConfigFile()), f);
+      f = new File(confDir, "schema.xml");
+      
+      FileUtils.copyFile(getFile(getSchemaFile()), f);
+      f = new File(confDir, "data-config.xml");
+      FileUtils.copyFile(getFile(SOURCE_CONF_DIR + "dataconfig-contentstream.xml"), f);
+
+      Files.createFile(confDir.toPath().resolve("../core.properties"));
+    }
+
+    public void tearDown() throws Exception {
+      IOUtils.rm(homeDir.toPath());
+    }
+  }
+  
+  private JettySolrRunner createJetty(SolrInstance instance) throws Exception {
+    Properties nodeProperties = new Properties();
+    nodeProperties.setProperty("solr.data.dir", instance.getDataDir());
+    JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir(), nodeProperties, buildJettyConfig("/solr"));
+    jetty.start();
+    return jetty;
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
new file mode 100644
index 0000000..a2a9fff
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSolrEntityProcessorUnit.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.util.*;
+
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.CursorMarkParams;
+import org.apache.solr.handler.dataimport.SolrEntityProcessor.SolrDocumentListIterator;
+import org.junit.Test;
+
+/**
+ * Unit test of SolrEntityProcessor. A very basic test outside of the DIH.
+ */
+public class TestSolrEntityProcessorUnit extends AbstractDataImportHandlerTestCase {
+
+  private static final class NoNextMockProcessor extends SolrEntityProcessor {
+    @Override
+    protected void nextPage() {
+    }
+  }
+
+  private static final String ID = "id";
+
+  public void testQuery() {
+    List<Doc> docs = generateUniqueDocs(2);
+
+    MockSolrEntityProcessor processor = createAndInit(docs);
+    try {
+      assertExpectedDocs(docs, processor);
+      assertEquals(1, processor.getQueryCount());
+    } finally {
+      processor.destroy();
+    }
+  }
+
+  private MockSolrEntityProcessor createAndInit(List<Doc> docs) {
+    return createAndInit(docs, SolrEntityProcessor.ROWS_DEFAULT);
+  }
+
+  public void testNumDocsGreaterThanRows() {
+    List<Doc> docs = generateUniqueDocs(44);
+
+    int rowsNum = 10;
+    MockSolrEntityProcessor processor = createAndInit(docs, rowsNum);
+    try {
+      assertExpectedDocs(docs, processor);
+      assertEquals(5, processor.getQueryCount());
+    } finally {
+      processor.destroy();
+    }
+  }
+
+  private MockSolrEntityProcessor createAndInit(List<Doc> docs, int rowsNum) {
+    MockSolrEntityProcessor processor = new MockSolrEntityProcessor(docs, rowsNum);
+    HashMap<String,String> entityAttrs = new HashMap<String,String>(){{put(SolrEntityProcessor.SOLR_SERVER,"http://route:66/no");}};
+    processor.init(getContext(null, null, null, null, Collections.emptyList(), 
+        entityAttrs));
+    return processor;
+  }
+
+  public void testMultiValuedFields() {
+    List<Doc> docs = new ArrayList<>();
+    List<FldType> types = new ArrayList<>();
+    types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 4)));
+    types.add(new FldType("description", new IRange(3, 3), new SVal('a', 'c', 1, 1)));
+    Doc testDoc = createDoc(types);
+    docs.add(testDoc);
+
+    MockSolrEntityProcessor processor = createAndInit(docs);
+    try {
+      Map<String, Object> next = processor.nextRow();
+      assertNotNull(next);
+  
+      @SuppressWarnings("unchecked")
+      List<Comparable> multiField = (List<Comparable>) next.get("description");
+      assertEquals(testDoc.getValues("description").size(), multiField.size());
+      assertEquals(testDoc.getValues("description"), multiField);
+      assertEquals(1, processor.getQueryCount());
+      assertNull(processor.nextRow());
+    } finally {
+      processor.destroy();
+    }
+  }
+  @Test (expected = DataImportHandlerException.class)
+  public void testNoQuery() {
+    SolrEntityProcessor processor = new SolrEntityProcessor();
+    
+    HashMap<String,String> entityAttrs = new HashMap<String,String>(){{put(SolrEntityProcessor.SOLR_SERVER,"http://route:66/no");}};
+    processor.init(getContext(null, null, null, null, Collections.emptyList(), 
+        entityAttrs));
+    try {
+    processor.buildIterator();
+    }finally {
+      processor.destroy();
+    }
+  }
+  
+  public void testPagingQuery() {
+    SolrEntityProcessor processor = new NoNextMockProcessor() ;
+    
+    HashMap<String,String> entityAttrs = new HashMap<String,String>(){{
+      put(SolrEntityProcessor.SOLR_SERVER,"http://route:66/no");
+      if (random().nextBoolean()) {
+        List<String> noCursor = Arrays.asList("","false",CursorMarkParams.CURSOR_MARK_START);//only 'true' not '*'
+        Collections.shuffle(noCursor, random());
+        put(CursorMarkParams.CURSOR_MARK_PARAM,  noCursor.get(0));
+      }}};
+    processor.init(getContext(null, null, null, null, Collections.emptyList(), 
+        entityAttrs));
+    try {
+    processor.buildIterator();
+    SolrQuery query = new SolrQuery();
+    ((SolrDocumentListIterator) processor.rowIterator).passNextPage(query);
+    assertEquals("0", query.get(CommonParams.START));
+    assertNull( query.get(CursorMarkParams.CURSOR_MARK_PARAM));
+    assertNotNull( query.get(CommonParams.TIME_ALLOWED));
+    }finally {
+      processor.destroy();
+    }
+  }
+  
+  public void testCursorQuery() {
+    SolrEntityProcessor processor = new NoNextMockProcessor() ;
+    
+    HashMap<String,String> entityAttrs = new HashMap<String,String>(){{
+      put(SolrEntityProcessor.SOLR_SERVER,"http://route:66/no");
+      put(CursorMarkParams.CURSOR_MARK_PARAM,"true");
+      }};
+    processor.init(getContext(null, null, null, null, Collections.emptyList(), 
+        entityAttrs));
+    try {
+    processor.buildIterator();
+    SolrQuery query = new SolrQuery();
+    ((SolrDocumentListIterator) processor.rowIterator).passNextPage(query);
+    assertNull(query.get(CommonParams.START));
+    assertEquals(CursorMarkParams.CURSOR_MARK_START, query.get(CursorMarkParams.CURSOR_MARK_PARAM));
+    assertNull( query.get(CommonParams.TIME_ALLOWED));
+    }finally {
+      processor.destroy();
+    }
+  }
+
+  private List<Doc> generateUniqueDocs(int numDocs) {
+    List<FldType> types = new ArrayList<>();
+    types.add(new FldType(ID, ONE_ONE, new SVal('A', 'Z', 4, 40)));
+    types.add(new FldType("description", new IRange(1, 3), new SVal('a', 'c', 1, 1)));
+
+    Set<Comparable> previousIds = new HashSet<>();
+    List<Doc> docs = new ArrayList<>(numDocs);
+    for (int i = 0; i < numDocs; i++) {
+      Doc doc = createDoc(types);
+      while (previousIds.contains(doc.id)) {
+        doc = createDoc(types);
+      }
+      previousIds.add(doc.id);
+      docs.add(doc);
+    }
+    return docs;
+  }
+
+  private static void assertExpectedDocs(List<Doc> expectedDocs, SolrEntityProcessor processor) {
+    for (Doc expectedDoc : expectedDocs) {
+      Map<String, Object> next = processor.nextRow();
+      assertNotNull(next);
+      assertEquals(expectedDoc.id, next.get("id"));
+      assertEquals(expectedDoc.getValues("description"), next.get("description"));
+    }
+    assertNull(processor.nextRow());
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
new file mode 100644
index 0000000..7b0e613
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSortedMapBackedCache.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.lang.invoke.MethodHandles;
+
+import java.math.BigDecimal;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class TestSortedMapBackedCache extends AbstractDIHCacheTestCase {
+  
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  
+  @Test
+  public void testCacheWithKeyLookup() {
+    DIHCache cache = null;
+    try {
+      cache = new SortedMapBackedCache();
+      cache.open(getContext(new HashMap<String,String>()));
+      loadData(cache, data, fieldNames, true);
+      List<ControlData> testData = extractDataByKeyLookup(cache, fieldNames);
+      compareData(data, testData);
+    } catch (Exception e) {
+      log.warn("Exception thrown: " + e.toString());
+      Assert.fail();
+    } finally {
+      try {
+        cache.destroy();
+      } catch (Exception ex) {
+      }
+    }
+  }
+
+  @Test
+  public void testCacheWithOrderedLookup() {
+    DIHCache cache = null;
+    try {
+      cache = new SortedMapBackedCache();
+      cache.open(getContext(new HashMap<String,String>()));
+      loadData(cache, data, fieldNames, true);
+      List<ControlData> testData = extractDataInKeyOrder(cache, fieldNames);
+      compareData(data, testData);
+    } catch (Exception e) {
+      log.warn("Exception thrown: " + e.toString());
+      Assert.fail();
+    } finally {
+      try {
+        cache.destroy();
+      } catch (Exception ex) {
+      }
+    }
+  }
+  
+  @Test
+  public void testNullKeys() throws Exception {
+    //A null key should just be ignored, but not throw an exception
+    DIHCache cache = null;
+    try {
+      cache = new SortedMapBackedCache();
+      Map<String, String> cacheProps = new HashMap<>();
+      cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
+      cache.open(getContext(cacheProps));
+      
+      Map<String,Object> data = new HashMap<>();
+      data.put("a_id", null);
+      data.put("bogus", "data");
+      cache.add(data);
+      
+      Iterator<Map<String, Object>> cacheIter = cache.iterator();
+      while (cacheIter.hasNext()) {
+        Assert.fail("cache should be empty.");
+      }
+      Assert.assertNull(cache.iterator(null));
+      cache.delete(null);      
+    } catch (Exception e) {
+      throw e;
+    } finally {
+      try {
+        cache.destroy();
+      } catch (Exception ex) {
+      }
+    }    
+  }
+
+  @Test
+  public void testCacheReopensWithUpdate() {
+    DIHCache cache = null;
+    try {      
+      Map<String, String> cacheProps = new HashMap<>();
+      cacheProps.put(DIHCacheSupport.CACHE_PRIMARY_KEY, "a_id");
+      
+      cache = new SortedMapBackedCache();
+      cache.open(getContext(cacheProps));
+      // We can let the data hit the cache with the fields out of order because
+      // we've identified the pk up-front.
+      loadData(cache, data, fieldNames, false);
+
+      // Close the cache.
+      cache.close();
+
+      List<ControlData> newControlData = new ArrayList<>();
+      Object[] newIdEqualsThree = null;
+      int j = 0;
+      for (int i = 0; i < data.size(); i++) {
+        // We'll be deleting a_id=1 so remove it from the control data.
+        if (data.get(i).data[0].equals(1)) {
+          continue;
+        }
+
+        // We'll be changing "Cookie" to "Carrot" in a_id=3 so change it in the control data.
+        if (data.get(i).data[0].equals(3)) {
+          newIdEqualsThree = new Object[data.get(i).data.length];
+          System.arraycopy(data.get(i).data, 0, newIdEqualsThree, 0, newIdEqualsThree.length);
+          newIdEqualsThree[3] = "Carrot";
+          newControlData.add(new ControlData(newIdEqualsThree));
+        }
+        // Everything else can just be copied over.
+        else {
+          newControlData.add(data.get(i));
+        }
+
+        j++;
+      }
+
+      // These new rows of data will get added to the cache, so add them to the control data too.
+      Object[] newDataRow1 = new Object[] {99, new BigDecimal(Math.PI), "Z", "Zebra", 99.99f, Feb21_2011, null };
+      Object[] newDataRow2 = new Object[] {2, new BigDecimal(Math.PI), "B", "Ballerina", 2.22f, Feb21_2011, null };
+
+      newControlData.add(new ControlData(newDataRow1));
+      newControlData.add(new ControlData(newDataRow2));
+
+      // Re-open the cache
+      cache.open(getContext(new HashMap<String,String>()));
+
+      // Delete a_id=1 from the cache.
+      cache.delete(1);
+
+      // Because the cache allows duplicates, the only way to update is to
+      // delete first then add.
+      cache.delete(3);
+      cache.add(controlDataToMap(new ControlData(newIdEqualsThree), fieldNames, false));
+
+      // Add this row with a new Primary key.
+      cache.add(controlDataToMap(new ControlData(newDataRow1), fieldNames, false));
+
+      // Add this row, creating two records in the cache with a_id=2.
+      cache.add(controlDataToMap(new ControlData(newDataRow2), fieldNames, false));
+
+      // Read the cache back and compare to the newControlData
+      List<ControlData> testData = extractDataInKeyOrder(cache, fieldNames);
+      compareData(newControlData, testData);
+
+      // Now try reading the cache read-only.
+      cache.close();
+      cache.open(getContext(new HashMap<String,String>()));
+      testData = extractDataInKeyOrder(cache, fieldNames);
+      compareData(newControlData, testData);
+
+    } catch (Exception e) {
+      log.warn("Exception thrown: " + e.toString());
+      Assert.fail();
+    } finally {
+      try {
+        cache.destroy();
+      } catch (Exception ex) {
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor.java
new file mode 100644
index 0000000..f1277c9
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessor.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.junit.Ignore;
+import org.junit.Test;
+
+/**
+ * Test with various combinations of parameters, child entities, caches, transformers.
+ */
+public class TestSqlEntityProcessor extends AbstractSqlEntityProcessorTestCase { 
+   
+  @Test
+  public void testSingleEntity() throws Exception {
+    singleEntity(1);
+  }  
+  @Test
+  public void testWithSimpleTransformer() throws Exception {
+    simpleTransform(1);   
+  }
+  @Test
+  public void testWithComplexTransformer() throws Exception {
+    complexTransform(1, 0);
+  }
+  @Test
+  public void testChildEntities() throws Exception {
+    withChildEntities(false, true);
+  }
+  @Test
+  public void testCachedChildEntities() throws Exception {
+    withChildEntities(true, true);
+  }
+  
+  @Test
+  public void testSportZipperChildEntities() throws Exception {
+    sportsZipper = true;
+    withChildEntities(true, true);
+  }
+
+  @Test
+  public void testCountryZipperChildEntities() throws Exception {
+    countryZipper = true;
+    withChildEntities(true, true);
+  }
+  
+  @Test
+  public void testBothZipperChildEntities() throws Exception {
+    countryZipper = true;
+    sportsZipper = true;
+    withChildEntities(true, true);
+  }
+  
+  @Test(expected=RuntimeException.class /* DIH exceptions are not propagated, here we capturing assertQ exceptions */)
+  public void testSportZipperChildEntitiesWrongOrder() throws Exception {
+    if(random().nextBoolean()){
+      wrongPeopleOrder = true;
+    }else{
+      wrongSportsOrder = true;
+    }
+    testSportZipperChildEntities();
+  }
+
+  @Test(expected=RuntimeException.class )
+  public void testCountryZipperChildEntitiesWrongOrder() throws Exception {
+    if(random().nextBoolean()){
+      wrongPeopleOrder = true;
+    }else{
+      wrongCountryOrder = true;
+    }
+    testCountryZipperChildEntities();
+  }
+  
+  @Test(expected=RuntimeException.class)
+  public void testBothZipperChildEntitiesWrongOrder() throws Exception {
+    if(random().nextBoolean()){
+      wrongPeopleOrder = true;
+    }else{
+      if(random().nextBoolean()){
+        wrongSportsOrder = true;
+      }else{
+        wrongCountryOrder = true;
+      }
+    }
+    testBothZipperChildEntities();
+  }
+  
+  @Test
+  @Ignore("broken see SOLR-3857")
+  public void testSimpleCacheChildEntities() throws Exception {
+    simpleCacheChildEntities(true);
+  }
+   
+  @Override
+  protected String deltaQueriesCountryTable() {
+    return "";
+  }
+  @Override
+  protected String deltaQueriesPersonTable() {
+    return "";
+  }  
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java
new file mode 100644
index 0000000..1f22a8c
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestSqlEntityProcessorDelta.java
@@ -0,0 +1,209 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.lang.invoke.MethodHandles;
+
+import org.apache.solr.request.LocalSolrQueryRequest;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Test with various combinations of parameters, child entities, transformers.
+ */
+public class TestSqlEntityProcessorDelta extends AbstractSqlEntityProcessorTestCase {
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private boolean delta = false;
+  private boolean useParentDeltaQueryParam = false;
+  private IntChanges personChanges = null;
+  private String[] countryChanges = null;
+  
+  @Before
+  public void setupDeltaTest() {
+    delta = false;
+    personChanges = null;
+    countryChanges = null;
+  }
+  @Test
+  public void testSingleEntity() throws Exception {
+    log.debug("testSingleEntity full-import...");
+    singleEntity(1);
+    logPropertiesFile();
+    changeStuff();
+    int c = calculateDatabaseCalls();
+    log.debug("testSingleEntity delta-import (" + c + " database calls expected)...");
+    singleEntity(c);
+    validateChanges();
+  }
+  
+  @Test
+  public void testDeltaImportWithoutInitialFullImport() throws Exception {
+    log.debug("testDeltaImportWithoutInitialFullImport delta-import...");
+    countryEntity = false;
+    delta = true;
+    /*
+     * We need to add 2 in total: 
+     * +1 for deltaQuery i.e identifying id of items to update, 
+     * +1 for deletedPkQuery i.e delete query
+     */
+    singleEntity(totalPeople() + 2);
+    validateChanges();
+  }
+
+  @Test
+  public void testWithSimpleTransformer() throws Exception {
+    log.debug("testWithSimpleTransformer full-import...");    
+    simpleTransform(1); 
+    logPropertiesFile(); 
+    changeStuff();
+    int c = calculateDatabaseCalls();
+    simpleTransform(c);
+    log.debug("testWithSimpleTransformer delta-import (" + c + " database calls expected)...");
+    validateChanges(); 
+  }
+  @Test
+  public void testWithComplexTransformer() throws Exception {
+    log.debug("testWithComplexTransformer full-import...");     
+    complexTransform(1, 0);
+    logPropertiesFile();
+    changeStuff();
+    int c = calculateDatabaseCalls();
+    log.debug("testWithComplexTransformer delta-import (" + c + " database calls expected)...");
+    complexTransform(c, personChanges.deletedKeys.length);
+    validateChanges();  
+  }
+  @Test
+  public void testChildEntities() throws Exception {
+    log.debug("testChildEntities full-import...");
+    useParentDeltaQueryParam = random().nextBoolean();
+    log.debug("using parent delta? " + useParentDeltaQueryParam);
+    withChildEntities(false, true);
+    logPropertiesFile();
+    changeStuff();
+    log.debug("testChildEntities delta-import...");
+    withChildEntities(false, false);
+    validateChanges();
+  }
+    
+  
+  private int calculateDatabaseCalls() {
+    //The main query generates 1
+    //Deletes generate 1
+    //Each add/mod generate 1
+    int c = 1;
+    if (countryChanges != null) {
+      c += countryChanges.length + 1;
+    }
+    if (personChanges != null) {
+      c += personChanges.addedKeys.length + personChanges.changedKeys.length + 1;
+    }
+    return c;    
+  }
+  private void validateChanges() throws Exception
+  {
+    if(personChanges!=null) {
+      for(int id : personChanges.addedKeys) {
+        assertQ(req("id:" + id), "//*[@numFound='1']");
+      }
+      for(int id : personChanges.deletedKeys) {
+        assertQ(req("id:" + id), "//*[@numFound='0']");
+      }
+      for(int id : personChanges.changedKeys) {
+        assertQ(req("id:" + id), "//*[@numFound='1']", "substring(//doc/arr[@name='NAME_mult_s']/str[1], 1, 8)='MODIFIED'");
+      }
+    }
+    if(countryChanges!=null) {      
+      for(String code : countryChanges) {
+        assertQ(req("COUNTRY_CODE_s:" + code), "//*[@numFound='" + numberPeopleByCountryCode(code) + "']", "substring((//doc/str[@name='COUNTRY_NAME_s'])[1], 1, 8)='MODIFIED'");
+      }
+    }
+  }
+  private void changeStuff() throws Exception {
+    if(countryEntity)
+    {
+      int n = random().nextInt(2);
+      switch(n) {
+        case 0:
+          personChanges = modifySomePeople();
+          break;
+        case 1:
+          countryChanges = modifySomeCountries();  
+          break;
+        case 2:
+          personChanges = modifySomePeople();
+          countryChanges = modifySomeCountries();
+          break;
+      }
+    } else {
+      personChanges = modifySomePeople();
+    }
+    countryChangesLog();
+    personChangesLog();
+    delta = true;
+  }
+  private void countryChangesLog() 
+  {
+    if(countryChanges!=null) {
+      StringBuilder sb = new StringBuilder();
+      sb.append("country changes { ");
+      for(String s : countryChanges) {
+        sb.append(s).append(" ");
+      }
+      sb.append(" }");    
+      log.debug(sb.toString());
+    }
+  }
+  private void personChangesLog()
+  {
+    if(personChanges!=null) {
+    log.debug("person changes { " + personChanges.toString() + " } ");
+    }
+  }
+  @Override
+  protected LocalSolrQueryRequest generateRequest() {
+    return lrf.makeRequest("command", (delta ? "delta-import" : "full-import"), "dataConfig", generateConfig(), 
+        "clean", (delta ? "false" : "true"), "commit", "true", "synchronous", "true", "indent", "true");
+  }
+  @Override
+  protected String deltaQueriesPersonTable() {
+    return 
+        "deletedPkQuery=''SELECT ID FROM PEOPLE WHERE DELETED='Y' AND last_modified &gt;='${dih.People.last_index_time}' '' " +
+        "deltaImportQuery=''SELECT ID, NAME, COUNTRY_CODE FROM PEOPLE where ID=${dih.delta.ID} '' " +
+        "deltaQuery=''" +
+        "SELECT ID FROM PEOPLE WHERE DELETED!='Y' AND last_modified &gt;='${dih.People.last_index_time}' " +
+        (useParentDeltaQueryParam ? "" : 
+        "UNION DISTINCT " +
+        "SELECT ID FROM PEOPLE WHERE DELETED!='Y' AND COUNTRY_CODE IN (SELECT CODE FROM COUNTRIES WHERE last_modified &gt;='${dih.People.last_index_time}') "
+        ) + "'' "
+    ;
+  }
+  @Override
+  protected String deltaQueriesCountryTable() {
+    if(useParentDeltaQueryParam) {
+      return 
+          "deltaQuery=''SELECT CODE FROM COUNTRIES WHERE DELETED != 'Y' AND last_modified &gt;='${dih.last_index_time}' ''  " +
+          "parentDeltaQuery=''SELECT ID FROM PEOPLE WHERE DELETED != 'Y' AND COUNTRY_CODE='${Countries.CODE}' '' "
+      ;
+          
+    }
+    return "";
+  }
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateTransformer.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateTransformer.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateTransformer.java
new file mode 100644
index 0000000..b5c3811
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestTemplateTransformer.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import org.junit.Test;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.Arrays;
+
+/**
+ * <p>
+ * Test for TemplateTransformer
+ * </p>
+ *
+ *
+ * @since solr 1.3
+ */
+public class TestTemplateTransformer extends AbstractDataImportHandlerTestCase {
+
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testTransformRow() {
+    List fields = new ArrayList();
+    fields.add(createMap("column", "firstName"));
+    fields.add(createMap("column", "lastName"));
+    fields.add(createMap("column", "middleName"));
+    fields.add(createMap("column", "name",
+            TemplateTransformer.TEMPLATE,
+            "${e.lastName}, ${e.firstName} ${e.middleName}"));
+    fields.add(createMap("column", "emails",
+            TemplateTransformer.TEMPLATE,
+            "${e.mail}"));
+
+    // test reuse of template output in another template 
+    fields.add(createMap("column", "mrname",
+            TemplateTransformer.TEMPLATE,"Mr ${e.name}"));
+
+    List<String> mails = Arrays.asList("a@b.com", "c@d.com");
+    Map row = createMap(
+            "firstName", "Shalin",
+            "middleName", "Shekhar", 
+            "lastName", "Mangar",
+            "mail", mails);
+
+    VariableResolver resolver = new VariableResolver();
+    resolver.addNamespace("e", row);
+    Map<String, String> entityAttrs = createMap("name", "e");
+
+    Context context = getContext(null, resolver,
+            null, Context.FULL_DUMP, fields, entityAttrs);
+    new TemplateTransformer().transformRow(row, context);
+    assertEquals("Mangar, Shalin Shekhar", row.get("name"));
+    assertEquals("Mr Mangar, Shalin Shekhar", row.get("mrname"));
+    assertEquals(mails,row.get("emails"));
+  }
+    
+  @Test
+  @SuppressWarnings("unchecked")
+  public void testTransformRowMultiValue() {
+    List fields = new ArrayList();
+    fields.add(createMap("column", "year"));
+    fields.add(createMap("column", "month"));
+    fields.add(createMap("column", "day"));
+      
+    // create three variations of date format
+    fields.add(createMap( "column", "date",
+                          TemplateTransformer.TEMPLATE,
+                          "${e.day} ${e.month}, ${e.year}" ));
+    fields.add(createMap( "column", "date",
+                          TemplateTransformer.TEMPLATE,
+                          "${e.month} ${e.day}, ${e.year}" ));
+    fields.add(createMap("column", "date",
+                          TemplateTransformer.TEMPLATE,
+                          "${e.year}-${e.month}-${e.day}" ));
+      
+    Map row = createMap( "year", "2016",
+                         "month", "Apr",
+                         "day", "30" );
+    VariableResolver resolver = new VariableResolver();
+    resolver.addNamespace("e", row);
+    Map<String, String> entityAttrs = createMap("date", "e");
+      
+    Context context = getContext(null, resolver,
+                                 null, Context.FULL_DUMP, fields, entityAttrs);
+    new TemplateTransformer().transformRow(row, context);
+    assertTrue( row.get( "date" ) instanceof List );
+    
+    List<Object> dates = (List<Object>)row.get( "date" );
+    assertEquals( dates.size(), 3 );
+    assertEquals( dates.get(0).toString(), "30 Apr, 2016" );
+    assertEquals( dates.get(1).toString(), "Apr 30, 2016" );
+    assertEquals( dates.get(2).toString(), "2016-Apr-30" );
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/lucene-solr/blob/d7c03684/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java
----------------------------------------------------------------------
diff --git a/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java
new file mode 100644
index 0000000..c1acc54
--- /dev/null
+++ b/solr/contrib/dataimporthandler/src/test/java/org/apache/solr/handler/dataimport/TestURLDataSource.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.handler.dataimport;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+import org.junit.Test;
+
+public class TestURLDataSource extends AbstractDataImportHandlerTestCase {
+  private List<Map<String, String>> fields = new ArrayList<>();
+  private URLDataSource dataSource = new URLDataSource();
+  private VariableResolver variableResolver = new VariableResolver();
+  private Context context = AbstractDataImportHandlerTestCase.getContext(null, variableResolver,
+      dataSource, Context.FULL_DUMP, fields, null);
+  private Properties initProps = new Properties();
+  
+  @Test
+  public void substitutionsOnBaseUrl() throws Exception {
+    String url = "http://example.com/";
+    
+    variableResolver.addNamespace("dataimporter.request", Collections.<String,Object>singletonMap("baseurl", url));
+    
+    initProps.setProperty(URLDataSource.BASE_URL, "${dataimporter.request.baseurl}");
+    dataSource.init(context, initProps);
+    assertEquals(url, dataSource.getBaseUrl());
+  }
+}