You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nifi.apache.org by mc...@apache.org on 2018/10/04 13:32:58 UTC

[1/4] nifi git commit: NIFI-4806 updated tika and a ton of other deps as found by dependency versions plugin

Repository: nifi
Updated Branches:
  refs/heads/master de685a7a7 -> 8e233ca2e


http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-provenance-repository-bundle/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/pom.xml b/nifi-nar-bundles/nifi-provenance-repository-bundle/pom.xml
index 39e41ff..517622b 100644
--- a/nifi-nar-bundles/nifi-provenance-repository-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/pom.xml
@@ -59,12 +59,12 @@
             <dependency>
                 <groupId>org.apache.commons</groupId>
                 <artifactId>commons-lang3</artifactId>
-                <version>3.7</version>
+                <version>3.8.1</version>
             </dependency>
             <dependency>
                 <groupId>org.bouncycastle</groupId>
                 <artifactId>bcprov-jdk15on</artifactId>
-                <version>1.59</version>
+                <version>1.60</version>
             </dependency>
         </dependencies>
     </dependencyManagement>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-redis-bundle/nifi-redis-extensions/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-redis-bundle/nifi-redis-extensions/pom.xml b/nifi-nar-bundles/nifi-redis-bundle/nifi-redis-extensions/pom.xml
index 045dc4d..d7e8b07 100644
--- a/nifi-nar-bundles/nifi-redis-bundle/nifi-redis-extensions/pom.xml
+++ b/nifi-nar-bundles/nifi-redis-bundle/nifi-redis-extensions/pom.xml
@@ -54,7 +54,7 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-registry-bundle/nifi-registry-service/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-registry-bundle/nifi-registry-service/pom.xml b/nifi-nar-bundles/nifi-registry-bundle/nifi-registry-service/pom.xml
index 7138820..57515ce 100644
--- a/nifi-nar-bundles/nifi-registry-bundle/nifi-registry-service/pom.xml
+++ b/nifi-nar-bundles/nifi-registry-bundle/nifi-registry-service/pom.xml
@@ -48,7 +48,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>commons-io</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/pom.xml b/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/pom.xml
index 330b78e..e451fa0 100644
--- a/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-rethinkdb-bundle/nifi-rethinkdb-processors/pom.xml
@@ -49,7 +49,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>com.google.code.gson</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-riemann-bundle/nifi-riemann-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-riemann-bundle/nifi-riemann-processors/pom.xml b/nifi-nar-bundles/nifi-riemann-bundle/nifi-riemann-processors/pom.xml
index 3502816..b3a72f7 100644
--- a/nifi-nar-bundles/nifi-riemann-bundle/nifi-riemann-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-riemann-bundle/nifi-riemann-processors/pom.xml
@@ -44,7 +44,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-riemann-bundle/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-riemann-bundle/pom.xml b/nifi-nar-bundles/nifi-riemann-bundle/pom.xml
index 986325a..8f0f8b2 100644
--- a/nifi-nar-bundles/nifi-riemann-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-riemann-bundle/pom.xml
@@ -37,7 +37,7 @@
             <dependency>
                 <groupId>com.aphyr</groupId>
                 <artifactId>riemann-java-client</artifactId>
-                <version>0.4.0</version>
+                <version>0.4.1</version>
             </dependency>
             <dependency>
                 <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/pom.xml b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/pom.xml
index 925b2e2..f4bcdfa 100644
--- a/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/pom.xml
@@ -79,7 +79,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>commons-io</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/pom.xml b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/pom.xml
index 18befab..2d0fee7 100755
--- a/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-solr-bundle/nifi-solr-processors/pom.xml
@@ -53,12 +53,12 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-core</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>commons-codec</groupId>
             <artifactId>commons-codec</artifactId>
-            <version>1.10</version>
+            <version>1.11</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/pom.xml b/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/pom.xml
index 42d9bee..04b98ca 100644
--- a/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-spark-bundle/nifi-livy-processors/pom.xml
@@ -101,7 +101,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-text</artifactId>
-            <version>1.3</version>
+            <version>1.4</version>
         </dependency>
     </dependencies>
 </project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-splunk-bundle/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-splunk-bundle/pom.xml b/nifi-nar-bundles/nifi-splunk-bundle/pom.xml
index 0f422d7..a792d5b 100644
--- a/nifi-nar-bundles/nifi-splunk-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-splunk-bundle/pom.xml
@@ -51,7 +51,7 @@
             <dependency>
                 <groupId>com.splunk</groupId>
                 <artifactId>splunk</artifactId>
-                <version>1.6.3.0</version>
+                <version>1.6.4.0</version>
             </dependency>
         </dependencies>
     </dependencyManagement>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/pom.xml b/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/pom.xml
index 9b6fb6d..95902b2 100644
--- a/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-spring-bundle/nifi-spring-processors/pom.xml
@@ -28,7 +28,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>commons-io</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
index bc031fe..889c567 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
+++ b/nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/main/java/org/apache/nifi/processors/standard/SplitXml.java
@@ -36,7 +36,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.atomic.AtomicInteger;
 import javax.xml.parsers.ParserConfigurationException;
 import javax.xml.parsers.SAXParserFactory;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
 import org.apache.nifi.annotation.behavior.SystemResourceConsideration;
 import org.apache.nifi.annotation.behavior.InputRequirement;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-standard-bundle/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-bundle/pom.xml b/nifi-nar-bundles/nifi-standard-bundle/pom.xml
index 0a0952b..e06a766 100644
--- a/nifi-nar-bundles/nifi-standard-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-bundle/pom.xml
@@ -33,9 +33,9 @@
         <module>nifi-standard-web-test-utils</module>
     </modules>
     <properties>
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
         <yammer.metrics.version>2.2.0</yammer.metrics.version>
-        <jolt.version>0.1.0</jolt.version>
+        <jolt.version>0.1.1</jolt.version>
         <jersey.version>2.26</jersey.version>
     </properties>
     <dependencyManagement>
@@ -170,27 +170,27 @@
             <dependency>
                 <groupId>org.apache.commons</groupId>
                 <artifactId>commons-compress</artifactId>
-                <version>1.16.1</version>
+                <version>1.18</version>
             </dependency>
             <dependency>
                 <groupId>org.apache.commons</groupId>
                 <artifactId>commons-lang3</artifactId>
-                <version>3.7</version>
+                <version>3.8.1</version>
             </dependency>
             <dependency>
                 <groupId>org.bouncycastle</groupId>
                 <artifactId>bcprov-jdk15on</artifactId>
-                <version>1.59</version>
+                <version>1.60</version>
             </dependency>
             <dependency>
                 <groupId>org.bouncycastle</groupId>
                 <artifactId>bcpg-jdk15on</artifactId>
-                <version>1.59</version>
+                <version>1.60</version>
             </dependency>
             <dependency>
                 <groupId>org.bouncycastle</groupId>
                 <artifactId>bcpkix-jdk15on</artifactId>
-                <version>1.59</version>
+                <version>1.60</version>
             </dependency>
             <dependency>
                 <groupId>commons-codec</groupId>
@@ -230,7 +230,7 @@
             <dependency>
                 <groupId>org.tukaani</groupId>
                 <artifactId>xz</artifactId>
-                <version>1.6</version>
+                <version>1.8</version>
             </dependency>
             <dependency>
                 <groupId>net.sf.saxon</groupId>
@@ -250,12 +250,12 @@
             <dependency>
                 <groupId>org.apache.activemq</groupId>
                 <artifactId>activemq-client</artifactId>
-                <version>5.15.3</version>
+                <version>5.15.6</version>
             </dependency>
             <dependency>
                 <groupId>org.apache.activemq</groupId>
                 <artifactId>activemq-broker</artifactId>
-                <version>5.15.3</version>
+                <version>5.15.6</version>
                 <scope>test</scope>
             </dependency>
             <dependency>
@@ -271,7 +271,7 @@
             <dependency>
                 <groupId>org.apache.tika</groupId>
                 <artifactId>tika-core</artifactId>
-                <version>1.17</version>
+                <version>1.19</version>
             </dependency>
             <dependency>
                 <groupId>com.squareup.okhttp3</groupId>
@@ -281,7 +281,7 @@
             <dependency>
                 <groupId>com.burgstaller</groupId>
                 <artifactId>okhttp-digest</artifactId>
-                <version>1.13</version>
+                <version>1.18</version>
                 <type>jar</type>
             </dependency>
             <dependency>
@@ -297,7 +297,7 @@
             <dependency>
                 <groupId>org.xerial.snappy</groupId>
                 <artifactId>snappy-java</artifactId>
-                <version>1.1.2</version>
+                <version>1.1.7.2</version>
             </dependency>
             <dependency>
                 <groupId>com.h2database</groupId>
@@ -330,7 +330,7 @@
             <dependency>
                 <groupId>com.github.wnameless</groupId>
                 <artifactId>json-flattener</artifactId>
-                <version>0.5.0</version>
+                <version>0.6.0</version>
             </dependency>
             <dependency>
                 <groupId>org.apache.bval</groupId>
@@ -373,7 +373,7 @@
             <dependency>
                 <groupId>org.apache.calcite</groupId>
                 <artifactId>calcite-core</artifactId>
-                <version>1.12.0</version>
+                <version>1.17.0</version>
             </dependency>
             <dependency>
                 <groupId>org.apache.avro</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
index 41f29fc..1423ea3 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
@@ -93,7 +93,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.4</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.slf4j</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-standard-services/nifi-hwx-schema-registry-bundle/nifi-hwx-schema-registry-service/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hwx-schema-registry-bundle/nifi-hwx-schema-registry-service/pom.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hwx-schema-registry-bundle/nifi-hwx-schema-registry-service/pom.xml
index e923cf1..aa521ca 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hwx-schema-registry-bundle/nifi-hwx-schema-registry-service/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-hwx-schema-registry-bundle/nifi-hwx-schema-registry-service/pom.xml
@@ -28,8 +28,8 @@ limitations under the License.
     <artifactId>nifi-hwx-schema-registry-service</artifactId>
     <packaging>jar</packaging>
     <properties>
-        <hwx.registry.version>0.5.1</hwx.registry.version>
-        <jackson.version>2.9.5</jackson.version>
+        <hwx.registry.version>0.5.3</hwx.registry.version>
+        <jackson.version>2.9.7</jackson.version>
     </properties>
     <dependencies>
         <dependency>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/pom.xml b/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/pom.xml
index 7fd6336..26a8839 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-lookup-services-bundle/nifi-lookup-services/pom.xml
@@ -53,12 +53,12 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-configuration2</artifactId>
-            <version>2.1.1</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-csv</artifactId>
-            <version>1.4</version>
+            <version>1.5</version>
         </dependency>
         <dependency>
             <groupId>commons-beanutils</groupId>
@@ -143,7 +143,7 @@
         <dependency>
             <groupId>com.burgstaller</groupId>
             <artifactId>okhttp-digest</artifactId>
-            <version>1.13</version>
+            <version>1.18</version>
             <scope>compile</scope>
         </dependency>
     </dependencies>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/pom.xml b/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/pom.xml
index 30694b5..260fabb 100755
--- a/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/pom.xml
@@ -59,17 +59,12 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-csv</artifactId>
-            <version>1.4</version>
+            <version>1.5</version>
         </dependency>
         <dependency>
             <groupId>com.fasterxml.jackson.dataformat</groupId>
@@ -114,6 +109,11 @@
             <version>2.2.1</version>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-text</artifactId>
+            <version>1.4</version>
+        </dependency>
     </dependencies>
     <build>
         <plugins>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestCSVRecordReader.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestCSVRecordReader.java b/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestCSVRecordReader.java
index a4415b7..5095767 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestCSVRecordReader.java
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestCSVRecordReader.java
@@ -18,7 +18,7 @@
 package org.apache.nifi.csv;
 
 import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.logging.ComponentLog;
 import org.apache.nifi.serialization.MalformedRecordException;
 import org.apache.nifi.serialization.SimpleRecordSchema;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestJacksonCSVRecordReader.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestJacksonCSVRecordReader.java b/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestJacksonCSVRecordReader.java
index 66486ee..d83cbfb 100644
--- a/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestJacksonCSVRecordReader.java
+++ b/nifi-nar-bundles/nifi-standard-services/nifi-record-serialization-services-bundle/nifi-record-serialization-services/src/test/java/org/apache/nifi/csv/TestJacksonCSVRecordReader.java
@@ -18,7 +18,7 @@
 package org.apache.nifi.csv;
 
 import org.apache.commons.csv.CSVFormat;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.logging.ComponentLog;
 import org.apache.nifi.serialization.MalformedRecordException;
 import org.apache.nifi.serialization.SimpleRecordSchema;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/pom.xml b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/pom.xml
index 70e8833..4ce6566 100644
--- a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/pom.xml
+++ b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-processor/pom.xml
@@ -35,7 +35,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-ui/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-ui/pom.xml b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-ui/pom.xml
index bcad29d..309bf3a 100644
--- a/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-ui/pom.xml
+++ b/nifi-nar-bundles/nifi-update-attribute-bundle/nifi-update-attribute-ui/pom.xml
@@ -25,7 +25,7 @@
     <properties>
         <maven.javadoc.skip>true</maven.javadoc.skip>
         <source.skip>true</source.skip>
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
         <jersey.version>2.26</jersey.version>
     </properties>
     <build>
@@ -137,7 +137,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-toolkit/nifi-toolkit-admin/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-toolkit/nifi-toolkit-admin/pom.xml b/nifi-toolkit/nifi-toolkit-admin/pom.xml
index a6f4685..e7c9ca2 100644
--- a/nifi-toolkit/nifi-toolkit-admin/pom.xml
+++ b/nifi-toolkit/nifi-toolkit-admin/pom.xml
@@ -24,7 +24,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>commons-cli</groupId>
             <artifactId>commons-cli</artifactId>
-            <version>1.3.1</version>
+            <version>1.4</version>
         </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>
@@ -39,7 +39,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
@@ -118,7 +118,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-compress</artifactId>
-            <version>1.16.1</version>
+            <version>1.18</version>
         </dependency>
         <!-- Spock testing dependencies-->
         <dependency>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-toolkit/nifi-toolkit-cli/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-toolkit/nifi-toolkit-cli/pom.xml b/nifi-toolkit/nifi-toolkit-cli/pom.xml
index 5728eeb..8fb718a 100644
--- a/nifi-toolkit/nifi-toolkit-cli/pom.xml
+++ b/nifi-toolkit/nifi-toolkit-cli/pom.xml
@@ -48,7 +48,7 @@
         <dependency>
             <groupId>commons-cli</groupId>
             <artifactId>commons-cli</artifactId>
-            <version>1.3.1</version>
+            <version>1.4</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
@@ -69,7 +69,7 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.jline</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-toolkit/nifi-toolkit-encrypt-config/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-toolkit/nifi-toolkit-encrypt-config/pom.xml b/nifi-toolkit/nifi-toolkit-encrypt-config/pom.xml
index 764fcbf..a292518 100644
--- a/nifi-toolkit/nifi-toolkit-encrypt-config/pom.xml
+++ b/nifi-toolkit/nifi-toolkit-encrypt-config/pom.xml
@@ -54,7 +54,7 @@
         <dependency>
             <groupId>commons-cli</groupId>
             <artifactId>commons-cli</artifactId>
-            <version>1.3.1</version>
+            <version>1.4</version>
         </dependency>
         <dependency>
             <groupId>com.github.stefanbirkner</groupId>
@@ -77,7 +77,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-configuration2</artifactId>
-            <version>2.0</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>commons-beanutils</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-toolkit/nifi-toolkit-s2s/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-toolkit/nifi-toolkit-s2s/pom.xml b/nifi-toolkit/nifi-toolkit-s2s/pom.xml
index b38baae..3a9c4ae 100644
--- a/nifi-toolkit/nifi-toolkit-s2s/pom.xml
+++ b/nifi-toolkit/nifi-toolkit-s2s/pom.xml
@@ -24,7 +24,7 @@
     <description>Site-to-site cli</description>
 
     <properties>
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
     </properties>
 
     <dependencies>
@@ -36,7 +36,7 @@
         <dependency>
             <groupId>commons-cli</groupId>
             <artifactId>commons-cli</artifactId>
-            <version>1.3.1</version>
+            <version>1.4</version>
         </dependency>
         <dependency>
             <groupId>commons-io</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-toolkit/nifi-toolkit-tls/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-toolkit/nifi-toolkit-tls/pom.xml b/nifi-toolkit/nifi-toolkit-tls/pom.xml
index f2b813f..445e46c 100644
--- a/nifi-toolkit/nifi-toolkit-tls/pom.xml
+++ b/nifi-toolkit/nifi-toolkit-tls/pom.xml
@@ -24,7 +24,7 @@
     <description>Tooling to make tls configuration easier</description>
     <properties>
         <jersey.version>2.26</jersey.version>
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
     </properties>
     <dependencies>
         <dependency>
@@ -52,17 +52,17 @@
         <dependency>
             <groupId>org.bouncycastle</groupId>
             <artifactId>bcpkix-jdk15on</artifactId>
-            <version>1.59</version>
+            <version>1.60</version>
         </dependency>
         <dependency>
             <groupId>org.bouncycastle</groupId>
             <artifactId>bcprov-jdk15on</artifactId>
-            <version>1.59</version>
+            <version>1.60</version>
         </dependency>
         <dependency>
             <groupId>commons-cli</groupId>
             <artifactId>commons-cli</artifactId>
-            <version>1.3.1</version>
+            <version>1.4</version>
         </dependency>
         <dependency>
             <groupId>commons-io</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-toolkit/nifi-toolkit-zookeeper-migrator/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-toolkit/nifi-toolkit-zookeeper-migrator/pom.xml b/nifi-toolkit/nifi-toolkit-zookeeper-migrator/pom.xml
index 5d8b97f..ce61401 100644
--- a/nifi-toolkit/nifi-toolkit-zookeeper-migrator/pom.xml
+++ b/nifi-toolkit/nifi-toolkit-zookeeper-migrator/pom.xml
@@ -28,7 +28,7 @@
         <dependency>
             <groupId>commons-cli</groupId>
             <artifactId>commons-cli</artifactId>
-            <version>1.3.1</version>
+            <version>1.4</version>
         </dependency>
         <dependency>
             <groupId>com.google.guava</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 80662ee..7d433b4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -433,7 +433,7 @@
                         <dependency>
                             <groupId>com.puppycrawl.tools</groupId>
                             <artifactId>checkstyle</artifactId>
-                            <version>8.5</version>
+                            <version>8.12</version>
                         </dependency>
                     </dependencies>
                 </plugin>


[2/4] nifi git commit: NIFI-4806 updated tika and a ton of other deps as found by dependency versions plugin

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java
deleted file mode 100644
index f031710..0000000
--- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/TestPersistentProvenanceRepository.java
+++ /dev/null
@@ -1,2356 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.provenance;
-
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.core.SimpleAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexWriter;
-import org.apache.lucene.queryparser.classic.ParseException;
-import org.apache.lucene.queryparser.classic.QueryParser;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.ScoreDoc;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.store.FSDirectory;
-import org.apache.nifi.authorization.AccessDeniedException;
-import org.apache.nifi.authorization.user.NiFiUser;
-import org.apache.nifi.events.EventReporter;
-import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.provenance.index.EventIndexSearcher;
-import org.apache.nifi.provenance.index.EventIndexWriter;
-import org.apache.nifi.provenance.lineage.EventNode;
-import org.apache.nifi.provenance.lineage.Lineage;
-import org.apache.nifi.provenance.lineage.LineageEdge;
-import org.apache.nifi.provenance.lineage.LineageNode;
-import org.apache.nifi.provenance.lineage.LineageNodeType;
-import org.apache.nifi.provenance.lucene.CachingIndexManager;
-import org.apache.nifi.provenance.lucene.IndexManager;
-import org.apache.nifi.provenance.lucene.IndexingAction;
-import org.apache.nifi.provenance.search.Query;
-import org.apache.nifi.provenance.search.QueryResult;
-import org.apache.nifi.provenance.search.QuerySubmission;
-import org.apache.nifi.provenance.search.SearchTerms;
-import org.apache.nifi.provenance.search.SearchableField;
-import org.apache.nifi.provenance.serialization.RecordReader;
-import org.apache.nifi.provenance.serialization.RecordReaders;
-import org.apache.nifi.provenance.serialization.RecordWriter;
-import org.apache.nifi.provenance.serialization.RecordWriters;
-import org.apache.nifi.reporting.Severity;
-import org.apache.nifi.util.NiFiProperties;
-import org.apache.nifi.util.file.FileUtils;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Ignore;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
-import org.junit.rules.TestName;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.lang.reflect.Field;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.UUID;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.stream.Collectors;
-import java.util.zip.GZIPOutputStream;
-
-import static org.apache.nifi.provenance.TestUtil.createFlowFile;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assume.assumeFalse;
-import static org.mockito.Mockito.mock;
-
-public class TestPersistentProvenanceRepository {
-
-    @Rule
-    public TestName name = new TestName();
-
-    @ClassRule
-    public static TemporaryFolder tempFolder = new TemporaryFolder();
-
-    private PersistentProvenanceRepository repo;
-    private static RepositoryConfiguration config;
-
-    public static final int DEFAULT_ROLLOVER_MILLIS = 2000;
-    private EventReporter eventReporter;
-    private List<ReportedEvent> reportedEvents = Collections.synchronizedList(new ArrayList<ReportedEvent>());
-
-    private static int headerSize;
-    private static int recordSize;
-    private static int recordSize2;
-
-    private static RepositoryConfiguration createConfiguration() {
-        config = new RepositoryConfiguration();
-        config.addStorageDirectory("1", new File("target/storage/" + UUID.randomUUID().toString()));
-        config.setCompressOnRollover(true);
-        config.setMaxEventFileLife(2000L, TimeUnit.SECONDS);
-        config.setCompressionBlockBytes(100);
-        return config;
-    }
-
-    @BeforeClass
-    public static void setLogLevel() {
-        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.provenance", "DEBUG");
-    }
-
-    @BeforeClass
-    public static void findJournalSizes() throws IOException {
-        // determine header and record size
-
-        final Map<String, String> attributes = new HashMap<>();
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-        final ProvenanceEventRecord record = builder.build();
-        builder.setComponentId("2345");
-        final ProvenanceEventRecord record2 = builder.build();
-
-        final File tempRecordFile = tempFolder.newFile("record.tmp");
-        System.out.println("findJournalSizes position 0 = " + tempRecordFile.length());
-
-        final AtomicLong idGenerator = new AtomicLong(0L);
-        final RecordWriter writer = RecordWriters.newSchemaRecordWriter(tempRecordFile, idGenerator, false, false);
-        writer.writeHeader(12345L);
-        writer.flush();
-        headerSize = Long.valueOf(tempRecordFile.length()).intValue();
-        writer.writeRecord(record);
-        writer.flush();
-        recordSize = Long.valueOf(tempRecordFile.length()).intValue() - headerSize;
-        writer.writeRecord(record2);
-        writer.flush();
-        recordSize2 = Long.valueOf(tempRecordFile.length()).intValue() - headerSize - recordSize;
-        writer.close();
-
-        System.out.println("headerSize =" + headerSize);
-        System.out.println("recordSize =" + recordSize);
-        System.out.println("recordSize2=" + recordSize2);
-    }
-
-    @Before
-    public void printTestName() {
-        System.out.println("\n\n\n***********************  " + name.getMethodName() + "  *****************************");
-
-        reportedEvents.clear();
-        eventReporter = new EventReporter() {
-            private static final long serialVersionUID = 1L;
-
-            @Override
-            public void reportEvent(Severity severity, String category, String message) {
-                reportedEvents.add(new ReportedEvent(severity, category, message));
-                System.out.println(severity + " : " + category + " : " + message);
-            }
-        };
-    }
-
-    @After
-    public void closeRepo() throws IOException {
-        if (repo == null) {
-            return;
-        }
-
-        try {
-            repo.close();
-        } catch (final IOException ioe) {
-        }
-
-        // Delete all of the storage files. We do this in order to clean up the tons of files that
-        // we create but also to ensure that we have closed all of the file handles. If we leave any
-        // streams open, for instance, this will throw an IOException, causing our unit test to fail.
-        if (config != null) {
-            for (final File storageDir : config.getStorageDirectories().values()) {
-                int i;
-                for (i = 0; i < 3; i++) {
-                    try {
-                        FileUtils.deleteFile(storageDir, true);
-                        break;
-                    } catch (final IOException ioe) {
-                        // if there is a virus scanner, etc. running in the background we may not be able to
-                        // delete the file. Wait a sec and try again.
-                        if (i == 2) {
-                            throw ioe;
-                        } else {
-                            try {
-                                System.out.println("file: " + storageDir.toString() + " exists=" + storageDir.exists());
-                                FileUtils.deleteFile(storageDir, true);
-                                break;
-                            } catch (final IOException ioe2) {
-                                // if there is a virus scanner, etc. running in the background we may not be able to
-                                // delete the file. Wait a sec and try again.
-                                if (i == 2) {
-                                    throw ioe2;
-                                } else {
-                                    try {
-                                        Thread.sleep(1000L);
-                                    } catch (final InterruptedException ie) {
-                                    }
-                                }
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-
-    private EventReporter getEventReporter() {
-        return eventReporter;
-    }
-
-    @Test
-    @Ignore("For local testing of performance only")
-    public void testPerformance() throws IOException, InterruptedException {
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileCapacity(1024 * 1024 * 1024L);
-        config.setMaxEventFileLife(20, TimeUnit.SECONDS);
-        config.setCompressOnRollover(false);
-        config.setJournalCount(10);
-        config.setQueryThreadPoolSize(10);
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("uuid", UUID.randomUUID().toString());
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-        final ProvenanceEventRecord record = builder.build();
-
-        final Runnable r = new Runnable() {
-            @Override
-            public void run() {
-                for (int i = 0; i < 100000; i++) {
-                    repo.registerEvent(record);
-                }
-            }
-        };
-
-        final Thread[] threads = new Thread[10];
-        for (int i = 0; i < threads.length; i++) {
-            threads[i] = new Thread(r);
-        }
-
-        final long start = System.nanoTime();
-        for (final Thread t : threads) {
-            t.start();
-        }
-
-        for (final Thread t : threads) {
-            t.join();
-        }
-        final long nanos = System.nanoTime() - start;
-
-        final long millis = TimeUnit.NANOSECONDS.toMillis(nanos);
-        final long recsPerMilli = 1000000 / millis;
-        final long recsPerSec = recsPerMilli * 1000;
-        System.out.println(millis + " millis to insert 1M records (" + recsPerSec + " recs/sec)");
-
-        System.out.println("Closing and re-initializing");
-        repo.close();
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-        System.out.println("Re-initialized");
-
-        final long fetchStart = System.nanoTime();
-        final List<ProvenanceEventRecord> records = repo.getEvents(0L, 1000000);
-        final long fetchMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - fetchStart);
-        assertEquals(1000000, records.size());
-        final long fetchRecsPerMilli = 1000000 / fetchMillis;
-        final long fetchRecsPerSec = fetchRecsPerMilli * 1000L;
-        System.out.println(fetchMillis + " millis to fetch 1M records (" + fetchRecsPerSec + " recs/sec)");
-
-        repo.close();
-    }
-
-    private NiFiProperties properties = new NiFiProperties() {
-        @Override
-        public String getProperty(String key) {
-            if (key.equals(NiFiProperties.PROVENANCE_COMPRESS_ON_ROLLOVER)) {
-                return "true";
-            } else if (key.equals(NiFiProperties.PROVENANCE_ROLLOVER_TIME)) {
-                return "2000 millis";
-            } else if (key.equals(NiFiProperties.PROVENANCE_REPO_DIRECTORY_PREFIX + ".default")) {
-                createConfiguration();
-                return config.getStorageDirectories().values().iterator().next().getAbsolutePath();
-            } else {
-                return null;
-            }
-        }
-
-        @Override
-        public Set<String> getPropertyKeys() {
-            return new HashSet<>(Arrays.asList(
-                    NiFiProperties.PROVENANCE_COMPRESS_ON_ROLLOVER,
-                    NiFiProperties.PROVENANCE_ROLLOVER_TIME,
-                    NiFiProperties.PROVENANCE_REPO_DIRECTORY_PREFIX + ".default"));
-        }
-    };
-
-    @Test
-    public void constructorNoArgs() {
-        TestablePersistentProvenanceRepository tppr = new TestablePersistentProvenanceRepository();
-        assertEquals(0, tppr.getRolloverCheckMillis());
-    }
-
-    @Test
-    public void constructorNiFiProperties() throws IOException {
-        TestablePersistentProvenanceRepository tppr = new TestablePersistentProvenanceRepository(properties);
-        assertEquals(10000, tppr.getRolloverCheckMillis());
-    }
-
-    @Test
-    public void constructorConfig() throws IOException {
-        RepositoryConfiguration configuration = RepositoryConfiguration.create(properties);
-        new TestablePersistentProvenanceRepository(configuration, 20000);
-    }
-
-    @Test
-    public void testAddAndRecover() throws IOException, InterruptedException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileCapacity(1L);
-        config.setMaxEventFileLife(1, TimeUnit.SECONDS);
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("uuid", UUID.randomUUID().toString());
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-        final ProvenanceEventRecord record = builder.build();
-
-        for (int i = 0; i < 10; i++) {
-            repo.registerEvent(record);
-        }
-
-        Thread.sleep(1000L);
-
-        repo.close();
-        Thread.sleep(500L); // Give the repo time to shutdown (i.e., close all file handles, etc.)
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-        final List<ProvenanceEventRecord> recoveredRecords = repo.getEvents(0L, 12);
-
-        //just test however many were actually recovered since it is timing sensitive
-        final int numRecovered = recoveredRecords.size();
-        for (int i = 0; i < numRecovered; i++) {
-            final ProvenanceEventRecord recovered = recoveredRecords.get(i);
-            assertEquals(i, recovered.getEventId());
-            assertEquals("nifi://unit-test", recovered.getTransitUri());
-            assertEquals(ProvenanceEventType.RECEIVE, recovered.getEventType());
-            assertEquals(attributes, recovered.getAttributes());
-        }
-    }
-
-    @Test
-    public void testAddToMultipleLogsAndRecover() throws IOException, InterruptedException {
-        assumeFalse(isWindowsEnvironment());
-        final List<SearchableField> searchableFields = new ArrayList<>();
-        searchableFields.add(SearchableFields.ComponentID);
-
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(2, TimeUnit.SECONDS);
-        config.setSearchableFields(searchableFields);
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("uuid", UUID.randomUUID().toString());
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-        final ProvenanceEventRecord record = builder.build();
-
-        for (int i = 0; i < 10; i++) {
-            repo.registerEvent(record);
-        }
-
-        builder.setComponentId("XXXX"); // create a different component id so that we can make sure we query this record.
-
-        attributes.put("uuid", "11111111-1111-1111-1111-111111111111");
-
-        builder.fromFlowFile(createFlowFile(11L, 11L, attributes));
-        repo.registerEvent(builder.build());
-
-        repo.waitForRollover();
-        Thread.sleep(500L); // Give the repo time to shutdown (i.e., close all file handles, etc.)
-
-        // Create a new repo and add another record with component id XXXX so that we can ensure that it's added to a different
-        // log file than the previous one.
-        attributes.put("uuid", "22222222-2222-2222-2222-222222222222");
-        builder.fromFlowFile(createFlowFile(11L, 11L, attributes));
-        repo.registerEvent(builder.build());
-        repo.waitForRollover();
-
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "XXXX"));
-        query.setMaxResults(100);
-
-        final QueryResult result = repo.queryEvents(query, createUser());
-        assertEquals(2, result.getMatchingEvents().size());
-        for (final ProvenanceEventRecord match : result.getMatchingEvents()) {
-            System.out.println(match);
-        }
-    }
-
-    @Test
-    public void testIndexOnRolloverWithImmenseAttribute() throws IOException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        config.setSearchableAttributes(SearchableFieldParser.extractSearchableFields("immense", false));
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        int immenseAttrSize = 33000; // must be greater than 32766 for a meaningful test
-        StringBuilder immenseBldr = new StringBuilder(immenseAttrSize);
-        for (int i = 0; i < immenseAttrSize; i++) {
-            immenseBldr.append('0');
-        }
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-        attributes.put("immense", immenseBldr.toString());
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.newSearchableAttribute("immense"), "000*"));
-        query.setMaxResults(100);
-
-        final QueryResult result = repo.queryEvents(query, createUser());
-        assertEquals(10, result.getMatchingEvents().size());
-    }
-
-    @Test
-    public void testIndexOnRolloverAndSubsequentSearch() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.FlowFileUUID, "000000*"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
-        query.setMaxResults(100);
-
-        final QueryResult result = repo.queryEvents(query, createUser());
-        assertEquals(10, result.getMatchingEvents().size());
-        for (final ProvenanceEventRecord match : result.getMatchingEvents()) {
-            System.out.println(match);
-        }
-    }
-
-    @Test
-    public void testCompressOnRollover() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setCompressOnRollover(true);
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", uuid);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-        final File storageDir = config.getStorageDirectories().values().iterator().next();
-        final File compressedLogFile = new File(storageDir, "0.prov.gz");
-        assertTrue(compressedLogFile.exists());
-    }
-
-    @Test
-    public void testIndexAndCompressOnRolloverAndSubsequentSearch() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(30, TimeUnit.SECONDS);
-        config.setMaxStorageCapacity(1024L * 1024L * 10);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L * 10);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "10000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", uuid);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        final Query query = new Query(UUID.randomUUID().toString());
-        // query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.FlowFileUUID, "00000000-0000-0000-0000*"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
-        query.setMaxResults(100);
-
-        final QueryResult result = repo.queryEvents(query, createUser());
-        assertEquals(10, result.getMatchingEvents().size());
-        for (final ProvenanceEventRecord match : result.getMatchingEvents()) {
-            System.out.println(match);
-        }
-
-        Thread.sleep(2000L);
-
-        config.setMaxStorageCapacity(100L);
-        config.setMaxRecordLife(500, TimeUnit.MILLISECONDS);
-        repo.purgeOldEvents();
-        Thread.sleep(2000L);
-
-        final QueryResult newRecordSet = repo.queryEvents(query, createUser());
-        assertTrue(newRecordSet.getMatchingEvents().isEmpty());
-    }
-
-    @Test(timeout = 10000)
-    public void testModifyIndexWhileSearching() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(30, TimeUnit.SECONDS);
-        config.setMaxStorageCapacity(1024L * 1024L * 10);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L * 10);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-
-        final CountDownLatch obtainIndexSearcherLatch = new CountDownLatch(2);
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
-            private CachingIndexManager wrappedManager = null;
-
-            // Create an IndexManager that adds a delay before returning the Index Searcher.
-            @Override
-            protected synchronized CachingIndexManager getIndexManager() {
-                if (wrappedManager == null) {
-                    final IndexManager mgr = super.getIndexManager();
-                    final Logger logger = LoggerFactory.getLogger("IndexManager");
-
-                    wrappedManager = new CachingIndexManager() {
-                        final AtomicInteger indexSearcherCount = new AtomicInteger(0);
-
-                        @Override
-                        public EventIndexSearcher borrowIndexSearcher(File indexDir) throws IOException {
-                            final EventIndexSearcher searcher = mgr.borrowIndexSearcher(indexDir);
-                            final int idx = indexSearcherCount.incrementAndGet();
-                            obtainIndexSearcherLatch.countDown();
-
-                            // The first searcher should sleep for 3 seconds. The second searcher should
-                            // sleep for 5 seconds. This allows us to have two threads each obtain a Searcher
-                            // and then have one of them finish searching and close the searcher if it's poisoned while the
-                            // second thread is still holding the searcher
-                            try {
-                                if (idx == 1) {
-                                    Thread.sleep(3000L);
-                                } else {
-                                    Thread.sleep(5000L);
-                                }
-                            } catch (InterruptedException e) {
-                                throw new IOException("Interrupted", e);
-                            }
-
-                            logger.info("Releasing index searcher");
-                            return searcher;
-                        }
-
-                        @Override
-                        public EventIndexWriter borrowIndexWriter(File indexingDirectory) throws IOException {
-                            return mgr.borrowIndexWriter(indexingDirectory);
-                        }
-
-                        @Override
-                        public void close() throws IOException {
-                            mgr.close();
-                        }
-
-                        @Override
-                        public boolean removeIndex(File indexDirectory) {
-                            mgr.removeIndex(indexDirectory);
-                            return true;
-                        }
-
-                        @Override
-                        public void returnIndexSearcher(EventIndexSearcher searcher) {
-                            mgr.returnIndexSearcher(searcher);
-                        }
-
-                        @Override
-                        public void returnIndexWriter(EventIndexWriter writer) {
-                            mgr.returnIndexWriter(writer);
-                        }
-                    };
-                }
-
-                return wrappedManager;
-            }
-        };
-
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "10000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", uuid);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        // Perform a query. This will ensure that an IndexSearcher is created and cached.
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
-        query.setMaxResults(100);
-
-        // Run a query in a background thread. When this thread goes to obtain the IndexSearcher, it will have a 5 second delay.
-        // That delay will occur as the main thread is updating the index. This should result in the search creating a new Index Reader
-        // that can properly query the index.
-        final int numThreads = 2;
-        final CountDownLatch performSearchLatch = new CountDownLatch(numThreads);
-        final Runnable searchRunnable = new Runnable() {
-            @Override
-            public void run() {
-                QueryResult result;
-                try {
-                    result = repo.queryEvents(query, createUser());
-                } catch (IOException e) {
-                    e.printStackTrace();
-                    Assert.fail(e.toString());
-                    return;
-                }
-
-                System.out.println("Finished search: " + result);
-                performSearchLatch.countDown();
-            }
-        };
-
-        // Kick off the searcher threads
-        for (int i = 0; i < numThreads; i++) {
-            final Thread searchThread = new Thread(searchRunnable);
-            searchThread.start();
-        }
-
-        // Wait until we've obtained the Index Searchers before modifying the index.
-        obtainIndexSearcherLatch.await();
-
-        // add more events to the repo
-        for (int i = 0; i < 10; i++) {
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            repo.registerEvent(builder.build());
-        }
-
-        // Force a rollover to occur. This will modify the index.
-        repo.rolloverWithLock(true);
-
-        // Wait for the repository to roll over.
-        repo.waitForRollover();
-
-        // Wait for the searches to complete.
-        performSearchLatch.await();
-    }
-
-    @Test
-    public void testIndexAndCompressOnRolloverAndSubsequentSearchMultipleStorageDirs() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.addStorageDirectory("2", new File("target/storage/" + UUID.randomUUID().toString()));
-        config.setMaxRecordLife(30, TimeUnit.SECONDS);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(1, TimeUnit.SECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        for (int j = 0; j < 3; j++) {
-            attributes.put("iteration", String.valueOf(j));
-
-            builder.setEventTime(System.currentTimeMillis());
-            builder.setEventType(ProvenanceEventType.RECEIVE);
-            builder.setTransitUri("nifi://unit-test");
-            builder.setComponentId("1234");
-            builder.setComponentType("dummy processor");
-            builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-
-            for (int i = 0; i < 10; i++) {
-                String uuidSuffix = String.valueOf(i + j * 10);
-                if (uuidSuffix.length() < 2) {
-                    uuidSuffix = "0" + uuidSuffix;
-                }
-
-                attributes.put("uuid", "00000000-0000-0000-0000-0000000000" + uuidSuffix);
-                builder.fromFlowFile(createFlowFile(i + j * 10, 3000L, attributes));
-                repo.registerEvent(builder.build());
-            }
-
-            repo.waitForRollover();
-        }
-
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
-        query.setMaxResults(100);
-
-        final QuerySubmission submission = repo.submitQuery(query, createUser());
-        while (!submission.getResult().isFinished()) {
-            Thread.sleep(100L);
-        }
-
-        assertEquals(30, submission.getResult().getMatchingEvents().size());
-        final Map<String, Integer> counts = new HashMap<>();
-        for (final ProvenanceEventRecord match : submission.getResult().getMatchingEvents()) {
-            System.out.println(match);
-
-            final String index = match.getAttributes().get("iteration");
-            Integer count = counts.get(index);
-            if (count == null) {
-                count = 0;
-            }
-            counts.put(index, count + 1);
-        }
-
-        assertEquals(3, counts.size());
-        assertEquals(10, counts.get("0").intValue());
-        assertEquals(10, counts.get("1").intValue());
-        assertEquals(10, counts.get("2").intValue());
-
-        config.setMaxRecordLife(1, TimeUnit.MILLISECONDS);
-
-        repo.purgeOldEvents();
-
-        Thread.sleep(2000L); // purge is async. Give it time to do its job.
-
-        query.setMaxResults(100);
-        final QuerySubmission noResultSubmission = repo.submitQuery(query, createUser());
-        while (!noResultSubmission.getResult().isFinished()) {
-            Thread.sleep(10L);
-        }
-
-        assertEquals(0, noResultSubmission.getResult().getTotalHitCount());
-    }
-
-    @Test
-    public void testIndexAndCompressOnRolloverAndSubsequentEmptySearch() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(30, TimeUnit.SECONDS);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", uuid);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            repo.registerEvent(builder.build());
-        }
-
-        // Give time for rollover to happen
-        repo.waitForRollover();
-
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.setMaxResults(100);
-
-        final QueryResult result = repo.queryEvents(query, createUser());
-        assertEquals(10, result.getMatchingEvents().size());
-        for (final ProvenanceEventRecord match : result.getMatchingEvents()) {
-            System.out.println(match);
-        }
-
-        Thread.sleep(2000L);
-
-        config.setMaxStorageCapacity(100L);
-        config.setMaxRecordLife(500, TimeUnit.MILLISECONDS);
-        repo.purgeOldEvents();
-
-        Thread.sleep(1000L);
-
-        final QueryResult newRecordSet = repo.queryEvents(query, createUser());
-        assertTrue(newRecordSet.getMatchingEvents().isEmpty());
-    }
-
-    @Test
-    public void testLineageReceiveDrop() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(3, TimeUnit.SECONDS);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000001";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("uuid", uuid);
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", uuid);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        repo.registerEvent(builder.build());
-
-        builder.setEventTime(System.currentTimeMillis() + 1);
-        builder.setEventType(ProvenanceEventType.DROP);
-        builder.setTransitUri(null);
-        repo.registerEvent(builder.build());
-
-        repo.waitForRollover();
-
-        final Lineage lineage = repo.computeLineage(uuid, createUser());
-        assertNotNull(lineage);
-
-        // Nodes should consist of a RECEIVE followed by FlowFileNode, followed by a DROP
-        final List<LineageNode> nodes = lineage.getNodes();
-        final List<LineageEdge> edges = lineage.getEdges();
-        assertEquals(3, nodes.size());
-
-        for (final LineageEdge edge : edges) {
-            if (edge.getSource().getNodeType() == LineageNodeType.FLOWFILE_NODE) {
-                assertTrue(edge.getDestination().getNodeType() == LineageNodeType.PROVENANCE_EVENT_NODE);
-                assertTrue(((EventNode) edge.getDestination()).getEventType() == ProvenanceEventType.DROP);
-            } else {
-                assertTrue(((EventNode) edge.getSource()).getEventType() == ProvenanceEventType.RECEIVE);
-                assertTrue(edge.getDestination().getNodeType() == LineageNodeType.FLOWFILE_NODE);
-            }
-        }
-    }
-
-    @Test
-    public void testLineageReceiveDropAsync() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(3, TimeUnit.SECONDS);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000001";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("uuid", uuid);
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", uuid);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        repo.registerEvent(builder.build());
-
-        builder.setEventTime(System.currentTimeMillis() + 1);
-        builder.setEventType(ProvenanceEventType.DROP);
-        builder.setTransitUri(null);
-        repo.registerEvent(builder.build());
-
-        repo.waitForRollover();
-
-        final AsyncLineageSubmission submission = repo.submitLineageComputation(uuid, createUser());
-        while (!submission.getResult().isFinished()) {
-            Thread.sleep(100L);
-        }
-
-        assertNotNull(submission);
-
-        // Nodes should consist of a RECEIVE followed by FlowFileNode, followed by a DROP
-        final List<LineageNode> nodes = submission.getResult().getNodes();
-        final List<LineageEdge> edges = submission.getResult().getEdges();
-        assertEquals(3, nodes.size());
-
-        for (final LineageEdge edge : edges) {
-            if (edge.getSource().getNodeType() == LineageNodeType.FLOWFILE_NODE) {
-                assertTrue(edge.getDestination().getNodeType() == LineageNodeType.PROVENANCE_EVENT_NODE);
-                assertTrue(((EventNode) edge.getDestination()).getEventType() == ProvenanceEventType.DROP);
-            } else {
-                assertTrue(((EventNode) edge.getSource()).getEventType() == ProvenanceEventType.RECEIVE);
-                assertTrue(edge.getDestination().getNodeType() == LineageNodeType.FLOWFILE_NODE);
-            }
-        }
-    }
-
-    @Test
-    public void testLineageManyToOneSpawn() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(3, TimeUnit.SECONDS);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String childId = "00000000-0000-0000-0000-000000000000";
-
-        final String parentId1 = "00000000-0000-0000-0001-000000000001";
-        final String parentId2 = "00000000-0000-0000-0001-000000000002";
-        final String parentId3 = "00000000-0000-0000-0001-000000000003";
-
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("uuid", childId);
-        attributes.put("filename", "file-" + childId);
-
-        final StandardProvenanceEventRecord.Builder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.FORK);
-        attributes.put("uuid", childId);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        builder.addChildUuid(childId);
-        builder.addParentUuid(parentId1);
-        builder.addParentUuid(parentId2);
-        builder.addParentUuid(parentId3);
-
-        repo.registerEvent(builder.build());
-
-        repo.waitForRollover();
-
-        final Lineage lineage = repo.computeLineage(childId, createUser());
-        assertNotNull(lineage);
-
-        // these are not necessarily accurate asserts....
-        final List<LineageNode> nodes = lineage.getNodes();
-        final List<LineageEdge> edges = lineage.getEdges();
-        assertEquals(2, nodes.size());
-        assertEquals(1, edges.size());
-    }
-
-    @Test
-    public void testLineageManyToOneSpawnAsync() throws IOException, InterruptedException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(3, TimeUnit.SECONDS);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String childId = "00000000-0000-0000-0000-000000000000";
-
-        final String parentId1 = "00000000-0000-0000-0001-000000000001";
-        final String parentId2 = "00000000-0000-0000-0001-000000000002";
-        final String parentId3 = "00000000-0000-0000-0001-000000000003";
-
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("uuid", childId);
-        attributes.put("filename", "file-" + childId);
-
-        final StandardProvenanceEventRecord.Builder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.FORK);
-        attributes.put("uuid", childId);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        builder.addChildUuid(childId);
-        builder.addParentUuid(parentId1);
-        builder.addParentUuid(parentId2);
-        builder.addParentUuid(parentId3);
-
-        repo.registerEvent(builder.build());
-
-        repo.waitForRollover();
-
-        final AsyncLineageSubmission submission = repo.submitLineageComputation(childId, createUser());
-        while (!submission.getResult().isFinished()) {
-            Thread.sleep(100L);
-        }
-
-        // these are not accurate asserts....
-        final List<LineageNode> nodes = submission.getResult().getNodes();
-        final List<LineageEdge> edges = submission.getResult().getEdges();
-        assertEquals(2, nodes.size());
-        assertEquals(1, edges.size());
-    }
-
-    @Test
-    public void testCorrectProvenanceEventIdOnRestore() throws IOException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileLife(1, TimeUnit.SECONDS);
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", uuid);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            repo.registerEvent(builder.build());
-        }
-
-        repo.close();
-
-        final PersistentProvenanceRepository secondRepo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        secondRepo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        try {
-            final ProvenanceEventRecord event11 = builder.build();
-            secondRepo.registerEvent(event11);
-            secondRepo.waitForRollover();
-            final ProvenanceEventRecord event11Retrieved = secondRepo.getEvent(10L, null);
-            assertNotNull(event11Retrieved);
-            assertEquals(10, event11Retrieved.getEventId());
-        } finally {
-            secondRepo.close();
-        }
-    }
-
-    /**
-     * Here the event file is simply corrupted by virtue of not having any event
-     * records while having correct headers
-     */
-    @Test
-    public void testWithWithEventFileMissingRecord() throws Exception {
-        assumeFalse(isWindowsEnvironment());
-        File eventFile = this.prepCorruptedEventFileTests();
-
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "foo-*"));
-        query.setMaxResults(100);
-
-        DataOutputStream in = new DataOutputStream(new GZIPOutputStream(new FileOutputStream(eventFile)));
-        in.writeUTF("BlahBlah");
-        in.writeInt(4);
-        in.close();
-        assertTrue(eventFile.exists());
-        final QueryResult result = repo.queryEvents(query, createUser());
-        assertEquals(10, result.getMatchingEvents().size());
-    }
-
-    /**
-     * Here the event file is simply corrupted by virtue of being empty (0
-     * bytes)
-     */
-    @Test
-    public void testWithWithEventFileCorrupted() throws Exception {
-        assumeFalse(isWindowsEnvironment());
-        File eventFile = this.prepCorruptedEventFileTests();
-
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "foo-*"));
-        query.setMaxResults(100);
-        DataOutputStream in = new DataOutputStream(new GZIPOutputStream(new FileOutputStream(eventFile)));
-        in.close();
-        final QueryResult result = repo.queryEvents(query, createUser());
-        assertEquals(10, result.getMatchingEvents().size());
-    }
-
-    private File prepCorruptedEventFileTests() throws Exception {
-        RepositoryConfiguration config = createConfiguration();
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        config.setDesiredIndexSize(10);
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        String uuid = UUID.randomUUID().toString();
-        for (int i = 0; i < 20; i++) {
-            ProvenanceEventRecord record = repo.eventBuilder().fromFlowFile(mock(FlowFile.class))
-                    .setEventType(ProvenanceEventType.CREATE).setComponentId("foo-" + i).setComponentType("myComponent")
-                    .setFlowFileUUID(uuid).build();
-            repo.registerEvent(record);
-            if (i == 9) {
-                repo.waitForRollover();
-                Thread.sleep(2000L);
-            }
-        }
-        repo.waitForRollover();
-        File eventFile = new File(config.getStorageDirectories().values().iterator().next(), "10.prov.gz");
-        assertTrue(eventFile.delete());
-        return eventFile;
-    }
-
-    @Test
-    @Ignore("This test relies too much on timing of background events by using Thread.sleep().")
-    public void testIndexDirectoryRemoved() throws InterruptedException, IOException, ParseException {
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(5, TimeUnit.MINUTES);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        config.setDesiredIndexSize(10); // force new index to be created for each rollover
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            builder.setEventTime(10L); // make sure the events are destroyed when we call purge
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        Thread.sleep(2000L);
-
-        final FileFilter indexFileFilter = file -> file.getName().startsWith("index");
-        final int numIndexDirs = config.getStorageDirectories().values().iterator().next().listFiles(indexFileFilter).length;
-        assertEquals(1, numIndexDirs);
-
-        // add more records so that we will create a new index
-        final long secondBatchStartTime = System.currentTimeMillis();
-        for (int i = 0; i < 10; i++) {
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000001" + i);
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            builder.setEventTime(System.currentTimeMillis());
-            repo.registerEvent(builder.build());
-        }
-
-        // wait for indexing to happen
-        repo.waitForRollover();
-
-        // verify we get the results expected
-        final Query query = new Query(UUID.randomUUID().toString());
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
-        query.setMaxResults(100);
-
-        final QueryResult result = repo.queryEvents(query, createUser());
-        assertEquals(20, result.getMatchingEvents().size());
-
-        // Ensure index directories exists
-        File[] indexDirs = config.getStorageDirectories().values().iterator().next().listFiles(indexFileFilter);
-        assertEquals(2, indexDirs.length);
-
-        // expire old events and indexes
-        final long timeSinceSecondBatch = System.currentTimeMillis() - secondBatchStartTime;
-        config.setMaxRecordLife(timeSinceSecondBatch + 1000L, TimeUnit.MILLISECONDS);
-        repo.purgeOldEvents();
-        Thread.sleep(2000L);
-
-        final QueryResult newRecordSet = repo.queryEvents(query, createUser());
-        assertEquals(10, newRecordSet.getMatchingEvents().size());
-
-        // Ensure that one index directory is gone
-        indexDirs = config.getStorageDirectories().values().iterator().next().listFiles(indexFileFilter);
-        assertEquals(1, indexDirs.length);
-    }
-
-    @Test
-    public void testNotAuthorizedGetSpecificEvent() throws IOException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(5, TimeUnit.MINUTES);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        config.setDesiredIndexSize(10); // force new index to be created for each rollover
-
-        final AccessDeniedException expectedException = new AccessDeniedException("Unit Test - Intentionally Thrown");
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
-            @Override
-            public void authorize(ProvenanceEventRecord event, NiFiUser user) {
-                throw expectedException;
-            }
-        };
-
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            builder.setEventTime(10L); // make sure the events are destroyed when we call purge
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        try {
-            repo.getEvent(0L, null);
-            Assert.fail("getEvent() did not throw an Exception");
-        } catch (final Exception e) {
-            Assert.assertSame(expectedException, e);
-        }
-    }
-
-    @Test
-    public void testNotAuthorizedGetEventRange() throws IOException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(5, TimeUnit.MINUTES);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        config.setDesiredIndexSize(10); // force new index to be created for each rollover
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
-            @Override
-            public boolean isAuthorized(ProvenanceEventRecord event, NiFiUser user) {
-                return event.getEventId() > 2;
-            }
-        };
-
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            builder.setEventTime(10L); // make sure the events are destroyed when we call purge
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        final List<ProvenanceEventRecord> events = repo.getEvents(0L, 10, null);
-
-        // Ensure that we gets events with ID's 3 through 10.
-        assertEquals(7, events.size());
-        final List<Long> eventIds = events.stream().map(event -> event.getEventId()).sorted().collect(Collectors.toList());
-        for (int i = 0; i < 7; i++) {
-            Assert.assertEquals(i + 3, eventIds.get(i).intValue());
-        }
-    }
-
-    @Test(timeout = 10000)
-    public void testNotAuthorizedQuery() throws IOException, InterruptedException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(5, TimeUnit.MINUTES);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        config.setDesiredIndexSize(10); // force new index to be created for each rollover
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
-            @Override
-            public boolean isAuthorized(ProvenanceEventRecord event, NiFiUser user) {
-                return event.getEventId() > 2;
-            }
-        };
-
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            builder.setEventTime(10L); // make sure the events are destroyed when we call purge
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        final Query query = new Query("1234");
-        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "1234"));
-        final QuerySubmission submission = repo.submitQuery(query, createUser());
-
-        final QueryResult result = submission.getResult();
-        while (!result.isFinished()) {
-            Thread.sleep(100L);
-        }
-
-        // Ensure that we gets events with ID's 3 through 10.
-        final List<ProvenanceEventRecord> events = result.getMatchingEvents();
-        assertEquals(7, events.size());
-        final List<Long> eventIds = events.stream().map(event -> event.getEventId()).sorted().collect(Collectors.toList());
-        for (int i = 0; i < 7; i++) {
-            Assert.assertEquals(i + 3, eventIds.get(i).intValue());
-        }
-    }
-
-    @Test(timeout = 1000000)
-    public void testNotAuthorizedLineage() throws IOException, InterruptedException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxRecordLife(5, TimeUnit.MINUTES);
-        config.setMaxStorageCapacity(1024L * 1024L);
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setMaxEventFileCapacity(1024L * 1024L);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        config.setDesiredIndexSize(10); // force new index to be created for each rollover
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
-            @Override
-            public boolean isAuthorized(ProvenanceEventRecord event, NiFiUser user) {
-                return event.getEventType() != ProvenanceEventType.ATTRIBUTES_MODIFIED;
-            }
-        };
-
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-" + uuid);
-        attributes.put("uuid", uuid);
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-        builder.setEventTime(10L); // make sure the events are destroyed when we call purge
-
-        builder.fromFlowFile(createFlowFile(1, 3000L, attributes));
-        repo.registerEvent(builder.build());
-
-        builder.setEventType(ProvenanceEventType.CONTENT_MODIFIED);
-        builder.fromFlowFile(createFlowFile(2, 2000L, attributes));
-        repo.registerEvent(builder.build());
-
-        builder.setEventType(ProvenanceEventType.CONTENT_MODIFIED);
-        builder.fromFlowFile(createFlowFile(3, 2000L, attributes));
-        repo.registerEvent(builder.build());
-
-        builder.setEventType(ProvenanceEventType.ATTRIBUTES_MODIFIED);
-        attributes.put("new-attr", "yes");
-        builder.fromFlowFile(createFlowFile(4, 2000L, attributes));
-        repo.registerEvent(builder.build());
-
-        final Map<String, String> childAttributes = new HashMap<>(attributes);
-        childAttributes.put("uuid", "00000000-0000-0000-0000-000000000001");
-        builder.setEventType(ProvenanceEventType.FORK);
-        builder.fromFlowFile(createFlowFile(4, 2000L, attributes));
-        builder.addChildFlowFile(createFlowFile(5, 2000L, childAttributes));
-        builder.addParentFlowFile(createFlowFile(4, 2000L, attributes));
-        repo.registerEvent(builder.build());
-
-        builder.setEventType(ProvenanceEventType.ATTRIBUTES_MODIFIED);
-        builder.fromFlowFile(createFlowFile(6, 2000L, childAttributes));
-        repo.registerEvent(builder.build());
-
-        builder.setEventType(ProvenanceEventType.DROP);
-        builder.fromFlowFile(createFlowFile(6, 2000L, childAttributes));
-        repo.registerEvent(builder.build());
-
-        repo.waitForRollover();
-
-        final AsyncLineageSubmission originalLineage = repo.submitLineageComputation(uuid, createUser());
-
-        final StandardLineageResult result = originalLineage.getResult();
-        while (!result.isFinished()) {
-            Thread.sleep(100L);
-        }
-
-        final List<LineageNode> lineageNodes = result.getNodes();
-        assertEquals(6, lineageNodes.size());
-
-        assertEquals(1, lineageNodes.stream().map(node -> node.getNodeType()).filter(t -> t == LineageNodeType.FLOWFILE_NODE).count());
-        assertEquals(5, lineageNodes.stream().map(node -> node.getNodeType()).filter(t -> t == LineageNodeType.PROVENANCE_EVENT_NODE).count());
-
-        final Set<EventNode> eventNodes = lineageNodes.stream()
-                .filter(node -> node.getNodeType() == LineageNodeType.PROVENANCE_EVENT_NODE)
-                .map(node -> (EventNode) node)
-                .collect(Collectors.toSet());
-
-        final Map<ProvenanceEventType, List<EventNode>> nodesByType = eventNodes.stream().collect(Collectors.groupingBy(EventNode::getEventType));
-        assertEquals(1, nodesByType.get(ProvenanceEventType.RECEIVE).size());
-        assertEquals(2, nodesByType.get(ProvenanceEventType.CONTENT_MODIFIED).size());
-        assertEquals(1, nodesByType.get(ProvenanceEventType.FORK).size());
-
-        assertEquals(1, nodesByType.get(ProvenanceEventType.UNKNOWN).size());
-        assertNull(nodesByType.get(ProvenanceEventType.ATTRIBUTES_MODIFIED));
-
-        // Test filtering on expandChildren
-        final AsyncLineageSubmission expandChild = repo.submitExpandChildren(4L, createUser());
-        final StandardLineageResult expandChildResult = expandChild.getResult();
-        while (!expandChildResult.isFinished()) {
-            Thread.sleep(100L);
-        }
-
-        final List<LineageNode> expandChildNodes = expandChildResult.getNodes();
-        assertEquals(4, expandChildNodes.size());
-
-        assertEquals(1, expandChildNodes.stream().map(node -> node.getNodeType()).filter(t -> t == LineageNodeType.FLOWFILE_NODE).count());
-        assertEquals(3, expandChildNodes.stream().map(node -> node.getNodeType()).filter(t -> t == LineageNodeType.PROVENANCE_EVENT_NODE).count());
-
-        final Set<EventNode> childEventNodes = expandChildNodes.stream()
-                .filter(node -> node.getNodeType() == LineageNodeType.PROVENANCE_EVENT_NODE)
-                .map(node -> (EventNode) node)
-                .collect(Collectors.toSet());
-
-        final Map<ProvenanceEventType, List<EventNode>> childNodesByType = childEventNodes.stream().collect(Collectors.groupingBy(EventNode::getEventType));
-        assertEquals(1, childNodesByType.get(ProvenanceEventType.FORK).size());
-        assertEquals(1, childNodesByType.get(ProvenanceEventType.DROP).size());
-        assertEquals(1, childNodesByType.get(ProvenanceEventType.UNKNOWN).size());
-        assertNull(childNodesByType.get(ProvenanceEventType.ATTRIBUTES_MODIFIED));
-    }
-
-    @Test
-    public void testBackPressure() throws IOException, InterruptedException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileCapacity(1L); // force rollover on each record.
-        config.setJournalCount(1);
-
-        final AtomicInteger journalCountRef = new AtomicInteger(0);
-
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
-            @Override
-            protected int getJournalCount() {
-                return journalCountRef.get();
-            }
-        };
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final Map<String, String> attributes = new HashMap<>();
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", UUID.randomUUID().toString());
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        // ensure that we can register the events.
-        for (int i = 0; i < 10; i++) {
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            repo.registerEvent(builder.build());
-        }
-
-        // set number of journals to 6 so that we will block.
-        journalCountRef.set(6);
-
-        final AtomicLong threadNanos = new AtomicLong(0L);
-        final Thread t = new Thread(new Runnable() {
-            @Override
-            public void run() {
-                final long start = System.nanoTime();
-                builder.fromFlowFile(createFlowFile(13, 3000L, attributes));
-                attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + 13);
-                repo.registerEvent(builder.build());
-                threadNanos.set(System.nanoTime() - start);
-            }
-        });
-        t.start();
-
-        Thread.sleep(1500L);
-
-        journalCountRef.set(1);
-        t.join();
-
-        final int threadMillis = (int) TimeUnit.NANOSECONDS.toMillis(threadNanos.get());
-        assertTrue(threadMillis > 1200); // use 1200 to account for the fact that the timing is not exact
-
-        builder.fromFlowFile(createFlowFile(15, 3000L, attributes));
-        attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + 15);
-        repo.registerEvent(builder.build());
-
-        Thread.sleep(3000L);
-    }
-
-    @Test
-    public void testTextualQuery() throws InterruptedException, IOException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
-        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final String uuid = "00000000-0000-0000-0000-000000000000";
-        final Map<String, String> attributes = new HashMap<>();
-        attributes.put("abc", "xyz");
-        attributes.put("xyz", "abc");
-        attributes.put("filename", "file-unnamed");
-
-        final long now = System.currentTimeMillis();
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(now - TimeUnit.MILLISECONDS.convert(30, TimeUnit.SECONDS));
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", uuid);
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        for (int i = 0; i < 10; i++) {
-            if (i > 5) {
-                attributes.put("filename", "file-" + i);
-                builder.setEventTime(System.currentTimeMillis());
-            }
-            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
-            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
-            repo.registerEvent(builder.build());
-        }
-
-        repo.waitForRollover();
-
-        final IndexConfiguration indexConfig = new IndexConfiguration(config);
-        final List<File> indexDirs = indexConfig.getIndexDirectories();
-
-        final String query = "uuid:00000000-0000-0000-0000-0000000000* AND NOT filename:file-?";
-        final List<Document> results = runQuery(indexDirs.get(0), new ArrayList<>(config.getStorageDirectories().values()), query);
-
-        assertEquals(6, results.size());
-    }
-
-    private List<Document> runQuery(final File indexDirectory, final List<File> storageDirs, final String query) throws IOException, ParseException {
-        assumeFalse(isWindowsEnvironment());
-        try (final DirectoryReader directoryReader = DirectoryReader.open(FSDirectory.open(indexDirectory))) {
-            final IndexSearcher searcher = new IndexSearcher(directoryReader);
-
-            final Analyzer analyzer = new SimpleAnalyzer();
-            final org.apache.lucene.search.Query luceneQuery = new QueryParser("uuid", analyzer).parse(query);
-
-            final Query q = new Query("");
-            q.setMaxResults(1000);
-            final TopDocs topDocs = searcher.search(luceneQuery, 1000);
-
-            final List<Document> docs = new ArrayList<>();
-            for (final ScoreDoc scoreDoc : topDocs.scoreDocs) {
-                final int docId = scoreDoc.doc;
-                final Document d = directoryReader.document(docId);
-                docs.add(d);
-            }
-
-            return docs;
-        }
-    }
-
-    private long checkJournalRecords(final File storageDir, final Boolean exact) throws IOException {
-        File[] storagefiles = storageDir.listFiles();
-        long counter = 0;
-        assertNotNull(storagefiles);
-        for (final File file : storagefiles) {
-            if (file.isFile()) {
-                try (RecordReader reader = RecordReaders.newRecordReader(file, null, 2048)) {
-                    ProvenanceEventRecord r;
-                    ProvenanceEventRecord last = null;
-                    while ((r = reader.nextRecord()) != null) {
-                        if (exact) {
-                            assertTrue(counter++ == r.getEventId());
-                        } else {
-                            assertTrue(counter++ <= r.getEventId());
-                        }
-                    }
-                }
-            }
-        }
-        return counter;
-    }
-
-    @Test
-    public void testMergeJournals() throws IOException, InterruptedException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileLife(3, TimeUnit.SECONDS);
-        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
-
-        final Map<String, String> attributes = new HashMap<>();
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
-        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
-        builder.setComponentId("1234");
-        builder.setComponentType("dummy processor");
-
-        final ProvenanceEventRecord record = builder.build();
-
-        final ExecutorService exec = Executors.newFixedThreadPool(10);
-        for (int i = 0; i < 10000; i++) {
-            exec.submit(new Runnable() {
-                @Override
-                public void run() {
-                    repo.registerEvent(record);
-                }
-            });
-        }
-
-        repo.waitForRollover();
-
-        final File storageDir = config.getStorageDirectories().values().iterator().next();
-        long counter = 0;
-        for (final File file : storageDir.listFiles()) {
-            if (file.isFile()) {
-
-                try (RecordReader reader = RecordReaders.newRecordReader(file, null, 2048)) {
-                    ProvenanceEventRecord r = null;
-
-                    while ((r = reader.nextRecord()) != null) {
-                        assertEquals(counter++, r.getEventId());
-                    }
-                }
-            }
-        }
-
-        assertEquals(10000, counter);
-    }
-
-    private void corruptJournalFile(final File journalFile, final int position,
-            final String original, final String replacement) throws IOException {
-        final int journalLength = Long.valueOf(journalFile.length()).intValue();
-        final byte[] origBytes = original.getBytes();
-        final byte[] replBytes = replacement.getBytes();
-        FileInputStream journalIn = new FileInputStream(journalFile);
-        byte[] content = new byte[journalLength];
-        assertEquals(journalLength, journalIn.read(content, 0, journalLength));
-        journalIn.close();
-        assertEquals(original, new String(Arrays.copyOfRange(content, position, position + origBytes.length)));
-        System.arraycopy(replBytes, 0, content, position, replBytes.length);
-        FileOutputStream journalOut = new FileOutputStream(journalFile);
-        journalOut.write(content, 0, journalLength);
-        journalOut.flush();
-        journalOut.close();
-    }
-
-    @Test
-    public void testMergeJournalsBadFirstRecord() throws IOException, InterruptedException {
-        assumeFalse(isWindowsEnvironment());
-        final RepositoryConfiguration config = createConfiguration();
-        config.setMaxEventFileLife(3, TimeUnit.SECONDS);
-        TestablePersistentProvenanceRepository testRepo = new TestablePersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
-        testRepo.initialize(getEventReporter(), null, null, null);
-
-        final Map<String, String> attributes = new HashMap<>();
-
-        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
-        builder.setEventTime(System.currentTimeMillis());
-        builder.setEventType(ProvenanceEventType.RECEIVE);
-        builder.setTransitUri("nifi://unit-test");
-        attributes.put("uuid", "12345678-0000-0000-0000-012345678912

<TRUNCATED>

[3/4] nifi git commit: NIFI-4806 updated tika and a ton of other deps as found by dependency versions plugin

Posted by mc...@apache.org.
http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/ITestPersistentProvenanceRepository.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/ITestPersistentProvenanceRepository.java b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/ITestPersistentProvenanceRepository.java
new file mode 100644
index 0000000..db87c97
--- /dev/null
+++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/java/org/apache/nifi/provenance/ITestPersistentProvenanceRepository.java
@@ -0,0 +1,2356 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.nifi.provenance;
+
+import org.apache.lucene.analysis.Analyzer;
+import org.apache.lucene.analysis.core.SimpleAnalyzer;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.queryparser.classic.ParseException;
+import org.apache.lucene.queryparser.classic.QueryParser;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.ScoreDoc;
+import org.apache.lucene.search.TopDocs;
+import org.apache.lucene.store.FSDirectory;
+import org.apache.nifi.authorization.AccessDeniedException;
+import org.apache.nifi.authorization.user.NiFiUser;
+import org.apache.nifi.events.EventReporter;
+import org.apache.nifi.flowfile.FlowFile;
+import org.apache.nifi.provenance.index.EventIndexSearcher;
+import org.apache.nifi.provenance.index.EventIndexWriter;
+import org.apache.nifi.provenance.lineage.EventNode;
+import org.apache.nifi.provenance.lineage.Lineage;
+import org.apache.nifi.provenance.lineage.LineageEdge;
+import org.apache.nifi.provenance.lineage.LineageNode;
+import org.apache.nifi.provenance.lineage.LineageNodeType;
+import org.apache.nifi.provenance.lucene.CachingIndexManager;
+import org.apache.nifi.provenance.lucene.IndexManager;
+import org.apache.nifi.provenance.lucene.IndexingAction;
+import org.apache.nifi.provenance.search.Query;
+import org.apache.nifi.provenance.search.QueryResult;
+import org.apache.nifi.provenance.search.QuerySubmission;
+import org.apache.nifi.provenance.search.SearchTerms;
+import org.apache.nifi.provenance.search.SearchableField;
+import org.apache.nifi.provenance.serialization.RecordReader;
+import org.apache.nifi.provenance.serialization.RecordReaders;
+import org.apache.nifi.provenance.serialization.RecordWriter;
+import org.apache.nifi.provenance.serialization.RecordWriters;
+import org.apache.nifi.reporting.Severity;
+import org.apache.nifi.util.NiFiProperties;
+import org.apache.nifi.util.file.FileUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.rules.TestName;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.UUID;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.stream.Collectors;
+import java.util.zip.GZIPOutputStream;
+
+import static org.apache.nifi.provenance.TestUtil.createFlowFile;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeFalse;
+import static org.mockito.Mockito.mock;
+
+public class ITestPersistentProvenanceRepository {
+
+    @Rule
+    public TestName name = new TestName();
+
+    @ClassRule
+    public static TemporaryFolder tempFolder = new TemporaryFolder();
+
+    private PersistentProvenanceRepository repo;
+    private static RepositoryConfiguration config;
+
+    public static final int DEFAULT_ROLLOVER_MILLIS = 2000;
+    private EventReporter eventReporter;
+    private List<ReportedEvent> reportedEvents = Collections.synchronizedList(new ArrayList<ReportedEvent>());
+
+    private static int headerSize;
+    private static int recordSize;
+    private static int recordSize2;
+
+    private static RepositoryConfiguration createConfiguration() {
+        config = new RepositoryConfiguration();
+        config.addStorageDirectory("1", new File("target/storage/" + UUID.randomUUID().toString()));
+        config.setCompressOnRollover(true);
+        config.setMaxEventFileLife(2000L, TimeUnit.SECONDS);
+        config.setCompressionBlockBytes(100);
+        return config;
+    }
+
+    @BeforeClass
+    public static void setLogLevel() {
+        System.setProperty("org.slf4j.simpleLogger.log.org.apache.nifi.provenance", "DEBUG");
+    }
+
+    @BeforeClass
+    public static void findJournalSizes() throws IOException {
+        // determine header and record size
+
+        final Map<String, String> attributes = new HashMap<>();
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+        final ProvenanceEventRecord record = builder.build();
+        builder.setComponentId("2345");
+        final ProvenanceEventRecord record2 = builder.build();
+
+        final File tempRecordFile = tempFolder.newFile("record.tmp");
+        System.out.println("findJournalSizes position 0 = " + tempRecordFile.length());
+
+        final AtomicLong idGenerator = new AtomicLong(0L);
+        final RecordWriter writer = RecordWriters.newSchemaRecordWriter(tempRecordFile, idGenerator, false, false);
+        writer.writeHeader(12345L);
+        writer.flush();
+        headerSize = Long.valueOf(tempRecordFile.length()).intValue();
+        writer.writeRecord(record);
+        writer.flush();
+        recordSize = Long.valueOf(tempRecordFile.length()).intValue() - headerSize;
+        writer.writeRecord(record2);
+        writer.flush();
+        recordSize2 = Long.valueOf(tempRecordFile.length()).intValue() - headerSize - recordSize;
+        writer.close();
+
+        System.out.println("headerSize =" + headerSize);
+        System.out.println("recordSize =" + recordSize);
+        System.out.println("recordSize2=" + recordSize2);
+    }
+
+    @Before
+    public void printTestName() {
+        System.out.println("\n\n\n***********************  " + name.getMethodName() + "  *****************************");
+
+        reportedEvents.clear();
+        eventReporter = new EventReporter() {
+            private static final long serialVersionUID = 1L;
+
+            @Override
+            public void reportEvent(Severity severity, String category, String message) {
+                reportedEvents.add(new ReportedEvent(severity, category, message));
+                System.out.println(severity + " : " + category + " : " + message);
+            }
+        };
+    }
+
+    @After
+    public void closeRepo() throws IOException {
+        if (repo == null) {
+            return;
+        }
+
+        try {
+            repo.close();
+        } catch (final IOException ioe) {
+        }
+
+        // Delete all of the storage files. We do this in order to clean up the tons of files that
+        // we create but also to ensure that we have closed all of the file handles. If we leave any
+        // streams open, for instance, this will throw an IOException, causing our unit test to fail.
+        if (config != null) {
+            for (final File storageDir : config.getStorageDirectories().values()) {
+                int i;
+                for (i = 0; i < 3; i++) {
+                    try {
+                        FileUtils.deleteFile(storageDir, true);
+                        break;
+                    } catch (final IOException ioe) {
+                        // if there is a virus scanner, etc. running in the background we may not be able to
+                        // delete the file. Wait a sec and try again.
+                        if (i == 2) {
+                            throw ioe;
+                        } else {
+                            try {
+                                System.out.println("file: " + storageDir.toString() + " exists=" + storageDir.exists());
+                                FileUtils.deleteFile(storageDir, true);
+                                break;
+                            } catch (final IOException ioe2) {
+                                // if there is a virus scanner, etc. running in the background we may not be able to
+                                // delete the file. Wait a sec and try again.
+                                if (i == 2) {
+                                    throw ioe2;
+                                } else {
+                                    try {
+                                        Thread.sleep(1000L);
+                                    } catch (final InterruptedException ie) {
+                                    }
+                                }
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    private EventReporter getEventReporter() {
+        return eventReporter;
+    }
+
+    @Test
+    @Ignore("For local testing of performance only")
+    public void testPerformance() throws IOException, InterruptedException {
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileCapacity(1024 * 1024 * 1024L);
+        config.setMaxEventFileLife(20, TimeUnit.SECONDS);
+        config.setCompressOnRollover(false);
+        config.setJournalCount(10);
+        config.setQueryThreadPoolSize(10);
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("uuid", UUID.randomUUID().toString());
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+        final ProvenanceEventRecord record = builder.build();
+
+        final Runnable r = new Runnable() {
+            @Override
+            public void run() {
+                for (int i = 0; i < 100000; i++) {
+                    repo.registerEvent(record);
+                }
+            }
+        };
+
+        final Thread[] threads = new Thread[10];
+        for (int i = 0; i < threads.length; i++) {
+            threads[i] = new Thread(r);
+        }
+
+        final long start = System.nanoTime();
+        for (final Thread t : threads) {
+            t.start();
+        }
+
+        for (final Thread t : threads) {
+            t.join();
+        }
+        final long nanos = System.nanoTime() - start;
+
+        final long millis = TimeUnit.NANOSECONDS.toMillis(nanos);
+        final long recsPerMilli = 1000000 / millis;
+        final long recsPerSec = recsPerMilli * 1000;
+        System.out.println(millis + " millis to insert 1M records (" + recsPerSec + " recs/sec)");
+
+        System.out.println("Closing and re-initializing");
+        repo.close();
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+        System.out.println("Re-initialized");
+
+        final long fetchStart = System.nanoTime();
+        final List<ProvenanceEventRecord> records = repo.getEvents(0L, 1000000);
+        final long fetchMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - fetchStart);
+        assertEquals(1000000, records.size());
+        final long fetchRecsPerMilli = 1000000 / fetchMillis;
+        final long fetchRecsPerSec = fetchRecsPerMilli * 1000L;
+        System.out.println(fetchMillis + " millis to fetch 1M records (" + fetchRecsPerSec + " recs/sec)");
+
+        repo.close();
+    }
+
+    private NiFiProperties properties = new NiFiProperties() {
+        @Override
+        public String getProperty(String key) {
+            if (key.equals(NiFiProperties.PROVENANCE_COMPRESS_ON_ROLLOVER)) {
+                return "true";
+            } else if (key.equals(NiFiProperties.PROVENANCE_ROLLOVER_TIME)) {
+                return "2000 millis";
+            } else if (key.equals(NiFiProperties.PROVENANCE_REPO_DIRECTORY_PREFIX + ".default")) {
+                createConfiguration();
+                return config.getStorageDirectories().values().iterator().next().getAbsolutePath();
+            } else {
+                return null;
+            }
+        }
+
+        @Override
+        public Set<String> getPropertyKeys() {
+            return new HashSet<>(Arrays.asList(
+                    NiFiProperties.PROVENANCE_COMPRESS_ON_ROLLOVER,
+                    NiFiProperties.PROVENANCE_ROLLOVER_TIME,
+                    NiFiProperties.PROVENANCE_REPO_DIRECTORY_PREFIX + ".default"));
+        }
+    };
+
+    @Test
+    public void constructorNoArgs() {
+        TestablePersistentProvenanceRepository tppr = new TestablePersistentProvenanceRepository();
+        assertEquals(0, tppr.getRolloverCheckMillis());
+    }
+
+    @Test
+    public void constructorNiFiProperties() throws IOException {
+        TestablePersistentProvenanceRepository tppr = new TestablePersistentProvenanceRepository(properties);
+        assertEquals(10000, tppr.getRolloverCheckMillis());
+    }
+
+    @Test
+    public void constructorConfig() throws IOException {
+        RepositoryConfiguration configuration = RepositoryConfiguration.create(properties);
+        new TestablePersistentProvenanceRepository(configuration, 20000);
+    }
+
+    @Test
+    public void testAddAndRecover() throws IOException, InterruptedException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileCapacity(1L);
+        config.setMaxEventFileLife(1, TimeUnit.SECONDS);
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("uuid", UUID.randomUUID().toString());
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+        final ProvenanceEventRecord record = builder.build();
+
+        for (int i = 0; i < 10; i++) {
+            repo.registerEvent(record);
+        }
+
+        Thread.sleep(1000L);
+
+        repo.close();
+        Thread.sleep(500L); // Give the repo time to shutdown (i.e., close all file handles, etc.)
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+        final List<ProvenanceEventRecord> recoveredRecords = repo.getEvents(0L, 12);
+
+        //just test however many were actually recovered since it is timing sensitive
+        final int numRecovered = recoveredRecords.size();
+        for (int i = 0; i < numRecovered; i++) {
+            final ProvenanceEventRecord recovered = recoveredRecords.get(i);
+            assertEquals(i, recovered.getEventId());
+            assertEquals("nifi://unit-test", recovered.getTransitUri());
+            assertEquals(ProvenanceEventType.RECEIVE, recovered.getEventType());
+            assertEquals(attributes, recovered.getAttributes());
+        }
+    }
+
+    @Test
+    public void testAddToMultipleLogsAndRecover() throws IOException, InterruptedException {
+        assumeFalse(isWindowsEnvironment());
+        final List<SearchableField> searchableFields = new ArrayList<>();
+        searchableFields.add(SearchableFields.ComponentID);
+
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(2, TimeUnit.SECONDS);
+        config.setSearchableFields(searchableFields);
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("uuid", UUID.randomUUID().toString());
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+        final ProvenanceEventRecord record = builder.build();
+
+        for (int i = 0; i < 10; i++) {
+            repo.registerEvent(record);
+        }
+
+        builder.setComponentId("XXXX"); // create a different component id so that we can make sure we query this record.
+
+        attributes.put("uuid", "11111111-1111-1111-1111-111111111111");
+
+        builder.fromFlowFile(createFlowFile(11L, 11L, attributes));
+        repo.registerEvent(builder.build());
+
+        repo.waitForRollover();
+        Thread.sleep(500L); // Give the repo time to shutdown (i.e., close all file handles, etc.)
+
+        // Create a new repo and add another record with component id XXXX so that we can ensure that it's added to a different
+        // log file than the previous one.
+        attributes.put("uuid", "22222222-2222-2222-2222-222222222222");
+        builder.fromFlowFile(createFlowFile(11L, 11L, attributes));
+        repo.registerEvent(builder.build());
+        repo.waitForRollover();
+
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "XXXX"));
+        query.setMaxResults(100);
+
+        final QueryResult result = repo.queryEvents(query, createUser());
+        assertEquals(2, result.getMatchingEvents().size());
+        for (final ProvenanceEventRecord match : result.getMatchingEvents()) {
+            System.out.println(match);
+        }
+    }
+
+    @Test
+    public void testIndexOnRolloverWithImmenseAttribute() throws IOException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        config.setSearchableAttributes(SearchableFieldParser.extractSearchableFields("immense", false));
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        int immenseAttrSize = 33000; // must be greater than 32766 for a meaningful test
+        StringBuilder immenseBldr = new StringBuilder(immenseAttrSize);
+        for (int i = 0; i < immenseAttrSize; i++) {
+            immenseBldr.append('0');
+        }
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+        attributes.put("immense", immenseBldr.toString());
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.newSearchableAttribute("immense"), "000*"));
+        query.setMaxResults(100);
+
+        final QueryResult result = repo.queryEvents(query, createUser());
+        assertEquals(10, result.getMatchingEvents().size());
+    }
+
+    @Test
+    public void testIndexOnRolloverAndSubsequentSearch() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.FlowFileUUID, "000000*"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
+        query.setMaxResults(100);
+
+        final QueryResult result = repo.queryEvents(query, createUser());
+        assertEquals(10, result.getMatchingEvents().size());
+        for (final ProvenanceEventRecord match : result.getMatchingEvents()) {
+            System.out.println(match);
+        }
+    }
+
+    @Test
+    public void testCompressOnRollover() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setCompressOnRollover(true);
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", uuid);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+        final File storageDir = config.getStorageDirectories().values().iterator().next();
+        final File compressedLogFile = new File(storageDir, "0.prov.gz");
+        assertTrue(compressedLogFile.exists());
+    }
+
+    @Test
+    public void testIndexAndCompressOnRolloverAndSubsequentSearch() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(30, TimeUnit.SECONDS);
+        config.setMaxStorageCapacity(1024L * 1024L * 10);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L * 10);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "10000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", uuid);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        final Query query = new Query(UUID.randomUUID().toString());
+        // query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.FlowFileUUID, "00000000-0000-0000-0000*"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
+        query.setMaxResults(100);
+
+        final QueryResult result = repo.queryEvents(query, createUser());
+        assertEquals(10, result.getMatchingEvents().size());
+        for (final ProvenanceEventRecord match : result.getMatchingEvents()) {
+            System.out.println(match);
+        }
+
+        Thread.sleep(2000L);
+
+        config.setMaxStorageCapacity(100L);
+        config.setMaxRecordLife(500, TimeUnit.MILLISECONDS);
+        repo.purgeOldEvents();
+        Thread.sleep(2000L);
+
+        final QueryResult newRecordSet = repo.queryEvents(query, createUser());
+        assertTrue(newRecordSet.getMatchingEvents().isEmpty());
+    }
+
+    @Test(timeout = 10000)
+    public void testModifyIndexWhileSearching() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(30, TimeUnit.SECONDS);
+        config.setMaxStorageCapacity(1024L * 1024L * 10);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L * 10);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+
+        final CountDownLatch obtainIndexSearcherLatch = new CountDownLatch(2);
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
+            private CachingIndexManager wrappedManager = null;
+
+            // Create an IndexManager that adds a delay before returning the Index Searcher.
+            @Override
+            protected synchronized CachingIndexManager getIndexManager() {
+                if (wrappedManager == null) {
+                    final IndexManager mgr = super.getIndexManager();
+                    final Logger logger = LoggerFactory.getLogger("IndexManager");
+
+                    wrappedManager = new CachingIndexManager() {
+                        final AtomicInteger indexSearcherCount = new AtomicInteger(0);
+
+                        @Override
+                        public EventIndexSearcher borrowIndexSearcher(File indexDir) throws IOException {
+                            final EventIndexSearcher searcher = mgr.borrowIndexSearcher(indexDir);
+                            final int idx = indexSearcherCount.incrementAndGet();
+                            obtainIndexSearcherLatch.countDown();
+
+                            // The first searcher should sleep for 3 seconds. The second searcher should
+                            // sleep for 5 seconds. This allows us to have two threads each obtain a Searcher
+                            // and then have one of them finish searching and close the searcher if it's poisoned while the
+                            // second thread is still holding the searcher
+                            try {
+                                if (idx == 1) {
+                                    Thread.sleep(3000L);
+                                } else {
+                                    Thread.sleep(5000L);
+                                }
+                            } catch (InterruptedException e) {
+                                throw new IOException("Interrupted", e);
+                            }
+
+                            logger.info("Releasing index searcher");
+                            return searcher;
+                        }
+
+                        @Override
+                        public EventIndexWriter borrowIndexWriter(File indexingDirectory) throws IOException {
+                            return mgr.borrowIndexWriter(indexingDirectory);
+                        }
+
+                        @Override
+                        public void close() throws IOException {
+                            mgr.close();
+                        }
+
+                        @Override
+                        public boolean removeIndex(File indexDirectory) {
+                            mgr.removeIndex(indexDirectory);
+                            return true;
+                        }
+
+                        @Override
+                        public void returnIndexSearcher(EventIndexSearcher searcher) {
+                            mgr.returnIndexSearcher(searcher);
+                        }
+
+                        @Override
+                        public void returnIndexWriter(EventIndexWriter writer) {
+                            mgr.returnIndexWriter(writer);
+                        }
+                    };
+                }
+
+                return wrappedManager;
+            }
+        };
+
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "10000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", uuid);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        // Perform a query. This will ensure that an IndexSearcher is created and cached.
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
+        query.setMaxResults(100);
+
+        // Run a query in a background thread. When this thread goes to obtain the IndexSearcher, it will have a 5 second delay.
+        // That delay will occur as the main thread is updating the index. This should result in the search creating a new Index Reader
+        // that can properly query the index.
+        final int numThreads = 2;
+        final CountDownLatch performSearchLatch = new CountDownLatch(numThreads);
+        final Runnable searchRunnable = new Runnable() {
+            @Override
+            public void run() {
+                QueryResult result;
+                try {
+                    result = repo.queryEvents(query, createUser());
+                } catch (IOException e) {
+                    e.printStackTrace();
+                    Assert.fail(e.toString());
+                    return;
+                }
+
+                System.out.println("Finished search: " + result);
+                performSearchLatch.countDown();
+            }
+        };
+
+        // Kick off the searcher threads
+        for (int i = 0; i < numThreads; i++) {
+            final Thread searchThread = new Thread(searchRunnable);
+            searchThread.start();
+        }
+
+        // Wait until we've obtained the Index Searchers before modifying the index.
+        obtainIndexSearcherLatch.await();
+
+        // add more events to the repo
+        for (int i = 0; i < 10; i++) {
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            repo.registerEvent(builder.build());
+        }
+
+        // Force a rollover to occur. This will modify the index.
+        repo.rolloverWithLock(true);
+
+        // Wait for the repository to roll over.
+        repo.waitForRollover();
+
+        // Wait for the searches to complete.
+        performSearchLatch.await();
+    }
+
+    @Test
+    public void testIndexAndCompressOnRolloverAndSubsequentSearchMultipleStorageDirs() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.addStorageDirectory("2", new File("target/storage/" + UUID.randomUUID().toString()));
+        config.setMaxRecordLife(30, TimeUnit.SECONDS);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(1, TimeUnit.SECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        for (int j = 0; j < 3; j++) {
+            attributes.put("iteration", String.valueOf(j));
+
+            builder.setEventTime(System.currentTimeMillis());
+            builder.setEventType(ProvenanceEventType.RECEIVE);
+            builder.setTransitUri("nifi://unit-test");
+            builder.setComponentId("1234");
+            builder.setComponentType("dummy processor");
+            builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+
+            for (int i = 0; i < 10; i++) {
+                String uuidSuffix = String.valueOf(i + j * 10);
+                if (uuidSuffix.length() < 2) {
+                    uuidSuffix = "0" + uuidSuffix;
+                }
+
+                attributes.put("uuid", "00000000-0000-0000-0000-0000000000" + uuidSuffix);
+                builder.fromFlowFile(createFlowFile(i + j * 10, 3000L, attributes));
+                repo.registerEvent(builder.build());
+            }
+
+            repo.waitForRollover();
+        }
+
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
+        query.setMaxResults(100);
+
+        final QuerySubmission submission = repo.submitQuery(query, createUser());
+        while (!submission.getResult().isFinished()) {
+            Thread.sleep(100L);
+        }
+
+        assertEquals(30, submission.getResult().getMatchingEvents().size());
+        final Map<String, Integer> counts = new HashMap<>();
+        for (final ProvenanceEventRecord match : submission.getResult().getMatchingEvents()) {
+            System.out.println(match);
+
+            final String index = match.getAttributes().get("iteration");
+            Integer count = counts.get(index);
+            if (count == null) {
+                count = 0;
+            }
+            counts.put(index, count + 1);
+        }
+
+        assertEquals(3, counts.size());
+        assertEquals(10, counts.get("0").intValue());
+        assertEquals(10, counts.get("1").intValue());
+        assertEquals(10, counts.get("2").intValue());
+
+        config.setMaxRecordLife(1, TimeUnit.MILLISECONDS);
+
+        repo.purgeOldEvents();
+
+        Thread.sleep(2000L); // purge is async. Give it time to do its job.
+
+        query.setMaxResults(100);
+        final QuerySubmission noResultSubmission = repo.submitQuery(query, createUser());
+        while (!noResultSubmission.getResult().isFinished()) {
+            Thread.sleep(10L);
+        }
+
+        assertEquals(0, noResultSubmission.getResult().getTotalHitCount());
+    }
+
+    @Test
+    public void testIndexAndCompressOnRolloverAndSubsequentEmptySearch() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(30, TimeUnit.SECONDS);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", uuid);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            repo.registerEvent(builder.build());
+        }
+
+        // Give time for rollover to happen
+        repo.waitForRollover();
+
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.setMaxResults(100);
+
+        final QueryResult result = repo.queryEvents(query, createUser());
+        assertEquals(10, result.getMatchingEvents().size());
+        for (final ProvenanceEventRecord match : result.getMatchingEvents()) {
+            System.out.println(match);
+        }
+
+        Thread.sleep(2000L);
+
+        config.setMaxStorageCapacity(100L);
+        config.setMaxRecordLife(500, TimeUnit.MILLISECONDS);
+        repo.purgeOldEvents();
+
+        Thread.sleep(1000L);
+
+        final QueryResult newRecordSet = repo.queryEvents(query, createUser());
+        assertTrue(newRecordSet.getMatchingEvents().isEmpty());
+    }
+
+    @Test
+    public void testLineageReceiveDrop() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(3, TimeUnit.SECONDS);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000001";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("uuid", uuid);
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", uuid);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        repo.registerEvent(builder.build());
+
+        builder.setEventTime(System.currentTimeMillis() + 1);
+        builder.setEventType(ProvenanceEventType.DROP);
+        builder.setTransitUri(null);
+        repo.registerEvent(builder.build());
+
+        repo.waitForRollover();
+
+        final Lineage lineage = repo.computeLineage(uuid, createUser());
+        assertNotNull(lineage);
+
+        // Nodes should consist of a RECEIVE followed by FlowFileNode, followed by a DROP
+        final List<LineageNode> nodes = lineage.getNodes();
+        final List<LineageEdge> edges = lineage.getEdges();
+        assertEquals(3, nodes.size());
+
+        for (final LineageEdge edge : edges) {
+            if (edge.getSource().getNodeType() == LineageNodeType.FLOWFILE_NODE) {
+                assertTrue(edge.getDestination().getNodeType() == LineageNodeType.PROVENANCE_EVENT_NODE);
+                assertTrue(((EventNode) edge.getDestination()).getEventType() == ProvenanceEventType.DROP);
+            } else {
+                assertTrue(((EventNode) edge.getSource()).getEventType() == ProvenanceEventType.RECEIVE);
+                assertTrue(edge.getDestination().getNodeType() == LineageNodeType.FLOWFILE_NODE);
+            }
+        }
+    }
+
+    @Test
+    public void testLineageReceiveDropAsync() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(3, TimeUnit.SECONDS);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000001";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("uuid", uuid);
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", uuid);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        repo.registerEvent(builder.build());
+
+        builder.setEventTime(System.currentTimeMillis() + 1);
+        builder.setEventType(ProvenanceEventType.DROP);
+        builder.setTransitUri(null);
+        repo.registerEvent(builder.build());
+
+        repo.waitForRollover();
+
+        final AsyncLineageSubmission submission = repo.submitLineageComputation(uuid, createUser());
+        while (!submission.getResult().isFinished()) {
+            Thread.sleep(100L);
+        }
+
+        assertNotNull(submission);
+
+        // Nodes should consist of a RECEIVE followed by FlowFileNode, followed by a DROP
+        final List<LineageNode> nodes = submission.getResult().getNodes();
+        final List<LineageEdge> edges = submission.getResult().getEdges();
+        assertEquals(3, nodes.size());
+
+        for (final LineageEdge edge : edges) {
+            if (edge.getSource().getNodeType() == LineageNodeType.FLOWFILE_NODE) {
+                assertTrue(edge.getDestination().getNodeType() == LineageNodeType.PROVENANCE_EVENT_NODE);
+                assertTrue(((EventNode) edge.getDestination()).getEventType() == ProvenanceEventType.DROP);
+            } else {
+                assertTrue(((EventNode) edge.getSource()).getEventType() == ProvenanceEventType.RECEIVE);
+                assertTrue(edge.getDestination().getNodeType() == LineageNodeType.FLOWFILE_NODE);
+            }
+        }
+    }
+
+    @Test
+    public void testLineageManyToOneSpawn() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(3, TimeUnit.SECONDS);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String childId = "00000000-0000-0000-0000-000000000000";
+
+        final String parentId1 = "00000000-0000-0000-0001-000000000001";
+        final String parentId2 = "00000000-0000-0000-0001-000000000002";
+        final String parentId3 = "00000000-0000-0000-0001-000000000003";
+
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("uuid", childId);
+        attributes.put("filename", "file-" + childId);
+
+        final StandardProvenanceEventRecord.Builder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.FORK);
+        attributes.put("uuid", childId);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        builder.addChildUuid(childId);
+        builder.addParentUuid(parentId1);
+        builder.addParentUuid(parentId2);
+        builder.addParentUuid(parentId3);
+
+        repo.registerEvent(builder.build());
+
+        repo.waitForRollover();
+
+        final Lineage lineage = repo.computeLineage(childId, createUser());
+        assertNotNull(lineage);
+
+        // these are not necessarily accurate asserts....
+        final List<LineageNode> nodes = lineage.getNodes();
+        final List<LineageEdge> edges = lineage.getEdges();
+        assertEquals(2, nodes.size());
+        assertEquals(1, edges.size());
+    }
+
+    @Test
+    public void testLineageManyToOneSpawnAsync() throws IOException, InterruptedException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(3, TimeUnit.SECONDS);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String childId = "00000000-0000-0000-0000-000000000000";
+
+        final String parentId1 = "00000000-0000-0000-0001-000000000001";
+        final String parentId2 = "00000000-0000-0000-0001-000000000002";
+        final String parentId3 = "00000000-0000-0000-0001-000000000003";
+
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("uuid", childId);
+        attributes.put("filename", "file-" + childId);
+
+        final StandardProvenanceEventRecord.Builder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.FORK);
+        attributes.put("uuid", childId);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        builder.addChildUuid(childId);
+        builder.addParentUuid(parentId1);
+        builder.addParentUuid(parentId2);
+        builder.addParentUuid(parentId3);
+
+        repo.registerEvent(builder.build());
+
+        repo.waitForRollover();
+
+        final AsyncLineageSubmission submission = repo.submitLineageComputation(childId, createUser());
+        while (!submission.getResult().isFinished()) {
+            Thread.sleep(100L);
+        }
+
+        // these are not accurate asserts....
+        final List<LineageNode> nodes = submission.getResult().getNodes();
+        final List<LineageEdge> edges = submission.getResult().getEdges();
+        assertEquals(2, nodes.size());
+        assertEquals(1, edges.size());
+    }
+
+    @Test
+    public void testCorrectProvenanceEventIdOnRestore() throws IOException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileLife(1, TimeUnit.SECONDS);
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", uuid);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            repo.registerEvent(builder.build());
+        }
+
+        repo.close();
+
+        final PersistentProvenanceRepository secondRepo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        secondRepo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        try {
+            final ProvenanceEventRecord event11 = builder.build();
+            secondRepo.registerEvent(event11);
+            secondRepo.waitForRollover();
+            final ProvenanceEventRecord event11Retrieved = secondRepo.getEvent(10L, null);
+            assertNotNull(event11Retrieved);
+            assertEquals(10, event11Retrieved.getEventId());
+        } finally {
+            secondRepo.close();
+        }
+    }
+
+    /**
+     * Here the event file is simply corrupted by virtue of not having any event
+     * records while having correct headers
+     */
+    @Test
+    public void testWithWithEventFileMissingRecord() throws Exception {
+        assumeFalse(isWindowsEnvironment());
+        File eventFile = this.prepCorruptedEventFileTests();
+
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "foo-*"));
+        query.setMaxResults(100);
+
+        DataOutputStream in = new DataOutputStream(new GZIPOutputStream(new FileOutputStream(eventFile)));
+        in.writeUTF("BlahBlah");
+        in.writeInt(4);
+        in.close();
+        assertTrue(eventFile.exists());
+        final QueryResult result = repo.queryEvents(query, createUser());
+        assertEquals(10, result.getMatchingEvents().size());
+    }
+
+    /**
+     * Here the event file is simply corrupted by virtue of being empty (0
+     * bytes)
+     */
+    @Test
+    public void testWithWithEventFileCorrupted() throws Exception {
+        assumeFalse(isWindowsEnvironment());
+        File eventFile = this.prepCorruptedEventFileTests();
+
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "foo-*"));
+        query.setMaxResults(100);
+        DataOutputStream in = new DataOutputStream(new GZIPOutputStream(new FileOutputStream(eventFile)));
+        in.close();
+        final QueryResult result = repo.queryEvents(query, createUser());
+        assertEquals(10, result.getMatchingEvents().size());
+    }
+
+    private File prepCorruptedEventFileTests() throws Exception {
+        RepositoryConfiguration config = createConfiguration();
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        config.setDesiredIndexSize(10);
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        String uuid = UUID.randomUUID().toString();
+        for (int i = 0; i < 20; i++) {
+            ProvenanceEventRecord record = repo.eventBuilder().fromFlowFile(mock(FlowFile.class))
+                    .setEventType(ProvenanceEventType.CREATE).setComponentId("foo-" + i).setComponentType("myComponent")
+                    .setFlowFileUUID(uuid).build();
+            repo.registerEvent(record);
+            if (i == 9) {
+                repo.waitForRollover();
+                Thread.sleep(2000L);
+            }
+        }
+        repo.waitForRollover();
+        File eventFile = new File(config.getStorageDirectories().values().iterator().next(), "10.prov.gz");
+        assertTrue(eventFile.delete());
+        return eventFile;
+    }
+
+    @Test
+    @Ignore("This test relies too much on timing of background events by using Thread.sleep().")
+    public void testIndexDirectoryRemoved() throws InterruptedException, IOException, ParseException {
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(5, TimeUnit.MINUTES);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        config.setDesiredIndexSize(10); // force new index to be created for each rollover
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            builder.setEventTime(10L); // make sure the events are destroyed when we call purge
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        Thread.sleep(2000L);
+
+        final FileFilter indexFileFilter = file -> file.getName().startsWith("index");
+        final int numIndexDirs = config.getStorageDirectories().values().iterator().next().listFiles(indexFileFilter).length;
+        assertEquals(1, numIndexDirs);
+
+        // add more records so that we will create a new index
+        final long secondBatchStartTime = System.currentTimeMillis();
+        for (int i = 0; i < 10; i++) {
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000001" + i);
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            builder.setEventTime(System.currentTimeMillis());
+            repo.registerEvent(builder.build());
+        }
+
+        // wait for indexing to happen
+        repo.waitForRollover();
+
+        // verify we get the results expected
+        final Query query = new Query(UUID.randomUUID().toString());
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.Filename, "file-*"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "12?4"));
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.TransitURI, "nifi://*"));
+        query.setMaxResults(100);
+
+        final QueryResult result = repo.queryEvents(query, createUser());
+        assertEquals(20, result.getMatchingEvents().size());
+
+        // Ensure index directories exists
+        File[] indexDirs = config.getStorageDirectories().values().iterator().next().listFiles(indexFileFilter);
+        assertEquals(2, indexDirs.length);
+
+        // expire old events and indexes
+        final long timeSinceSecondBatch = System.currentTimeMillis() - secondBatchStartTime;
+        config.setMaxRecordLife(timeSinceSecondBatch + 1000L, TimeUnit.MILLISECONDS);
+        repo.purgeOldEvents();
+        Thread.sleep(2000L);
+
+        final QueryResult newRecordSet = repo.queryEvents(query, createUser());
+        assertEquals(10, newRecordSet.getMatchingEvents().size());
+
+        // Ensure that one index directory is gone
+        indexDirs = config.getStorageDirectories().values().iterator().next().listFiles(indexFileFilter);
+        assertEquals(1, indexDirs.length);
+    }
+
+    @Test
+    public void testNotAuthorizedGetSpecificEvent() throws IOException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(5, TimeUnit.MINUTES);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        config.setDesiredIndexSize(10); // force new index to be created for each rollover
+
+        final AccessDeniedException expectedException = new AccessDeniedException("Unit Test - Intentionally Thrown");
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
+            @Override
+            public void authorize(ProvenanceEventRecord event, NiFiUser user) {
+                throw expectedException;
+            }
+        };
+
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            builder.setEventTime(10L); // make sure the events are destroyed when we call purge
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        try {
+            repo.getEvent(0L, null);
+            Assert.fail("getEvent() did not throw an Exception");
+        } catch (final Exception e) {
+            Assert.assertSame(expectedException, e);
+        }
+    }
+
+    @Test
+    public void testNotAuthorizedGetEventRange() throws IOException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(5, TimeUnit.MINUTES);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        config.setDesiredIndexSize(10); // force new index to be created for each rollover
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
+            @Override
+            public boolean isAuthorized(ProvenanceEventRecord event, NiFiUser user) {
+                return event.getEventId() > 2;
+            }
+        };
+
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            builder.setEventTime(10L); // make sure the events are destroyed when we call purge
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        final List<ProvenanceEventRecord> events = repo.getEvents(0L, 10, null);
+
+        // Ensure that we gets events with ID's 3 through 10.
+        assertEquals(7, events.size());
+        final List<Long> eventIds = events.stream().map(event -> event.getEventId()).sorted().collect(Collectors.toList());
+        for (int i = 0; i < 7; i++) {
+            Assert.assertEquals(i + 3, eventIds.get(i).intValue());
+        }
+    }
+
+    @Test(timeout = 10000)
+    public void testNotAuthorizedQuery() throws IOException, InterruptedException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(5, TimeUnit.MINUTES);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        config.setDesiredIndexSize(10); // force new index to be created for each rollover
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
+            @Override
+            public boolean isAuthorized(ProvenanceEventRecord event, NiFiUser user) {
+                return event.getEventId() > 2;
+            }
+        };
+
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            builder.setEventTime(10L); // make sure the events are destroyed when we call purge
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        final Query query = new Query("1234");
+        query.addSearchTerm(SearchTerms.newSearchTerm(SearchableFields.ComponentID, "1234"));
+        final QuerySubmission submission = repo.submitQuery(query, createUser());
+
+        final QueryResult result = submission.getResult();
+        while (!result.isFinished()) {
+            Thread.sleep(100L);
+        }
+
+        // Ensure that we gets events with ID's 3 through 10.
+        final List<ProvenanceEventRecord> events = result.getMatchingEvents();
+        assertEquals(7, events.size());
+        final List<Long> eventIds = events.stream().map(event -> event.getEventId()).sorted().collect(Collectors.toList());
+        for (int i = 0; i < 7; i++) {
+            Assert.assertEquals(i + 3, eventIds.get(i).intValue());
+        }
+    }
+
+    @Test(timeout = 1000000)
+    public void testNotAuthorizedLineage() throws IOException, InterruptedException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxRecordLife(5, TimeUnit.MINUTES);
+        config.setMaxStorageCapacity(1024L * 1024L);
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setMaxEventFileCapacity(1024L * 1024L);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        config.setDesiredIndexSize(10); // force new index to be created for each rollover
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
+            @Override
+            public boolean isAuthorized(ProvenanceEventRecord event, NiFiUser user) {
+                return event.getEventType() != ProvenanceEventType.ATTRIBUTES_MODIFIED;
+            }
+        };
+
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-" + uuid);
+        attributes.put("uuid", uuid);
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+        builder.setEventTime(10L); // make sure the events are destroyed when we call purge
+
+        builder.fromFlowFile(createFlowFile(1, 3000L, attributes));
+        repo.registerEvent(builder.build());
+
+        builder.setEventType(ProvenanceEventType.CONTENT_MODIFIED);
+        builder.fromFlowFile(createFlowFile(2, 2000L, attributes));
+        repo.registerEvent(builder.build());
+
+        builder.setEventType(ProvenanceEventType.CONTENT_MODIFIED);
+        builder.fromFlowFile(createFlowFile(3, 2000L, attributes));
+        repo.registerEvent(builder.build());
+
+        builder.setEventType(ProvenanceEventType.ATTRIBUTES_MODIFIED);
+        attributes.put("new-attr", "yes");
+        builder.fromFlowFile(createFlowFile(4, 2000L, attributes));
+        repo.registerEvent(builder.build());
+
+        final Map<String, String> childAttributes = new HashMap<>(attributes);
+        childAttributes.put("uuid", "00000000-0000-0000-0000-000000000001");
+        builder.setEventType(ProvenanceEventType.FORK);
+        builder.fromFlowFile(createFlowFile(4, 2000L, attributes));
+        builder.addChildFlowFile(createFlowFile(5, 2000L, childAttributes));
+        builder.addParentFlowFile(createFlowFile(4, 2000L, attributes));
+        repo.registerEvent(builder.build());
+
+        builder.setEventType(ProvenanceEventType.ATTRIBUTES_MODIFIED);
+        builder.fromFlowFile(createFlowFile(6, 2000L, childAttributes));
+        repo.registerEvent(builder.build());
+
+        builder.setEventType(ProvenanceEventType.DROP);
+        builder.fromFlowFile(createFlowFile(6, 2000L, childAttributes));
+        repo.registerEvent(builder.build());
+
+        repo.waitForRollover();
+
+        final AsyncLineageSubmission originalLineage = repo.submitLineageComputation(uuid, createUser());
+
+        final StandardLineageResult result = originalLineage.getResult();
+        while (!result.isFinished()) {
+            Thread.sleep(100L);
+        }
+
+        final List<LineageNode> lineageNodes = result.getNodes();
+        assertEquals(6, lineageNodes.size());
+
+        assertEquals(1, lineageNodes.stream().map(node -> node.getNodeType()).filter(t -> t == LineageNodeType.FLOWFILE_NODE).count());
+        assertEquals(5, lineageNodes.stream().map(node -> node.getNodeType()).filter(t -> t == LineageNodeType.PROVENANCE_EVENT_NODE).count());
+
+        final Set<EventNode> eventNodes = lineageNodes.stream()
+                .filter(node -> node.getNodeType() == LineageNodeType.PROVENANCE_EVENT_NODE)
+                .map(node -> (EventNode) node)
+                .collect(Collectors.toSet());
+
+        final Map<ProvenanceEventType, List<EventNode>> nodesByType = eventNodes.stream().collect(Collectors.groupingBy(EventNode::getEventType));
+        assertEquals(1, nodesByType.get(ProvenanceEventType.RECEIVE).size());
+        assertEquals(2, nodesByType.get(ProvenanceEventType.CONTENT_MODIFIED).size());
+        assertEquals(1, nodesByType.get(ProvenanceEventType.FORK).size());
+
+        assertEquals(1, nodesByType.get(ProvenanceEventType.UNKNOWN).size());
+        assertNull(nodesByType.get(ProvenanceEventType.ATTRIBUTES_MODIFIED));
+
+        // Test filtering on expandChildren
+        final AsyncLineageSubmission expandChild = repo.submitExpandChildren(4L, createUser());
+        final StandardLineageResult expandChildResult = expandChild.getResult();
+        while (!expandChildResult.isFinished()) {
+            Thread.sleep(100L);
+        }
+
+        final List<LineageNode> expandChildNodes = expandChildResult.getNodes();
+        assertEquals(4, expandChildNodes.size());
+
+        assertEquals(1, expandChildNodes.stream().map(node -> node.getNodeType()).filter(t -> t == LineageNodeType.FLOWFILE_NODE).count());
+        assertEquals(3, expandChildNodes.stream().map(node -> node.getNodeType()).filter(t -> t == LineageNodeType.PROVENANCE_EVENT_NODE).count());
+
+        final Set<EventNode> childEventNodes = expandChildNodes.stream()
+                .filter(node -> node.getNodeType() == LineageNodeType.PROVENANCE_EVENT_NODE)
+                .map(node -> (EventNode) node)
+                .collect(Collectors.toSet());
+
+        final Map<ProvenanceEventType, List<EventNode>> childNodesByType = childEventNodes.stream().collect(Collectors.groupingBy(EventNode::getEventType));
+        assertEquals(1, childNodesByType.get(ProvenanceEventType.FORK).size());
+        assertEquals(1, childNodesByType.get(ProvenanceEventType.DROP).size());
+        assertEquals(1, childNodesByType.get(ProvenanceEventType.UNKNOWN).size());
+        assertNull(childNodesByType.get(ProvenanceEventType.ATTRIBUTES_MODIFIED));
+    }
+
+    @Test
+    public void testBackPressure() throws IOException, InterruptedException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileCapacity(1L); // force rollover on each record.
+        config.setJournalCount(1);
+
+        final AtomicInteger journalCountRef = new AtomicInteger(0);
+
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS) {
+            @Override
+            protected int getJournalCount() {
+                return journalCountRef.get();
+            }
+        };
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final Map<String, String> attributes = new HashMap<>();
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", UUID.randomUUID().toString());
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        // ensure that we can register the events.
+        for (int i = 0; i < 10; i++) {
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            repo.registerEvent(builder.build());
+        }
+
+        // set number of journals to 6 so that we will block.
+        journalCountRef.set(6);
+
+        final AtomicLong threadNanos = new AtomicLong(0L);
+        final Thread t = new Thread(new Runnable() {
+            @Override
+            public void run() {
+                final long start = System.nanoTime();
+                builder.fromFlowFile(createFlowFile(13, 3000L, attributes));
+                attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + 13);
+                repo.registerEvent(builder.build());
+                threadNanos.set(System.nanoTime() - start);
+            }
+        });
+        t.start();
+
+        Thread.sleep(1500L);
+
+        journalCountRef.set(1);
+        t.join();
+
+        final int threadMillis = (int) TimeUnit.NANOSECONDS.toMillis(threadNanos.get());
+        assertTrue(threadMillis > 1200); // use 1200 to account for the fact that the timing is not exact
+
+        builder.fromFlowFile(createFlowFile(15, 3000L, attributes));
+        attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + 15);
+        repo.registerEvent(builder.build());
+
+        Thread.sleep(3000L);
+    }
+
+    @Test
+    public void testTextualQuery() throws InterruptedException, IOException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileLife(500, TimeUnit.MILLISECONDS);
+        config.setSearchableFields(new ArrayList<>(SearchableFields.getStandardFields()));
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final String uuid = "00000000-0000-0000-0000-000000000000";
+        final Map<String, String> attributes = new HashMap<>();
+        attributes.put("abc", "xyz");
+        attributes.put("xyz", "abc");
+        attributes.put("filename", "file-unnamed");
+
+        final long now = System.currentTimeMillis();
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(now - TimeUnit.MILLISECONDS.convert(30, TimeUnit.SECONDS));
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", uuid);
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        for (int i = 0; i < 10; i++) {
+            if (i > 5) {
+                attributes.put("filename", "file-" + i);
+                builder.setEventTime(System.currentTimeMillis());
+            }
+            builder.fromFlowFile(createFlowFile(i, 3000L, attributes));
+            attributes.put("uuid", "00000000-0000-0000-0000-00000000000" + i);
+            repo.registerEvent(builder.build());
+        }
+
+        repo.waitForRollover();
+
+        final IndexConfiguration indexConfig = new IndexConfiguration(config);
+        final List<File> indexDirs = indexConfig.getIndexDirectories();
+
+        final String query = "uuid:00000000-0000-0000-0000-0000000000* AND NOT filename:file-?";
+        final List<Document> results = runQuery(indexDirs.get(0), new ArrayList<>(config.getStorageDirectories().values()), query);
+
+        assertEquals(6, results.size());
+    }
+
+    private List<Document> runQuery(final File indexDirectory, final List<File> storageDirs, final String query) throws IOException, ParseException {
+        assumeFalse(isWindowsEnvironment());
+        try (final DirectoryReader directoryReader = DirectoryReader.open(FSDirectory.open(indexDirectory))) {
+            final IndexSearcher searcher = new IndexSearcher(directoryReader);
+
+            final Analyzer analyzer = new SimpleAnalyzer();
+            final org.apache.lucene.search.Query luceneQuery = new QueryParser("uuid", analyzer).parse(query);
+
+            final Query q = new Query("");
+            q.setMaxResults(1000);
+            final TopDocs topDocs = searcher.search(luceneQuery, 1000);
+
+            final List<Document> docs = new ArrayList<>();
+            for (final ScoreDoc scoreDoc : topDocs.scoreDocs) {
+                final int docId = scoreDoc.doc;
+                final Document d = directoryReader.document(docId);
+                docs.add(d);
+            }
+
+            return docs;
+        }
+    }
+
+    private long checkJournalRecords(final File storageDir, final Boolean exact) throws IOException {
+        File[] storagefiles = storageDir.listFiles();
+        long counter = 0;
+        assertNotNull(storagefiles);
+        for (final File file : storagefiles) {
+            if (file.isFile()) {
+                try (RecordReader reader = RecordReaders.newRecordReader(file, null, 2048)) {
+                    ProvenanceEventRecord r;
+                    ProvenanceEventRecord last = null;
+                    while ((r = reader.nextRecord()) != null) {
+                        if (exact) {
+                            assertTrue(counter++ == r.getEventId());
+                        } else {
+                            assertTrue(counter++ <= r.getEventId());
+                        }
+                    }
+                }
+            }
+        }
+        return counter;
+    }
+
+    @Test
+    public void testMergeJournals() throws IOException, InterruptedException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileLife(3, TimeUnit.SECONDS);
+        repo = new PersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        repo.initialize(getEventReporter(), null, null, IdentifierLookup.EMPTY);
+
+        final Map<String, String> attributes = new HashMap<>();
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", "12345678-0000-0000-0000-012345678912");
+        builder.fromFlowFile(createFlowFile(3L, 3000L, attributes));
+        builder.setComponentId("1234");
+        builder.setComponentType("dummy processor");
+
+        final ProvenanceEventRecord record = builder.build();
+
+        final ExecutorService exec = Executors.newFixedThreadPool(10);
+        for (int i = 0; i < 10000; i++) {
+            exec.submit(new Runnable() {
+                @Override
+                public void run() {
+                    repo.registerEvent(record);
+                }
+            });
+        }
+
+        repo.waitForRollover();
+
+        final File storageDir = config.getStorageDirectories().values().iterator().next();
+        long counter = 0;
+        for (final File file : storageDir.listFiles()) {
+            if (file.isFile()) {
+
+                try (RecordReader reader = RecordReaders.newRecordReader(file, null, 2048)) {
+                    ProvenanceEventRecord r = null;
+
+                    while ((r = reader.nextRecord()) != null) {
+                        assertEquals(counter++, r.getEventId());
+                    }
+                }
+            }
+        }
+
+        assertEquals(10000, counter);
+    }
+
+    private void corruptJournalFile(final File journalFile, final int position,
+            final String original, final String replacement) throws IOException {
+        final int journalLength = Long.valueOf(journalFile.length()).intValue();
+        final byte[] origBytes = original.getBytes();
+        final byte[] replBytes = replacement.getBytes();
+        FileInputStream journalIn = new FileInputStream(journalFile);
+        byte[] content = new byte[journalLength];
+        assertEquals(journalLength, journalIn.read(content, 0, journalLength));
+        journalIn.close();
+        assertEquals(original, new String(Arrays.copyOfRange(content, position, position + origBytes.length)));
+        System.arraycopy(replBytes, 0, content, position, replBytes.length);
+        FileOutputStream journalOut = new FileOutputStream(journalFile);
+        journalOut.write(content, 0, journalLength);
+        journalOut.flush();
+        journalOut.close();
+    }
+
+    @Test
+    public void testMergeJournalsBadFirstRecord() throws IOException, InterruptedException {
+        assumeFalse(isWindowsEnvironment());
+        final RepositoryConfiguration config = createConfiguration();
+        config.setMaxEventFileLife(3, TimeUnit.SECONDS);
+        TestablePersistentProvenanceRepository testRepo = new TestablePersistentProvenanceRepository(config, DEFAULT_ROLLOVER_MILLIS);
+        testRepo.initialize(getEventReporter(), null, null, null);
+
+        final Map<String, String> attributes = new HashMap<>();
+
+        final ProvenanceEventBuilder builder = new StandardProvenanceEventRecord.Builder();
+        builder.setEventTime(System.currentTimeMillis());
+        builder.setEventType(ProvenanceEventType.RECEIVE);
+        builder.setTransitUri("nifi://unit-test");
+        attributes.put("uuid", "12345678-0000-0000-0000-01234567891

<TRUNCATED>

[4/4] nifi git commit: NIFI-4806 updated tika and a ton of other deps as found by dependency versions plugin

Posted by mc...@apache.org.
NIFI-4806 updated tika and a ton of other deps as found by dependency versions plugin

This closes #3028


Project: http://git-wip-us.apache.org/repos/asf/nifi/repo
Commit: http://git-wip-us.apache.org/repos/asf/nifi/commit/8e233ca2
Tree: http://git-wip-us.apache.org/repos/asf/nifi/tree/8e233ca2
Diff: http://git-wip-us.apache.org/repos/asf/nifi/diff/8e233ca2

Branch: refs/heads/master
Commit: 8e233ca2ef55b2100174ee2b32a8668e9190a85c
Parents: de685a7
Author: joewitt <jo...@apache.org>
Authored: Thu Sep 20 23:24:17 2018 -0400
Committer: Matt Gilman <ma...@gmail.com>
Committed: Thu Oct 4 09:32:04 2018 -0400

----------------------------------------------------------------------
 .../src/test/resources/localhost-ks.jks         |  Bin
 .../src/test/resources/localhost-ts.jks         |  Bin
 nifi-commons/nifi-data-provenance-utils/pom.xml |    2 +-
 nifi-commons/nifi-expression-language/pom.xml   |    6 +-
 .../CharSequenceTranslatorEvaluator.java        |    4 +-
 nifi-commons/nifi-flowfile-packager/pom.xml     |    9 +-
 .../apache/nifi/util/FlowFilePackagerV1.java    |    2 +-
 nifi-commons/nifi-hl7-query-language/pom.xml    |   18 +-
 .../org/apache/nifi/hl7/hapi/HapiField.java     |    4 +-
 nifi-commons/nifi-json-utils/pom.xml            |    2 +-
 nifi-commons/nifi-schema-utils/pom.xml          |    2 +-
 nifi-commons/nifi-security-utils/pom.xml        |    6 +-
 nifi-commons/nifi-site-to-site-client/pom.xml   |    4 +-
 nifi-commons/nifi-socket-utils/pom.xml          |    2 +-
 nifi-commons/nifi-web-utils/pom.xml             |   10 +-
 nifi-external/nifi-spark-receiver/pom.xml       |    2 +-
 nifi-external/nifi-storm-spout/pom.xml          |    2 +-
 .../nifi-amqp-processors/pom.xml                |    2 +-
 .../emulator/AtlasAPIV2ServerEmulator.java      |   11 +-
 .../nifi-avro-processors/pom.xml                |    4 +-
 .../nifi-aws-abstract-processors/pom.xml        |    2 +-
 nifi-nar-bundles/nifi-aws-bundle/pom.xml        |    2 +-
 .../nifi-cassandra-processors/pom.xml           |    5 +
 .../processors/cassandra/QueryCassandra.java    |    2 +-
 .../nifi-ccda-processors/pom.xml                |    2 +-
 nifi-nar-bundles/nifi-cdc/nifi-cdc-api/pom.xml  |    4 +-
 .../nifi-couchbase-processors/pom.xml           |    7 +-
 .../nifi-druid-controller-service-api/pom.xml   |    2 +-
 .../nifi-elasticsearch-client-service/pom.xml   |    4 +-
 .../nifi-elasticsearch-processors/pom.xml       |    4 +-
 .../pom.xml                                     |    2 +-
 .../nifi-email-processors/pom.xml               |    6 +-
 .../nifi-enrich-processors/pom.xml              |    2 +-
 .../nifi-hadoop-utils/pom.xml                   |    2 +-
 .../nifi-processor-utils/pom.xml                |    2 +-
 .../nifi-standard-record-utils/pom.xml          |    5 +
 .../main/java/org/apache/nifi/csv/CSVUtils.java |    2 +-
 .../java/org/apache/nifi/csv/CSVValidators.java |    2 +-
 .../nifi-reporting-utils/pom.xml                |    2 +-
 .../nifi-syslog-utils/pom.xml                   |    2 +-
 .../nifi-framework/nifi-web/nifi-jetty/pom.xml  |    7 +-
 .../nifi/web/server/HostHeaderHandler.java      |    2 +-
 nifi-nar-bundles/nifi-framework-bundle/pom.xml  |   14 +-
 .../nifi-gcp-services-api/pom.xml               |    3 +-
 .../nifi-grpc-processors/pom.xml                |    2 +-
 .../nifi-hbase-processors/pom.xml               |   13 +-
 .../nifi/hbase/io/JsonFullRowSerializer.java    |    2 +-
 .../io/JsonQualifierAndValueRowSerializer.java  |    2 +-
 .../apache/nifi/hbase/io/JsonRowSerializer.java |    2 +-
 .../nifi-hive-processors/pom.xml                |    5 +
 .../apache/nifi/util/hive/HiveJdbcCommon.java   |    2 +-
 .../apache/nifi/util/hive/HiveJdbcCommon.java   |    2 +-
 .../nifi-hl7-bundle/nifi-hl7-processors/pom.xml |   20 +-
 .../nifi-html-processors/pom.xml                |    2 +-
 .../nifi-ignite-processors/pom.xml              |    2 +-
 .../nifi-influxdb-processors/pom.xml            |    2 +-
 .../nifi-jms-bundle/nifi-jms-processors/pom.xml |    4 +-
 .../nifi-jolt-record-bundle/pom.xml             |    2 +-
 .../nifi-kerberos-iaa-providers/pom.xml         |    2 +-
 .../nifi-kite-bundle/nifi-kite-nar/pom.xml      |    2 +-
 .../nifi-kite-processors/pom.xml                |    7 +-
 .../nifi/processors/kite/ConvertCSVToAvro.java  |    2 +-
 .../nifi/processors/kite/InferAvroSchema.java   |    2 +-
 .../nifi-ldap-iaa-providers/pom.xml             |   11 +-
 .../nifi-media-processors/pom.xml               |    8 +-
 .../nifi-mongodb-services/pom.xml               |    2 +-
 .../nifi-network-processors/pom.xml             |    2 +-
 .../nifi-network-utils/pom.xml                  |   52 +-
 .../nifi-poi-bundle/nifi-poi-processors/pom.xml |    2 +-
 .../poi/ConvertExcelToCSVProcessor.java         |    2 +-
 ...tedWriteAheadProvenanceRepositoryTest.groovy |    2 +
 .../ITestPersistentProvenanceRepository.java    | 2356 ++++++++++++++++++
 .../TestPersistentProvenanceRepository.java     | 2356 ------------------
 .../nifi-provenance-repository-bundle/pom.xml   |    4 +-
 .../nifi-redis-extensions/pom.xml               |    2 +-
 .../nifi-registry-service/pom.xml               |    2 +-
 .../nifi-rethinkdb-processors/pom.xml           |    2 +-
 .../nifi-riemann-processors/pom.xml             |    2 +-
 nifi-nar-bundles/nifi-riemann-bundle/pom.xml    |    2 +-
 .../nifi-scripting-processors/pom.xml           |    2 +-
 .../nifi-solr-processors/pom.xml                |    4 +-
 .../nifi-livy-processors/pom.xml                |    2 +-
 nifi-nar-bundles/nifi-splunk-bundle/pom.xml     |    2 +-
 .../nifi-spring-processors/pom.xml              |    2 +-
 .../nifi/processors/standard/SplitXml.java      |    2 +-
 nifi-nar-bundles/nifi-standard-bundle/pom.xml   |   30 +-
 .../nifi-hbase_1_1_2-client-service/pom.xml     |    2 +-
 .../nifi-hwx-schema-registry-service/pom.xml    |    4 +-
 .../nifi-lookup-services/pom.xml                |    6 +-
 .../nifi-record-serialization-services/pom.xml  |   14 +-
 .../apache/nifi/csv/TestCSVRecordReader.java    |    2 +-
 .../nifi/csv/TestJacksonCSVRecordReader.java    |    2 +-
 .../nifi-update-attribute-processor/pom.xml     |    2 +-
 .../nifi-update-attribute-ui/pom.xml            |    4 +-
 nifi-toolkit/nifi-toolkit-admin/pom.xml         |    6 +-
 nifi-toolkit/nifi-toolkit-cli/pom.xml           |    4 +-
 .../nifi-toolkit-encrypt-config/pom.xml         |    4 +-
 nifi-toolkit/nifi-toolkit-s2s/pom.xml           |    4 +-
 nifi-toolkit/nifi-toolkit-tls/pom.xml           |    8 +-
 .../nifi-toolkit-zookeeper-migrator/pom.xml     |    2 +-
 pom.xml                                         |    2 +-
 101 files changed, 2592 insertions(+), 2581 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-bootstrap/src/test/resources/localhost-ks.jks
----------------------------------------------------------------------
diff --git a/nifi-bootstrap/src/test/resources/localhost-ks.jks b/nifi-bootstrap/src/test/resources/localhost-ks.jks
old mode 100755
new mode 100644

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-bootstrap/src/test/resources/localhost-ts.jks
----------------------------------------------------------------------
diff --git a/nifi-bootstrap/src/test/resources/localhost-ts.jks b/nifi-bootstrap/src/test/resources/localhost-ts.jks
old mode 100755
new mode 100644

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-data-provenance-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-data-provenance-utils/pom.xml b/nifi-commons/nifi-data-provenance-utils/pom.xml
index 5598b59..3fd1924 100644
--- a/nifi-commons/nifi-data-provenance-utils/pom.xml
+++ b/nifi-commons/nifi-data-provenance-utils/pom.xml
@@ -45,7 +45,7 @@
         <dependency>
             <groupId>org.bouncycastle</groupId>
             <artifactId>bcprov-jdk15on</artifactId>
-            <version>1.59</version>
+            <version>1.60</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-expression-language/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-expression-language/pom.xml b/nifi-commons/nifi-expression-language/pom.xml
index 5a5a096..91a9844 100644
--- a/nifi-commons/nifi-expression-language/pom.xml
+++ b/nifi-commons/nifi-expression-language/pom.xml
@@ -81,12 +81,12 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.apache.commons</groupId>
-            <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <artifactId>commons-text</artifactId>
+            <version>1.4</version>
         </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/evaluation/functions/CharSequenceTranslatorEvaluator.java
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/evaluation/functions/CharSequenceTranslatorEvaluator.java b/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/evaluation/functions/CharSequenceTranslatorEvaluator.java
index 9a671c1..9f86fa4 100644
--- a/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/evaluation/functions/CharSequenceTranslatorEvaluator.java
+++ b/nifi-commons/nifi-expression-language/src/main/java/org/apache/nifi/attribute/expression/language/evaluation/functions/CharSequenceTranslatorEvaluator.java
@@ -18,8 +18,8 @@ package org.apache.nifi.attribute.expression.language.evaluation.functions;
 
 import java.util.Map;
 
-import org.apache.commons.lang3.StringEscapeUtils;
-import org.apache.commons.lang3.text.translate.CharSequenceTranslator;
+import org.apache.commons.text.StringEscapeUtils;
+import org.apache.commons.text.translate.CharSequenceTranslator;
 import org.apache.nifi.attribute.expression.language.evaluation.Evaluator;
 import org.apache.nifi.attribute.expression.language.evaluation.QueryResult;
 import org.apache.nifi.attribute.expression.language.evaluation.StringEvaluator;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-flowfile-packager/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-flowfile-packager/pom.xml b/nifi-commons/nifi-flowfile-packager/pom.xml
index 99af5fb..cd995da 100644
--- a/nifi-commons/nifi-flowfile-packager/pom.xml
+++ b/nifi-commons/nifi-flowfile-packager/pom.xml
@@ -25,12 +25,17 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-compress</artifactId>
-            <version>1.16.1</version>
+            <version>1.18</version>
         </dependency>
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-text</artifactId>
+            <version>1.4</version>
         </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-flowfile-packager/src/main/java/org/apache/nifi/util/FlowFilePackagerV1.java
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-flowfile-packager/src/main/java/org/apache/nifi/util/FlowFilePackagerV1.java b/nifi-commons/nifi-flowfile-packager/src/main/java/org/apache/nifi/util/FlowFilePackagerV1.java
index 479ac58..2e73e81 100644
--- a/nifi-commons/nifi-flowfile-packager/src/main/java/org/apache/nifi/util/FlowFilePackagerV1.java
+++ b/nifi-commons/nifi-flowfile-packager/src/main/java/org/apache/nifi/util/FlowFilePackagerV1.java
@@ -24,7 +24,7 @@ import java.util.Map;
 
 import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
 import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 
 public class FlowFilePackagerV1 implements FlowFilePackager {
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-hl7-query-language/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-hl7-query-language/pom.xml b/nifi-commons/nifi-hl7-query-language/pom.xml
index ffb22a8..cbe9007 100644
--- a/nifi-commons/nifi-hl7-query-language/pom.xml
+++ b/nifi-commons/nifi-hl7-query-language/pom.xml
@@ -65,47 +65,47 @@
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-base</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v21</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v22</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v23</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v231</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v24</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v25</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v251</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v26</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/hapi/HapiField.java
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/hapi/HapiField.java b/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/hapi/HapiField.java
index 94cce5c..f9624d0 100644
--- a/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/hapi/HapiField.java
+++ b/nifi-commons/nifi-hl7-query-language/src/main/java/org/apache/nifi/hl7/hapi/HapiField.java
@@ -27,7 +27,6 @@ import ca.uhn.hl7v2.model.Composite;
 import ca.uhn.hl7v2.model.ExtraComponents;
 import ca.uhn.hl7v2.model.Primitive;
 import ca.uhn.hl7v2.model.Type;
-import ca.uhn.hl7v2.model.Varies;
 import ca.uhn.hl7v2.parser.EncodingCharacters;
 import ca.uhn.hl7v2.parser.PipeParser;
 
@@ -59,8 +58,7 @@ public class HapiField implements HL7Field, HL7Component {
             componentList.add(new SingleValueField(singleFieldValue));
 
             for (int i = 0; i < extra.numComponents(); i++) {
-                final Varies varies = extra.getComponent(i);
-                componentList.add(new HapiField(varies));
+                componentList.add(new HapiField(extra.getComponent(i)));
             }
         }
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-json-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-json-utils/pom.xml b/nifi-commons/nifi-json-utils/pom.xml
index 655ab51..d4fdb16 100644
--- a/nifi-commons/nifi-json-utils/pom.xml
+++ b/nifi-commons/nifi-json-utils/pom.xml
@@ -32,7 +32,7 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
             <scope>compile</scope>
         </dependency>
     </dependencies>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-schema-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-schema-utils/pom.xml b/nifi-commons/nifi-schema-utils/pom.xml
index 561b36f..8c53b61 100644
--- a/nifi-commons/nifi-schema-utils/pom.xml
+++ b/nifi-commons/nifi-schema-utils/pom.xml
@@ -24,7 +24,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
             <scope>test</scope>
         </dependency>
     </dependencies>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-security-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-security-utils/pom.xml b/nifi-commons/nifi-security-utils/pom.xml
index bc5bc5e..da684c5 100644
--- a/nifi-commons/nifi-security-utils/pom.xml
+++ b/nifi-commons/nifi-security-utils/pom.xml
@@ -44,7 +44,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>commons-codec</groupId>
@@ -54,12 +54,12 @@
         <dependency>
             <groupId>org.bouncycastle</groupId>
             <artifactId>bcprov-jdk15on</artifactId>
-            <version>1.59</version>
+            <version>1.60</version>
         </dependency>
         <dependency>
             <groupId>org.bouncycastle</groupId>
             <artifactId>bcpkix-jdk15on</artifactId>
-            <version>1.59</version>
+            <version>1.60</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-site-to-site-client/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-site-to-site-client/pom.xml b/nifi-commons/nifi-site-to-site-client/pom.xml
index ace8db1..cf9ab8c 100644
--- a/nifi-commons/nifi-site-to-site-client/pom.xml
+++ b/nifi-commons/nifi-site-to-site-client/pom.xml
@@ -48,12 +48,12 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-socket-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-socket-utils/pom.xml b/nifi-commons/nifi-socket-utils/pom.xml
index eccf216..f4a4668 100644
--- a/nifi-commons/nifi-socket-utils/pom.xml
+++ b/nifi-commons/nifi-socket-utils/pom.xml
@@ -45,7 +45,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>commons-io</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-commons/nifi-web-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-commons/nifi-web-utils/pom.xml b/nifi-commons/nifi-web-utils/pom.xml
index 2830e58..4d38aad 100644
--- a/nifi-commons/nifi-web-utils/pom.xml
+++ b/nifi-commons/nifi-web-utils/pom.xml
@@ -23,7 +23,7 @@
     <artifactId>nifi-web-utils</artifactId>
     <properties>
         <jersey.version>2.26</jersey.version>
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
     </properties>
     <dependencies>
         <dependency>
@@ -39,7 +39,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.glassfish.jersey.core</groupId>
@@ -91,12 +91,6 @@
             <version>4.5.6</version>
             <scope>compile</scope>
         </dependency>
-        <dependency>
-            <groupId>org.apache.httpcomponents</groupId>
-            <artifactId>httpclient</artifactId>
-            <version>4.5.6</version>
-            <scope>compile</scope>
-        </dependency>
     </dependencies>
     <build>
         <plugins>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-external/nifi-spark-receiver/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-external/nifi-spark-receiver/pom.xml b/nifi-external/nifi-spark-receiver/pom.xml
index 9a7083b..581ee4c 100644
--- a/nifi-external/nifi-spark-receiver/pom.xml
+++ b/nifi-external/nifi-spark-receiver/pom.xml
@@ -43,7 +43,7 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-external/nifi-storm-spout/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-external/nifi-storm-spout/pom.xml b/nifi-external/nifi-storm-spout/pom.xml
index 45fc7a3..d81249f 100644
--- a/nifi-external/nifi-storm-spout/pom.xml
+++ b/nifi-external/nifi-storm-spout/pom.xml
@@ -32,7 +32,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/pom.xml b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/pom.xml
index f317e8c..4a61ee4 100644
--- a/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-amqp-bundle/nifi-amqp-processors/pom.xml
@@ -20,7 +20,7 @@ language governing permissions and limitations under the License. -->
     <packaging>jar</packaging>
 
     <properties>
-        <amqp-client.version>5.2.0</amqp-client.version>
+        <amqp-client.version>5.4.1</amqp-client.version>
     </properties>
 
     <dependencies>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/emulator/AtlasAPIV2ServerEmulator.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/emulator/AtlasAPIV2ServerEmulator.java b/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/emulator/AtlasAPIV2ServerEmulator.java
index 577e58b..b147810 100644
--- a/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/emulator/AtlasAPIV2ServerEmulator.java
+++ b/nifi-nar-bundles/nifi-atlas-bundle/nifi-atlas-reporting-task/src/test/java/org/apache/nifi/atlas/emulator/AtlasAPIV2ServerEmulator.java
@@ -171,8 +171,7 @@ public class AtlasAPIV2ServerEmulator {
                 final Object r;
                 switch (k) {
                     case "inputs":
-                    case "outputs":
-                    {
+                    case "outputs": {
                         // If a reference doesn't have guid, then find it.
                         r = resolveIOReference(v);
                     }
@@ -211,7 +210,7 @@ public class AtlasAPIV2ServerEmulator {
         httpConnector = new ServerConnector(server);
         httpConnector.setPort(21000);
 
-        server.setConnectors(new Connector[] {httpConnector});
+        server.setConnectors(new Connector[]{httpConnector});
 
         servletHandler.addServletWithMapping(TypeDefsServlet.class, "/types/typedefs/");
         servletHandler.addServletWithMapping(EntityBulkServlet.class, "/entity/bulk/");
@@ -334,6 +333,7 @@ public class AtlasAPIV2ServerEmulator {
     public static class EntityGuidServlet extends HttpServlet {
 
         private static Pattern URL_PATTERN = Pattern.compile(".+/guid/([^/]+)");
+
         @Override
         protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
             final Matcher matcher = URL_PATTERN.matcher(req.getRequestURI());
@@ -358,6 +358,7 @@ public class AtlasAPIV2ServerEmulator {
     public static class SearchByUniqueAttributeServlet extends HttpServlet {
 
         private static Pattern URL_PATTERN = Pattern.compile(".+/uniqueAttribute/type/([^/]+)");
+
         @Override
         protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
             // http://localhost:21000/api/atlas/v2/entity/uniqueAttribute/type/nifi_flow_path?attr:qualifiedName=2e9a2852-228f-379b-0000-000000000000@example
@@ -479,7 +480,6 @@ public class AtlasAPIV2ServerEmulator {
                 }
             }
 
-
             // Traverse entities those consume this entity as their input.
             final List<AtlasEntity> outGoings = Stream.of(outgoingEntities.getOrDefault(toTypedQname(s), Collections.emptyList()),
                     outgoingEntities.getOrDefault(s.getGuid(), Collections.emptyList())).flatMap(List::stream).collect(Collectors.toList());
@@ -567,7 +567,6 @@ public class AtlasAPIV2ServerEmulator {
                         traverse(seen, s, links, nodeIndices, outgoingEntities);
                     });
 
-
                 }
             }
 
@@ -596,7 +595,7 @@ public class AtlasAPIV2ServerEmulator {
                     // Group links by its target, and configure each weight value.
                     // E.g. 1 -> 3 and 2 -> 3, then 1 (0.5) -> 3 and 2 (0.5) -> 3.
                     ls.stream().collect(Collectors.groupingBy(Link::getTarget))
-                        .forEach((t, ls2SameTgt) -> ls2SameTgt.forEach(l -> l.setValue(1.0 / (double) ls2SameTgt.size())));
+                            .forEach((t, ls2SameTgt) -> ls2SameTgt.forEach(l -> l.setValue(1.0 / (double) ls2SameTgt.size())));
                 }
             });
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/pom.xml b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/pom.xml
index ae1758b..4776b21 100644
--- a/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-avro-bundle/nifi-avro-processors/pom.xml
@@ -34,7 +34,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.avro</groupId>
@@ -44,7 +44,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>commons-codec</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/pom.xml b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/pom.xml
index 5a9c357..cdd9325 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-aws-bundle/nifi-aws-abstract-processors/pom.xml
@@ -61,7 +61,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-aws-bundle/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-aws-bundle/pom.xml b/nifi-nar-bundles/nifi-aws-bundle/pom.xml
index ed42117..b3bb943 100644
--- a/nifi-nar-bundles/nifi-aws-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-aws-bundle/pom.xml
@@ -26,7 +26,7 @@
     <packaging>pom</packaging>
 
     <properties>
-        <aws-java-sdk-version>1.11.319</aws-java-sdk-version>
+        <aws-java-sdk-version>1.11.412</aws-java-sdk-version>
     </properties>
 
     <dependencyManagement>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/pom.xml b/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/pom.xml
index d5c36d4..3359b3d 100644
--- a/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/pom.xml
@@ -79,5 +79,10 @@
             <artifactId>nifi-mock-record-utils</artifactId>
             <version>1.8.0-SNAPSHOT</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-text</artifactId>
+            <version>1.4</version>
+        </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/QueryCassandra.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/QueryCassandra.java b/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/QueryCassandra.java
index 40b88cc..75a66f0 100644
--- a/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/QueryCassandra.java
+++ b/nifi-nar-bundles/nifi-cassandra-bundle/nifi-cassandra-processors/src/main/java/org/apache/nifi/processors/cassandra/QueryCassandra.java
@@ -33,7 +33,7 @@ import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.io.DatumWriter;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.nifi.annotation.behavior.EventDriven;
 import org.apache.nifi.annotation.behavior.InputRequirement;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-ccda-bundle/nifi-ccda-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-ccda-bundle/nifi-ccda-processors/pom.xml b/nifi-nar-bundles/nifi-ccda-bundle/nifi-ccda-processors/pom.xml
index 6aee1f3..7d7dc82 100644
--- a/nifi-nar-bundles/nifi-ccda-bundle/nifi-ccda-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-ccda-bundle/nifi-ccda-processors/pom.xml
@@ -38,7 +38,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-jexl3</artifactId>
-            <version>3.0</version>
+            <version>3.1</version>
         </dependency>
         <dependency>
             <groupId>org.openehealth.ipf.oht.mdht</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-cdc/nifi-cdc-api/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-cdc/nifi-cdc-api/pom.xml b/nifi-nar-bundles/nifi-cdc/nifi-cdc-api/pom.xml
index 2169711..4abe273 100644
--- a/nifi-nar-bundles/nifi-cdc/nifi-cdc-api/pom.xml
+++ b/nifi-nar-bundles/nifi-cdc/nifi-cdc-api/pom.xml
@@ -34,12 +34,12 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
index f358a92..1770694 100644
--- a/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-couchbase-bundle/nifi-couchbase-processors/pom.xml
@@ -48,7 +48,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
@@ -64,11 +64,6 @@
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-utils</artifactId>
-            <version>1.8.0-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
             <artifactId>nifi-mock</artifactId>
             <version>1.8.0-SNAPSHOT</version>
             <scope>test</scope>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-druid-bundle/nifi-druid-controller-service-api/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-druid-bundle/nifi-druid-controller-service-api/pom.xml b/nifi-nar-bundles/nifi-druid-bundle/nifi-druid-controller-service-api/pom.xml
index d922e97..9a7f2b3 100644
--- a/nifi-nar-bundles/nifi-druid-bundle/nifi-druid-controller-service-api/pom.xml
+++ b/nifi-nar-bundles/nifi-druid-bundle/nifi-druid-controller-service-api/pom.xml
@@ -130,7 +130,7 @@
         <dependency>
             <groupId>org.bouncycastle</groupId>
             <artifactId>bcprov-jdk15on</artifactId>
-            <version>1.59</version>
+            <version>1.60</version>
             <scope>provided</scope>
         </dependency>
     </dependencies>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/pom.xml b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/pom.xml
index b0c7842..5334aa2 100644
--- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/pom.xml
+++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-client-service/pom.xml
@@ -66,7 +66,7 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
 
         <dependency>
@@ -83,7 +83,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.4</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.slf4j</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml
index 94a07fc..dfe7208 100644
--- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-processors/pom.xml
@@ -24,7 +24,7 @@ language governing permissions and limitations under the License. -->
         <slf4jversion>1.7.12</slf4jversion>
         <es.version>2.1.0</es.version>
         <lucene.version>5.3.1</lucene.version>
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
     </properties>
 
     <dependencies>
@@ -63,7 +63,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-text</artifactId>
-            <version>1.3</version>
+            <version>1.4</version>
         </dependency>
         <dependency>
             <groupId>org.apache.lucene</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/pom.xml b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/pom.xml
index 54cc431..eecad55 100644
--- a/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-elasticsearch-bundle/nifi-elasticsearch-restapi-processors/pom.xml
@@ -80,7 +80,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/pom.xml b/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/pom.xml
index cd93e23..c3b3e52 100644
--- a/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-email-bundle/nifi-email-processors/pom.xml
@@ -43,7 +43,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-email</artifactId>
-            <version>1.4</version>
+            <version>1.5</version>
             <exclusions>
                 <exclusion>
                     <groupId>com.sun.mail</groupId>
@@ -104,7 +104,7 @@
         <dependency>
             <groupId>org.apache.poi</groupId>
             <artifactId>poi-scratchpad</artifactId>
-            <version>3.17</version>
+            <version>4.0.0</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
@@ -120,7 +120,7 @@
         <dependency>
             <groupId>com.icegreen</groupId>
             <artifactId>greenmail</artifactId>
-            <version>1.5.2</version>
+            <version>1.5.8</version>
             <scope>test</scope>
         </dependency>
     </dependencies>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/pom.xml b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/pom.xml
index 6c55cce..2e55a4c 100644
--- a/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-enrich-bundle/nifi-enrich-processors/pom.xml
@@ -31,7 +31,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/pom.xml b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/pom.xml
index 8c7aa2c..f1a36b0 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/pom.xml
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-hadoop-utils/pom.xml
@@ -43,7 +43,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <!-- Other modules using nifi-hadoop-utils are expected to have the below dependencies available, typically through a NAR dependency -->
         <dependency>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-extension-utils/nifi-processor-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-processor-utils/pom.xml b/nifi-nar-bundles/nifi-extension-utils/nifi-processor-utils/pom.xml
index 4f59729..e4e30d9 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-processor-utils/pom.xml
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-processor-utils/pom.xml
@@ -59,7 +59,7 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/pom.xml b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/pom.xml
index e82bf62..36f565f 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/pom.xml
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/pom.xml
@@ -54,6 +54,11 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-csv</artifactId>
+            <version>1.5</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-text</artifactId>
             <version>1.4</version>
         </dependency>
         <dependency>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVUtils.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVUtils.java b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVUtils.java
index f379bea..3f3814e 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVUtils.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVUtils.java
@@ -19,7 +19,7 @@ package org.apache.nifi.csv;
 
 import org.apache.commons.csv.CSVFormat;
 import org.apache.commons.csv.QuoteMode;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.components.AllowableValue;
 import org.apache.nifi.components.PropertyDescriptor;
 import org.apache.nifi.components.PropertyValue;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVValidators.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVValidators.java b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVValidators.java
index 5979407..0f6a22f 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVValidators.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-record-utils/nifi-standard-record-utils/src/main/java/org/apache/nifi/csv/CSVValidators.java
@@ -17,7 +17,7 @@
 
 package org.apache.nifi.csv;
 
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.components.ValidationContext;
 import org.apache.nifi.components.ValidationResult;
 import org.apache.nifi.components.Validator;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/pom.xml b/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/pom.xml
index 87a388c..681589c 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/pom.xml
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-reporting-utils/pom.xml
@@ -38,7 +38,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>com.yammer.metrics</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-extension-utils/nifi-syslog-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-syslog-utils/pom.xml b/nifi-nar-bundles/nifi-extension-utils/nifi-syslog-utils/pom.xml
index dd94370..e78e694 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-syslog-utils/pom.xml
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-syslog-utils/pom.xml
@@ -26,7 +26,7 @@
         <dependency>
             <groupId>com.github.palindromicity</groupId>
             <artifactId>simple-syslog-5424</artifactId>
-            <version>0.0.7</version>
+            <version>0.0.8</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/pom.xml b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/pom.xml
index a220826..4318c38 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/pom.xml
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/pom.xml
@@ -189,9 +189,14 @@
             <scope>compile</scope>
         </dependency>
         <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-text</artifactId>
+            <version>1.4</version>
+        </dependency>
+        <dependency>
             <groupId>com.github.stefanbirkner</groupId>
             <artifactId>system-rules</artifactId>
-            <version>1.16.0</version>
+            <version>1.18.0</version>
             <scope>test</scope>
         </dependency>
         <dependency>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java
index 72e8d84..5ccf1e0 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java
+++ b/nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-jetty/src/main/java/org/apache/nifi/web/server/HostHeaderHandler.java
@@ -17,7 +17,7 @@
 package org.apache.nifi.web.server;
 
 import com.google.common.base.Strings;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.http.conn.util.InetAddressUtils;
 import org.apache.nifi.util.NiFiProperties;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-framework-bundle/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-framework-bundle/pom.xml b/nifi-nar-bundles/nifi-framework-bundle/pom.xml
index 3d82203..60fa7ba 100644
--- a/nifi-nar-bundles/nifi-framework-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-framework-bundle/pom.xml
@@ -27,7 +27,7 @@
         <jersey.version>2.26</jersey.version>
         <spring.version>4.3.10.RELEASE</spring.version>
         <spring.security.version>4.2.4.RELEASE</spring.security.version>    
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
     </properties>
     <modules>
         <module>nifi-framework</module>
@@ -194,7 +194,7 @@
             <dependency>
                 <groupId>org.apache.tika</groupId>
                 <artifactId>tika-core</artifactId>
-                <version>1.17</version>
+                <version>1.19</version>
             </dependency>
             <dependency>
                 <groupId>commons-codec</groupId>
@@ -258,7 +258,7 @@
             <dependency>
                 <groupId>org.apache.commons</groupId>
                 <artifactId>commons-lang3</artifactId>
-                <version>3.7</version>
+                <version>3.8.1</version>
             </dependency>
             <dependency>
                 <groupId>org.quartz-scheduler</groupId>
@@ -302,12 +302,12 @@
             <dependency>
                 <groupId>org.bouncycastle</groupId>
                 <artifactId>bcprov-jdk15on</artifactId>
-                <version>1.59</version>
+                <version>1.60</version>
             </dependency>
             <dependency>
                 <groupId>org.bouncycastle</groupId>
                 <artifactId>bcpkix-jdk15on</artifactId>
-                <version>1.59</version>
+                <version>1.60</version>
             </dependency>
             <dependency>
                 <groupId>com.google.guava</groupId>
@@ -547,7 +547,7 @@
             <dependency>
                 <groupId>org.apache.commons</groupId>
                 <artifactId>commons-collections4</artifactId>
-                <version>4.1</version>
+                <version>4.2</version>
             </dependency>
             <dependency>
                 <groupId>org.jasypt</groupId>
@@ -594,7 +594,7 @@
             <dependency>
                 <groupId>org.apache.commons</groupId>
                 <artifactId>commons-compress</artifactId>
-                <version>1.16.1</version>
+                <version>1.18</version>
             </dependency>
             <dependency>
                 <groupId>commons-net</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-services-api/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-services-api/pom.xml b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-services-api/pom.xml
index 8f8979d..5515e17 100644
--- a/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-services-api/pom.xml
+++ b/nifi-nar-bundles/nifi-gcp-bundle/nifi-gcp-services-api/pom.xml
@@ -33,7 +33,6 @@
         <dependency>
             <groupId>com.google.auth</groupId>
             <artifactId>google-auth-library-oauth2-http</artifactId>
-            <version>0.6.0</version>
             <exclusions>
                 <exclusion>
                     <groupId>com.google.code.findbugs</groupId>
@@ -49,7 +48,7 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency> <!-- TODO: Remove this when the next version of google-auth-library-oauth2-http is released and brings this in-->
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-grpc-bundle/nifi-grpc-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-grpc-bundle/nifi-grpc-processors/pom.xml b/nifi-nar-bundles/nifi-grpc-bundle/nifi-grpc-processors/pom.xml
index 1ce4f00..e6d9d1e 100644
--- a/nifi-nar-bundles/nifi-grpc-bundle/nifi-grpc-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-grpc-bundle/nifi-grpc-processors/pom.xml
@@ -38,7 +38,7 @@ language governing permissions and limitations under the License. -->
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>commons-io</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/pom.xml b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/pom.xml
index cf6e09d..c2866fa 100644
--- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/pom.xml
@@ -24,7 +24,7 @@
     <description>Support for interacting with HBase</description>
 
     <properties>
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
     </properties>
 
     <dependencies>
@@ -55,12 +55,12 @@
         <dependency>
             <groupId>org.apache.nifi</groupId>
             <artifactId>nifi-record</artifactId>
-            <version>${project.version}</version>
+            <version>1.8.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.4</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
@@ -93,7 +93,12 @@
         <dependency>
             <groupId>org.apache.nifi</groupId>
             <artifactId>nifi-record-path</artifactId>
-            <version>${project.version}</version>
+            <version>1.8.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-text</artifactId>
+            <version>1.4</version>
         </dependency>
     </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonFullRowSerializer.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonFullRowSerializer.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonFullRowSerializer.java
index 837f14d..a518400 100644
--- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonFullRowSerializer.java
+++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonFullRowSerializer.java
@@ -16,7 +16,7 @@
  */
 package org.apache.nifi.hbase.io;
 
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.hbase.scan.ResultCell;
 import org.apache.nifi.hbase.util.RowSerializerUtil;
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonQualifierAndValueRowSerializer.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonQualifierAndValueRowSerializer.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonQualifierAndValueRowSerializer.java
index 0eb18ff..ee05f04 100644
--- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonQualifierAndValueRowSerializer.java
+++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonQualifierAndValueRowSerializer.java
@@ -16,7 +16,7 @@
  */
 package org.apache.nifi.hbase.io;
 
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.hbase.scan.ResultCell;
 import org.apache.nifi.hbase.util.RowSerializerUtil;
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonRowSerializer.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonRowSerializer.java b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonRowSerializer.java
index 0ea0804..fc903e4 100644
--- a/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonRowSerializer.java
+++ b/nifi-nar-bundles/nifi-hbase-bundle/nifi-hbase-processors/src/main/java/org/apache/nifi/hbase/io/JsonRowSerializer.java
@@ -16,7 +16,7 @@
  */
 package org.apache.nifi.hbase.io;
 
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.hbase.scan.ResultCell;
 
 import java.io.IOException;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/pom.xml b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/pom.xml
index fda010e..30e641b 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/pom.xml
@@ -107,6 +107,11 @@
             <version>1.3.9-1</version>
         </dependency>
         <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-text</artifactId>
+            <version>1.4</version>
+        </dependency>
+        <dependency>
             <groupId>org.apache.nifi</groupId>
             <artifactId>nifi-mock</artifactId>
             <version>1.8.0-SNAPSHOT</version>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java
index ff06495..6e28f94 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java
@@ -24,7 +24,7 @@ import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.io.DatumWriter;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java
index ff06495..6e28f94 100644
--- a/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java
+++ b/nifi-nar-bundles/nifi-hive-bundle/nifi-hive3-processors/src/main/java/org/apache/nifi/util/hive/HiveJdbcCommon.java
@@ -24,7 +24,7 @@ import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
 import org.apache.avro.io.DatumWriter;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/pom.xml b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/pom.xml
index 2b43a50..fb53d5c 100644
--- a/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-hl7-bundle/nifi-hl7-processors/pom.xml
@@ -52,7 +52,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
@@ -66,47 +66,47 @@
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-base</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v21</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v22</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v23</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v231</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v24</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v25</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v251</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>ca.uhn.hapi</groupId>
             <artifactId>hapi-structures-v26</artifactId>
-            <version>2.2</version>
+            <version>2.3</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/pom.xml b/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/pom.xml
index 06bdf88..25c1143 100644
--- a/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-html-bundle/nifi-html-processors/pom.xml
@@ -34,7 +34,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/pom.xml b/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/pom.xml
index 0dc7880..fc694be 100644
--- a/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/pom.xml
@@ -45,7 +45,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.ignite</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/pom.xml b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/pom.xml
index aad47b9..9e94c1a 100644
--- a/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-influxdb-bundle/nifi-influxdb-processors/pom.xml
@@ -34,7 +34,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
index 9b711eb..87ba89d 100644
--- a/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-jms-bundle/nifi-jms-processors/pom.xml
@@ -51,7 +51,7 @@
         <dependency>
             <groupId>org.apache.activemq</groupId>
             <artifactId>activemq-client</artifactId>
-            <version>5.15.3</version>
+            <version>5.15.6</version>
             <exclusions>
                 <!-- -->
                 <exclusion>
@@ -64,7 +64,7 @@
         <dependency>
             <groupId>org.apache.activemq</groupId>
             <artifactId>activemq-broker</artifactId>
-            <version>5.15.3</version>
+            <version>5.15.6</version>
             <scope>test</scope>
         </dependency>
         <dependency>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-jolt-record-bundle/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-jolt-record-bundle/pom.xml b/nifi-nar-bundles/nifi-jolt-record-bundle/pom.xml
index 3a7c4fa..b1c0c8d 100644
--- a/nifi-nar-bundles/nifi-jolt-record-bundle/pom.xml
+++ b/nifi-nar-bundles/nifi-jolt-record-bundle/pom.xml
@@ -33,7 +33,7 @@
     </modules>
 
     <properties>
-        <jackson.version>2.9.5</jackson.version>
+        <jackson.version>2.9.7</jackson.version>
         <yammer.metrics.version>2.2.0</yammer.metrics.version>
         <jolt.version>0.1.1</jolt.version>
         <jersey.version>2.26</jersey.version>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-kerberos-iaa-providers-bundle/nifi-kerberos-iaa-providers/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kerberos-iaa-providers-bundle/nifi-kerberos-iaa-providers/pom.xml b/nifi-nar-bundles/nifi-kerberos-iaa-providers-bundle/nifi-kerberos-iaa-providers/pom.xml
index 008b0d8..8ac3b7a 100644
--- a/nifi-nar-bundles/nifi-kerberos-iaa-providers-bundle/nifi-kerberos-iaa-providers/pom.xml
+++ b/nifi-nar-bundles/nifi-kerberos-iaa-providers-bundle/nifi-kerberos-iaa-providers/pom.xml
@@ -68,7 +68,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
     </dependencies>
     <name>nifi-kerberos-iaa-providers</name>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-nar/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-nar/pom.xml b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-nar/pom.xml
index cb95490..a10f5a9 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-nar/pom.xml
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-nar/pom.xml
@@ -97,7 +97,7 @@
                     <artifactId>commons-lang</artifactId>
                 </exclusion>
                 <exclusion>
-                    <groupId>commons-lang3</groupId>
+                    <groupId>org.apache.commons</groupId>
                     <artifactId>commons-lang3</artifactId>
                 </exclusion>
                 <exclusion>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/pom.xml b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/pom.xml
index 9f87b36..0ff664a 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/pom.xml
@@ -60,7 +60,7 @@
         <dependency>
             <groupId>org.apache.commons</groupId>
             <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <version>3.8.1</version>
         </dependency>
 
         <dependency>
@@ -280,6 +280,11 @@
             <artifactId>nifi-hadoop-utils</artifactId>
             <version>1.8.0-SNAPSHOT</version>
         </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-text</artifactId>
+            <version>1.4</version>
+        </dependency>
 
     </dependencies>
 

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
index 3646680..bacef3b 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/ConvertCSVToAvro.java
@@ -29,7 +29,7 @@ import java.util.Set;
 import org.apache.avro.Schema;
 import org.apache.avro.file.DataFileWriter;
 import org.apache.avro.generic.GenericData.Record;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.InputRequirement.Requirement;
 import org.apache.nifi.annotation.documentation.CapabilityDescription;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/InferAvroSchema.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/InferAvroSchema.java b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/InferAvroSchema.java
index 4344ce0..69545dd 100644
--- a/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/InferAvroSchema.java
+++ b/nifi-nar-bundles/nifi-kite-bundle/nifi-kite-processors/src/main/java/org/apache/nifi/processors/kite/InferAvroSchema.java
@@ -20,7 +20,7 @@ package org.apache.nifi.processors.kite;
 
 import org.apache.avro.Schema;
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 import org.apache.nifi.annotation.behavior.InputRequirement;
 import org.apache.nifi.annotation.behavior.ReadsAttribute;
 import org.apache.nifi.annotation.behavior.ReadsAttributes;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-ldap-iaa-providers-bundle/nifi-ldap-iaa-providers/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-ldap-iaa-providers-bundle/nifi-ldap-iaa-providers/pom.xml b/nifi-nar-bundles/nifi-ldap-iaa-providers-bundle/nifi-ldap-iaa-providers/pom.xml
index e6f7c3a..1782a92 100644
--- a/nifi-nar-bundles/nifi-ldap-iaa-providers-bundle/nifi-ldap-iaa-providers/pom.xml
+++ b/nifi-nar-bundles/nifi-ldap-iaa-providers-bundle/nifi-ldap-iaa-providers/pom.xml
@@ -82,15 +82,10 @@
             <artifactId>spring-context</artifactId>
             <version>${spring.version}</version>
         </dependency>
-            <dependency>
-                <groupId>org.springframework</groupId>
-                <artifactId>spring-tx</artifactId>
-                <version>${spring.version}</version>
-            </dependency>
         <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-lang3</artifactId>
-            <version>3.7</version>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-tx</artifactId>
+            <version>${spring.version}</version>
         </dependency>
         <dependency>
             <groupId>org.apache.directory.server</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
index 8fd828e..7166397 100644
--- a/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-media-bundle/nifi-media-processors/pom.xml
@@ -49,7 +49,7 @@
         <dependency>
             <groupId>org.apache.tika</groupId>
             <artifactId>tika-parsers</artifactId>
-            <version>1.17</version>
+            <version>1.19</version>
             <exclusions>
                 <exclusion>
                     <groupId>com.fasterxml.jackson.core</groupId>
@@ -66,12 +66,6 @@
                 </exclusion>
             </exclusions>
         </dependency>
-        <!-- Once Tika-Parsers 1.18 comes out this explicit dependency ref forcing 1.16.1 can be removed -->
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-compress</artifactId>
-            <version>1.16.1</version>
-        </dependency>
     </dependencies>
     <build>
         <plugins>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/pom.xml b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/pom.xml
index 9bfe27f..f4dfa8f 100644
--- a/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/pom.xml
+++ b/nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-services/pom.xml
@@ -55,7 +55,7 @@
         <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
-            <version>2.9.5</version>
+            <version>2.9.7</version>
         </dependency>
         <dependency>
             <groupId>org.mongodb</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-network-bundle/nifi-network-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-network-bundle/nifi-network-processors/pom.xml b/nifi-nar-bundles/nifi-network-bundle/nifi-network-processors/pom.xml
index 3491cd9..9996ea9 100644
--- a/nifi-nar-bundles/nifi-network-bundle/nifi-network-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-network-bundle/nifi-network-processors/pom.xml
@@ -44,7 +44,7 @@
 		<dependency>
 			<groupId>com.fasterxml.jackson.core</groupId>
 			<artifactId>jackson-databind</artifactId>
-			<version>2.9.5</version>
+			<version>2.9.7</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.nifi</groupId>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-network-bundle/nifi-network-utils/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-network-bundle/nifi-network-utils/pom.xml b/nifi-nar-bundles/nifi-network-bundle/nifi-network-utils/pom.xml
index 0a72a1c..55f0277 100644
--- a/nifi-nar-bundles/nifi-network-bundle/nifi-network-utils/pom.xml
+++ b/nifi-nar-bundles/nifi-network-bundle/nifi-network-utils/pom.xml
@@ -14,30 +14,30 @@
   limitations under the License.
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-	<parent>
-		<artifactId>nifi-network-bundle</artifactId>
-		<groupId>org.apache.nifi</groupId>
-		<version>1.8.0-SNAPSHOT</version>
-	</parent>
-	<modelVersion>4.0.0</modelVersion>
-	<artifactId>nifi-network-utils</artifactId>
-	<packaging>jar</packaging>
-	<dependencies>
-		<dependency>
-			<groupId>com.fasterxml.jackson.core</groupId>
-			<artifactId>jackson-databind</artifactId>
-			<version>2.9.5</version>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-simple</artifactId>
-			<scope>test</scope>
-		</dependency>
-		<dependency>
-			<groupId>junit</groupId>
-			<artifactId>junit</artifactId>
-			<scope>test</scope>
-		</dependency>
-	</dependencies>
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>nifi-network-bundle</artifactId>
+        <groupId>org.apache.nifi</groupId>
+        <version>1.8.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+    <artifactId>nifi-network-utils</artifactId>
+    <packaging>jar</packaging>
+    <dependencies>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>2.9.7</version>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-simple</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
 </project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/pom.xml
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/pom.xml b/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/pom.xml
index 3de45c9..ccc6892 100644
--- a/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/pom.xml
+++ b/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/pom.xml
@@ -17,7 +17,7 @@
     <modelVersion>4.0.0</modelVersion>
 
     <properties>
-        <poi.version>3.17</poi.version>
+        <poi.version>4.0.0</poi.version>
     </properties>
 
     <parent>

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/src/main/java/org/apache/nifi/processors/poi/ConvertExcelToCSVProcessor.java
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/src/main/java/org/apache/nifi/processors/poi/ConvertExcelToCSVProcessor.java b/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/src/main/java/org/apache/nifi/processors/poi/ConvertExcelToCSVProcessor.java
index 7a762f4..51abc27 100644
--- a/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/src/main/java/org/apache/nifi/processors/poi/ConvertExcelToCSVProcessor.java
+++ b/nifi-nar-bundles/nifi-poi-bundle/nifi-poi-processors/src/main/java/org/apache/nifi/processors/poi/ConvertExcelToCSVProcessor.java
@@ -55,7 +55,7 @@ import org.apache.poi.openxml4j.opc.OPCPackage;
 import org.apache.poi.ss.usermodel.DataFormatter;
 import org.apache.poi.ss.util.CellAddress;
 import org.apache.poi.ss.util.CellReference;
-import org.apache.poi.util.SAXHelper;
+import org.apache.poi.ooxml.util.SAXHelper;
 import org.apache.poi.xssf.eventusermodel.ReadOnlySharedStringsTable;
 import org.apache.poi.xssf.eventusermodel.XSSFReader;
 import org.apache.poi.xssf.eventusermodel.XSSFSheetXMLHandler;

http://git-wip-us.apache.org/repos/asf/nifi/blob/8e233ca2/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/groovy/org/apache/nifi/provenance/EncryptedWriteAheadProvenanceRepositoryTest.groovy
----------------------------------------------------------------------
diff --git a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/groovy/org/apache/nifi/provenance/EncryptedWriteAheadProvenanceRepositoryTest.groovy b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/groovy/org/apache/nifi/provenance/EncryptedWriteAheadProvenanceRepositoryTest.groovy
index 582a805..ce1ebc7 100644
--- a/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/groovy/org/apache/nifi/provenance/EncryptedWriteAheadProvenanceRepositoryTest.groovy
+++ b/nifi-nar-bundles/nifi-provenance-repository-bundle/nifi-persistent-provenance-repository/src/test/groovy/org/apache/nifi/provenance/EncryptedWriteAheadProvenanceRepositoryTest.groovy
@@ -29,6 +29,7 @@ import org.junit.Before
 import org.junit.BeforeClass
 import org.junit.ClassRule
 import org.junit.Test
+import org.junit.Ignore
 import org.junit.rules.TemporaryFolder
 import org.junit.runner.RunWith
 import org.junit.runners.JUnit4
@@ -249,6 +250,7 @@ class EncryptedWriteAheadProvenanceRepositoryTest {
     }
 
     @Test
+    @Ignore("test is unstable. NIFI-5624 to improve it")
     void testShouldRegisterAndGetEvent() {
         // Arrange