You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nifi.apache.org by mt...@apache.org on 2023/02/22 00:53:55 UTC

[nifi] branch main updated (57a1144f34 -> 85f2162021)

This is an automated email from the ASF dual-hosted git repository.

mthomsen pushed a change to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git


    from 57a1144f34 NIFI-11187 Removed ActiveMQ from Standard Processors
     new a0ef378a42 NIFI-11207 Removed HBase 1.1.2 Services
     new 85f2162021 NIFI-11201 Added include-iotdb Maven build profile

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 nifi-assembly/pom.xml                              |  23 +-
 nifi-docs/src/main/asciidoc/developer-guide.adoc   |  28 +-
 .../apache/nifi/processors/QueryIoTDBRecord.java   |   2 +-
 .../nifi-hbase_1_1_2-client-service-nar/pom.xml    |  45 -
 .../src/main/resources/META-INF/LICENSE            | 357 --------
 .../src/main/resources/META-INF/NOTICE             | 322 -------
 .../nifi-hbase_1_1_2-client-service/pom.xml        | 142 ----
 .../nifi/hbase/AbstractHBaseLookupService.java     | 157 ----
 .../hbase/HBase_1_1_2_ClientMapCacheService.java   | 316 -------
 .../nifi/hbase/HBase_1_1_2_ClientService.java      | 946 ---------------------
 .../nifi/hbase/HBase_1_1_2_ListLookupService.java  | 115 ---
 .../hbase/HBase_1_1_2_RecordLookupService.java     |  85 --
 .../apache/nifi/hbase/VisibilityLabelUtils.java    |  50 --
 .../org.apache.nifi.controller.ControllerService   |  18 -
 .../apache/nifi/hbase/MockHBaseClientService.java  | 235 -----
 .../TestHBase_1_1_2_ClientMapCacheService.java     | 453 ----------
 .../nifi/hbase/TestHBase_1_1_2_ClientService.java  | 529 ------------
 .../hbase/TestHBase_1_1_2_ListLookupService.java   | 128 ---
 .../hbase/TestHBase_1_1_2_RecordLookupService.java | 124 ---
 .../java/org/apache/nifi/hbase/TestProcessor.java  |  56 --
 .../nifi/hbase/TestRecordLookupProcessor.java      | 112 ---
 .../src/test/resources/core-site-security.xml      |  30 -
 .../src/test/resources/core-site.xml               |  22 -
 .../src/test/resources/fake.keytab                 |   0
 .../src/test/resources/hbase-site-security.xml     |  30 -
 .../src/test/resources/hbase-site.xml              |  22 -
 .../src/test/resources/krb5.conf                   |   0
 .../nifi-hbase_1_1_2-client-service-bundle/pom.xml | 114 ---
 nifi-nar-bundles/nifi-standard-services/pom.xml    |   1 -
 29 files changed, 32 insertions(+), 4430 deletions(-)
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/pom.xml
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/LICENSE
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/NOTICE
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/AbstractHBaseLookupService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientMapCacheService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ListLookupService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_RecordLookupService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/VisibilityLabelUtils.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/MockHBaseClientService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientMapCacheService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ListLookupService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_RecordLookupService.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestProcessor.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestRecordLookupProcessor.java
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site-security.xml
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site.xml
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/fake.keytab
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/hbase-site-security.xml
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/hbase-site.xml
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/krb5.conf
 delete mode 100644 nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/pom.xml


[nifi] 01/02: NIFI-11207 Removed HBase 1.1.2 Services

Posted by mt...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mthomsen pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit a0ef378a42e2a3951e085d7f8566ed81d8f37902
Author: exceptionfactory <ex...@apache.org>
AuthorDate: Tue Feb 21 10:33:26 2023 -0600

    NIFI-11207 Removed HBase 1.1.2 Services
    
    This closes #6979
    
    Signed-off-by: Mike Thomsen <mt...@apache.org>
---
 nifi-assembly/pom.xml                              |   6 -
 .../nifi-hbase_1_1_2-client-service-nar/pom.xml    |  45 -
 .../src/main/resources/META-INF/LICENSE            | 357 --------
 .../src/main/resources/META-INF/NOTICE             | 322 -------
 .../nifi-hbase_1_1_2-client-service/pom.xml        | 142 ----
 .../nifi/hbase/AbstractHBaseLookupService.java     | 157 ----
 .../hbase/HBase_1_1_2_ClientMapCacheService.java   | 316 -------
 .../nifi/hbase/HBase_1_1_2_ClientService.java      | 946 ---------------------
 .../nifi/hbase/HBase_1_1_2_ListLookupService.java  | 115 ---
 .../hbase/HBase_1_1_2_RecordLookupService.java     |  85 --
 .../apache/nifi/hbase/VisibilityLabelUtils.java    |  50 --
 .../org.apache.nifi.controller.ControllerService   |  18 -
 .../apache/nifi/hbase/MockHBaseClientService.java  | 235 -----
 .../TestHBase_1_1_2_ClientMapCacheService.java     | 453 ----------
 .../nifi/hbase/TestHBase_1_1_2_ClientService.java  | 529 ------------
 .../hbase/TestHBase_1_1_2_ListLookupService.java   | 128 ---
 .../hbase/TestHBase_1_1_2_RecordLookupService.java | 124 ---
 .../java/org/apache/nifi/hbase/TestProcessor.java  |  56 --
 .../nifi/hbase/TestRecordLookupProcessor.java      | 112 ---
 .../src/test/resources/core-site-security.xml      |  30 -
 .../src/test/resources/core-site.xml               |  22 -
 .../src/test/resources/fake.keytab                 |   0
 .../src/test/resources/hbase-site-security.xml     |  30 -
 .../src/test/resources/hbase-site.xml              |  22 -
 .../src/test/resources/krb5.conf                   |   0
 .../nifi-hbase_1_1_2-client-service-bundle/pom.xml | 114 ---
 nifi-nar-bundles/nifi-standard-services/pom.xml    |   1 -
 27 files changed, 4415 deletions(-)

diff --git a/nifi-assembly/pom.xml b/nifi-assembly/pom.xml
index 093bba00e3..5a6f523f0e 100644
--- a/nifi-assembly/pom.xml
+++ b/nifi-assembly/pom.xml
@@ -492,12 +492,6 @@ language governing permissions and limitations under the License. -->
             <version>2.0.0-SNAPSHOT</version>
             <type>nar</type>
         </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-hbase_1_1_2-client-service-nar</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-            <type>nar</type>
-        </dependency>
         <dependency>
             <groupId>org.apache.nifi</groupId>
             <artifactId>nifi-hbase_2-client-service-nar</artifactId>
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/pom.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/pom.xml
deleted file mode 100644
index f5096abc8b..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/pom.xml
+++ /dev/null
@@ -1,45 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements. See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License. You may obtain a copy of the License at
-  http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <parent>
-        <groupId>org.apache.nifi</groupId>
-        <artifactId>nifi-hbase_1_1_2-client-service-bundle</artifactId>
-        <version>2.0.0-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>nifi-hbase_1_1_2-client-service-nar</artifactId>
-    <packaging>nar</packaging>
-    <properties>
-        <maven.javadoc.skip>true</maven.javadoc.skip>
-        <source.skip>true</source.skip>
-    </properties>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-standard-services-api-nar</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-            <type>nar</type>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-hbase_1_1_2-client-service</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-        </dependency>
-    </dependencies>
-</project>
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/LICENSE b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/LICENSE
deleted file mode 100644
index d167721fb4..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/LICENSE
+++ /dev/null
@@ -1,357 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
-APACHE NIFI SUBCOMPONENTS:
-
-The Apache NiFi project contains subcomponents with separate copyright
-notices and license terms. Your use of the source code for the these
-subcomponents is subject to the terms and conditions of the following
-licenses. 
-
-The binary distribution of this product bundles 'Jcodings' under an MIT style
-license.
-
-  Permission is hereby granted, free of charge, to any person obtaining a copy of
-  this software and associated documentation files (the "Software"), to deal in
-  the Software without restriction, including without limitation the rights to
-  use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-  of the Software, and to permit persons to whom the Software is furnished to do
-  so, subject to the following conditions:
-
-  The above copyright notice and this permission notice shall be included in all
-  copies or substantial portions of the Software.
-
-  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-  SOFTWARE.
-
-The binary distribution of this product bundles 'Joni' under an MIT style
-license.
-
-  Permission is hereby granted, free of charge, to any person obtaining a copy of
-  this software and associated documentation files (the "Software"), to deal in
-  the Software without restriction, including without limitation the rights to
-  use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
-  of the Software, and to permit persons to whom the Software is furnished to do
-  so, subject to the following conditions:
-
-  The above copyright notice and this permission notice shall be included in all
-  copies or substantial portions of the Software.
-
-  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-  SOFTWARE.
-
-The binary distribution of this product bundles 'Google Protocol Buffers Java 2.5.0'
-which is licensed under a BSD license.
-
-  This license applies to all parts of Protocol Buffers except the following:
-
-    - Atomicops support for generic gcc, located in
-      src/google/protobuf/stubs/atomicops_internals_generic_gcc.h.
-      This file is copyrighted by Red Hat Inc.
-
-    - Atomicops support for AIX/POWER, located in
-      src/google/protobuf/stubs/atomicops_internals_aix.h.
-      This file is copyrighted by Bloomberg Finance LP.
-
-  Copyright 2014, Google Inc.  All rights reserved.
-
-  Redistribution and use in source and binary forms, with or without
-  modification, are permitted provided that the following conditions are
-  met:
-
-      * Redistributions of source code must retain the above copyright
-  notice, this list of conditions and the following disclaimer.
-      * Redistributions in binary form must reproduce the above
-  copyright notice, this list of conditions and the following disclaimer
-  in the documentation and/or other materials provided with the
-  distribution.
-      * Neither the name of Google Inc. nor the names of its
-  contributors may be used to endorse or promote products derived from
-  this software without specific prior written permission.
-
-  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-  Code generated by the Protocol Buffer compiler is owned by the owner
-  of the input file used when generating it.  This code is not
-  standalone and requires a support library to be linked with it.  This
-  support library is itself covered by the above license.
-
-The binary distribution of this product bundles 'Paranamer Core' which is available
-under a BSD style license.
-
-    Copyright (c) 2006 Paul Hammant & ThoughtWorks Inc
-     All rights reserved.
-
-     Redistribution and use in source and binary forms, with or without
-     modification, are permitted provided that the following conditions
-     are met:
-     1. Redistributions of source code must retain the above copyright
-        notice, this list of conditions and the following disclaimer.
-     2. Redistributions in binary form must reproduce the above copyright
-        notice, this list of conditions and the following disclaimer in the
-        documentation and/or other materials provided with the distribution.
-     3. Neither the name of the copyright holders nor the names of its
-        contributors may be used to endorse or promote products derived from
-        this software without specific prior written permission.
-
-     THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-     AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-     IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-     ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
-     LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
-     CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
-     SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-     INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
-     CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-     ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
-     THE POSSIBILITY OF SUCH DAMAGE.
-
-The binary distribution of this product bundles 'JCraft Jsch' which is available
-under a BSD style license.
-
-     Copyright (c) 2002-2014 Atsuhiko Yamanaka, JCraft,Inc.
-     All rights reserved.
-
-     Redistribution and use in source and binary forms, with or without
-     modification, are permitted provided that the following conditions are met:
-
-       1. Redistributions of source code must retain the above copyright notice,
-          this list of conditions and the following disclaimer.
-
-       2. Redistributions in binary form must reproduce the above copyright
-          notice, this list of conditions and the following disclaimer in
-          the documentation and/or other materials provided with the distribution.
-
-       3. The names of the authors may not be used to endorse or promote products
-          derived from this software without specific prior written permission.
-
-     THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES,
-     INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
-     FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL JCRAFT,
-     INC. OR ANY CONTRIBUTORS TO THIS SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT,
-     INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-     LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
-     OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
-     LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-     NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
-     EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/NOTICE b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/NOTICE
deleted file mode 100644
index d0ca1d3b78..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service-nar/src/main/resources/META-INF/NOTICE
+++ /dev/null
@@ -1,322 +0,0 @@
-nifi-hbase_1_1_2-client-service-nar
-Copyright 2014-2023 The Apache Software Foundation
-
-This product includes software developed at
-The Apache Software Foundation (http://www.apache.org/).
-
-******************
-Apache Software License v2
-******************
-
- (ASLv2) Apache Commons CLI
-    The following NOTICE information applies:
-      Apache Commons CLI
-      Copyright 2001-2009 The Apache Software Foundation
-
-  (ASLv2) Apache Curator
-    The following NOTICE information applies:
-      Curator Framework
-      Copyright 2011-2014 The Apache Software Foundation
-
-      Curator Client
-      Copyright 2011-2014 The Apache Software Foundation
-
-      Curator Recipes
-      Copyright 2011-2014 The Apache Software Foundation
-
-  (ASLv2) Apache Directory Server
-    The following NOTICE information applies:
-      ApacheDS Protocol Kerberos Codec
-      Copyright 2003-2013 The Apache Software Foundation
-
-      ApacheDS I18n
-      Copyright 2003-2013 The Apache Software Foundation
-
-      Apache Directory API ASN.1 API
-      Copyright 2003-2013 The Apache Software Foundation
-
-      Apache Directory LDAP API Utilities
-      Copyright 2003-2013 The Apache Software Foundation
-
-  (ASLv2) Apache Commons Math
-    The following NOTICE information applies:
-      Apache Commons Math
-      Copyright 2001-2012 The Apache Software Foundation
-
-      This product includes software developed by
-      The Apache Software Foundation (http://www.apache.org/).
-
-      ===============================================================================
-
-      The BracketFinder (package org.apache.commons.math3.optimization.univariate)
-      and PowellOptimizer (package org.apache.commons.math3.optimization.general)
-      classes are based on the Python code in module "optimize.py" (version 0.5)
-      developed by Travis E. Oliphant for the SciPy library (http://www.scipy.org/)
-      Copyright © 2003-2009 SciPy Developers.
-      ===============================================================================
-
-      The LinearConstraint, LinearObjectiveFunction, LinearOptimizer,
-      RelationShip, SimplexSolver and SimplexTableau classes in package
-      org.apache.commons.math3.optimization.linear include software developed by
-      Benjamin McCann (http://www.benmccann.com) and distributed with
-      the following copyright: Copyright 2009 Google Inc.
-      ===============================================================================
-
-      This product includes software developed by the
-      University of Chicago, as Operator of Argonne National
-      Laboratory.
-      The LevenbergMarquardtOptimizer class in package
-      org.apache.commons.math3.optimization.general includes software
-      translated from the lmder, lmpar and qrsolv Fortran routines
-      from the Minpack package
-      Minpack Copyright Notice (1999) University of Chicago.  All rights reserved
-      ===============================================================================
-
-      The GraggBulirschStoerIntegrator class in package
-      org.apache.commons.math3.ode.nonstiff includes software translated
-      from the odex Fortran routine developed by E. Hairer and G. Wanner.
-      Original source copyright:
-      Copyright (c) 2004, Ernst Hairer
-      ===============================================================================
-
-      The EigenDecompositionImpl class in package
-      org.apache.commons.math3.linear includes software translated
-      from some LAPACK Fortran routines.  Original source copyright:
-      Copyright (c) 1992-2008 The University of Tennessee.  All rights reserved.
-      ===============================================================================
-
-      The MersenneTwister class in package org.apache.commons.math3.random
-      includes software translated from the 2002-01-26 version of
-      the Mersenne-Twister generator written in C by Makoto Matsumoto and Takuji
-      Nishimura. Original source copyright:
-      Copyright (C) 1997 - 2002, Makoto Matsumoto and Takuji Nishimura,
-      All rights reserved
-      ===============================================================================
-
-      The LocalizedFormatsTest class in the unit tests is an adapted version of
-      the OrekitMessagesTest class from the orekit library distributed under the
-      terms of the Apache 2 licence. Original source copyright:
-      Copyright 2010 CS Systèmes d'Information
-      ===============================================================================
-
-      The HermiteInterpolator class and its corresponding test have been imported from
-      the orekit library distributed under the terms of the Apache 2 licence. Original
-      source copyright:
-      Copyright 2010-2012 CS Systèmes d'Information
-      ===============================================================================
-
-      The creation of the package "o.a.c.m.analysis.integration.gauss" was inspired
-      by an original code donated by Sébastien Brisard.
-      ===============================================================================
-
-  (ASLv2) Apache Jakarta HttpClient
-    The following NOTICE information applies:
-      Apache Jakarta HttpClient
-      Copyright 1999-2007 The Apache Software Foundation
-
-  (ASLv2) Apache Commons Codec
-    The following NOTICE information applies:
-      Apache Commons Codec
-      Copyright 2002-2014 The Apache Software Foundation
-
-      src/test/org/apache/commons/codec/language/DoubleMetaphoneTest.java
-      contains test data from http://aspell.net/test/orig/batch0.tab.
-      Copyright (C) 2002 Kevin Atkinson (kevina@gnu.org)
-
-      ===============================================================================
-
-      The content of package org.apache.commons.codec.language.bm has been translated
-      from the original php source code available at http://stevemorse.org/phoneticinfo.htm
-      with permission from the original authors.
-      Original source copyright:
-      Copyright (c) 2008 Alexander Beider & Stephen P. Morse.
-
-  (ASLv2) Apache Commons IO
-    The following NOTICE information applies:
-      Apache Commons IO
-      Copyright 2002-2016 The Apache Software Foundation
-
-  (ASLv2) Apache Commons Net
-    The following NOTICE information applies:
-      Apache Commons Net
-      Copyright 2001-2013 The Apache Software Foundation
-
-  (ASLv2) Apache Commons Collections
-    The following NOTICE information applies:
-      Apache Commons Collections
-      Copyright 2001-2008 The Apache Software Foundation
-
-  (ASLv2) Jettison
-    The following NOTICE information applies:
-         Copyright 2006 Envoi Solutions LLC
-
-  (ASLv2) Apache Commons Lang
-    The following NOTICE information applies:
-      Apache Commons Lang
-      Copyright 2001-2011 The Apache Software Foundation
-
-  (ASLv2) Apache log4j
-    The following NOTICE information applies:
-      Apache log4j
-      Copyright 2007 The Apache Software Foundation
-
-  (ASLv2) Apache HttpComponents
-    The following NOTICE information applies:
-      Apache HttpClient
-      Copyright 1999-2015 The Apache Software Foundation
-
-      Apache HttpComponents HttpCore
-      Copyright 2005-2011 The Apache Software Foundation
-
-  (ASLv2) Apache Commons Configuration
-    The following NOTICE information applies:
-      Apache Commons Configuration
-      Copyright 2001-2008 The Apache Software Foundation
-
-  (ASLv2) Apache Jakarta Commons Digester
-    The following NOTICE information applies:
-      Apache Jakarta Commons Digester
-      Copyright 2001-2006 The Apache Software Foundation
-
-  (ASLv2) Apache Commons BeanUtils
-    The following NOTICE information applies:
-      Apache Commons BeanUtils
-      Copyright 2000-2008 The Apache Software Foundation
-
-  (ASLv2) Apache Avro
-    The following NOTICE information applies:
-      Apache Avro
-      Copyright 2009-2017 The Apache Software Foundation
-
-  (ASLv2) Snappy Java
-    The following NOTICE information applies:
-      This product includes software developed by Google
-       Snappy: http://code.google.com/p/snappy/ (New BSD License)
-
-      This product includes software developed by Apache
-       PureJavaCrc32C from apache-hadoop-common http://hadoop.apache.org/
-       (Apache 2.0 license)
-
-      This library containd statically linked libstdc++. This inclusion is allowed by
-      "GCC RUntime Library Exception"
-      http://gcc.gnu.org/onlinedocs/libstdc++/manual/license.html
-
-  (ASLv2) ApacheDS
-    The following NOTICE information applies:
-      ApacheDS
-      Copyright 2003-2013 The Apache Software Foundation
-
-  (ASLv2) Apache ZooKeeper
-    The following NOTICE information applies:
-      Apache ZooKeeper
-      Copyright 2009-2012 The Apache Software Foundation
-
-  (ASLv2) Apache Commons Compress
-    The following NOTICE information applies:
-      Apache Commons Compress
-      Copyright 2002-2017 The Apache Software Foundation
-
-      The files in the package org.apache.commons.compress.archivers.sevenz
-      were derived from the LZMA SDK, version 9.20 (C/ and CPP/7zip/),
-      which has been placed in the public domain:
-
-      "LZMA SDK is placed in the public domain." (http://www.7-zip.org/sdk.html)
-
-  (ASLv2) Apache Commons Daemon
-    The following NOTICE information applies:
-      Apache Commons Daemon
-      Copyright 1999-2013 The Apache Software Foundation
-
-  (ASLv2) The Netty Project
-    The following NOTICE information applies:
-      The Netty Project
-      Copyright 2011 The Netty Project
-
-  (ASLv2) Apache Xerces Java
-    The following NOTICE information applies:
-      Apache Xerces Java
-      Copyright 1999-2007 The Apache Software Foundation
-
-      This product includes software developed at
-      The Apache Software Foundation (http://www.apache.org/).
-
-      Portions of this software were originally based on the following:
-        - software copyright (c) 1999, IBM Corporation., http://www.ibm.com.
-        - software copyright (c) 1999, Sun Microsystems., http://www.sun.com.
-        - voluntary contributions made by Paul Eng on behalf of the
-          Apache Software Foundation that were originally developed at iClick, Inc.,
-          software copyright (c) 1999.
-
-  (ASLv2) Google Guice
-    The following NOTICE information applies:
-      Google Guice - Core Library
-      Copyright 2006-2011 Google, Inc.
-
-      Google Guice - Extensions - Servlet
-      Copyright 2006-2011 Google, Inc.
-
-  (ASLv2) HBase Common
-      The following NOTICE information applies:
-        This product includes portions of the Guava project v14, specifically
-        'hbase-common/src/main/java/org/apache/hadoop/hbase/io/LimitInputStream.java'
-
-        Copyright (C) 2007 The Guava Authors
-
-        Licensed under the Apache License, Version 2.0
-
-    (ASLv2) Apache HTrace Core
-      The following NOTICE information applies:
-        Copyright 2016 The Apache Software Foundation
-
-        Apache HTrace includes an Apache Thrift connector to Zipkin. Zipkin
-        is a distributed tracing system that is Apache 2.0 Licensed.
-        Copyright 2012 Twitter, Inc.
-
-  (ASLv2) Jackson Core ASL
-      The following NOTICE information applies:
-        This product currently only contains code developed by authors
-        of specific components, as identified by the source code files;
-        if such notes are missing files have been created by
-        Tatu Saloranta.
-
-        For additional credits (generally to people who reported problems)
-        see CREDITS file.
-
-    (ASLv2) Jackson Mapper ASL
-      The following NOTICE information applies:
-        This product currently only contains code developed by authors
-        of specific components, as identified by the source code files;
-        if such notes are missing files have been created by
-        Tatu Saloranta.
-
-        For additional credits (generally to people who reported problems)
-        see CREDITS file.
-
-************************
-Common Development and Distribution License 1.1
-************************
-
-The following binary components are provided under the Common Development and Distribution License 1.1. See project link for details.
-
-    (CDDL 1.1) (GPL2 w/ CPE) jersey-client (com.sun.jersey:jersey-client:jar:1.9 - https://jersey.java.net/jersey-client/)
-    (CDDL 1.1) (GPL2 w/ CPE) Old JAXB Runtime (com.sun.xml.bind:jaxb-impl:jar:2.2.3-1 - http://jaxb.java.net/)
-    (CDDL 1.1) (GPL2 w/ CPE) Java Architecture For XML Binding (javax.xml.bind:jaxb-api:jar:2.2.2 - https://jaxb.dev.java.net/)
-
-************************
-Common Development and Distribution License 1.0
-************************
-
-The following binary components are provided under the Common Development and Distribution License 1.0.  See project link for details.
-
-    (CDDL 1.0) JavaServlet(TM) Specification (javax.servlet:servlet-api:jar:2.5 - no url available)
-    (CDDL 1.0) (GPL3) Streaming API For XML (javax.xml.stream:stax-api:jar:1.0-2 - no url provided)
-    (CDDL 1.0) JavaBeans Activation Framework (JAF) (javax.activation:activation:jar:1.1 - http://java.sun.com/products/javabeans/jaf/index.jsp)
-    (CDDL 1.0) JavaServer Pages(TM) API (javax.servlet.jsp:jsp-api:jar:2.1 - http://jsp.java.net)
-
-*****************
-Public Domain
-*****************
-
-The following binary components are provided to the 'Public Domain'.  See project link for details.
-
-    (Public Domain) AOP Alliance 1.0 (http://aopalliance.sourceforge.net/)
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
deleted file mode 100644
index 0a2cc97e57..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/pom.xml
+++ /dev/null
@@ -1,142 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements. See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License. You may obtain a copy of the License at
-  http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <parent>
-        <groupId>org.apache.nifi</groupId>
-        <artifactId>nifi-hbase_1_1_2-client-service-bundle</artifactId>
-        <version>2.0.0-SNAPSHOT</version>
-    </parent>
-
-    <artifactId>nifi-hbase_1_1_2-client-service</artifactId>
-    <packaging>jar</packaging>
-    <properties>
-        <hbase.version>1.1.13</hbase.version>
-    </properties>
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-hbase-client-service-api</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-lookup-service-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-api</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-utils</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-security-kerberos</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-hadoop-utils</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-distributed-cache-client-service-api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-record</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-kerberos-credentials-service-api</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-kerberos-user-service-api</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hbase</groupId>
-            <artifactId>hbase-client</artifactId>
-            <version>${hbase.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>commons-codec</groupId>
-                    <artifactId>commons-codec</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-log4j12</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>log4j</groupId>
-                    <artifactId>log4j</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.google.code.findbugs</groupId>
-                    <artifactId>jsr305</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>commons-logging</groupId>
-                    <artifactId>commons-logging</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>jdk.tools</groupId>
-                    <artifactId>jdk.tools</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-
-        <dependency>
-            <groupId>commons-codec</groupId>
-            <artifactId>commons-codec</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>com.github.stephenc.findbugs</groupId>
-            <artifactId>findbugs-annotations</artifactId>
-            <version>1.3.9-1</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.commons</groupId>
-            <artifactId>commons-lang3</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>log4j-over-slf4j</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>jcl-over-slf4j</artifactId>
-        </dependency>
-        <!-- test dependencies -->
-        <dependency>
-            <groupId>org.apache.nifi</groupId>
-            <artifactId>nifi-mock</artifactId>
-            <version>2.0.0-SNAPSHOT</version>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-</project>
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/AbstractHBaseLookupService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/AbstractHBaseLookupService.java
deleted file mode 100644
index ded7ee43b5..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/AbstractHBaseLookupService.java
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nifi.hbase;
-
-import org.apache.nifi.annotation.lifecycle.OnDisabled;
-import org.apache.nifi.annotation.lifecycle.OnEnabled;
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.controller.AbstractControllerService;
-import org.apache.nifi.controller.ConfigurationContext;
-import org.apache.nifi.hbase.scan.Column;
-import org.apache.nifi.hbase.scan.ResultCell;
-import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.reporting.InitializationException;
-
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import static org.apache.nifi.hbase.VisibilityLabelUtils.AUTHORIZATIONS;
-
-public abstract class AbstractHBaseLookupService extends AbstractControllerService {
-    static final PropertyDescriptor HBASE_CLIENT_SERVICE = new PropertyDescriptor.Builder()
-            .name("hbase-client-service")
-            .displayName("HBase Client Service")
-            .description("Specifies the HBase Client Controller Service to use for accessing HBase.")
-            .required(true)
-            .identifiesControllerService(HBaseClientService.class)
-            .build();
-
-    static final PropertyDescriptor TABLE_NAME = new PropertyDescriptor.Builder()
-            .name("hb-lu-table-name")
-            .displayName("Table Name")
-            .description("The name of the table where look ups will be run.")
-            .required(true)
-            .addValidator(StandardValidators.NON_BLANK_VALIDATOR)
-            .build();
-
-    static final PropertyDescriptor RETURN_COLUMNS = new PropertyDescriptor.Builder()
-            .name("hb-lu-return-cols")
-            .displayName("Columns")
-            .description("A comma-separated list of \\\"<colFamily>:<colQualifier>\\\" pairs to return when scanning. " +
-                    "To return all columns for a given family, leave off the qualifier such as \\\"<colFamily1>,<colFamily2>\\\".")
-            .required(false)
-            .addValidator(StandardValidators.NON_BLANK_VALIDATOR)
-            .build();
-
-    static final PropertyDescriptor CHARSET = new PropertyDescriptor.Builder()
-            .name("hb-lu-charset")
-            .displayName("Character Set")
-            .description("Specifies the character set used to decode bytes retrieved from HBase.")
-            .required(true)
-            .defaultValue("UTF-8")
-            .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR)
-            .build();
-
-    static final String ROW_KEY_KEY = "rowKey";
-    protected static final Set<String> REQUIRED_KEYS = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(ROW_KEY_KEY)));
-
-    static final List<PropertyDescriptor> PROPERTIES;
-    static {
-        final List<PropertyDescriptor> props = new ArrayList<>();
-        props.add(HBASE_CLIENT_SERVICE);
-        props.add(TABLE_NAME);
-        props.add(AUTHORIZATIONS);
-        props.add(RETURN_COLUMNS);
-        props.add(CHARSET);
-        PROPERTIES = Collections.unmodifiableList(props);
-    }
-
-    protected String tableName;
-    protected List<Column> columns;
-    protected Charset charset;
-    protected HBaseClientService hBaseClientService;
-    protected List<String> authorizations;
-
-
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return PROPERTIES;
-    }
-
-    @OnEnabled
-    public void onEnabled(final ConfigurationContext context) throws InitializationException, IOException, InterruptedException {
-        this.hBaseClientService = context.getProperty(HBASE_CLIENT_SERVICE).asControllerService(HBaseClientService.class);
-        this.tableName = context.getProperty(TABLE_NAME).getValue();
-        this.columns = getColumns(context.getProperty(RETURN_COLUMNS).getValue());
-        this.charset = Charset.forName(context.getProperty(CHARSET).getValue());
-        this.authorizations = VisibilityLabelUtils.getAuthorizations(context);
-    }
-
-    @OnDisabled
-    public void onDisabled() {
-        this.hBaseClientService = null;
-        this.tableName = null;
-        this.columns = null;
-        this.charset = null;
-    }
-
-    protected List<Column> getColumns(final String columnsValue) {
-        final String[] columns = (columnsValue == null || columnsValue.isEmpty() ? new String[0] : columnsValue.split(","));
-
-        final List<Column> columnsList = new ArrayList<>();
-
-        for (final String column : columns) {
-            if (column.contains(":"))  {
-                final String[] parts = column.trim().split(":");
-                final byte[] cf = parts[0].getBytes(StandardCharsets.UTF_8);
-                final byte[] cq = parts[1].getBytes(StandardCharsets.UTF_8);
-                columnsList.add(new Column(cf, cq));
-            } else {
-                final byte[] cf = column.trim().getBytes(StandardCharsets.UTF_8);
-                columnsList.add(new Column(cf, null));
-            }
-        }
-
-        return columnsList;
-    }
-
-    protected Map<String, Object> scan(byte[] rowKeyBytes) throws IOException {
-        final Map<String, Object> values = new HashMap<>();
-
-        hBaseClientService.scan(tableName, rowKeyBytes, rowKeyBytes, columns, authorizations, (byte[] row, ResultCell[] resultCells) ->  {
-            for (final ResultCell cell : resultCells) {
-                final byte[] qualifier = Arrays.copyOfRange(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierOffset() + cell.getQualifierLength());
-                final byte[] value = Arrays.copyOfRange(cell.getValueArray(), cell.getValueOffset(), cell.getValueOffset() + cell.getValueLength());
-                values.put(new String(qualifier, charset), new String(value, charset));
-            }
-        });
-
-        return values;
-    }
-
-
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientMapCacheService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientMapCacheService.java
deleted file mode 100644
index a194a631d7..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientMapCacheService.java
+++ /dev/null
@@ -1,316 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.hbase;
-
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.SeeAlso;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.lifecycle.OnEnabled;
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.Validator;
-import org.apache.nifi.controller.AbstractControllerService;
-import org.apache.nifi.controller.ConfigurationContext;
-import org.apache.nifi.distributed.cache.client.AtomicCacheEntry;
-import org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient;
-import org.apache.nifi.distributed.cache.client.Deserializer;
-import org.apache.nifi.distributed.cache.client.Serializer;
-import org.apache.nifi.expression.ExpressionLanguageScope;
-import org.apache.nifi.hbase.put.PutColumn;
-import org.apache.nifi.hbase.put.PutFlowFile;
-import org.apache.nifi.hbase.scan.Column;
-import org.apache.nifi.hbase.scan.ResultCell;
-import org.apache.nifi.hbase.scan.ResultHandler;
-import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.reporting.InitializationException;
-
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-import static org.apache.nifi.hbase.VisibilityLabelUtils.AUTHORIZATIONS;
-
-@Tags({"distributed", "cache", "state", "map", "cluster","hbase"})
-@SeeAlso(classNames = {"org.apache.nifi.hbase.HBase_1_1_2_ClientService"})
-@CapabilityDescription("Provides the ability to use an HBase table as a cache, in place of a DistributedMapCache."
-    + " Uses a HBase_1_1_2_ClientService controller to communicate with HBase.")
-
-public class HBase_1_1_2_ClientMapCacheService extends AbstractControllerService implements AtomicDistributedMapCacheClient<byte[]> {
-
-    static final PropertyDescriptor HBASE_CLIENT_SERVICE = new PropertyDescriptor.Builder()
-        .name("HBase Client Service")
-        .description("Specifies the HBase Client Controller Service to use for accessing HBase.")
-        .required(true)
-        .identifiesControllerService(HBaseClientService.class)
-        .build();
-
-    public static final PropertyDescriptor HBASE_CACHE_TABLE_NAME = new PropertyDescriptor.Builder()
-        .name("HBase Cache Table Name")
-        .description("Name of the table on HBase to use for the cache.")
-        .required(true)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-        .build();
-
-    public static final PropertyDescriptor HBASE_COLUMN_FAMILY = new PropertyDescriptor.Builder()
-        .name("HBase Column Family")
-        .description("Name of the column family on HBase to use for the cache.")
-        .required(true)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .defaultValue("f")
-        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-        .build();
-
-    public static final PropertyDescriptor HBASE_COLUMN_QUALIFIER = new PropertyDescriptor.Builder()
-        .name("HBase Column Qualifier")
-        .description("Name of the column qualifier on HBase to use for the cache")
-        .defaultValue("q")
-        .required(true)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-        .build();
-    public static final PropertyDescriptor VISIBILITY_EXPRESSION = new PropertyDescriptor.Builder()
-        .name("hbase-cache-visibility-expression")
-        .displayName("Visibility Expression")
-        .description("The default visibility expression to apply to cells when visibility expression support is enabled.")
-        .defaultValue("")
-        .addValidator(Validator.VALID)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .required(false)
-        .build();
-
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        final List<PropertyDescriptor> descriptors = new ArrayList<>();
-        descriptors.add(HBASE_CACHE_TABLE_NAME);
-        descriptors.add(AUTHORIZATIONS);
-        descriptors.add(VISIBILITY_EXPRESSION);
-        descriptors.add(HBASE_CLIENT_SERVICE);
-        descriptors.add(HBASE_COLUMN_FAMILY);
-        descriptors.add(HBASE_COLUMN_QUALIFIER);
-        return descriptors;
-    }
-
-    // Other threads may call @OnEnabled so these are marked volatile to ensure other class methods read the updated value
-    private volatile String hBaseCacheTableName;
-    private volatile HBaseClientService hBaseClientService;
-
-    private volatile String hBaseColumnFamily;
-    private volatile byte[] hBaseColumnFamilyBytes;
-
-    private volatile String hBaseColumnQualifier;
-    private volatile byte[] hBaseColumnQualifierBytes;
-
-    private List<String> authorizations;
-    private String defaultVisibilityExpression;
-
-    @OnEnabled
-    public void onConfigured(final ConfigurationContext context) throws InitializationException{
-        hBaseClientService   = context.getProperty(HBASE_CLIENT_SERVICE).asControllerService(HBaseClientService.class);
-
-        hBaseCacheTableName  = context.getProperty(HBASE_CACHE_TABLE_NAME).evaluateAttributeExpressions().getValue();
-        hBaseColumnFamily    = context.getProperty(HBASE_COLUMN_FAMILY).evaluateAttributeExpressions().getValue();
-        hBaseColumnQualifier = context.getProperty(HBASE_COLUMN_QUALIFIER).evaluateAttributeExpressions().getValue();
-
-        hBaseColumnFamilyBytes    = hBaseColumnFamily.getBytes(StandardCharsets.UTF_8);
-        hBaseColumnQualifierBytes = hBaseColumnQualifier.getBytes(StandardCharsets.UTF_8);
-
-        authorizations = VisibilityLabelUtils.getAuthorizations(context);
-        if (context.getProperty(VISIBILITY_EXPRESSION).isSet()) {
-            defaultVisibilityExpression = context.getProperty(VISIBILITY_EXPRESSION).evaluateAttributeExpressions().getValue();
-        } else {
-            defaultVisibilityExpression = null;
-        }
-    }
-
-    private <T> byte[] serialize(final T value, final Serializer<T> serializer) throws IOException {
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        serializer.serialize(value, baos);
-        return baos.toByteArray();
-    }
-    private <T> T deserialize(final byte[] value, final Deserializer<T> deserializer) throws IOException {
-        return deserializer.deserialize(value);
-    }
-
-
-    @Override
-    public <K, V> boolean putIfAbsent(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer) throws IOException {
-
-      final byte[] rowIdBytes = serialize(key, keySerializer);
-      final byte[] valueBytes = serialize(value, valueSerializer);
-      final PutColumn putColumn = new PutColumn(hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, valueBytes, defaultVisibilityExpression);
-
-      return hBaseClientService.checkAndPut(hBaseCacheTableName, rowIdBytes, hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, null, putColumn);
-    }
-
-    @Override
-    public <K, V> void put(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer) throws IOException {
-
-        List<PutColumn> putColumns = new ArrayList<PutColumn>(1);
-        final byte[] rowIdBytes = serialize(key, keySerializer);
-        final byte[] valueBytes = serialize(value, valueSerializer);
-
-        final PutColumn putColumn = new PutColumn(hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, valueBytes, defaultVisibilityExpression);
-        putColumns.add(putColumn);
-
-        hBaseClientService.put(hBaseCacheTableName, rowIdBytes, putColumns);
-    }
-
-    @Override
-    public <K, V> void putAll(Map<K, V> keysAndValues, Serializer<K> keySerializer, Serializer<V> valueSerializer) throws IOException {
-        List<PutFlowFile> puts = new ArrayList<>();
-        for (Map.Entry<K, V> entry : keysAndValues.entrySet()) {
-            List<PutColumn> putColumns = new ArrayList<PutColumn>(1);
-            final byte[] rowIdBytes = serialize(entry.getKey(), keySerializer);
-            final byte[] valueBytes = serialize(entry.getValue(), valueSerializer);
-
-            final PutColumn putColumn = new PutColumn(hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, valueBytes, defaultVisibilityExpression);
-            putColumns.add(putColumn);
-            puts.add(new PutFlowFile(hBaseCacheTableName, rowIdBytes, putColumns, null));
-        }
-        hBaseClientService.put(hBaseCacheTableName, puts);
-    }
-
-    @Override
-    public <K> boolean containsKey(final K key, final Serializer<K> keySerializer) throws IOException {
-      final byte[] rowIdBytes = serialize(key, keySerializer);
-      final HBaseRowHandler handler = new HBaseRowHandler();
-
-      final List<Column> columnsList = new ArrayList<Column>(0);
-      columnsList.add(new Column(hBaseColumnFamilyBytes, hBaseColumnQualifierBytes));
-
-      hBaseClientService.scan(hBaseCacheTableName, rowIdBytes, rowIdBytes, columnsList, authorizations, handler);
-      return (handler.numRows() > 0);
-    }
-
-    /**
-     *  Note that the implementation of getAndPutIfAbsent is not atomic.
-     *  The putIfAbsent is atomic, but a getAndPutIfAbsent does a get and then a putIfAbsent.
-     *  If there is an existing value and it is updated in betweern the two steps, then the existing (unmodified) value will be returned.
-     *  If the existing value was deleted between the two steps, getAndPutIfAbsent will correctly return null.
-     *  This should not generally be an issue with cache processors such as DetectDuplicate.
-     *
-     */
-    @Override
-    public <K, V> V getAndPutIfAbsent(final K key, final V value, final Serializer<K> keySerializer, final Serializer<V> valueSerializer, final Deserializer<V> valueDeserializer) throws IOException {
-      // Between the get and the putIfAbsent, the value could be deleted or updated.
-      // Logic below takes care of the deleted case but not the updated case.
-      // This is probably fine since DistributedMapCache and DetectDuplicate expect to receive the original cache value
-      // Could possibly be fixed by implementing AtomicDistributedMapCache (Map Cache protocol version 2)
-      final V got = get(key, keySerializer, valueDeserializer);
-      final boolean wasAbsent = putIfAbsent(key, value, keySerializer, valueSerializer);
-
-      if (! wasAbsent) return got;
-      else return null;
-   }
-
-    @Override
-    public <K, V> V get(final K key, final Serializer<K> keySerializer, final Deserializer<V> valueDeserializer) throws IOException {
-      final byte[] rowIdBytes = serialize(key, keySerializer);
-      final HBaseRowHandler handler = new HBaseRowHandler();
-
-      final List<Column> columnsList = new ArrayList<Column>(0);
-      columnsList.add(new Column(hBaseColumnFamilyBytes, hBaseColumnQualifierBytes));
-
-      hBaseClientService.scan(hBaseCacheTableName, rowIdBytes, rowIdBytes, columnsList, authorizations, handler);
-      if (handler.numRows() > 1) {
-          throw new IOException("Found multiple rows in HBase for key");
-      } else if(handler.numRows() == 1) {
-          return deserialize( handler.getLastResultBytes(), valueDeserializer);
-      } else {
-          return null;
-      }
-    }
-
-    @Override
-    public <K> boolean remove(final K key, final Serializer<K> keySerializer) throws IOException {
-        final boolean contains = containsKey(key, keySerializer);
-        if (contains) {
-            final byte[] rowIdBytes = serialize(key, keySerializer);
-            final DeleteRequest deleteRequest = new DeleteRequest(rowIdBytes, hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, null);
-            hBaseClientService.deleteCells(hBaseCacheTableName, Collections.singletonList(deleteRequest));
-        }
-        return contains;
-    }
-
-    @Override
-    public long removeByPattern(String regex) throws IOException {
-        throw new IOException("HBase removeByPattern is not implemented");
-    }
-
-    @Override
-    public void close() throws IOException {
-    }
-
-    @Override
-    protected void finalize() throws Throwable {
-    }
-
-    @Override
-    public <K, V> AtomicCacheEntry<K, V, byte[]> fetch(K key, Serializer<K> keySerializer, Deserializer<V> valueDeserializer) throws IOException {
-        final byte[] rowIdBytes = serialize(key, keySerializer);
-        final HBaseRowHandler handler = new HBaseRowHandler();
-
-        final List<Column> columnsList = new ArrayList<>(1);
-        columnsList.add(new Column(hBaseColumnFamilyBytes, hBaseColumnQualifierBytes));
-
-        hBaseClientService.scan(hBaseCacheTableName, rowIdBytes, rowIdBytes, columnsList, authorizations, handler);
-
-        if (handler.numRows() > 1) {
-            throw new IOException("Found multiple rows in HBase for key");
-        } else if (handler.numRows() == 1) {
-            return new AtomicCacheEntry<>(key, deserialize(handler.getLastResultBytes(), valueDeserializer), handler.getLastResultBytes());
-        } else {
-            return null;
-        }
-    }
-
-    @Override
-    public <K, V> boolean replace(AtomicCacheEntry<K, V, byte[]> entry, Serializer<K> keySerializer, Serializer<V> valueSerializer) throws IOException {
-        final byte[] rowIdBytes = serialize(entry.getKey(), keySerializer);
-        final byte[] valueBytes = serialize(entry.getValue(), valueSerializer);
-        final byte[] revision = entry.getRevision().orElse(null);
-        final PutColumn putColumn = new PutColumn(hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, valueBytes, defaultVisibilityExpression);
-
-        // If the current revision is unset then only insert the row if it doesn't already exist.
-        return hBaseClientService.checkAndPut(hBaseCacheTableName, rowIdBytes, hBaseColumnFamilyBytes, hBaseColumnQualifierBytes, revision, putColumn);
-    }
-
-    private class HBaseRowHandler implements ResultHandler {
-        private int numRows = 0;
-        private byte[] lastResultBytes;
-
-        @Override
-        public void handle(byte[] row, ResultCell[] resultCells) {
-            numRows += 1;
-            for( final ResultCell resultCell : resultCells ){
-                lastResultBytes = Arrays.copyOfRange(resultCell.getValueArray(), resultCell.getValueOffset(), resultCell.getValueLength() + resultCell.getValueOffset());
-            }
-        }
-        public int numRows() {
-            return numRows;
-        }
-        public byte[] getLastResultBytes() {
-           return lastResultBytes;
-        }
-    }
-
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientService.java
deleted file mode 100644
index bd029c8dd8..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ClientService.java
+++ /dev/null
@@ -1,946 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.hbase;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.ClusterStatus;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.ServerName;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.ConnectionFactory;
-import org.apache.hadoop.hbase.client.Delete;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.exceptions.DeserializationException;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.hbase.filter.ParseFilter;
-import org.apache.hadoop.hbase.security.visibility.Authorizations;
-import org.apache.hadoop.hbase.security.visibility.CellVisibility;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.nifi.annotation.behavior.DynamicProperty;
-import org.apache.nifi.annotation.behavior.RequiresInstanceClassLoading;
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.annotation.lifecycle.OnDisabled;
-import org.apache.nifi.annotation.lifecycle.OnEnabled;
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.ValidationContext;
-import org.apache.nifi.components.ValidationResult;
-import org.apache.nifi.components.resource.ResourceCardinality;
-import org.apache.nifi.components.resource.ResourceType;
-import org.apache.nifi.controller.AbstractControllerService;
-import org.apache.nifi.controller.ConfigurationContext;
-import org.apache.nifi.controller.ControllerServiceInitializationContext;
-import org.apache.nifi.expression.ExpressionLanguageScope;
-import org.apache.nifi.hadoop.KerberosProperties;
-import org.apache.nifi.hadoop.SecurityUtil;
-import org.apache.nifi.hbase.put.PutColumn;
-import org.apache.nifi.hbase.put.PutFlowFile;
-import org.apache.nifi.hbase.scan.Column;
-import org.apache.nifi.hbase.scan.ResultCell;
-import org.apache.nifi.hbase.scan.ResultHandler;
-import org.apache.nifi.kerberos.KerberosCredentialsService;
-import org.apache.nifi.kerberos.KerberosUserService;
-import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.reporting.InitializationException;
-import org.apache.nifi.security.krb.KerberosKeytabUser;
-import org.apache.nifi.security.krb.KerberosPasswordUser;
-import org.apache.nifi.security.krb.KerberosUser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicReference;
-
-@RequiresInstanceClassLoading
-@Tags({ "hbase", "client"})
-@CapabilityDescription("Implementation of HBaseClientService using the HBase 1.1.x client. Although this service was originally built with the 1.1.2 " +
-        "client and has 1_1_2 in it's name, the client library has since been upgraded to 1.1.13 to leverage bug fixes. This service can be configured " +
-        "by providing a comma-separated list of configuration files, or by specifying values for the other properties. If configuration files " +
-        "are provided, they will be loaded first, and the values of the additional properties will override the values from " +
-        "the configuration files. In addition, any user defined properties on the processor will also be passed to the HBase " +
-        "configuration.")
-@DynamicProperty(name="The name of an HBase configuration property.", value="The value of the given HBase configuration property.",
-        description="These properties will be set on the HBase configuration after loading any provided configuration files.")
-public class HBase_1_1_2_ClientService extends AbstractControllerService implements HBaseClientService {
-    private static final String ALLOW_EXPLICIT_KEYTAB = "NIFI_ALLOW_EXPLICIT_KEYTAB";
-
-    private static final Logger logger = LoggerFactory.getLogger(HBase_1_1_2_ClientService.class);
-
-    static final PropertyDescriptor KERBEROS_CREDENTIALS_SERVICE = new PropertyDescriptor.Builder()
-        .name("kerberos-credentials-service")
-        .displayName("Kerberos Credentials Service")
-        .description("Specifies the Kerberos Credentials Controller Service that should be used for authenticating with Kerberos")
-        .identifiesControllerService(KerberosCredentialsService.class)
-        .required(false)
-        .build();
-
-    static final PropertyDescriptor KERBEROS_USER_SERVICE = new PropertyDescriptor.Builder()
-            .name("kerberos-user-service")
-            .displayName("Kerberos User Service")
-            .description("Specifies the Kerberos User Controller Service that should be used for authenticating with Kerberos")
-            .identifiesControllerService(KerberosUserService.class)
-            .required(false)
-            .build();
-
-    static final PropertyDescriptor HADOOP_CONF_FILES = new PropertyDescriptor.Builder()
-        .name("Hadoop Configuration Files")
-        .description("Comma-separated list of Hadoop Configuration files," +
-            " such as hbase-site.xml and core-site.xml for kerberos, " +
-            "including full paths to the files.")
-        .identifiesExternalResource(ResourceCardinality.MULTIPLE, ResourceType.FILE, ResourceType.DIRECTORY)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .build();
-
-    static final PropertyDescriptor ZOOKEEPER_QUORUM = new PropertyDescriptor.Builder()
-        .name("ZooKeeper Quorum")
-        .description("Comma-separated list of ZooKeeper hosts for HBase. Required if Hadoop Configuration Files are not provided.")
-        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .build();
-
-    static final PropertyDescriptor ZOOKEEPER_CLIENT_PORT = new PropertyDescriptor.Builder()
-        .name("ZooKeeper Client Port")
-        .description("The port on which ZooKeeper is accepting client connections. Required if Hadoop Configuration Files are not provided.")
-        .addValidator(StandardValidators.PORT_VALIDATOR)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .build();
-
-    static final PropertyDescriptor ZOOKEEPER_ZNODE_PARENT = new PropertyDescriptor.Builder()
-        .name("ZooKeeper ZNode Parent")
-        .description("The ZooKeeper ZNode Parent value for HBase (example: /hbase). Required if Hadoop Configuration Files are not provided.")
-        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .build();
-
-    static final PropertyDescriptor HBASE_CLIENT_RETRIES = new PropertyDescriptor.Builder()
-        .name("HBase Client Retries")
-        .description("The number of times the HBase client will retry connecting. Required if Hadoop Configuration Files are not provided.")
-        .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR)
-        .defaultValue("1")
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .build();
-
-    static final PropertyDescriptor PHOENIX_CLIENT_JAR_LOCATION = new PropertyDescriptor.Builder()
-        .name("Phoenix Client JAR Location")
-        .description("The full path to the Phoenix client JAR. Required if Phoenix is installed on top of HBase.")
-        .identifiesExternalResource(ResourceCardinality.SINGLE, ResourceType.FILE, ResourceType.DIRECTORY, ResourceType.URL)
-        .expressionLanguageSupported(ExpressionLanguageScope.VARIABLE_REGISTRY)
-        .dynamicallyModifiesClasspath(true)
-        .build();
-
-    static final String HBASE_CONF_ZK_QUORUM = "hbase.zookeeper.quorum";
-    static final String HBASE_CONF_ZK_PORT = "hbase.zookeeper.property.clientPort";
-    static final String HBASE_CONF_ZNODE_PARENT = "zookeeper.znode.parent";
-    static final String HBASE_CONF_CLIENT_RETRIES = "hbase.client.retries.number";
-
-    private volatile Connection connection;
-    private volatile UserGroupInformation ugi;
-    private final AtomicReference<KerberosUser> kerberosUserReference = new AtomicReference<>();
-    private volatile String masterAddress;
-
-    private List<PropertyDescriptor> properties;
-    private KerberosProperties kerberosProperties;
-    private volatile File kerberosConfigFile = null;
-
-    // Holder of cached Configuration information so validation does not reload the same config over and over
-    private final AtomicReference<ValidationResources> validationResourceHolder = new AtomicReference<>();
-
-    protected Connection getConnection() {
-        return connection;
-    }
-
-    protected void setConnection(Connection connection) {
-        this.connection = connection;
-    }
-
-    @Override
-    protected void init(ControllerServiceInitializationContext config) throws InitializationException {
-        kerberosConfigFile = config.getKerberosConfigurationFile();
-        kerberosProperties = getKerberosProperties(kerberosConfigFile);
-
-        List<PropertyDescriptor> props = new ArrayList<>();
-        props.add(HADOOP_CONF_FILES);
-        props.add(KERBEROS_USER_SERVICE);
-        props.add(KERBEROS_CREDENTIALS_SERVICE);
-        props.add(kerberosProperties.getKerberosPrincipal());
-        props.add(kerberosProperties.getKerberosKeytab());
-        props.add(kerberosProperties.getKerberosPassword());
-        props.add(ZOOKEEPER_QUORUM);
-        props.add(ZOOKEEPER_CLIENT_PORT);
-        props.add(ZOOKEEPER_ZNODE_PARENT);
-        props.add(HBASE_CLIENT_RETRIES);
-        props.add(PHOENIX_CLIENT_JAR_LOCATION);
-        props.addAll(getAdditionalProperties());
-        this.properties = Collections.unmodifiableList(props);
-    }
-
-    protected List<PropertyDescriptor> getAdditionalProperties() {
-        return new ArrayList<>();
-    }
-
-    protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
-        return new KerberosProperties(kerberosConfigFile);
-    }
-
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return properties;
-    }
-
-    @Override
-    protected PropertyDescriptor getSupportedDynamicPropertyDescriptor(String propertyDescriptorName) {
-        return new PropertyDescriptor.Builder()
-                .description("Specifies the value for '" + propertyDescriptorName + "' in the HBase configuration.")
-                .name(propertyDescriptorName)
-                .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-                .dynamic(true)
-                .build();
-    }
-
-    @Override
-    protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
-        boolean confFileProvided = validationContext.getProperty(HADOOP_CONF_FILES).isSet();
-        boolean zkQuorumProvided = validationContext.getProperty(ZOOKEEPER_QUORUM).isSet();
-        boolean zkPortProvided = validationContext.getProperty(ZOOKEEPER_CLIENT_PORT).isSet();
-        boolean znodeParentProvided = validationContext.getProperty(ZOOKEEPER_ZNODE_PARENT).isSet();
-        boolean retriesProvided = validationContext.getProperty(HBASE_CLIENT_RETRIES).isSet();
-
-        final String explicitPrincipal = validationContext.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
-        final String explicitKeytab = validationContext.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
-        final String explicitPassword = validationContext.getProperty(kerberosProperties.getKerberosPassword()).getValue();
-        final KerberosCredentialsService credentialsService = validationContext.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
-        final KerberosUserService kerberosUserService = validationContext.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
-
-        final String resolvedPrincipal;
-        final String resolvedKeytab;
-        if (credentialsService == null) {
-            resolvedPrincipal = explicitPrincipal;
-            resolvedKeytab = explicitKeytab;
-        } else {
-            resolvedPrincipal = credentialsService.getPrincipal();
-            resolvedKeytab = credentialsService.getKeytab();
-        }
-
-        final List<ValidationResult> problems = new ArrayList<>();
-
-        if (!confFileProvided && (!zkQuorumProvided || !zkPortProvided || !znodeParentProvided || !retriesProvided)) {
-            problems.add(new ValidationResult.Builder()
-                    .valid(false)
-                    .subject(this.getClass().getSimpleName())
-                    .explanation("ZooKeeper Quorum, ZooKeeper Client Port, ZooKeeper ZNode Parent, and HBase Client Retries are required " +
-                            "when Hadoop Configuration Files are not provided.")
-                    .build());
-        }
-
-        if (confFileProvided) {
-            final String configFiles = validationContext.getProperty(HADOOP_CONF_FILES).evaluateAttributeExpressions().getValue();
-            ValidationResources resources = validationResourceHolder.get();
-
-            // if no resources in the holder, or if the holder has different resources loaded,
-            // then load the Configuration and set the new resources in the holder
-            if (resources == null || !configFiles.equals(resources.getConfigResources())) {
-                getLogger().debug("Reloading validation resources");
-                resources = new ValidationResources(configFiles, getConfigurationFromFiles(configFiles));
-                validationResourceHolder.set(resources);
-            }
-
-            final Configuration hbaseConfig = resources.getConfiguration();
-            if (kerberosUserService == null) {
-                problems.addAll(KerberosProperties.validatePrincipalWithKeytabOrPassword(getClass().getSimpleName(), hbaseConfig,
-                        resolvedPrincipal, resolvedKeytab, explicitPassword, getLogger()));
-            } else {
-                final boolean securityEnabled = SecurityUtil.isSecurityEnabled(hbaseConfig);
-                if (!securityEnabled) {
-                    getLogger().warn("Hadoop Configuration does not have security enabled, KerberosUserService will be ignored");
-                }
-            }
-        }
-
-        if (credentialsService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
-            problems.add(new ValidationResult.Builder()
-                .subject("Kerberos Credentials")
-                .valid(false)
-                .explanation("Cannot specify a Kerberos Credentials Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
-                .build());
-        }
-
-        if (kerberosUserService != null && (explicitPrincipal != null || explicitKeytab != null || explicitPassword != null)) {
-            problems.add(new ValidationResult.Builder()
-                    .subject("Kerberos User")
-                    .valid(false)
-                    .explanation("Cannot specify a Kerberos User Service while also specifying a Kerberos Principal, Kerberos Keytab, or Kerberos Password")
-                    .build());
-        }
-
-        if (kerberosUserService != null && credentialsService != null) {
-            problems.add(new ValidationResult.Builder()
-                    .subject("Kerberos User")
-                    .valid(false)
-                    .explanation("Cannot specify a Kerberos User Service while also specifying a Kerberos Credentials Service")
-                    .build());
-        }
-
-        if (!isAllowExplicitKeytab() && explicitKeytab != null) {
-            problems.add(new ValidationResult.Builder()
-                .subject("Kerberos Credentials")
-                .valid(false)
-                .explanation("The '" + ALLOW_EXPLICIT_KEYTAB + "' system environment variable is configured to forbid explicitly configuring Kerberos Keytab in processors. "
-                    + "The Kerberos Credentials Service should be used instead of setting the Kerberos Keytab or Kerberos Principal property.")
-                .build());
-        }
-
-        return problems;
-    }
-
-    /**
-     * As of Apache NiFi 1.5.0, due to changes made to
-     * {@link SecurityUtil#loginKerberos(Configuration, String, String)}, which is used by this
-     * class to authenticate a principal with Kerberos, HBase controller services no longer
-     * attempt relogins explicitly.  For more information, please read the documentation for
-     * {@link SecurityUtil#loginKerberos(Configuration, String, String)}.
-     * <p/>
-     * In previous versions of NiFi, a {@link org.apache.nifi.hadoop.KerberosTicketRenewer} was started
-     * when the HBase controller service was enabled.  The use of a separate thread to explicitly relogin could cause
-     * race conditions with the implicit relogin attempts made by hadoop/HBase code on a thread that references the same
-     * {@link UserGroupInformation} instance.  One of these threads could leave the
-     * {@link javax.security.auth.Subject} in {@link UserGroupInformation} to be cleared or in an unexpected state
-     * while the other thread is attempting to use the {@link javax.security.auth.Subject}, resulting in failed
-     * authentication attempts that would leave the HBase controller service in an unrecoverable state.
-     *
-     * @see SecurityUtil#loginKerberos(Configuration, String, String)
-     */
-    @OnEnabled
-    public void onEnabled(final ConfigurationContext context) throws InitializationException, IOException, InterruptedException {
-        this.connection = createConnection(context);
-
-        // connection check
-        if (this.connection != null) {
-            final Admin admin = this.connection.getAdmin();
-            if (admin != null) {
-                admin.listTableNames();
-
-                final ClusterStatus clusterStatus = admin.getClusterStatus();
-                if (clusterStatus != null) {
-                    final ServerName master = clusterStatus.getMaster();
-                    if (master != null) {
-                        masterAddress = master.getHostAndPort();
-                    } else {
-                        masterAddress = null;
-                    }
-                }
-            }
-        }
-    }
-
-    protected Connection createConnection(final ConfigurationContext context) throws IOException, InterruptedException {
-        final String configFiles = context.getProperty(HADOOP_CONF_FILES).evaluateAttributeExpressions().getValue();
-        final Configuration hbaseConfig = getConfigurationFromFiles(configFiles);
-
-        // override with any properties that are provided
-        if (context.getProperty(ZOOKEEPER_QUORUM).isSet()) {
-            hbaseConfig.set(HBASE_CONF_ZK_QUORUM, context.getProperty(ZOOKEEPER_QUORUM).evaluateAttributeExpressions().getValue());
-        }
-        if (context.getProperty(ZOOKEEPER_CLIENT_PORT).isSet()) {
-            hbaseConfig.set(HBASE_CONF_ZK_PORT, context.getProperty(ZOOKEEPER_CLIENT_PORT).evaluateAttributeExpressions().getValue());
-        }
-        if (context.getProperty(ZOOKEEPER_ZNODE_PARENT).isSet()) {
-            hbaseConfig.set(HBASE_CONF_ZNODE_PARENT, context.getProperty(ZOOKEEPER_ZNODE_PARENT).evaluateAttributeExpressions().getValue());
-        }
-        if (context.getProperty(HBASE_CLIENT_RETRIES).isSet()) {
-            hbaseConfig.set(HBASE_CONF_CLIENT_RETRIES, context.getProperty(HBASE_CLIENT_RETRIES).evaluateAttributeExpressions().getValue());
-        }
-
-        // add any dynamic properties to the HBase configuration
-        for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
-            final PropertyDescriptor descriptor = entry.getKey();
-            if (descriptor.isDynamic()) {
-                hbaseConfig.set(descriptor.getName(), entry.getValue());
-            }
-        }
-
-        if (SecurityUtil.isSecurityEnabled(hbaseConfig)) {
-            getLogger().debug("HBase Security Enabled, creating KerberosUser");
-            final KerberosUser kerberosUser = createKerberosUser(context);
-            ugi = SecurityUtil.getUgiForKerberosUser(hbaseConfig, kerberosUser);
-            kerberosUserReference.set(kerberosUser);
-            getLogger().info("Successfully logged in as principal {}", kerberosUser.getPrincipal());
-            return getUgi().doAs((PrivilegedExceptionAction<Connection>)() ->  ConnectionFactory.createConnection(hbaseConfig));
-        } else {
-            getLogger().debug("Simple Authentication");
-            return ConnectionFactory.createConnection(hbaseConfig);
-        }
-    }
-
-    protected KerberosUser createKerberosUser(final ConfigurationContext context) {
-        // Check Kerberos User Service first, if present then get the KerberosUser from the service
-        // The customValidate method ensures that KerberosUserService can't be set at the same time as the credentials service or explicit properties
-        final KerberosUserService kerberosUserService = context.getProperty(KERBEROS_USER_SERVICE).asControllerService(KerberosUserService.class);
-        if (kerberosUserService != null) {
-            return kerberosUserService.createKerberosUser();
-        }
-
-        String principal = context.getProperty(kerberosProperties.getKerberosPrincipal()).evaluateAttributeExpressions().getValue();
-        String keyTab = context.getProperty(kerberosProperties.getKerberosKeytab()).evaluateAttributeExpressions().getValue();
-        String password = context.getProperty(kerberosProperties.getKerberosPassword()).getValue();
-
-        // If the Kerberos Credentials Service is specified, we need to use its configuration, not the explicit properties for principal/keytab.
-        // The customValidate method ensures that only one can be set, so we know that the principal & keytab above are null.
-        final KerberosCredentialsService credentialsService = context.getProperty(KERBEROS_CREDENTIALS_SERVICE).asControllerService(KerberosCredentialsService.class);
-        if (credentialsService != null) {
-            principal = credentialsService.getPrincipal();
-            keyTab = credentialsService.getKeytab();
-        }
-
-        if (keyTab != null) {
-            return new KerberosKeytabUser(principal, keyTab);
-        } else if (password != null) {
-            return new KerberosPasswordUser(principal, password);
-        } else {
-            throw new IllegalStateException("Unable to authenticate with Kerberos, no keytab or password was provided");
-        }
-    }
-
-    protected Configuration getConfigurationFromFiles(final String configFiles) {
-        final Configuration hbaseConfig = HBaseConfiguration.create();
-        if (StringUtils.isNotBlank(configFiles)) {
-            for (final String configFile : configFiles.split(",")) {
-                hbaseConfig.addResource(new Path(configFile.trim()));
-            }
-        }
-        return hbaseConfig;
-    }
-
-    @OnDisabled
-    public void shutdown() {
-        if (connection != null) {
-            try {
-                connection.close();
-            } catch (final IOException ioe) {
-                getLogger().warn("Failed to close connection to HBase due to {}", new Object[]{ioe});
-            }
-        }
-
-        final KerberosUser kerberosUser = kerberosUserReference.get();
-        if (kerberosUser != null) {
-            try {
-                kerberosUser.logout();
-            } catch (final Exception e) {
-                getLogger().warn("Error logging out KerberosUser: {}", e.getMessage(), e);
-            } finally {
-                ugi = null;
-                kerberosUserReference.set(null);
-            }
-        }
-    }
-
-    protected List<Put> buildPuts(byte[] rowKey, List<PutColumn> columns) {
-        List<Put> retVal = new ArrayList<>();
-
-        try {
-            Put put = null;
-
-            for (final PutColumn column : columns) {
-                if (put == null || (put.getCellVisibility() == null && column.getVisibility() != null) || ( put.getCellVisibility() != null
-                        && !put.getCellVisibility().getExpression().equals(column.getVisibility())
-                    )) {
-                    put = new Put(rowKey);
-
-                    if (column.getVisibility() != null) {
-                        put.setCellVisibility(new CellVisibility(column.getVisibility()));
-                    }
-                    retVal.add(put);
-                }
-
-                if (column.getTimestamp() != null) {
-                    put.addColumn(
-                            column.getColumnFamily(),
-                            column.getColumnQualifier(),
-                            column.getTimestamp(),
-                            column.getBuffer());
-                } else {
-                    put.addColumn(
-                            column.getColumnFamily(),
-                            column.getColumnQualifier(),
-                            column.getBuffer());
-                }
-            }
-        } catch (DeserializationException de) {
-            getLogger().error("Error writing cell visibility statement.", de);
-            throw new RuntimeException(de);
-        }
-
-        return retVal;
-    }
-
-    @Override
-    public void put(final String tableName, final Collection<PutFlowFile> puts) throws IOException {
-        SecurityUtil.callWithUgi(getUgi(), () -> {
-            try (final Table table = connection.getTable(TableName.valueOf(tableName))) {
-                // Create one Put per row....
-                final Map<String, List<PutColumn>> sorted = new HashMap<>();
-                final List<Put> newPuts = new ArrayList<>();
-
-                for (final PutFlowFile putFlowFile : puts) {
-                    final String rowKeyString = new String(putFlowFile.getRow(), StandardCharsets.UTF_8);
-                    List<PutColumn> columns = sorted.get(rowKeyString);
-                    if (columns == null) {
-                        columns = new ArrayList<>();
-                        sorted.put(rowKeyString, columns);
-                    }
-
-                    columns.addAll(putFlowFile.getColumns());
-                }
-
-                for (final Map.Entry<String, List<PutColumn>> entry : sorted.entrySet()) {
-                    newPuts.addAll(buildPuts(entry.getKey().getBytes(StandardCharsets.UTF_8), entry.getValue()));
-                }
-
-                table.put(newPuts);
-            }
-            return null;
-        });
-    }
-
-    @Override
-    public void put(final String tableName, final byte[] rowId, final Collection<PutColumn> columns) throws IOException {
-        SecurityUtil.callWithUgi(getUgi(), () -> {
-            try (final Table table = connection.getTable(TableName.valueOf(tableName))) {
-                table.put(buildPuts(rowId, new ArrayList(columns)));
-            }
-            return null;
-        });
-    }
-
-    @Override
-    public boolean checkAndPut(final String tableName, final byte[] rowId, final byte[] family, final byte[] qualifier, final byte[] value, final PutColumn column) throws IOException {
-        return SecurityUtil.callWithUgi(getUgi(), () -> {
-            try (final Table table = connection.getTable(TableName.valueOf(tableName))) {
-                Put put = new Put(rowId);
-                put.addColumn(
-                    column.getColumnFamily(),
-                    column.getColumnQualifier(),
-                    column.getBuffer());
-                return table.checkAndPut(rowId, family, qualifier, value, put);
-            }
-        });
-    }
-
-    @Override
-    public void delete(final String tableName, final byte[] rowId) throws IOException {
-        delete(tableName, rowId, null);
-    }
-
-    @Override
-    public void delete(String tableName, byte[] rowId, String visibilityLabel) throws IOException {
-        SecurityUtil.callWithUgi(getUgi(), () -> {
-            try (final Table table = connection.getTable(TableName.valueOf(tableName))) {
-                Delete delete = new Delete(rowId);
-                if (!StringUtils.isEmpty(visibilityLabel)) {
-                    delete.setCellVisibility(new CellVisibility(visibilityLabel));
-                }
-                table.delete(delete);
-            }
-            return null;
-        });
-    }
-
-    @Override
-    public void delete(String tableName, List<byte[]> rowIds) throws IOException {
-        delete(tableName, rowIds, null);
-    }
-
-    @Override
-    public void deleteCells(String tableName, List<DeleteRequest> deletes) throws IOException {
-        List<Delete> deleteRequests = new ArrayList<>();
-        for (int index = 0; index < deletes.size(); index++) {
-            DeleteRequest req = deletes.get(index);
-            Delete delete = new Delete(req.getRowId())
-                .addColumn(req.getColumnFamily(), req.getColumnQualifier());
-            if (!StringUtils.isEmpty(req.getVisibilityLabel())) {
-                delete.setCellVisibility(new CellVisibility(req.getVisibilityLabel()));
-            }
-            deleteRequests.add(delete);
-        }
-        batchDelete(tableName, deleteRequests);
-    }
-
-    @Override
-    public void delete(String tableName, List<byte[]> rowIds, String visibilityLabel) throws IOException {
-        List<Delete> deletes = new ArrayList<>();
-        for (int index = 0; index < rowIds.size(); index++) {
-            Delete delete = new Delete(rowIds.get(index));
-            if (!StringUtils.isBlank(visibilityLabel)) {
-                delete.setCellVisibility(new CellVisibility(visibilityLabel));
-            }
-            deletes.add(delete);
-        }
-        batchDelete(tableName, deletes);
-    }
-
-    private void batchDelete(String tableName, List<Delete> deletes) throws IOException {
-        SecurityUtil.callWithUgi(getUgi(), () -> {
-            try (final Table table = connection.getTable(TableName.valueOf(tableName))) {
-                table.delete(deletes);
-            }
-            return null;
-        });
-    }
-
-    @Override
-    public void scan(final String tableName, final Collection<Column> columns, final String filterExpression, final long minTime, final ResultHandler handler)
-            throws IOException {
-        scan(tableName, columns, filterExpression, minTime, null, handler);
-    }
-
-    @Override
-    public void scan(String tableName, Collection<Column> columns, String filterExpression, long minTime, List<String> visibilityLabels, ResultHandler handler) throws IOException {
-        SecurityUtil.callWithUgi(getUgi(), () -> {
-            Filter filter = null;
-            if (!StringUtils.isBlank(filterExpression)) {
-                ParseFilter parseFilter = new ParseFilter();
-                filter = parseFilter.parseFilterString(filterExpression);
-            }
-
-            try (final Table table = connection.getTable(TableName.valueOf(tableName));
-                 final ResultScanner scanner = getResults(table, columns, filter, minTime, visibilityLabels)) {
-
-                for (final Result result : scanner) {
-                    final byte[] rowKey = result.getRow();
-                    final Cell[] cells = result.rawCells();
-
-                    if (cells == null) {
-                        continue;
-                    }
-
-                    // convert HBase cells to NiFi cells
-                    final ResultCell[] resultCells = new ResultCell[cells.length];
-                    for (int i = 0; i < cells.length; i++) {
-                        final Cell cell = cells[i];
-                        final ResultCell resultCell = getResultCell(cell);
-                        resultCells[i] = resultCell;
-                    }
-
-                    // delegate to the handler
-                    handler.handle(rowKey, resultCells);
-                }
-            }
-            return null;
-        });
-    }
-
-    @Override
-    public void scan(final String tableName, final byte[] startRow, final byte[] endRow, final Collection<Column> columns, List<String> authorizations, final ResultHandler handler)
-            throws IOException {
-
-        SecurityUtil.callWithUgi(getUgi(), () -> {
-            try (final Table table = connection.getTable(TableName.valueOf(tableName));
-                 final ResultScanner scanner = getResults(table, startRow, endRow, columns, authorizations)) {
-
-                for (final Result result : scanner) {
-                    final byte[] rowKey = result.getRow();
-                    final Cell[] cells = result.rawCells();
-
-                    if (cells == null) {
-                        continue;
-                    }
-
-                    // convert HBase cells to NiFi cells
-                    final ResultCell[] resultCells = new ResultCell[cells.length];
-                    for (int i = 0; i < cells.length; i++) {
-                        final Cell cell = cells[i];
-                        final ResultCell resultCell = getResultCell(cell);
-                        resultCells[i] = resultCell;
-                    }
-
-                    // delegate to the handler
-                    handler.handle(rowKey, resultCells);
-                }
-            }
-            return null;
-        });
-    }
-
-    @Override
-    public void scan(final String tableName, final String startRow, final String endRow, String filterExpression,
-            final Long timerangeMin, final Long timerangeMax, final Integer limitRows, final Boolean isReversed,
-            final Boolean blockCache, final Collection<Column> columns, List<String> visibilityLabels, final ResultHandler handler) throws IOException {
-
-        SecurityUtil.callWithUgi(getUgi(), () -> {
-            try (final Table table = connection.getTable(TableName.valueOf(tableName));
-                 final ResultScanner scanner = getResults(table, startRow, endRow, filterExpression, timerangeMin,
-                     timerangeMax, limitRows, isReversed, blockCache, columns, visibilityLabels)) {
-
-                int cnt = 0;
-                final int lim = limitRows != null ? limitRows : 0;
-                for (final Result result : scanner) {
-
-                    if (lim > 0 && ++cnt > lim) {
-                        break;
-                    }
-
-                    final byte[] rowKey = result.getRow();
-                    final Cell[] cells = result.rawCells();
-
-                    if (cells == null) {
-                        continue;
-                    }
-
-                    // convert HBase cells to NiFi cells
-                    final ResultCell[] resultCells = new ResultCell[cells.length];
-                    for (int i = 0; i < cells.length; i++) {
-                        final Cell cell = cells[i];
-                        final ResultCell resultCell = getResultCell(cell);
-                        resultCells[i] = resultCell;
-                    }
-
-                    // delegate to the handler
-                    handler.handle(rowKey, resultCells);
-                }
-            }
-            return null;
-        });
-    }
-
-    //
-    protected ResultScanner getResults(final Table table, final String startRow, final String endRow, final String filterExpression, final Long timerangeMin, final Long timerangeMax,
-            final Integer limitRows, final Boolean isReversed, final Boolean blockCache, final Collection<Column> columns, List<String> authorizations)  throws IOException {
-        final Scan scan = new Scan();
-        if (!StringUtils.isBlank(startRow)){
-            scan.setStartRow(startRow.getBytes(StandardCharsets.UTF_8));
-        }
-        if (!StringUtils.isBlank(endRow)){
-            scan.setStopRow(   endRow.getBytes(StandardCharsets.UTF_8));
-        }
-
-        if (authorizations != null && authorizations.size() > 0) {
-            scan.setAuthorizations(new Authorizations(authorizations));
-        }
-
-        Filter filter = null;
-        if (columns != null) {
-            for (Column col : columns) {
-                if (col.getQualifier() == null) {
-                    scan.addFamily(col.getFamily());
-                } else {
-                    scan.addColumn(col.getFamily(), col.getQualifier());
-                }
-            }
-        }
-        if (!StringUtils.isBlank(filterExpression)) {
-            ParseFilter parseFilter = new ParseFilter();
-            filter = parseFilter.parseFilterString(filterExpression);
-        }
-        if (filter != null){
-            scan.setFilter(filter);
-        }
-
-        if (timerangeMin != null && timerangeMax != null){
-            scan.setTimeRange(timerangeMin, timerangeMax);
-        }
-
-        // ->>> reserved for HBase v 2 or later
-        //if (limitRows != null && limitRows > 0){
-        //    scan.setLimit(limitRows)
-        //}
-
-        if (isReversed != null){
-            scan.setReversed(isReversed);
-        }
-
-        scan.setCacheBlocks(blockCache);
-
-        return table.getScanner(scan);
-    }
-
-    // protected and extracted into separate method for testing
-    protected ResultScanner getResults(final Table table, final byte[] startRow, final byte[] endRow, final Collection<Column> columns, List<String> authorizations) throws IOException {
-        final Scan scan = new Scan();
-        scan.setStartRow(startRow);
-        scan.setStopRow(endRow);
-
-        if (authorizations != null && authorizations.size() > 0) {
-            scan.setAuthorizations(new Authorizations(authorizations));
-        }
-
-        if (columns != null && columns.size() > 0) {
-            for (Column col : columns) {
-                if (col.getQualifier() == null) {
-                    scan.addFamily(col.getFamily());
-                } else {
-                    scan.addColumn(col.getFamily(), col.getQualifier());
-                }
-            }
-        }
-
-        return table.getScanner(scan);
-    }
-
-    // protected and extracted into separate method for testing
-    protected ResultScanner getResults(final Table table, final Collection<Column> columns, final Filter filter, final long minTime, List<String> authorizations) throws IOException {
-        // Create a new scan. We will set the min timerange as the latest timestamp that
-        // we have seen so far. The minimum timestamp is inclusive, so we will get duplicates.
-        // We will record any cells that have the latest timestamp, so that when we scan again,
-        // we know to throw away those duplicates.
-        final Scan scan = new Scan();
-        scan.setTimeRange(minTime, Long.MAX_VALUE);
-
-        if (authorizations != null && authorizations.size() > 0) {
-            scan.setAuthorizations(new Authorizations(authorizations));
-        }
-
-        if (filter != null) {
-            scan.setFilter(filter);
-        }
-
-        if (columns != null) {
-            for (Column col : columns) {
-                if (col.getQualifier() == null) {
-                    scan.addFamily(col.getFamily());
-                } else {
-                    scan.addColumn(col.getFamily(), col.getQualifier());
-                }
-            }
-        }
-
-        return table.getScanner(scan);
-    }
-
-    private ResultCell getResultCell(Cell cell) {
-        final ResultCell resultCell = new ResultCell();
-        resultCell.setRowArray(cell.getRowArray());
-        resultCell.setRowOffset(cell.getRowOffset());
-        resultCell.setRowLength(cell.getRowLength());
-
-        resultCell.setFamilyArray(cell.getFamilyArray());
-        resultCell.setFamilyOffset(cell.getFamilyOffset());
-        resultCell.setFamilyLength(cell.getFamilyLength());
-
-        resultCell.setQualifierArray(cell.getQualifierArray());
-        resultCell.setQualifierOffset(cell.getQualifierOffset());
-        resultCell.setQualifierLength(cell.getQualifierLength());
-
-        resultCell.setTimestamp(cell.getTimestamp());
-        resultCell.setTypeByte(cell.getTypeByte());
-        resultCell.setSequenceId(cell.getSequenceId());
-
-        resultCell.setValueArray(cell.getValueArray());
-        resultCell.setValueOffset(cell.getValueOffset());
-        resultCell.setValueLength(cell.getValueLength());
-
-        resultCell.setTagsArray(cell.getTagsArray());
-        resultCell.setTagsOffset(cell.getTagsOffset());
-        resultCell.setTagsLength(cell.getTagsLength());
-        return resultCell;
-    }
-
-    static protected class ValidationResources {
-        private final String configResources;
-        private final Configuration configuration;
-
-        public ValidationResources(String configResources, Configuration configuration) {
-            this.configResources = configResources;
-            this.configuration = configuration;
-        }
-
-        public String getConfigResources() {
-            return configResources;
-        }
-
-        public Configuration getConfiguration() {
-            return configuration;
-        }
-    }
-
-    @Override
-    public byte[] toBytes(boolean b) {
-        return Bytes.toBytes(b);
-    }
-
-    @Override
-    public byte[] toBytes(float f) {
-        return Bytes.toBytes(f);
-    }
-
-    @Override
-    public byte[] toBytes(int i) {
-        return Bytes.toBytes(i);
-    }
-
-    @Override
-    public byte[] toBytes(long l) {
-        return Bytes.toBytes(l);
-    }
-
-    @Override
-    public byte[] toBytes(double d) {
-        return Bytes.toBytes(d);
-    }
-
-    @Override
-    public byte[] toBytes(String s) {
-        return Bytes.toBytes(s);
-    }
-
-    @Override
-    public byte[] toBytesBinary(String s) {
-        return Bytes.toBytesBinary(s);
-    }
-
-    @Override
-    public String toTransitUri(String tableName, String rowKey) {
-        if (connection == null) {
-            logger.warn("Connection has not been established, could not create a transit URI. Returning null.");
-            return null;
-        }
-        final String transitUriMasterAddress = StringUtils.isEmpty(masterAddress) ? "unknown" : masterAddress;
-        return "hbase://" + transitUriMasterAddress + "/" + tableName + (StringUtils.isEmpty(rowKey) ? "" : "/" + rowKey);
-    }
-
-    /*
-     * Overridable by subclasses in the same package, mainly intended for testing purposes to allow verification without having to set environment variables.
-     */
-    boolean isAllowExplicitKeytab() {
-        return Boolean.parseBoolean(System.getenv(ALLOW_EXPLICIT_KEYTAB));
-    }
-
-    UserGroupInformation getUgi() {
-        getLogger().trace("getting UGI instance");
-        // if there is a KerberosUser associated with UGI, call checkTGTAndRelogin to ensure UGI's underlying Subject has a valid ticket
-        SecurityUtil.checkTGTAndRelogin(getLogger(), kerberosUserReference.get());
-        return ugi;
-    }
-
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ListLookupService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ListLookupService.java
deleted file mode 100644
index 694885470b..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_ListLookupService.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nifi.hbase;
-
-import org.apache.nifi.annotation.lifecycle.OnEnabled;
-import org.apache.nifi.components.AllowableValue;
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.components.Validator;
-import org.apache.nifi.controller.ConfigurationContext;
-import org.apache.nifi.lookup.LookupFailureException;
-import org.apache.nifi.lookup.LookupService;
-import org.apache.nifi.reporting.InitializationException;
-import org.apache.nifi.util.StringUtils;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-public class HBase_1_1_2_ListLookupService extends AbstractHBaseLookupService implements LookupService<List> {
-    public static final AllowableValue KEY_LIST = new AllowableValue("key_list", "List of keys",
-            "Return the row as a list of the column qualifiers (keys)");
-    public static final AllowableValue VALUE_LIST = new AllowableValue("value_list", "List of values",
-            "Return the row as a list of the values associated with each column qualifier.");
-    public static final PropertyDescriptor RETURN_TYPE = new PropertyDescriptor.Builder()
-        .name("hb-lu-list-return-type")
-        .displayName("Return Type")
-        .description("Choose whether to return a list of the keys or a list of the values for the supplied row key.")
-        .allowableValues(KEY_LIST, VALUE_LIST)
-        .defaultValue(KEY_LIST.getValue())
-        .required(true)
-        .addValidator(Validator.VALID)
-        .build();
-
-    public static final List<PropertyDescriptor> _PROPERTIES;
-    static {
-        List<PropertyDescriptor> _temp = new ArrayList<>();
-        _temp.addAll(PROPERTIES);
-        _temp.add(RETURN_TYPE);
-        _PROPERTIES = Collections.unmodifiableList(_temp);
-    }
-
-    @Override
-    public List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        return _PROPERTIES;
-    }
-
-    @Override
-    public Optional<List> lookup(Map<String, Object> coordinates) throws LookupFailureException {
-        if (coordinates.get(ROW_KEY_KEY) == null) {
-            return Optional.empty();
-        }
-
-        final String rowKey = coordinates.get(ROW_KEY_KEY).toString();
-        if (StringUtils.isBlank(rowKey)) {
-            return Optional.empty();
-        }
-
-        final byte[] rowKeyBytes = rowKey.getBytes(StandardCharsets.UTF_8);
-
-        try {
-            final Map<String, Object> values = scan(rowKeyBytes);
-
-            if (values.size() > 0) {
-                List<String> retVal = returnType.equals(KEY_LIST.getValue())
-                    ? new ArrayList<>(values.keySet())
-                    : values.values().stream().map( obj -> obj.toString() ).collect(Collectors.toList());
-                return Optional.ofNullable(retVal);
-            } else {
-                return Optional.empty();
-            }
-        } catch (IOException e) {
-            getLogger().error("Error occurred loading {}", new Object[] { coordinates.get("rowKey") }, e);
-            throw new LookupFailureException(e);
-        }
-    }
-
-    private String returnType;
-
-    @OnEnabled
-    public void onEnabled(ConfigurationContext context) throws InterruptedException, IOException, InitializationException {
-        super.onEnabled(context);
-        returnType = context.getProperty(RETURN_TYPE).getValue();
-    }
-
-    @Override
-    public Class<?> getValueType() {
-        return List.class;
-    }
-
-    @Override
-    public Set<String> getRequiredKeys() {
-        return REQUIRED_KEYS;
-    }
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_RecordLookupService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_RecordLookupService.java
deleted file mode 100644
index 23ac3d047d..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/HBase_1_1_2_RecordLookupService.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nifi.hbase;
-
-import org.apache.nifi.annotation.documentation.CapabilityDescription;
-import org.apache.nifi.annotation.documentation.Tags;
-import org.apache.nifi.lookup.LookupFailureException;
-import org.apache.nifi.lookup.LookupService;
-import org.apache.nifi.serialization.SimpleRecordSchema;
-import org.apache.nifi.serialization.record.MapRecord;
-import org.apache.nifi.serialization.record.Record;
-import org.apache.nifi.serialization.record.RecordField;
-import org.apache.nifi.serialization.record.RecordFieldType;
-import org.apache.nifi.serialization.record.RecordSchema;
-import org.apache.nifi.util.StringUtils;
-
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-
-@Tags({"hbase", "record", "lookup", "service"})
-@CapabilityDescription("A lookup service that retrieves one or more columns from HBase and returns them as a record. The lookup coordinates " +
-        "must contain 'rowKey' which will be the HBase row id.")
-public class HBase_1_1_2_RecordLookupService extends AbstractHBaseLookupService implements LookupService<Record> {
-    @Override
-    public Optional<Record> lookup(Map<String, Object> coordinates) throws LookupFailureException {
-        if (coordinates.get(ROW_KEY_KEY) == null) {
-            return Optional.empty();
-        }
-
-        final String rowKey = coordinates.get(ROW_KEY_KEY).toString();
-        if (StringUtils.isBlank(rowKey)) {
-            return Optional.empty();
-        }
-
-        final byte[] rowKeyBytes = rowKey.getBytes(StandardCharsets.UTF_8);
-        try {
-            final Map<String, Object> values = scan(rowKeyBytes);
-
-            if (values.size() > 0) {
-                final List<RecordField> fields = new ArrayList<>();
-                for (String key : values.keySet()) {
-                    fields.add(new RecordField(key, RecordFieldType.STRING.getDataType()));
-                }
-                final RecordSchema schema = new SimpleRecordSchema(fields);
-                return Optional.ofNullable(new MapRecord(schema, values));
-            } else {
-                return Optional.empty();
-            }
-        } catch (IOException e) {
-            getLogger().error("Error occurred loading {}", new Object[] { coordinates.get("rowKey") }, e);
-            throw new LookupFailureException(e);
-        }
-    }
-
-    @Override
-    public Class<?> getValueType() {
-        return Record.class;
-    }
-
-    @Override
-    public Set<String> getRequiredKeys() {
-        return REQUIRED_KEYS;
-    }
-}
-
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/VisibilityLabelUtils.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/VisibilityLabelUtils.java
deleted file mode 100644
index 8366696553..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/java/org/apache/nifi/hbase/VisibilityLabelUtils.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nifi.hbase;
-
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.controller.ConfigurationContext;
-import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.util.StringUtils;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
-class VisibilityLabelUtils {
-    static final PropertyDescriptor AUTHORIZATIONS = new PropertyDescriptor.Builder()
-        .name("hb-lu-authorizations")
-        .displayName("Authorizations")
-        .description("The list of authorization tokens to be used with cell visibility if it is enabled. These will be used to " +
-                "override the default authorization list for the user accessing HBase.")
-        .required(false)
-        .addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
-        .build();
-
-    static List<String> getAuthorizations(ConfigurationContext context) {
-        List<String> tokens = new ArrayList<>();
-        String authorizationString = context.getProperty(AUTHORIZATIONS).isSet()
-                ? context.getProperty(AUTHORIZATIONS).getValue()
-                : "";
-        if (!StringUtils.isEmpty(authorizationString)) {
-            tokens = Arrays.asList(authorizationString.split(",[\\s]*"));
-        }
-
-        return tokens;
-    }
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
deleted file mode 100644
index 1bef44fe3a..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/main/resources/META-INF/services/org.apache.nifi.controller.ControllerService
+++ /dev/null
@@ -1,18 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-org.apache.nifi.hbase.HBase_1_1_2_ClientService
-org.apache.nifi.hbase.HBase_1_1_2_ClientMapCacheService
-org.apache.nifi.hbase.HBase_1_1_2_ListLookupService
-org.apache.nifi.hbase.HBase_1_1_2_RecordLookupService
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/MockHBaseClientService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/MockHBaseClientService.java
deleted file mode 100644
index 59f88a86e9..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/MockHBaseClientService.java
+++ /dev/null
@@ -1,235 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.hbase;
-
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Result;
-import org.apache.hadoop.hbase.client.ResultScanner;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.hadoop.hbase.filter.Filter;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.nifi.controller.ConfigurationContext;
-import org.apache.nifi.hadoop.KerberosProperties;
-import org.apache.nifi.hbase.put.PutColumn;
-import org.apache.nifi.hbase.put.PutFlowFile;
-import org.apache.nifi.hbase.scan.Column;
-import org.mockito.Mockito;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.mockito.ArgumentMatchers.any;
-import static org.mockito.Mockito.doAnswer;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-/**
- * Override methods to create a mock service that can return staged data
- */
-public class MockHBaseClientService extends HBase_1_1_2_ClientService {
-
-    private Table table;
-    private String family;
-    private Map<String, Result> results = new HashMap<>();
-    private KerberosProperties kerberosProperties;
-    private boolean allowExplicitKeytab;
-    private UserGroupInformation mockUgi;
-
-    {
-        mockUgi = mock(UserGroupInformation.class);
-        try {
-            doAnswer(invocation -> {
-                PrivilegedExceptionAction<?> action = invocation.getArgument(0);
-                return action.run();
-            }).when(mockUgi).doAs(any(PrivilegedExceptionAction.class));
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-    }
-
-    public MockHBaseClientService(final Table table, final String family, final KerberosProperties kerberosProperties) {
-        this(table, family, kerberosProperties, false);
-    }
-
-    public MockHBaseClientService(final Table table, final String family, final KerberosProperties kerberosProperties, boolean allowExplicitKeytab) {
-        this.table = table;
-        this.family = family;
-        this.kerberosProperties = kerberosProperties;
-        this.allowExplicitKeytab = allowExplicitKeytab;
-    }
-
-    @Override
-    protected KerberosProperties getKerberosProperties(File kerberosConfigFile) {
-        return kerberosProperties;
-    }
-
-    protected void setKerberosProperties(KerberosProperties properties) {
-        this.kerberosProperties = properties;
-
-    }
-
-    public void addResult(final String rowKey, final Map<String, String> cells, final long timestamp) {
-        final byte[] rowArray = rowKey.getBytes(StandardCharsets.UTF_8);
-        final Cell[] cellArray = new Cell[cells.size()];
-        int i = 0;
-        for (final Map.Entry<String, String> cellEntry : cells.entrySet()) {
-            final Cell cell = Mockito.mock(Cell.class);
-            when(cell.getRowArray()).thenReturn(rowArray);
-            when(cell.getRowOffset()).thenReturn(0);
-            when(cell.getRowLength()).thenReturn((short) rowArray.length);
-
-            final String cellValue = cellEntry.getValue();
-            final byte[] valueArray = cellValue.getBytes(StandardCharsets.UTF_8);
-            when(cell.getValueArray()).thenReturn(valueArray);
-            when(cell.getValueOffset()).thenReturn(0);
-            when(cell.getValueLength()).thenReturn(valueArray.length);
-
-            final byte[] familyArray = family.getBytes(StandardCharsets.UTF_8);
-            when(cell.getFamilyArray()).thenReturn(familyArray);
-            when(cell.getFamilyOffset()).thenReturn(0);
-            when(cell.getFamilyLength()).thenReturn((byte) familyArray.length);
-
-            final String qualifier = cellEntry.getKey();
-            final byte[] qualifierArray = qualifier.getBytes(StandardCharsets.UTF_8);
-            when(cell.getQualifierArray()).thenReturn(qualifierArray);
-            when(cell.getQualifierOffset()).thenReturn(0);
-            when(cell.getQualifierLength()).thenReturn(qualifierArray.length);
-
-            when(cell.getTimestamp()).thenReturn(timestamp);
-
-            cellArray[i++] = cell;
-        }
-
-        final Result result = Mockito.mock(Result.class);
-        when(result.getRow()).thenReturn(rowArray);
-        when(result.rawCells()).thenReturn(cellArray);
-        results.put(rowKey, result);
-    }
-
-    @Override
-    public void put(final String tableName, final byte[] rowId, final Collection<PutColumn> columns) throws IOException {
-        Put put = new Put(rowId);
-        Map<String, String> map = new HashMap<String, String>();
-        for (final PutColumn column : columns) {
-            put.addColumn(
-                    column.getColumnFamily(),
-                    column.getColumnQualifier(),
-                    column.getBuffer());
-            map.put(new String(column.getColumnQualifier()), new String(column.getBuffer()));
-        }
-
-        table.put(put);
-        addResult(new String(rowId), map, 1);
-    }
-
-    @Override
-    public void put(final String tableName, final Collection<PutFlowFile> puts) throws IOException {
-        final Map<String, List<PutColumn>> sorted = new HashMap<>();
-        final List<Put> newPuts = new ArrayList<>();
-
-        for (final PutFlowFile putFlowFile : puts) {
-            Map<String, String> map = new HashMap<String, String>();
-            final String rowKeyString = new String(putFlowFile.getRow(), StandardCharsets.UTF_8);
-            List<PutColumn> columns = sorted.get(rowKeyString);
-            if (columns == null) {
-                columns = new ArrayList<>();
-                sorted.put(rowKeyString, columns);
-            }
-
-            columns.addAll(putFlowFile.getColumns());
-            for (PutColumn column : putFlowFile.getColumns()) {
-                map.put(new String(column.getColumnQualifier()), new String(column.getBuffer()));
-            }
-
-            addResult(new String(putFlowFile.getRow()), map, 1);
-        }
-
-        for (final Map.Entry<String, List<PutColumn>> entry : sorted.entrySet()) {
-            newPuts.addAll(buildPuts(entry.getKey().getBytes(StandardCharsets.UTF_8), entry.getValue()));
-        }
-
-        table.put(newPuts);
-    }
-
-    @Override
-    public boolean checkAndPut(final String tableName, final byte[] rowId, final byte[] family, final byte[] qualifier, final byte[] value, final PutColumn column) throws IOException {
-        for (Result result : results.values()) {
-            if (Arrays.equals(result.getRow(), rowId)) {
-                Cell[] cellArray = result.rawCells();
-                for (Cell cell : cellArray) {
-                    if (Arrays.equals(cell.getFamilyArray(), family) && Arrays.equals(cell.getQualifierArray(), qualifier)) {
-                         if (value == null || !Arrays.equals(cell.getValueArray(), value)) {
-                             return false;
-                         }
-                    }
-                }
-            }
-        }
-
-        final List<PutColumn> putColumns = new ArrayList<PutColumn>();
-        putColumns.add(column);
-        put(tableName, rowId, putColumns);
-        return true;
-    }
-
-    protected ResultScanner getResults(Table table, byte[] startRow, byte[] endRow, Collection<Column> columns, List<String> labels) throws IOException {
-        final ResultScanner scanner = Mockito.mock(ResultScanner.class);
-        Mockito.when(scanner.iterator()).thenReturn(results.values().iterator());
-        return scanner;
-    }
-
-    @Override
-    protected ResultScanner getResults(Table table, Collection<Column> columns, Filter filter, long minTime, List<String> labels) throws IOException {
-        final ResultScanner scanner = Mockito.mock(ResultScanner.class);
-        Mockito.when(scanner.iterator()).thenReturn(results.values().iterator());
-        return scanner;
-    }
-
-    protected ResultScanner getResults(final Table table, final String startRow, final String endRow, final String filterExpression, final Long timerangeMin, final Long timerangeMax,
-            final Integer limitRows, final Boolean isReversed, final Collection<Column> columns)  throws IOException {
-        final ResultScanner scanner = Mockito.mock(ResultScanner.class);
-        Mockito.when(scanner.iterator()).thenReturn(results.values().iterator());
-        return scanner;
-    }
-
-    @Override
-    protected Connection createConnection(ConfigurationContext context) throws IOException {
-        Connection connection = Mockito.mock(Connection.class);
-        Mockito.when(connection.getTable(table.getName())).thenReturn(table);
-        return connection;
-    }
-
-    @Override
-    boolean isAllowExplicitKeytab() {
-        return allowExplicitKeytab;
-    }
-
-    @Override
-    UserGroupInformation getUgi() {
-        return mockUgi;
-    }
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientMapCacheService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientMapCacheService.java
deleted file mode 100644
index f62691edbc..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientMapCacheService.java
+++ /dev/null
@@ -1,453 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.hbase;
-
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.nifi.distributed.cache.client.AtomicCacheEntry;
-import org.apache.nifi.distributed.cache.client.AtomicDistributedMapCacheClient;
-import org.apache.nifi.distributed.cache.client.Deserializer;
-import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
-import org.apache.nifi.distributed.cache.client.Serializer;
-import org.apache.nifi.distributed.cache.client.exception.DeserializationException;
-import org.apache.nifi.distributed.cache.client.exception.SerializationException;
-import org.apache.nifi.hadoop.KerberosProperties;
-import org.apache.nifi.hbase.scan.ResultCell;
-import org.apache.nifi.reporting.InitializationException;
-import org.apache.nifi.util.TestRunner;
-import org.apache.nifi.util.TestRunners;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-
-import static org.junit.jupiter.api.Assertions.assertArrayEquals;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertFalse;
-import static org.junit.jupiter.api.Assertions.assertNull;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-public class TestHBase_1_1_2_ClientMapCacheService {
-
-    private KerberosProperties kerberosPropsWithFile;
-    private KerberosProperties kerberosPropsWithoutFile;
-
-    private Serializer<String> stringSerializer = new StringSerializer();
-    private Deserializer<String> stringDeserializer = new StringDeserializer();
-
-    @BeforeEach
-    public void setup() {
-        // needed for calls to UserGroupInformation.setConfiguration() to work when passing in
-        // config with Kerberos authentication enabled
-        System.setProperty("java.security.krb5.realm", "nifi.com");
-        System.setProperty("java.security.krb5.kdc", "nifi.kdc");
-
-        kerberosPropsWithFile = new KerberosProperties(new File("src/test/resources/krb5.conf"));
-
-        kerberosPropsWithoutFile = new KerberosProperties(null);
-    }
-
-    private final String tableName = "nifi";
-    private final String columnFamily = "family1";
-    private final String columnQualifier = "qualifier1";
-
-
-    @Test
-    public void testPut() throws InitializationException, IOException {
-        final String row = "row1";
-        final String content = "content1";
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-            .asControllerService(HBaseClientService.class);
-
-        final DistributedMapCacheClient cacheService = configureHBaseCacheService(runner, hBaseClientService);
-        runner.assertValid(cacheService);
-
-        // try to put a single cell
-        final DistributedMapCacheClient hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
-                .asControllerService(DistributedMapCacheClient.class);
-
-        hBaseCacheService.put( row, content, stringSerializer, stringSerializer);
-
-        // verify only one call to put was made
-        ArgumentCaptor<Put> capture = ArgumentCaptor.forClass(Put.class);
-        verify(table, times(1)).put(capture.capture());
-
-        verifyPut(row, columnFamily, columnQualifier, content, capture.getValue());
-    }
-
-    @Test
-    public void testPutAll() throws InitializationException, IOException {
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        final DistributedMapCacheClient cacheService = configureHBaseCacheService(runner, hBaseClientService);
-        runner.assertValid(cacheService);
-
-        // try to put a single cell
-        final DistributedMapCacheClient hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
-                .asControllerService(DistributedMapCacheClient.class);
-
-        Map<String, String> putz = new HashMap<>();
-        List<String> content = new ArrayList<>();
-        List<String> rows = new ArrayList<>();
-        for (int x = 1; x <= 5; x++) {
-            putz.put(String.format("row-%d", x), String.format("content-%d", x));
-            content.add(String.format("content-%d", x));
-            rows.add(String.format("row-%d", x));
-        }
-
-        hBaseCacheService.putAll( putz, stringSerializer, stringSerializer);
-
-        // verify only one call to put was made
-        ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
-        verify(table, times(1)).put(capture.capture());
-
-        List<Put> captured = capture.getValue();
-
-
-        for (int x = 0; x < 5; x++) {
-            Put put = captured.get(x);
-
-            String row = new String(put.getRow());
-            assertTrue(rows.contains(row));
-
-            NavigableMap<byte [], List<Cell>> familyCells = put.getFamilyCellMap();
-            assertEquals(1, familyCells.size());
-
-            Map.Entry<byte[], List<Cell>> entry = familyCells.firstEntry();
-            assertEquals(columnFamily, new String(entry.getKey()));
-            assertEquals(1, entry.getValue().size());
-
-            Cell cell = entry.getValue().get(0);
-            String contentString = new String(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
-            assertEquals(columnQualifier, new String(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()));
-            assertTrue(content.contains(contentString));
-
-            content.remove(contentString);
-            rows.remove(row);
-        }
-    }
-
-    @Test
-    public void testGet() throws InitializationException, IOException {
-        final String row = "row1";
-        final String content = "content1";
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-            .asControllerService(HBaseClientService.class);
-
-        final DistributedMapCacheClient cacheService = configureHBaseCacheService(runner, hBaseClientService);
-        runner.assertValid(cacheService);
-
-        final DistributedMapCacheClient hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
-                .asControllerService(DistributedMapCacheClient.class);
-
-        hBaseCacheService.put( row, content, stringSerializer, stringSerializer);
-
-        final String result = hBaseCacheService.get(row, stringSerializer, stringDeserializer);
-
-        assertEquals( content, result);
-
-    }
-
-    @Test
-    public void testContainsKey() throws InitializationException, IOException {
-        final String row = "row1";
-        final String content = "content1";
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-            .asControllerService(HBaseClientService.class);
-
-        final DistributedMapCacheClient cacheService = configureHBaseCacheService(runner, hBaseClientService);
-        runner.assertValid(cacheService);
-
-        final DistributedMapCacheClient hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
-                .asControllerService(DistributedMapCacheClient.class);
-
-        assertFalse( hBaseCacheService.containsKey(row , stringSerializer ) );
-
-        hBaseCacheService.put( row, content, stringSerializer, stringSerializer);
-
-        assertTrue( hBaseCacheService.containsKey(row, stringSerializer) );
-    }
-
-    @Test
-    public void testPutIfAbsent() throws InitializationException, IOException {
-        final String row = "row1";
-        final String content = "content1";
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-            .asControllerService(HBaseClientService.class);
-
-        final DistributedMapCacheClient cacheService = configureHBaseCacheService(runner, hBaseClientService);
-        runner.assertValid(cacheService);
-
-        final DistributedMapCacheClient hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
-                .asControllerService(DistributedMapCacheClient.class);
-
-        assertTrue( hBaseCacheService.putIfAbsent( row, content, stringSerializer, stringSerializer));
-
-        // verify only one call to put was made
-        ArgumentCaptor<Put> capture = ArgumentCaptor.forClass(Put.class);
-        verify(table, times(1)).put(capture.capture());
-
-        verifyPut(row, columnFamily, columnQualifier, content, capture.getValue());
-
-        assertFalse( hBaseCacheService.putIfAbsent( row, content, stringSerializer, stringSerializer));
-
-        verify(table, times(1)).put(capture.capture());
-    }
-
-    @Test
-    public void testGetAndPutIfAbsent() throws InitializationException, IOException {
-        final String row = "row1";
-        final String content = "content1";
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-            .asControllerService(HBaseClientService.class);
-
-        final DistributedMapCacheClient cacheService = configureHBaseCacheService(runner, hBaseClientService);
-        runner.assertValid(cacheService);
-
-        final DistributedMapCacheClient hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
-                .asControllerService(DistributedMapCacheClient.class);
-
-        assertNull( hBaseCacheService.getAndPutIfAbsent( row, content, stringSerializer, stringSerializer, stringDeserializer));
-
-        // verify only one call to put was made
-        ArgumentCaptor<Put> capture = ArgumentCaptor.forClass(Put.class);
-        verify(table, times(1)).put(capture.capture());
-
-        verifyPut(row, columnFamily, columnQualifier, content, capture.getValue());
-
-        final String result = hBaseCacheService.getAndPutIfAbsent( row, content, stringSerializer, stringSerializer, stringDeserializer);
-
-        verify(table, times(1)).put(capture.capture());
-
-        assertEquals( result, content);
-    }
-
-
-    @Test
-    public void testFetch() throws InitializationException, IOException {
-        final String key = "key1";
-        final String value = "value1";
-        final byte[] revision = value.getBytes();
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        final AtomicDistributedMapCacheClient<byte[]> cacheService = configureHBaseCacheService(runner, hBaseClientService);
-        runner.assertValid(cacheService);
-
-        final AtomicDistributedMapCacheClient<byte[]> hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
-                .asControllerService(AtomicDistributedMapCacheClient.class);
-
-        hBaseCacheService.put(key, value, stringSerializer, stringSerializer);
-
-        final AtomicCacheEntry<String, String, byte[]> atomicCacheEntry = hBaseCacheService.fetch(key, stringSerializer, stringDeserializer);
-
-        assertEquals(key, atomicCacheEntry.getKey());
-        assertEquals(value, atomicCacheEntry.getValue());
-        assertArrayEquals(revision, atomicCacheEntry.getRevision().get());
-    }
-
-    @Test
-    public void testReplace() throws InitializationException, IOException {
-        final String key = "key1";
-        final String value = "value1";
-        final byte[] revision = value.getBytes();
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        final AtomicDistributedMapCacheClient<byte[]> cacheService = configureHBaseCacheService(runner, hBaseClientService);
-        runner.assertValid(cacheService);
-
-        final AtomicDistributedMapCacheClient<byte[]> hBaseCacheService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CACHE_SERVICE)
-                .asControllerService(AtomicDistributedMapCacheClient.class);
-
-        // First time value should not already be in cache so this should return true
-        final boolean newResult = hBaseCacheService.replace(new AtomicCacheEntry(key,value,null), stringSerializer, stringSerializer);
-        assertTrue(newResult);
-
-        // Second time value is already in cache so this should return false
-        final boolean existingResult = hBaseCacheService.replace(new AtomicCacheEntry(key,value,revision), stringSerializer, stringSerializer);
-        assertTrue(existingResult);
-
-        // Third time we're replacing with a new value so this should return true
-        final boolean replaceResult = hBaseCacheService.replace(new AtomicCacheEntry(key,"value2",revision), stringSerializer, stringSerializer);
-        assertTrue(replaceResult);
-    }
-
-    private MockHBaseClientService configureHBaseClientService(final TestRunner runner, final Table table) throws InitializationException {
-        final MockHBaseClientService service = new MockHBaseClientService(table, "family1", kerberosPropsWithFile);
-        runner.addControllerService("hbaseClient", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
-        runner.enableControllerService(service);
-        runner.setProperty(TestProcessor.HBASE_CLIENT_SERVICE, "hbaseClient");
-        return service;
-    }
-
-    private AtomicDistributedMapCacheClient<byte[]> configureHBaseCacheService(final TestRunner runner, final HBaseClientService service) throws InitializationException {
-        final HBase_1_1_2_ClientMapCacheService cacheService = new HBase_1_1_2_ClientMapCacheService();
-        runner.addControllerService("hbaseCache", cacheService);
-        runner.setProperty(cacheService, HBase_1_1_2_ClientMapCacheService.HBASE_CLIENT_SERVICE, "hbaseClient");
-        runner.setProperty(cacheService, HBase_1_1_2_ClientMapCacheService.HBASE_CACHE_TABLE_NAME, tableName);
-        runner.setProperty(cacheService, HBase_1_1_2_ClientMapCacheService.HBASE_COLUMN_FAMILY, columnFamily);
-        runner.setProperty(cacheService, HBase_1_1_2_ClientMapCacheService.HBASE_COLUMN_QUALIFIER, columnQualifier);
-        runner.enableControllerService(cacheService);
-        runner.setProperty(TestProcessor.HBASE_CACHE_SERVICE,"hbaseCache");
-        return cacheService;
-    }
-
-    private void verifyResultCell(final ResultCell result, final String cf, final String cq, final String val) {
-        final String colFamily = new String(result.getFamilyArray(), result.getFamilyOffset(), result.getFamilyLength());
-        assertEquals(cf, colFamily);
-
-        final String colQualifier = new String(result.getQualifierArray(), result.getQualifierOffset(), result.getQualifierLength());
-        assertEquals(cq, colQualifier);
-
-        final String value = new String(result.getValueArray(), result.getValueOffset(), result.getValueLength());
-        assertEquals(val, value);
-    }
-
-    private void verifyPut(String row, String columnFamily, String columnQualifier, String content, Put put) {
-        assertEquals(row, new String(put.getRow()));
-
-        NavigableMap<byte [], List<Cell>> familyCells = put.getFamilyCellMap();
-        assertEquals(1, familyCells.size());
-
-        Map.Entry<byte[], List<Cell>> entry = familyCells.firstEntry();
-        assertEquals(columnFamily, new String(entry.getKey()));
-        assertEquals(1, entry.getValue().size());
-
-        Cell cell = entry.getValue().get(0);
-        assertEquals(columnQualifier, new String(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()));
-        assertEquals(content, new String(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
-    }
-
-    private static class StringSerializer implements Serializer<String> {
-        @Override
-        public void serialize(final String value, final OutputStream out) throws SerializationException, IOException {
-            out.write(value.getBytes(StandardCharsets.UTF_8));
-        }
-    }
-
-    private static class StringDeserializer implements Deserializer<String> {
-        @Override
-        public String deserialize(byte[] input) throws DeserializationException, IOException{
-            return new String(input);
-        }
-    }
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientService.java
deleted file mode 100644
index a25eefdede..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ClientService.java
+++ /dev/null
@@ -1,529 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.hbase;
-
-import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Put;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.nifi.hadoop.KerberosProperties;
-import org.apache.nifi.hbase.put.PutColumn;
-import org.apache.nifi.hbase.put.PutFlowFile;
-import org.apache.nifi.hbase.scan.Column;
-import org.apache.nifi.hbase.scan.ResultCell;
-import org.apache.nifi.hbase.scan.ResultHandler;
-import org.apache.nifi.kerberos.KerberosCredentialsService;
-import org.apache.nifi.kerberos.KerberosUserService;
-import org.apache.nifi.reporting.InitializationException;
-import org.apache.nifi.util.TestRunner;
-import org.apache.nifi.util.TestRunners;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.ArgumentCaptor;
-import org.mockito.Mockito;
-
-import java.io.File;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.NavigableMap;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.times;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-public class TestHBase_1_1_2_ClientService {
-
-    static final String COL_FAM = "nifi1";
-
-    private KerberosProperties kerberosPropsWithFile;
-    private KerberosProperties kerberosPropsWithoutFile;
-
-    @BeforeEach
-    public void setup() {
-        // needed for calls to UserGroupInformation.setConfiguration() to work when passing in
-        // config with Kerberos authentication enabled
-        System.setProperty("java.security.krb5.realm", "nifi.com");
-        System.setProperty("java.security.krb5.kdc", "nifi.kdc");
-
-        kerberosPropsWithFile = new KerberosProperties(new File("src/test/resources/krb5.conf"));
-
-        kerberosPropsWithoutFile = new KerberosProperties(null);
-    }
-
-    @Test
-    public void testCustomValidate() throws InitializationException, IOException {
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        final String tableName = "nifi";
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // no conf file or zk properties so should be invalid
-        MockHBaseClientService service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
-        runner.addControllerService("hbaseClientService", service);
-
-        runner.assertNotValid(service);
-        runner.removeControllerService(service);
-
-        runner.setVariable("hadoop-conf-files", "src/test/resources/hbase-site.xml");
-        runner.setVariable("zk-quorum", "localhost");
-        runner.setVariable("zk-client-port", "2181");
-        runner.setVariable("zk-znode", "/hbase");
-
-        // conf file with no zk properties should be valid
-        service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
-        runner.addControllerService("hbaseClientService", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.HADOOP_CONF_FILES, "${hadoop-conf-files}");
-        runner.enableControllerService(service);
-
-        runner.assertValid(service);
-        runner.removeControllerService(service);
-
-        // only quorum and no conf file should be invalid
-        service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
-        runner.addControllerService("hbaseClientService", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.ZOOKEEPER_QUORUM, "${zk-quorum}");
-
-        runner.assertNotValid(service);
-        runner.removeControllerService(service);
-
-        // quorum and port, no znode, no conf file, should be invalid
-        service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
-        runner.addControllerService("hbaseClientService", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.ZOOKEEPER_QUORUM, "${zk-quorum}");
-        runner.setProperty(service, HBase_1_1_2_ClientService.ZOOKEEPER_CLIENT_PORT, "${zk-client-port}");
-
-        runner.assertNotValid(service);
-        runner.removeControllerService(service);
-
-        // quorum, port, and znode, no conf file, should be valid
-        service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
-        runner.addControllerService("hbaseClientService", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.ZOOKEEPER_QUORUM, "${zk-quorum}");
-        runner.setProperty(service, HBase_1_1_2_ClientService.ZOOKEEPER_CLIENT_PORT, "${zk-client-port}");
-        runner.setProperty(service, HBase_1_1_2_ClientService.ZOOKEEPER_ZNODE_PARENT, "${zk-znode}");
-        runner.enableControllerService(service);
-
-        runner.assertValid(service);
-        runner.removeControllerService(service);
-
-        // quorum and port with conf file should be valid
-        service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
-        runner.addControllerService("hbaseClientService", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
-        runner.setProperty(service, HBase_1_1_2_ClientService.ZOOKEEPER_QUORUM, "localhost");
-        runner.setProperty(service, HBase_1_1_2_ClientService.ZOOKEEPER_CLIENT_PORT, "2181");
-        runner.enableControllerService(service);
-
-        runner.assertValid(service);
-        runner.removeControllerService(service);
-
-        // Kerberos - principal with non-set keytab and only hbase-site-security - valid because we need core-site-security to turn on security
-        service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile, true);
-        runner.addControllerService("hbaseClientService", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site-security.xml");
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "test@REALM");
-        runner.enableControllerService(service);
-        runner.assertValid(service);
-
-        // Kerberos - principal with non-set keytab and both config files
-        runner.disableControllerService(service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.HADOOP_CONF_FILES,
-                "src/test/resources/hbase-site-security.xml, src/test/resources/core-site-security.xml");
-        runner.assertNotValid(service);
-
-        // Kerberos - add valid options
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "test@REALM");
-        runner.enableControllerService(service);
-        runner.assertValid(service);
-
-        // Kerberos - add invalid non-existent keytab file
-        runner.disableControllerService(service);
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/missing.keytab");
-        runner.assertNotValid(service);
-
-        // Kerberos - add invalid principal
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "");
-        runner.assertNotValid(service);
-
-        // Kerberos - valid props but the KerberosProperties has a null Kerberos config file so be invalid
-        service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithoutFile);
-        runner.addControllerService("hbaseClientService", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.HADOOP_CONF_FILES,
-                "src/test/resources/hbase-site-security.xml, src/test/resources/core-site-security.xml");
-        runner.setProperty(service, kerberosPropsWithoutFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
-        runner.setProperty(service, kerberosPropsWithoutFile.getKerberosPrincipal(), "test@REALM");
-        runner.assertNotValid(service);
-
-        // Kerberos - add valid options with password
-        service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile, true);
-        runner.addControllerService("hbaseClientService", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.HADOOP_CONF_FILES,
-                "src/test/resources/hbase-site.xml, src/test/resources/core-site-security.xml");
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosPassword(), "password");
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "test@REALM");
-        runner.assertValid(service);
-
-        // Kerberos - keytab and password at same time should be invalid
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
-        runner.assertNotValid(service);
-
-        runner.removeProperty(service, kerberosPropsWithFile.getKerberosKeytab());
-        runner.assertValid(service);
-
-        // Kerberos - credentials service not valid when other kerberos properties set
-        final KerberosCredentialsService credentialsService = enabledKerberosCredentialsService(runner);
-        runner.setProperty(service, HBase_1_1_2_ClientService.KERBEROS_CREDENTIALS_SERVICE, credentialsService.getIdentifier());
-        runner.assertNotValid(service);
-
-        runner.removeProperty(service, kerberosPropsWithFile.getKerberosPassword());
-        runner.assertNotValid(service);
-
-        runner.removeProperty(service, kerberosPropsWithFile.getKerberosPrincipal());
-        runner.assertValid(service);
-
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
-        runner.assertNotValid(service);
-
-        runner.removeProperty(service, kerberosPropsWithFile.getKerberosKeytab());
-        runner.assertValid(service);
-
-        // Kerberos - user service with credentials service is invalid
-        final KerberosUserService userService = enableKerberosUserService(runner);
-        runner.setProperty(service, HBase_1_1_2_ClientService.KERBEROS_USER_SERVICE, userService.getIdentifier());
-        runner.assertNotValid(service);
-
-        runner.removeProperty(service, HBase_1_1_2_ClientService.KERBEROS_CREDENTIALS_SERVICE);
-        runner.assertValid(service);
-
-        // Kerberos - user service with other kerberos properties is invalid
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosPassword(), "password");
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosPrincipal(), "test@REALM");
-        runner.assertNotValid(service);
-
-        runner.removeProperty(service, kerberosPropsWithFile.getKerberosPassword());
-        runner.setProperty(service, kerberosPropsWithFile.getKerberosKeytab(), "src/test/resources/fake.keytab");
-        runner.assertNotValid(service);
-
-        runner.removeProperty(service, kerberosPropsWithFile.getKerberosKeytab());
-        runner.assertNotValid(service);
-
-        runner.removeProperty(service, kerberosPropsWithFile.getKerberosPrincipal());
-        runner.assertValid(service);
-    }
-
-    private KerberosUserService enableKerberosUserService(final TestRunner runner) throws InitializationException {
-        final KerberosUserService kerberosUserService = mock(KerberosUserService.class);
-        when(kerberosUserService.getIdentifier()).thenReturn("userService1");
-        runner.addControllerService(kerberosUserService.getIdentifier(), kerberosUserService);
-        runner.enableControllerService(kerberosUserService);
-        return kerberosUserService;
-    }
-
-    private KerberosCredentialsService enabledKerberosCredentialsService(final TestRunner runner) throws InitializationException {
-        final KerberosCredentialsService credentialsService = mock(KerberosCredentialsService.class);
-        when(credentialsService.getIdentifier()).thenReturn("credsService1");
-        when(credentialsService.getPrincipal()).thenReturn("principal1");
-        when(credentialsService.getKeytab()).thenReturn("keytab1");
-
-        runner.addControllerService(credentialsService.getIdentifier(), credentialsService);
-        runner.enableControllerService(credentialsService);
-        return credentialsService;
-    }
-
-    @Test
-    public void testSinglePut() throws InitializationException, IOException {
-        final String tableName = "nifi";
-        final String row = "row1";
-        final String columnFamily = "family1";
-        final String columnQualifier = "qualifier1";
-        final String content = "content1";
-
-        final Collection<PutColumn> columns = Collections.singletonList(new PutColumn(columnFamily.getBytes(StandardCharsets.UTF_8), columnQualifier.getBytes(StandardCharsets.UTF_8),
-                content.getBytes(StandardCharsets.UTF_8)));
-        final PutFlowFile putFlowFile = new PutFlowFile(tableName, row.getBytes(StandardCharsets.UTF_8), columns, null);
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final HBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        // try to put a single cell
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        hBaseClientService.put(tableName, Arrays.asList(putFlowFile));
-
-        // verify only one call to put was made
-        ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
-        verify(table, times(1)).put(capture.capture());
-
-        // verify only one put was in the list of puts
-        final List<Put> puts = capture.getValue();
-        assertEquals(1, puts.size());
-        verifyPut(row, columnFamily, columnQualifier, content, puts.get(0));
-    }
-
-    @Test
-    public void testMultiplePutsSameRow() throws IOException, InitializationException {
-        final String tableName = "nifi";
-        final String row = "row1";
-        final String columnFamily = "family1";
-        final String columnQualifier = "qualifier1";
-        final String content1 = "content1";
-        final String content2 = "content2";
-
-        final Collection<PutColumn> columns1 = Collections.singletonList(new PutColumn(columnFamily.getBytes(StandardCharsets.UTF_8),
-                columnQualifier.getBytes(StandardCharsets.UTF_8),
-                content1.getBytes(StandardCharsets.UTF_8)));
-        final PutFlowFile putFlowFile1 = new PutFlowFile(tableName, row.getBytes(StandardCharsets.UTF_8), columns1, null);
-
-        final Collection<PutColumn> columns2 = Collections.singletonList(new PutColumn(columnFamily.getBytes(StandardCharsets.UTF_8),
-                columnQualifier.getBytes(StandardCharsets.UTF_8),
-                content2.getBytes(StandardCharsets.UTF_8)));
-        final PutFlowFile putFlowFile2 = new PutFlowFile(tableName, row.getBytes(StandardCharsets.UTF_8), columns2, null);
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final HBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        // try to put a multiple cells for the same row
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        hBaseClientService.put(tableName, Arrays.asList(putFlowFile1, putFlowFile2));
-
-        // verify put was only called once
-        ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
-        verify(table, times(1)).put(capture.capture());
-
-        // verify there was only one put in the list of puts
-        final List<Put> puts = capture.getValue();
-        assertEquals(1, puts.size());
-
-        // verify two cells were added to this one put operation
-        final NavigableMap<byte[], List<Cell>> familyCells = puts.get(0).getFamilyCellMap();
-        Map.Entry<byte[], List<Cell>> entry = familyCells.firstEntry();
-        assertEquals(2, entry.getValue().size());
-    }
-
-    @Test
-    public void testMultiplePutsDifferentRow() throws IOException, InitializationException {
-        final String tableName = "nifi";
-        final String row1 = "row1";
-        final String row2 = "row2";
-        final String columnFamily = "family1";
-        final String columnQualifier = "qualifier1";
-        final String content1 = "content1";
-        final String content2 = "content2";
-
-        final Collection<PutColumn> columns1 = Collections.singletonList(new PutColumn(columnFamily.getBytes(StandardCharsets.UTF_8),
-                columnQualifier.getBytes(StandardCharsets.UTF_8),
-                content1.getBytes(StandardCharsets.UTF_8)));
-        final PutFlowFile putFlowFile1 = new PutFlowFile(tableName, row1.getBytes(StandardCharsets.UTF_8), columns1, null);
-
-        final Collection<PutColumn> columns2 = Collections.singletonList(new PutColumn(columnFamily.getBytes(StandardCharsets.UTF_8),
-                columnQualifier.getBytes(StandardCharsets.UTF_8),
-                content2.getBytes(StandardCharsets.UTF_8)));
-        final PutFlowFile putFlowFile2 = new PutFlowFile(tableName, row2.getBytes(StandardCharsets.UTF_8), columns2, null);
-
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final HBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        // try to put a multiple cells with different rows
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        hBaseClientService.put(tableName, Arrays.asList(putFlowFile1, putFlowFile2));
-
-        // verify put was only called once
-        ArgumentCaptor<List> capture = ArgumentCaptor.forClass(List.class);
-        verify(table, times(1)).put(capture.capture());
-
-        // verify there were two puts in the list
-        final List<Put> puts = capture.getValue();
-        assertEquals(2, puts.size());
-    }
-
-    @Test
-    public void testScan() throws InitializationException, IOException {
-        final String tableName = "nifi";
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        // stage some results in the mock service...
-        final long now = System.currentTimeMillis();
-
-        final Map<String, String> cells = new LinkedHashMap<>();
-        cells.put("greeting", "hello");
-        cells.put("name", "nifi");
-
-        service.addResult("row0", cells, now - 2);
-        service.addResult("row1", cells, now - 1);
-        service.addResult("row2", cells, now - 1);
-        service.addResult("row3", cells, now);
-
-        // perform a scan and verify the four rows were returned
-        final CollectingResultHandler handler = new CollectingResultHandler();
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        hBaseClientService.scan(tableName, new ArrayList<Column>(), null, now, handler);
-        assertEquals(4, handler.results.size());
-
-        // get row0 using the row id and verify it has 2 cells
-        final ResultCell[] results = handler.results.get("row0");
-        assertNotNull(results);
-        assertEquals(2, results.length);
-
-        verifyResultCell(results[0], COL_FAM, "greeting", "hello");
-        verifyResultCell(results[1], COL_FAM, "name", "nifi");
-    }
-
-    @Test
-    public void testScanWithValidFilter() throws InitializationException, IOException {
-        final String tableName = "nifi";
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        // perform a scan and verify the four rows were returned
-        final CollectingResultHandler handler = new CollectingResultHandler();
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        // make sure we parse the filter expression without throwing an exception
-        final String filter = "PrefixFilter ('Row') AND PageFilter (1) AND FirstKeyOnlyFilter ()";
-        hBaseClientService.scan(tableName, new ArrayList<Column>(), filter, System.currentTimeMillis(), handler);
-    }
-
-    @Test
-    public void testScanWithInvalidFilter() throws InitializationException {
-        final String tableName = "nifi";
-        final TestRunner runner = TestRunners.newTestRunner(TestProcessor.class);
-
-        // Mock an HBase Table so we can verify the put operations later
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(tableName));
-
-        // create the controller service and link it to the test processor
-        final MockHBaseClientService service = configureHBaseClientService(runner, table);
-        runner.assertValid(service);
-
-        // perform a scan and verify the four rows were returned
-        final CollectingResultHandler handler = new CollectingResultHandler();
-        final HBaseClientService hBaseClientService = runner.getProcessContext().getProperty(TestProcessor.HBASE_CLIENT_SERVICE)
-                .asControllerService(HBaseClientService.class);
-
-        // this should throw IllegalArgumentException
-        final String filter = "this is not a filter";
-        assertThrows(IllegalArgumentException.class,
-                () -> hBaseClientService.scan(tableName, new ArrayList<Column>(), filter, System.currentTimeMillis(), handler));
-    }
-
-    private MockHBaseClientService configureHBaseClientService(final TestRunner runner, final Table table) throws InitializationException {
-        final MockHBaseClientService service = new MockHBaseClientService(table, COL_FAM, kerberosPropsWithFile);
-        runner.addControllerService("hbaseClient", service);
-        runner.setProperty(service, HBase_1_1_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
-        runner.enableControllerService(service);
-        runner.setProperty(TestProcessor.HBASE_CLIENT_SERVICE, "hbaseClient");
-        return service;
-    }
-
-    private void verifyResultCell(final ResultCell result, final String cf, final String cq, final String val) {
-        final String colFamily = new String(result.getFamilyArray(), result.getFamilyOffset(), result.getFamilyLength());
-        assertEquals(cf, colFamily);
-
-        final String colQualifier = new String(result.getQualifierArray(), result.getQualifierOffset(), result.getQualifierLength());
-        assertEquals(cq, colQualifier);
-
-        final String value = new String(result.getValueArray(), result.getValueOffset(), result.getValueLength());
-        assertEquals(val, value);
-    }
-
-    private void verifyPut(String row, String columnFamily, String columnQualifier, String content, Put put) {
-        assertEquals(row, new String(put.getRow()));
-
-        NavigableMap<byte [], List<Cell>> familyCells = put.getFamilyCellMap();
-        assertEquals(1, familyCells.size());
-
-        Map.Entry<byte[], List<Cell>> entry = familyCells.firstEntry();
-        assertEquals(columnFamily, new String(entry.getKey()));
-        assertEquals(1, entry.getValue().size());
-
-        Cell cell = entry.getValue().get(0);
-        assertEquals(columnQualifier, new String(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength()));
-        assertEquals(content, new String(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength()));
-    }
-
-    // handler that saves results for verification
-    private static final class CollectingResultHandler implements ResultHandler {
-
-        Map<String,ResultCell[]> results = new LinkedHashMap<>();
-
-        @Override
-        public void handle(byte[] row, ResultCell[] resultCells) {
-            final String rowStr = new String(row, StandardCharsets.UTF_8);
-            results.put(rowStr, resultCells);
-        }
-    }
-
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ListLookupService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ListLookupService.java
deleted file mode 100644
index 17788c787a..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_ListLookupService.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nifi.hbase;
-
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.hadoop.KerberosProperties;
-import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.ProcessSession;
-import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.util.TestRunner;
-import org.apache.nifi.util.TestRunners;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mockito;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.mockito.Mockito.when;
-
-public class TestHBase_1_1_2_ListLookupService {
-
-    static final String TABLE_NAME = "guids";
-
-    private TestRunner runner;
-    private HBase_1_1_2_ListLookupService lookupService;
-    private MockHBaseClientService clientService;
-    private NoOpProcessor processor;
-
-    @BeforeEach
-    public void before() throws Exception {
-        processor = new NoOpProcessor();
-        runner = TestRunners.newTestRunner(processor);
-
-        // setup mock HBaseClientService
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(TABLE_NAME));
-
-        final KerberosProperties kerberosProperties = new KerberosProperties(new File("src/test/resources/krb5.conf"));
-        clientService = new MockHBaseClientService(table, "family", kerberosProperties);
-        runner.addControllerService("clientService", clientService);
-        runner.setProperty(clientService, HBase_1_1_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
-        runner.enableControllerService(clientService);
-
-        // setup HBase LookupService
-        lookupService = new HBase_1_1_2_ListLookupService();
-        runner.addControllerService("lookupService", lookupService);
-        runner.setProperty(lookupService, HBase_1_1_2_ListLookupService.HBASE_CLIENT_SERVICE, "clientService");
-        runner.setProperty(lookupService, HBase_1_1_2_ListLookupService.TABLE_NAME, TABLE_NAME);
-        runner.enableControllerService(lookupService);
-    }
-
-    private Optional<List> setupAndRun() throws Exception {
-        // setup some staged data in the mock client service
-        final Map<String,String> cells = new HashMap<>();
-        cells.put("cq1", "v1");
-        cells.put("cq2", "v2");
-        clientService.addResult("row1", cells, System.currentTimeMillis());
-
-        Map<String, Object> lookup = new HashMap<>();
-        lookup.put("rowKey", "row1");
-
-        return lookupService.lookup(lookup);
-    }
-
-    @Test
-    public void testLookupKeyList() throws Exception {
-        Optional<List> results = setupAndRun();
-
-        assertTrue(results.isPresent());
-        List result = results.get();
-        assertTrue(result.size() == 2);
-        assertTrue(result.contains("cq1"));
-        assertTrue(result.contains("cq2"));
-    }
-
-    @Test
-    public void testLookupValueList() throws Exception {
-        runner.disableControllerService(lookupService);
-        runner.setProperty(lookupService, HBase_1_1_2_ListLookupService.RETURN_TYPE, HBase_1_1_2_ListLookupService.VALUE_LIST);
-        runner.enableControllerService(lookupService);
-        Optional<List> results = setupAndRun();
-
-        assertTrue(results.isPresent());
-        List result = results.get();
-        assertTrue(result.size() == 2);
-        assertTrue(result.contains("v1"));
-        assertTrue(result.contains("v2"));
-    }
-
-    // Processor that does nothing just so we can create a TestRunner
-    private static class NoOpProcessor extends AbstractProcessor {
-
-        @Override
-        protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-            return Collections.emptyList();
-        }
-
-        @Override
-        public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
-
-        }
-    }
-
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_RecordLookupService.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_RecordLookupService.java
deleted file mode 100644
index f187d4da94..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_1_1_2_RecordLookupService.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nifi.hbase;
-
-import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.client.Table;
-import org.apache.nifi.hadoop.KerberosProperties;
-import org.apache.nifi.serialization.record.Record;
-import org.apache.nifi.util.TestRunner;
-import org.apache.nifi.util.TestRunners;
-import org.junit.jupiter.api.BeforeEach;
-import org.junit.jupiter.api.Test;
-import org.mockito.Mockito;
-
-import java.io.File;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertNotNull;
-import static org.mockito.Mockito.when;
-
-public class TestHBase_1_1_2_RecordLookupService {
-
-    static final String TABLE_NAME = "guids";
-    static final String ROW = "row1";
-    static final String COLS = "cf1:cq1,cf2:cq2";
-
-    private TestRunner runner;
-    private HBase_1_1_2_RecordLookupService lookupService;
-    private MockHBaseClientService clientService;
-    private TestRecordLookupProcessor testLookupProcessor;
-
-    @BeforeEach
-    public void before() throws Exception {
-        testLookupProcessor = new TestRecordLookupProcessor();
-        runner = TestRunners.newTestRunner(testLookupProcessor);
-
-        // setup mock HBaseClientService
-        final Table table = Mockito.mock(Table.class);
-        when(table.getName()).thenReturn(TableName.valueOf(TABLE_NAME));
-
-        final KerberosProperties kerberosProperties = new KerberosProperties(new File("src/test/resources/krb5.conf"));
-        clientService = new MockHBaseClientService(table, "family", kerberosProperties);
-        runner.addControllerService("clientService", clientService);
-        runner.setProperty(clientService, HBase_1_1_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
-        runner.enableControllerService(clientService);
-
-        // setup HBase LookupService
-        lookupService = new HBase_1_1_2_RecordLookupService();
-        runner.addControllerService("lookupService", lookupService);
-        runner.setProperty(lookupService, HBase_1_1_2_RecordLookupService.HBASE_CLIENT_SERVICE, "clientService");
-        runner.setProperty(lookupService, HBase_1_1_2_RecordLookupService.TABLE_NAME, TABLE_NAME);
-        runner.enableControllerService(lookupService);
-
-        // setup test processor
-        runner.setProperty(TestRecordLookupProcessor.HBASE_LOOKUP_SERVICE, "lookupService");
-        runner.setProperty(TestRecordLookupProcessor.HBASE_ROW, ROW);
-    }
-
-    @Test
-    public void testSuccessfulLookupAllColumns() {
-        // setup some staged data in the mock client service
-        final Map<String,String> cells = new HashMap<>();
-        cells.put("cq1", "v1");
-        cells.put("cq2", "v2");
-        clientService.addResult("row1", cells, System.currentTimeMillis());
-
-        // run the processor
-        runner.enqueue("trigger flow file");
-        runner.run();
-        runner.assertAllFlowFilesTransferred(TestRecordLookupProcessor.REL_SUCCESS);
-
-        final List<Record> records = testLookupProcessor.getLookedupRecords();
-        assertNotNull(records);
-        assertEquals(1, records.size());
-
-        final Record record = records.get(0);
-        assertEquals("v1", record.getAsString("cq1"));
-        assertEquals("v2", record.getAsString("cq2"));
-    }
-
-    @Test
-    public void testLookupWithNoResults() {
-        // run the processor
-        runner.enqueue("trigger flow file");
-        runner.run();
-        runner.assertAllFlowFilesTransferred(TestRecordLookupProcessor.REL_FAILURE);
-
-        final List<Record> records = testLookupProcessor.getLookedupRecords();
-        assertNotNull(records);
-        assertEquals(0, records.size());
-    }
-
-    @Test
-    public void testLookupWhenMissingRowKeyCoordinate() {
-        runner.removeProperty(TestRecordLookupProcessor.HBASE_ROW);
-
-        // run the processor
-        runner.enqueue("trigger flow file");
-        runner.run();
-        runner.assertAllFlowFilesTransferred(TestRecordLookupProcessor.REL_FAILURE);
-
-        final List<Record> records = testLookupProcessor.getLookedupRecords();
-        assertNotNull(records);
-        assertEquals(0, records.size());
-    }
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestProcessor.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestProcessor.java
deleted file mode 100644
index bc06d1a831..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestProcessor.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.nifi.hbase;
-
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.distributed.cache.client.DistributedMapCacheClient;
-import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.ProcessSession;
-import org.apache.nifi.processor.exception.ProcessException;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class TestProcessor extends AbstractProcessor {
-
-    static final PropertyDescriptor HBASE_CLIENT_SERVICE = new PropertyDescriptor.Builder()
-            .name("HBase Client Service")
-            .description("HBaseClientService")
-            .identifiesControllerService(HBaseClientService.class)
-            .required(true)
-            .build();
-
-    static final PropertyDescriptor HBASE_CACHE_SERVICE = new PropertyDescriptor.Builder()
-            .name("HBase Cache Service")
-            .description("HBaseCacheService")
-            .identifiesControllerService(DistributedMapCacheClient.class)
-            .required(true)
-            .build();
-
-    @Override
-    public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
-    }
-
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        List<PropertyDescriptor> propDescs = new ArrayList<>();
-        propDescs.add(HBASE_CLIENT_SERVICE);
-        propDescs.add(HBASE_CACHE_SERVICE);
-        return propDescs;
-    }
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestRecordLookupProcessor.java b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestRecordLookupProcessor.java
deleted file mode 100644
index 47d8880751..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/java/org/apache/nifi/hbase/TestRecordLookupProcessor.java
+++ /dev/null
@@ -1,112 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.nifi.hbase;
-
-import org.apache.nifi.components.PropertyDescriptor;
-import org.apache.nifi.flowfile.FlowFile;
-import org.apache.nifi.lookup.LookupFailureException;
-import org.apache.nifi.lookup.LookupService;
-import org.apache.nifi.processor.AbstractProcessor;
-import org.apache.nifi.processor.ProcessContext;
-import org.apache.nifi.processor.ProcessSession;
-import org.apache.nifi.processor.Relationship;
-import org.apache.nifi.processor.exception.ProcessException;
-import org.apache.nifi.processor.util.StandardValidators;
-import org.apache.nifi.serialization.record.Record;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-
-public class TestRecordLookupProcessor extends AbstractProcessor {
-    static final PropertyDescriptor HBASE_LOOKUP_SERVICE = new PropertyDescriptor.Builder()
-            .name("HBase Lookup Service")
-            .description("HBaseLookupService")
-            .identifiesControllerService(LookupService.class)
-            .required(true)
-            .build();
-
-    static final PropertyDescriptor HBASE_ROW = new PropertyDescriptor.Builder()
-            .name("HBase Row Id")
-            .description("The Row Id to Lookup.")
-            .addValidator(StandardValidators.NON_BLANK_VALIDATOR)
-            .build();
-
-    static final Relationship REL_SUCCESS = new Relationship.Builder()
-            .name("success")
-            .description("All success FlowFiles are routed to this relationship")
-            .build();
-    static final Relationship REL_FAILURE = new Relationship.Builder()
-            .name("failure")
-            .description("All failed FlowFiles are routed to this relationship")
-            .build();
-
-    private List<Record> lookedupRecords = new ArrayList<>();
-
-    @Override
-    protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
-        List<PropertyDescriptor> propDescs = new ArrayList<>();
-        propDescs.add(HBASE_LOOKUP_SERVICE);
-        propDescs.add(HBASE_ROW);
-        return propDescs;
-    }
-
-    @Override
-    public Set<Relationship> getRelationships() {
-        Set<Relationship> relationships = new HashSet<>();
-        relationships.add(REL_SUCCESS);
-        relationships.add(REL_FAILURE);
-        return relationships;
-    }
-
-    @Override
-    public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
-        FlowFile flowFile = session.get();
-        if (flowFile == null) {
-            return;
-        }
-
-        final String rowKey = context.getProperty(HBASE_ROW).getValue();
-
-        final Map<String,Object> coordinates = new HashMap<>();
-        coordinates.put(HBase_1_1_2_RecordLookupService.ROW_KEY_KEY, rowKey);
-
-        final LookupService<Record> lookupService = context.getProperty(HBASE_LOOKUP_SERVICE).asControllerService(LookupService.class);
-        try {
-            final Optional<Record> record = lookupService.lookup(coordinates);
-            if (record.isPresent()) {
-                lookedupRecords.add(record.get());
-                session.transfer(flowFile, REL_SUCCESS);
-            } else {
-                session.transfer(flowFile, REL_FAILURE);
-            }
-
-        } catch (LookupFailureException e) {
-            session.transfer(flowFile, REL_FAILURE);
-        }
-
-    }
-
-    public List<Record> getLookedupRecords() {
-        return new ArrayList<>(lookedupRecords);
-    }
-}
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site-security.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site-security.xml
deleted file mode 100644
index 2aca105f9b..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site-security.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-      http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<configuration>
-    <property>
-        <name>fs.default.name</name>
-        <value>hdfs://hbase</value>
-    </property>
-    <property>
-        <name>hadoop.security.authentication</name>
-        <value>kerberos</value>
-    </property>
-    <property>
-        <name>hadoop.security.authorization</name>
-        <value>true</value>
-    </property>
-</configuration>
\ No newline at end of file
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site.xml
deleted file mode 100644
index c044ee30da..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/core-site.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-      http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<configuration>
-    <property>
-        <name>fs.default.name</name>
-        <value>hdfs://hbase</value>
-    </property>
-</configuration>
\ No newline at end of file
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/fake.keytab b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/fake.keytab
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/hbase-site-security.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/hbase-site-security.xml
deleted file mode 100644
index 0875ea8d96..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/hbase-site-security.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-      http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<configuration>
-  <property>
-    <name>fs.default.name</name>
-    <value>hdfs://hbase</value>
-  </property>
-  <property>
-    <name>hbase.security.authentication</name>
-    <value>kerberos</value>
-  </property>
-  <property>
-    <name>hbase.security.authorization</name>
-    <value>true</value>
-  </property>
-</configuration>
\ No newline at end of file
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/hbase-site.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/hbase-site.xml
deleted file mode 100644
index d022099f6c..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/hbase-site.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0"?>
-<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-      http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<configuration>
-  <property>
-    <name>fs.default.name</name>
-    <value>hdfs://hbase</value>
-  </property>
-</configuration>
\ No newline at end of file
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/krb5.conf b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/nifi-hbase_1_1_2-client-service/src/test/resources/krb5.conf
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/pom.xml b/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/pom.xml
deleted file mode 100644
index 1bd516cf59..0000000000
--- a/nifi-nar-bundles/nifi-standard-services/nifi-hbase_1_1_2-client-service-bundle/pom.xml
+++ /dev/null
@@ -1,114 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements. See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License. You may obtain a copy of the License at
-  http://www.apache.org/licenses/LICENSE-2.0
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.nifi</groupId>
-        <artifactId>nifi-standard-services</artifactId>
-        <version>2.0.0-SNAPSHOT</version>
-    </parent>
-    <artifactId>nifi-hbase_1_1_2-client-service-bundle</artifactId>
-    <packaging>pom</packaging>
-    <properties>
-        <hadoop.version>2.7.3</hadoop.version>
-    </properties>
-    <modules>
-        <module>nifi-hbase_1_1_2-client-service</module>
-        <module>nifi-hbase_1_1_2-client-service-nar</module>
-    </modules>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.rat</groupId>
-                <artifactId>apache-rat-plugin</artifactId>
-                <configuration>
-                    <excludes combine.children="append">
-                        <exclude>src/test/resources/fake.keytab</exclude>
-                        <exclude>src/test/resources/krb5.conf</exclude>
-                    </excludes>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-    <dependencyManagement>
-        <dependencies>
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-common</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-hdfs</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-yarn-api</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-mapreduce-client-core</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-annotations</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-client</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.apache.hadoop</groupId>
-                <artifactId>hadoop-auth</artifactId>
-                <version>${hadoop.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-                <version>${netty.3.version}</version>
-            </dependency>
-            <!-- Override commons-compress -->
-            <dependency>
-                <groupId>org.apache.commons</groupId>
-                <artifactId>commons-compress</artifactId>
-                <version>1.21</version>
-            </dependency>
-            <!-- Override commons-beanutils -->
-            <dependency>
-                <groupId>commons-beanutils</groupId>
-                <artifactId>commons-beanutils</artifactId>
-                <version>1.9.4</version>
-            </dependency>
-            <!-- Override zookeeper -->
-            <dependency>
-                <groupId>org.apache.zookeeper</groupId>
-                <artifactId>zookeeper</artifactId>
-                <version>${zookeeper.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>ch.qos.logback</groupId>
-                        <artifactId>logback-classic</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
-        </dependencies>
-    </dependencyManagement>
-</project>
diff --git a/nifi-nar-bundles/nifi-standard-services/pom.xml b/nifi-nar-bundles/nifi-standard-services/pom.xml
index 714cfe6baf..03442a4457 100644
--- a/nifi-nar-bundles/nifi-standard-services/pom.xml
+++ b/nifi-nar-bundles/nifi-standard-services/pom.xml
@@ -37,7 +37,6 @@
         <module>nifi-dbcp-service-api</module>
         <module>nifi-dbcp-service-bundle</module>
         <module>nifi-hbase-client-service-api</module>
-        <module>nifi-hbase_1_1_2-client-service-bundle</module>
         <module>nifi-hbase_2-client-service-bundle</module>
         <module>nifi-schema-registry-service-api</module>
         <module>nifi-record-serialization-service-api</module>


[nifi] 02/02: NIFI-11201 Added include-iotdb Maven build profile

Posted by mt...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

mthomsen pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/nifi.git

commit 85f2162021d8f8c0049915c96a1d2b236086f65f
Author: exceptionfactory <ex...@apache.org>
AuthorDate: Sat Feb 18 20:38:51 2023 -0600

    NIFI-11201 Added include-iotdb Maven build profile
    
    This closes #6975
    
    - Corrected Fetch Size property evaluation in QueryIoTDBRecord for FlowFile attributes
    
    Signed-off-by: Mike Thomsen <mt...@apache.org>
---
 nifi-assembly/pom.xml                              | 17 +++++++++++++
 nifi-docs/src/main/asciidoc/developer-guide.adoc   | 28 +++++++++++-----------
 .../apache/nifi/processors/QueryIoTDBRecord.java   |  2 +-
 3 files changed, 32 insertions(+), 15 deletions(-)

diff --git a/nifi-assembly/pom.xml b/nifi-assembly/pom.xml
index 5a6f523f0e..1c3e6d9ad5 100644
--- a/nifi-assembly/pom.xml
+++ b/nifi-assembly/pom.xml
@@ -1044,6 +1044,23 @@ language governing permissions and limitations under the License. -->
                 </dependency>
             </dependencies>
         </profile>
+        <profile>
+            <id>include-iotdb</id>
+            <activation>
+                <activeByDefault>false</activeByDefault>
+                <property>
+                    <name>allProfiles</name>
+                </property>
+            </activation>
+            <dependencies>
+                <dependency>
+                    <groupId>org.apache.nifi</groupId>
+                    <artifactId>nifi-iotdb-nar</artifactId>
+                    <version>2.0.0-SNAPSHOT</version>
+                    <type>nar</type>
+                </dependency>
+            </dependencies>
+        </profile>
         <profile>
             <id>include-media</id>
             <!-- This profile includes the NiFi Media Bundle which is a large package that exposes Apache Tika functionality
diff --git a/nifi-docs/src/main/asciidoc/developer-guide.adoc b/nifi-docs/src/main/asciidoc/developer-guide.adoc
index fe301e41ae..a31a422d1c 100644
--- a/nifi-docs/src/main/asciidoc/developer-guide.adoc
+++ b/nifi-docs/src/main/asciidoc/developer-guide.adoc
@@ -2721,20 +2721,20 @@ deprecationLogger.warn(
 
 [options="header,footer"]
 |==================================================================================================================================================
-| Package                                         | Maven Profile       | Description
-| Apache Accumulo Bundle | include-accumulo   | Adds support for https://accumulo.apache.org[Apache Accumulo].
-| Apache Atlas Bundle | include-atlas | Adds support for the Apache Atlas data governance tool. The functionality from this bundle is based around reporting tasks that integrate with https://atlas.apache.org[Apache Atlas].
-| Apache Hive 1.1 Bundle | include-hive1_1 | Adds support for Apache Hive 1.1.X.
-| Apache Hive 3 Bundle | include-hive3 | Adds support for Apache Hive 3.X
-| Apache Ranger Bundle | include-ranger | Adds support for https://ranger.apache.org[Apache Ranger].
-| ASN1 Support | include-asn1 | Adds support for ASN1
-| Contribution Check | contrib-check | Runs various quality checks that are required to be accepted before a contribution can be accepted into the core NiFi code base.
-| Graph Database Bundle | include-graph     | Adds support for various common graph database scenarios. Support is currently for https://neo4j.com/developer/cypher[Cypher] and https://tinkerpop.apache.org/gremlin.html[Gremlin]-compatible databases such as Neo4J and JanusGraph. Includes controller services that provide driver functionality and a suite of processors for ingestion and querying.
-| GRPC Bundle | include-grpc | **This profile is active in official builds and should be active** Provides support for the GRPC protocol.
-| Media Bundle | include-media | The media bundle provides functionality based on https://tika.apache.org[Apache Tika] for extracting content and metadata from various types of binary formats supported by Apache Tika (ex. PDF, Microsoft Office).
-| Rules Engine Bundle | include-rules | Adds support for creating scripted rules engines that can be integrated into existing flows. These rules engines can provide flexibility to people who need more complex flow logic or are more comfortable with flow decision-making using custom code.
-| Snowflake Bundle | include-snowflake | Adds support for integration with the https://www.snowflake.com[Snowflake platform].
-| SQL Reporting Bundle | include-sql-reporting | Adds reporting tasks that are designed to use SQL to update a RDBMS with metrics and other related data from Apache NiFi.
+| Package                | Maven Profile         | Description
+| Apache Accumulo Bundle | include-accumulo      | Adds support for https://accumulo.apache.org[Apache Accumulo].
+| Apache Atlas Bundle    | include-atlas         | Adds support for the Apache Atlas data governance tool. The functionality from this bundle is based around reporting tasks that integrate with https://atlas.apache.org[Apache Atlas].
+| Apache Hive 3 Bundle   | include-hive3         | Adds support for Apache Hive 3.X
+| Apache IoTDB Bundle    | include-iotdb         | Adds support for Apache IoTDB
+| Apache Ranger Bundle   | include-ranger        | Adds support for https://ranger.apache.org[Apache Ranger].
+| ASN.1 Support          | include-asn1          | Adds support for ASN.1
+| Contribution Check     | contrib-check         | Runs various quality checks that are required to be accepted before a contribution can be accepted into the core NiFi code base.
+| Graph Database Bundle  | include-graph         | Adds support for various common graph database scenarios. Support is currently for https://neo4j.com/developer/cypher[Cypher] and https://tinkerpop.apache.org/gremlin.html[Gremlin]-compatible databases such as Neo4J and JanusGraph. Includes controller services that provide driver functionality and a suite of processors for ingestion and querying.
+| GRPC Bundle            | include-grpc          | **This profile is active in official builds and should be active** Provides support for the GRPC protocol.
+| Media Bundle           | include-media         | The media bundle provides functionality based on https://tika.apache.org[Apache Tika] for extracting content and metadata from various types of binary formats supported by Apache Tika (ex. PDF, Microsoft Office).
+| Rules Engine Bundle    | include-rules         | Adds support for creating scripted rules engines that can be integrated into existing flows. These rules engines can provide flexibility to people who need more complex flow logic or are more comfortable with flow decision-making using custom code.
+| Snowflake Bundle       | include-snowflake     | Adds support for integration with the https://www.snowflake.com[Snowflake platform].
+| SQL Reporting Bundle   | include-sql-reporting | Adds reporting tasks that are designed to use SQL to update a RDBMS with metrics and other related data from Apache NiFi.
 |==================================================================================================================================================
 
 === Standard Build Instructions
diff --git a/nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/QueryIoTDBRecord.java b/nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/QueryIoTDBRecord.java
index 023b46980c..4946da4b4f 100755
--- a/nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/QueryIoTDBRecord.java
+++ b/nifi-nar-bundles/nifi-iotdb-bundle/nifi-iotdb-processors/src/main/java/org/apache/nifi/processors/QueryIoTDBRecord.java
@@ -109,7 +109,7 @@ public class QueryIoTDBRecord extends AbstractIoTDB {
         }
 
         final String query = context.getProperty(QUERY).evaluateAttributeExpressions(flowFile).getValue();
-        final int fetchSize = context.getProperty(FETCH_SIZE).asInteger();
+        final int fetchSize = context.getProperty(FETCH_SIZE).evaluateAttributeExpressions(flowFile).asInteger();
         final RecordSetWriterFactory recordSetWriterFactory = context.getProperty(RECORD_WRITER_FACTORY).asControllerService(RecordSetWriterFactory.class);
 
         try (