You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@nifi.apache.org by ph...@apache.org on 2018/06/06 14:14:32 UTC

[04/51] [partial] nifi-minifi-cpp git commit: MINIFICPP-512 - upgrade to librdkafka 0.11.4

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/README.md
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/README.md b/thirdparty/librdkafka-0.11.4/packaging/cmake/README.md
new file mode 100644
index 0000000..47ad2cb
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/README.md
@@ -0,0 +1,38 @@
+# Build librdkafka with cmake
+
+The cmake build mode is experimental and not officially supported,
+the community is asked to maintain and support this mode through PRs.
+
+Set up build environment (from top-level librdkafka directory):
+
+    $ cmake -H. -B_cmake_build
+
+On MacOSX and OpenSSL from Homebrew you might need to do:
+
+    $ cmake -H. -B_cmake_build -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl
+
+
+Build the library:
+
+    $ cmake --build _cmake_build
+
+If you want to build static library:
+
+    $ cmake --build _cmake_build -DRDKAFKA_BUILD_STATIC=1
+
+
+Run (local) tests:
+
+    $ (cd _cmake_build && ctest -VV -R RdKafkaTestBrokerLess)
+
+
+Install library:
+
+    $ cmake --build _cmake_build --target install
+
+
+If you use librdkafka as submodule in cmake project and want static link of librdkafka:
+
+      set(RDKAFKA_BUILD_STATIC ON CACHE BOOL "")
+      add_subdirectory(librdkafka)
+      target_link_libraries(your_library_or_executable rdkafka)

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/config.h.in
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/config.h.in b/thirdparty/librdkafka-0.11.4/packaging/cmake/config.h.in
new file mode 100644
index 0000000..5c03b4d
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/config.h.in
@@ -0,0 +1,40 @@
+#cmakedefine01 WITHOUT_OPTIMIZATION
+#cmakedefine01 ENABLE_DEVEL
+#cmakedefine01 ENABLE_REFCNT_DEBUG
+#cmakedefine01 ENABLE_SHAREDPTR_DEBUG
+
+#cmakedefine01 HAVE_ATOMICS_32
+#cmakedefine01 HAVE_ATOMICS_32_SYNC
+
+#if (HAVE_ATOMICS_32)
+# if (HAVE_ATOMICS_32_SYNC)
+#  define ATOMIC_OP32(OP1,OP2,PTR,VAL) __sync_ ## OP1 ## _and_ ## OP2(PTR, VAL)
+# else
+#  define ATOMIC_OP32(OP1,OP2,PTR,VAL) __atomic_ ## OP1 ## _ ## OP2(PTR, VAL, __ATOMIC_SEQ_CST)
+# endif
+#endif
+
+#cmakedefine01 HAVE_ATOMICS_64
+#cmakedefine01 HAVE_ATOMICS_64_SYNC
+
+#if (HAVE_ATOMICS_64)
+# if (HAVE_ATOMICS_64_SYNC)
+#  define ATOMIC_OP64(OP1,OP2,PTR,VAL) __sync_ ## OP1 ## _and_ ## OP2(PTR, VAL)
+# else
+#  define ATOMIC_OP64(OP1,OP2,PTR,VAL) __atomic_ ## OP1 ## _ ## OP2(PTR, VAL, __ATOMIC_SEQ_CST)
+# endif
+#endif
+
+
+#cmakedefine01 WITH_ZLIB
+#cmakedefine01 WITH_LIBDL
+#cmakedefine01 WITH_PLUGINS
+#define WITH_SNAPPY 1
+#define WITH_SOCKEM 1
+#cmakedefine01 WITH_SSL
+#cmakedefine01 WITH_SASL
+#cmakedefine01 WITH_SASL_SCRAM
+#cmakedefine01 WITH_SASL_CYRUS
+#cmakedefine01 HAVE_REGEX
+#cmakedefine01 HAVE_STRNDUP
+#define SOLIB_EXT "${CMAKE_SHARED_LIBRARY_SUFFIX}"

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/atomic_32_test.c
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/atomic_32_test.c b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/atomic_32_test.c
new file mode 100644
index 0000000..de9738a
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/atomic_32_test.c
@@ -0,0 +1,8 @@
+#include <inttypes.h>
+
+int32_t foo (int32_t i) {
+  return __atomic_add_fetch(&i, 1, __ATOMIC_SEQ_CST);
+}
+
+int main() {
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/atomic_64_test.c
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/atomic_64_test.c b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/atomic_64_test.c
new file mode 100644
index 0000000..a713c74
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/atomic_64_test.c
@@ -0,0 +1,8 @@
+#include <inttypes.h>
+
+int64_t foo (int64_t i) {
+  return __atomic_add_fetch(&i, 1, __ATOMIC_SEQ_CST);
+}
+
+int main() {
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/dlopen_test.c
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/dlopen_test.c b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/dlopen_test.c
new file mode 100644
index 0000000..61c2504
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/dlopen_test.c
@@ -0,0 +1,11 @@
+#include <string.h>
+#include <dlfcn.h>
+
+int main() {
+        void *h;
+        /* Try loading anything, we don't care if it works */
+        h = dlopen("__nothing_rdkafka.so", RTLD_NOW|RTLD_LOCAL);
+        if (h)
+                dlclose(h);
+        return 0;
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/libsasl2_test.c
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/libsasl2_test.c b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/libsasl2_test.c
new file mode 100644
index 0000000..3f3ab34
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/libsasl2_test.c
@@ -0,0 +1,7 @@
+#include <string.h>
+#include <sasl/sasl.h>
+
+int main() {
+        sasl_done();
+        return 0;
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/rdkafka_setup.cmake
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/rdkafka_setup.cmake b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/rdkafka_setup.cmake
new file mode 100644
index 0000000..b5a3535
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/rdkafka_setup.cmake
@@ -0,0 +1,76 @@
+try_compile(
+    HAVE_REGEX
+    "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+    "${TRYCOMPILE_SRC_DIR}/regex_test.c"
+)
+
+try_compile(
+    HAVE_STRNDUP
+    "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+    "${TRYCOMPILE_SRC_DIR}/strndup_test.c"
+)
+
+# Atomic 32 tests {
+set(LINK_ATOMIC NO)
+set(HAVE_ATOMICS_32 NO)
+set(HAVE_ATOMICS_32_SYNC NO)
+
+try_compile(
+    _atomics_32
+    "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+    "${TRYCOMPILE_SRC_DIR}/atomic_32_test.c"
+)
+
+if(_atomics_32)
+  set(HAVE_ATOMICS_32 YES)
+else()
+  try_compile(
+      _atomics_32_lib
+      "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+      "${TRYCOMPILE_SRC_DIR}/atomic_32_test.c"
+      LINK_LIBRARIES "-latomic"
+  )
+  if(_atomics_32_lib)
+    set(HAVE_ATOMICS_32 YES)
+    set(LINK_ATOMIC YES)
+  else()
+    try_compile(
+        HAVE_ATOMICS_32_SYNC
+        "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+        "${TRYCOMPILE_SRC_DIR}/sync_32_test.c"
+    )
+  endif()
+endif()
+# }
+
+# Atomic 64 tests {
+set(HAVE_ATOMICS_64 NO)
+set(HAVE_ATOMICS_64_SYNC NO)
+
+try_compile(
+    _atomics_64
+    "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+    "${TRYCOMPILE_SRC_DIR}/atomic_64_test.c"
+)
+
+if(_atomics_64)
+  set(HAVE_ATOMICS_64 YES)
+else()
+  try_compile(
+      _atomics_64_lib
+      "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+      "${TRYCOMPILE_SRC_DIR}/atomic_64_test.c"
+      LINK_LIBRARIES "-latomic"
+  )
+  if(_atomics_64_lib)
+    set(HAVE_ATOMICS_64 YES)
+    set(LINK_ATOMIC YES)
+  else()
+    try_compile(
+        HAVE_ATOMICS_64_SYNC
+        "${CMAKE_CURRENT_BINARY_DIR}/try_compile"
+        "${TRYCOMPILE_SRC_DIR}/sync_64_test.c"
+    )
+  endif()
+endif()
+# }

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/regex_test.c
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/regex_test.c b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/regex_test.c
new file mode 100644
index 0000000..1d6eeb3
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/regex_test.c
@@ -0,0 +1,10 @@
+#include <stddef.h>
+#include <regex.h>
+
+int main() {
+   regcomp(NULL, NULL, 0);
+   regexec(NULL, NULL, 0, NULL, 0);
+   regerror(0, NULL, NULL, 0);
+   regfree(NULL);
+   return 0;
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/strndup_test.c
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/strndup_test.c b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/strndup_test.c
new file mode 100644
index 0000000..9b62043
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/strndup_test.c
@@ -0,0 +1,5 @@
+#include <string.h>
+
+int main() {
+   return strndup("hi", 2) ? 0 : 1;
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/sync_32_test.c
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/sync_32_test.c b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/sync_32_test.c
new file mode 100644
index 0000000..44ba120
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/sync_32_test.c
@@ -0,0 +1,8 @@
+#include <inttypes.h>
+
+int32_t foo (int32_t i) {
+  return __sync_add_and_fetch(&i, 1);
+}
+
+int main() {
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/sync_64_test.c
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/sync_64_test.c b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/sync_64_test.c
new file mode 100644
index 0000000..ad06204
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/cmake/try_compile/sync_64_test.c
@@ -0,0 +1,8 @@
+#include <inttypes.h>
+
+int64_t foo (int64_t i) {
+  return __sync_add_and_fetch(&i, 1);
+}
+
+int main() {
+}

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/.gitignore
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/.gitignore b/thirdparty/librdkafka-0.11.4/packaging/debian/.gitignore
new file mode 100644
index 0000000..eb66d4d
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/.gitignore
@@ -0,0 +1,6 @@
+*.log
+files
+librdkafka-dev
+librdkafka1-dbg
+librdkafka1
+tmp

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/changelog
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/changelog b/thirdparty/librdkafka-0.11.4/packaging/debian/changelog
new file mode 100644
index 0000000..c50cb5a
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/changelog
@@ -0,0 +1,66 @@
+librdkafka (0.8.6-1) unstable; urgency=medium
+
+  * New upstream release.
+  * Backport upstream commit f6fd0da, adding --disable-silent-rules
+    compatibility support to mklove. (Closes: #788742)
+
+ -- Faidon Liambotis <pa...@debian.org>  Sun, 19 Jul 2015 01:36:18 +0300
+
+librdkafka (0.8.5-2) unstable; urgency=medium
+
+  * Install rdkafka.pc in the right, multiarch location. (Closes: #766759)
+
+ -- Faidon Liambotis <pa...@debian.org>  Sun, 26 Oct 2014 06:47:07 +0200
+
+librdkafka (0.8.5-1) unstable; urgency=medium
+
+  * New upstream release.
+    - Fixes kFreeBSD FTBFS.
+  * Ship rdkafka.pc pkg-config in librdkafka-dev.
+
+ -- Faidon Liambotis <pa...@debian.org>  Fri, 24 Oct 2014 18:03:22 +0300
+
+librdkafka (0.8.4-1) unstable; urgency=medium
+
+  * New upstream release, including a new build system.
+    - Add Build-Depends on perl, required by configure.
+    - Support multiarch library paths.
+    - Better detection of architecture atomic builtins, supporting more
+      architectures. (Closes: #739930)
+    - Various portability bugs fixed. (Closes: #730506)
+    - Update debian/librdkafka1.symbols.
+  * Convert to a multiarch package.
+  * Switch to Architecture: any, because of renewed upstream portability.
+  * Update debian/copyright to add src/ before Files: paths.
+  * Update Standards-Version to 3.9.6, no changes needed.
+  * Ship only the C library for now, not the new C++ library; the latter is
+    still in flux in some ways and will probably be shipped in a separate
+    package in a future release.
+
+ -- Faidon Liambotis <pa...@debian.org>  Wed, 22 Oct 2014 23:57:24 +0300
+
+librdkafka (0.8.3-1) unstable; urgency=medium
+
+  * New upstream release.
+    - Multiple internal symbols hidden; breaks ABI without a SONAME bump, but
+      these were internal and should not break any applications, packaged or
+      not.
+  * Update Standards-Version to 3.9.5, no changes needed.
+
+ -- Faidon Liambotis <pa...@debian.org>  Tue, 18 Feb 2014 02:21:43 +0200
+
+librdkafka (0.8.1-1) unstable; urgency=medium
+
+  * New upstream release.
+    - Multiple fixes to FTBFS on various architectures. (Closes: #730506)
+    - Remove dh_auto_clean override, fixed upstream.
+  * Limit the set of architectures: upstream currently relies on 64-bit atomic
+    operations that several Debian architectures do not support.
+
+ -- Faidon Liambotis <pa...@debian.org>  Thu, 05 Dec 2013 16:53:28 +0200
+
+librdkafka (0.8.0-1) unstable; urgency=low
+
+  * Initial release. (Closes: #710271)
+
+ -- Faidon Liambotis <pa...@debian.org>  Mon, 04 Nov 2013 16:50:07 +0200

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/compat
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/compat b/thirdparty/librdkafka-0.11.4/packaging/debian/compat
new file mode 100644
index 0000000..ec63514
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/compat
@@ -0,0 +1 @@
+9

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/control
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/control b/thirdparty/librdkafka-0.11.4/packaging/debian/control
new file mode 100644
index 0000000..8274798
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/control
@@ -0,0 +1,49 @@
+Source: librdkafka
+Priority: optional
+Maintainer: Faidon Liambotis <pa...@debian.org>
+Build-Depends: debhelper (>= 9), zlib1g-dev, libssl-dev, libsasl2-dev, python
+Standards-Version: 3.9.6
+Section: libs
+Homepage: https://github.com/edenhill/librdkafka
+Vcs-Git: git://github.com/edenhill/librdkafka.git -b debian
+Vcs-Browser: https://github.com/edenhill/librdkafka/tree/debian
+
+Package: librdkafka1
+Architecture: any
+Multi-Arch: same
+Depends: ${shlibs:Depends}, ${misc:Depends}
+Description: library implementing the Apache Kafka protocol
+ librdkafka is a C implementation of the Apache Kafka protocol. It currently
+ implements the 0.8 version of the protocol and can be used to develop both
+ Producers and Consumers.
+ .
+ More information about Apache Kafka can be found at http://kafka.apache.org/
+
+Package: librdkafka-dev
+Section: libdevel
+Architecture: any
+Multi-Arch: same
+Depends: librdkafka1 (= ${binary:Version}), ${misc:Depends}
+Description: library implementing the Apache Kafka protocol (development headers)
+ librdkafka is a C implementation of the Apache Kafka protocol. It currently
+ implements the 0.8 version of the protocol and can be used to develop both
+ Producers and Consumers.
+ .
+ More information about Apache Kafka can be found at http://kafka.apache.org/
+ .
+ This package contains the development headers.
+
+Package: librdkafka1-dbg
+Section: debug
+Priority: extra
+Architecture: any
+Multi-Arch: same
+Depends: librdkafka1 (= ${binary:Version}), ${misc:Depends}
+Description: library implementing the Apache Kafka protocol (debugging symbols)
+ librdkafka is a C implementation of the Apache Kafka protocol. It currently
+ implements the 0.8 version of the protocol and can be used to develop both
+ Producers and Consumers.
+ .
+ More information about Apache Kafka can be found at http://kafka.apache.org/
+ .
+ This package contains the debugging symbols.

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/copyright
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/copyright b/thirdparty/librdkafka-0.11.4/packaging/debian/copyright
new file mode 100644
index 0000000..20885d9
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/copyright
@@ -0,0 +1,84 @@
+Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: librdkafka
+Source: https://github.com/edenhill/librdkafka
+
+License: BSD-2-clause
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are met:
+  .
+  1. Redistributions of source code must retain the above copyright notice,
+     this list of conditions and the following disclaimer.
+  2. Redistributions in binary form must reproduce the above copyright notice,
+     this list of conditions and the following disclaimer in the documentation
+     and/or other materials provided with the distribution.
+  .
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+  AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+  IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+  ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+  LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+  CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+  SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+  INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+  CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+  ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+  POSSIBILITY OF SUCH DAMAGE.
+
+Files: *
+Copyright: 2012-2015, Magnus Edenhill
+License: BSD-2-clause
+
+Files: src/rdcrc32.c src/rdcrc32.h
+Copyright: 2006-2012, Thomas Pircher <te...@gmx.net>
+License: MIT
+  Permission is hereby granted, free of charge, to any person obtaining a copy
+  of this software and associated documentation files (the "Software"), to deal
+  in the Software without restriction, including without limitation the rights
+  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+  copies of the Software, and to permit persons to whom the Software is
+  furnished to do so, subject to the following conditions:
+  .
+  The above copyright notice and this permission notice shall be included in
+  all copies or substantial portions of the Software.
+  . 
+  THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+  THE SOFTWARE.
+
+Files: src/snappy.c src/snappy.h src/snappy_compat.h
+Copyright: 2005, Google Inc.
+           2011, Intel Corporation
+License: BSD-3-clause
+  Redistribution and use in source and binary forms, with or without
+  modification, are permitted provided that the following conditions are
+  met:
+  .
+      * Redistributions of source code must retain the above copyright
+  notice, this list of conditions and the following disclaimer.
+      * Redistributions in binary form must reproduce the above
+  copyright notice, this list of conditions and the following disclaimer
+  in the documentation and/or other materials provided with the
+  distribution.
+      * Neither the name of Google Inc. nor the names of its
+  contributors may be used to endorse or promote products derived from
+  this software without specific prior written permission.
+  .
+  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+  A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+  OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+  SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+  LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+  DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+  THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+  OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+Files: debian/*
+Copyright: 2013 Faidon Liambotis <pa...@debian.org>
+License: BSD-2-clause

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/docs
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/docs b/thirdparty/librdkafka-0.11.4/packaging/debian/docs
new file mode 100644
index 0000000..891afcd
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/docs
@@ -0,0 +1,3 @@
+README.md
+INTRODUCTION.md
+CONFIGURATION.md

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/gbp.conf
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/gbp.conf b/thirdparty/librdkafka-0.11.4/packaging/debian/gbp.conf
new file mode 100644
index 0000000..b2a0f02
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/gbp.conf
@@ -0,0 +1,9 @@
+[buildpackage]
+upstream-tree=tag
+upstream-branch=master
+debian-branch=debian
+upstream-tag=%(version)s
+debian-tag=debian/%(version)s
+no-create-orig = True
+tarball-dir = ../tarballs
+export-dir = ../build-area

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.dirs
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.dirs b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.dirs
new file mode 100644
index 0000000..4418816
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.dirs
@@ -0,0 +1,2 @@
+usr/lib
+usr/include

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.examples
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.examples b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.examples
new file mode 100644
index 0000000..b45032e
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.examples
@@ -0,0 +1,2 @@
+examples/rdkafka_example.c
+examples/rdkafka_performance.c

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.install
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.install b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.install
new file mode 100644
index 0000000..478f660
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.install
@@ -0,0 +1,6 @@
+usr/include/*/rdkafka.h
+usr/include/*/rdkafkacpp.h
+usr/lib/*/librdkafka.a
+usr/lib/*/librdkafka.so
+usr/lib/*/librdkafka++.a
+usr/lib/*/librdkafka++.so

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.substvars
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.substvars b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.substvars
new file mode 100644
index 0000000..abd3ebe
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka-dev.substvars
@@ -0,0 +1 @@
+misc:Depends=

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka.dsc
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka.dsc b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka.dsc
new file mode 100644
index 0000000..65826d4
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka.dsc
@@ -0,0 +1,16 @@
+Format: 3.0 (quilt)
+Source: librdkafka
+Binary: librdkafka1, librdkafka-dev, librdkafka1-dbg
+Architecture: any
+Version: 0.9.1-1pre1
+Maintainer: Magnus Edenhill <li...@edenhill.se>
+Homepage: https://github.com/edenhill/librdkafka
+Standards-Version: 3.9.6
+Vcs-Browser: https://github.com/edenhill/librdkafka/tree/master
+Vcs-Git: git://github.com/edenhill/librdkafka.git -b master
+Build-Depends: debhelper (>= 9), zlib1g-dev, libssl-dev, libsasl2-dev, python
+Package-List:
+ librdkafka-dev deb libdevel optional arch=any
+ librdkafka1 deb libs optional arch=any
+ librdkafka1-dbg deb debug extra arch=any
+Original-Maintainer: Faidon Liambotis <pa...@debian.org>

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1-dbg.substvars
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1-dbg.substvars b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1-dbg.substvars
new file mode 100644
index 0000000..abd3ebe
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1-dbg.substvars
@@ -0,0 +1 @@
+misc:Depends=

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.dirs
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.dirs b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.dirs
new file mode 100644
index 0000000..6845771
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.dirs
@@ -0,0 +1 @@
+usr/lib

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.install
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.install b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.install
new file mode 100644
index 0000000..7e86e5f
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.install
@@ -0,0 +1,2 @@
+usr/lib/*/librdkafka.so.*
+usr/lib/*/librdkafka++.so.*

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.postinst.debhelper
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.postinst.debhelper b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.postinst.debhelper
new file mode 100644
index 0000000..3d89d3e
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.postinst.debhelper
@@ -0,0 +1,5 @@
+# Automatically added by dh_makeshlibs
+if [ "$1" = "configure" ]; then
+	ldconfig
+fi
+# End automatically added section

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.postrm.debhelper
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.postrm.debhelper b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.postrm.debhelper
new file mode 100644
index 0000000..7f44047
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.postrm.debhelper
@@ -0,0 +1,5 @@
+# Automatically added by dh_makeshlibs
+if [ "$1" = "remove" ]; then
+	ldconfig
+fi
+# End automatically added section

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.symbols
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.symbols b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.symbols
new file mode 100644
index 0000000..0ef576e
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/librdkafka1.symbols
@@ -0,0 +1,64 @@
+librdkafka.so.1 librdkafka1 #MINVER#
+* Build-Depends-Package: librdkafka-dev
+ rd_kafka_brokers_add@Base 0.8.0
+ rd_kafka_conf_destroy@Base 0.8.0
+ rd_kafka_conf_dump@Base 0.8.3
+ rd_kafka_conf_dump_free@Base 0.8.3
+ rd_kafka_conf_dup@Base 0.8.3
+ rd_kafka_conf_new@Base 0.8.0
+ rd_kafka_conf_properties_show@Base 0.8.0
+ rd_kafka_conf_set@Base 0.8.0
+ rd_kafka_conf_set_dr_cb@Base 0.8.0
+ rd_kafka_conf_set_dr_msg_cb@Base 0.8.4
+ rd_kafka_conf_set_error_cb@Base 0.8.0
+ rd_kafka_conf_set_log_cb@Base 0.8.4
+ rd_kafka_conf_set_opaque@Base 0.8.0
+ rd_kafka_conf_set_open_cb@Base 0.8.4
+ rd_kafka_conf_set_socket_cb@Base 0.8.4
+ rd_kafka_conf_set_stats_cb@Base 0.8.0
+ rd_kafka_consume@Base 0.8.0
+ rd_kafka_consume_batch@Base 0.8.0
+ rd_kafka_consume_batch_queue@Base 0.8.4
+ rd_kafka_consume_callback@Base 0.8.0
+ rd_kafka_consume_callback_queue@Base 0.8.4
+ rd_kafka_consume_queue@Base 0.8.4
+ rd_kafka_consume_start@Base 0.8.0
+ rd_kafka_consume_start_queue@Base 0.8.4
+ rd_kafka_consume_stop@Base 0.8.0
+ rd_kafka_destroy@Base 0.8.0
+ rd_kafka_dump@Base 0.8.0
+ rd_kafka_err2str@Base 0.8.0
+ rd_kafka_errno2err@Base 0.8.3
+ rd_kafka_log_print@Base 0.8.0
+ rd_kafka_log_syslog@Base 0.8.0
+ rd_kafka_message_destroy@Base 0.8.0
+ rd_kafka_metadata@Base 0.8.4
+ rd_kafka_metadata_destroy@Base 0.8.4
+ rd_kafka_msg_partitioner_random@Base 0.8.0
+ rd_kafka_name@Base 0.8.0
+ rd_kafka_new@Base 0.8.0
+ rd_kafka_offset_store@Base 0.8.3
+ rd_kafka_opaque@Base 0.8.4
+ rd_kafka_outq_len@Base 0.8.0
+ rd_kafka_poll@Base 0.8.0
+ rd_kafka_produce@Base 0.8.0
+ rd_kafka_produce_batch@Base 0.8.4
+ rd_kafka_queue_destroy@Base 0.8.4
+ rd_kafka_queue_new@Base 0.8.4
+ rd_kafka_set_log_level@Base 0.8.0
+ rd_kafka_set_logger@Base 0.8.0
+ rd_kafka_thread_cnt@Base 0.8.0
+ rd_kafka_topic_conf_destroy@Base 0.8.0
+ rd_kafka_topic_conf_dump@Base 0.8.3
+ rd_kafka_topic_conf_dup@Base 0.8.3
+ rd_kafka_topic_conf_new@Base 0.8.0
+ rd_kafka_topic_conf_set@Base 0.8.0
+ rd_kafka_topic_conf_set_opaque@Base 0.8.0
+ rd_kafka_topic_conf_set_partitioner_cb@Base 0.8.0
+ rd_kafka_topic_destroy@Base 0.8.0
+ rd_kafka_topic_name@Base 0.8.0
+ rd_kafka_topic_new@Base 0.8.0
+ rd_kafka_topic_partition_available@Base 0.8.0
+ rd_kafka_version@Base 0.8.1
+ rd_kafka_version_str@Base 0.8.1
+ rd_kafka_wait_destroyed@Base 0.8.0

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/rules
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/rules b/thirdparty/librdkafka-0.11.4/packaging/debian/rules
new file mode 100755
index 0000000..a18c40d
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/rules
@@ -0,0 +1,19 @@
+#!/usr/bin/make -f
+
+# Uncomment this to turn on verbose mode.
+#export DH_VERBOSE=1
+
+%:
+	dh $@
+
+override_dh_strip:
+	dh_strip --dbg-package=librdkafka1-dbg
+
+override_dh_auto_install:
+	dh_auto_install
+	install -D -m 0644 rdkafka.pc \
+		debian/librdkafka-dev/usr/lib/${DEB_HOST_MULTIARCH}/pkgconfig/rdkafka.pc
+	install -D -m 0644 rdkafka-static.pc \
+		debian/librdkafka-dev/usr/lib/${DEB_HOST_MULTIARCH}/pkgconfig/rdkafka-static.pc
+
+.PHONY: override_dh_strip override_dh_auth_install

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/source/format
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/source/format b/thirdparty/librdkafka-0.11.4/packaging/debian/source/format
new file mode 100644
index 0000000..163aaf8
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/source/format
@@ -0,0 +1 @@
+3.0 (quilt)

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/debian/watch
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/debian/watch b/thirdparty/librdkafka-0.11.4/packaging/debian/watch
new file mode 100644
index 0000000..fc9aec8
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/debian/watch
@@ -0,0 +1,2 @@
+version=3
+http://github.com/edenhill/librdkafka/tags .*/(\d[\d\.]*)\.tar\.gz

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/get_version.py
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/get_version.py b/thirdparty/librdkafka-0.11.4/packaging/get_version.py
new file mode 100755
index 0000000..3d98d21
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/get_version.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python
+
+import sys
+
+if len(sys.argv) != 2:
+    raise Exception('Usage: %s path/to/rdkafka.h' % sys.argv[0])
+
+kafka_h_file = sys.argv[1]
+f = open(kafka_h_file)
+for line in f:
+    if '#define RD_KAFKA_VERSION' in line:
+        version = line.split()[-1]
+        break
+f.close()
+
+major = int(version[2:4], 16)
+minor = int(version[4:6], 16)
+patch = int(version[6:8], 16)
+version = '.'.join(str(item) for item in (major, minor, patch))
+
+print version

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/homebrew/README.md
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/homebrew/README.md b/thirdparty/librdkafka-0.11.4/packaging/homebrew/README.md
new file mode 100644
index 0000000..a23a085
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/homebrew/README.md
@@ -0,0 +1,15 @@
+# Update the Homebrew librdkafka package version
+
+The `./brew-update-pr.sh` script in this directory updates the
+brew formula for librdkafka and pushes a PR to the homebrew-core repository.
+
+You should run it in two steps, first an implicit dry-run mode
+to check that things seem correct, and if that checks out a
+live upload mode which actually pushes the PR.
+
+    # Do a dry-run first, v0.11.0 is the librdkafka tag:
+    $ ./brew-update-pr.sh v0.11.0
+
+    # If everything looks okay, run the live upload mode:
+    $ ./brew-update-pr.sh --upload v0.11.0
+

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/homebrew/brew-update-pr.sh
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/homebrew/brew-update-pr.sh b/thirdparty/librdkafka-0.11.4/packaging/homebrew/brew-update-pr.sh
new file mode 100755
index 0000000..f756159
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/homebrew/brew-update-pr.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+# Automatically pushes a PR to homebrew-core to update
+# the librdkafka version.
+#
+# Usage:
+#   # Dry-run:
+#   ./brew-update-pr.sh v0.11.0
+#   # if everything looks good:
+#   ./brew-update-pr.sh --upload v0.11.0
+#
+
+
+DRY_RUN="--dry-run"
+if [[ $1 == "--upload" ]]; then
+   DRY_RUN=
+   shift
+fi
+
+TAG=$1
+
+if [[ -z $TAG ]]; then
+    echo "Usage: $0 [--upload] <librdkafka-tag>"
+    exit 1
+fi
+
+set -eu
+
+brew bump-formula-pr $DRY_RUN --strict \
+     --url=https://github.com/edenhill/librdkafka/archive/${TAG}.tar.gz \
+     librdkafka

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/.gitignore
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/.gitignore b/thirdparty/librdkafka-0.11.4/packaging/nuget/.gitignore
new file mode 100644
index 0000000..712f08d
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/.gitignore
@@ -0,0 +1,5 @@
+dl-*
+out-*
+*.nupkg
+*.pyc
+__pycache__

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/README.md
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/README.md b/thirdparty/librdkafka-0.11.4/packaging/nuget/README.md
new file mode 100644
index 0000000..720a767
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/README.md
@@ -0,0 +1,50 @@
+# NuGet package assembly
+
+This set of scripts collect CI artifacts from S3 and assembles
+them into a NuGet package structure staging directory.
+The NuGet tool is then run (from within docker) on this staging directory
+to create a proper NuGet package (with all the metadata).
+
+The finalized nuget package maybe uploaded manually to NuGet.org
+
+## Requirements
+
+ * Requires Python 2.x (due to Python 3 compat issues with rpmfile)
+ * Requires Docker
+ * Requires private S3 access keys for the librdkafka-ci-packages bucket.
+
+
+
+## Usage
+
+1. Trigger CI builds by creating and pushing a new release (candidate) tag
+   in the librdkafka repo. Make sure the tag is created on the correct branch.
+
+    $ git tag v0.11.0
+    $ git push origin v0.11.0
+
+2. Wait for CI builds to finish, monitor the builds here:
+
+ * https://travis-ci.org/edenhill/librdkafka
+ * https://ci.appveyor.com/project/edenhill/librdkafka
+
+3. On a Linux host, run the release.py script to assemble the NuGet package
+
+    $ cd packaging/nuget
+    # Specify the tag
+    $ ./release.py v0.11.0
+    # Optionally, if the tag was moved and an exact sha is also required:
+    # $ ./release.py --sha <the-full-git-sha> v0.11.0
+
+4. If all artifacts were available the NuGet package will be built
+   and reside in the current directory as librdkafka.redist.<v-less-tag>.nupkg
+
+5. Test the package manually
+
+6. Upload the package to NuGet
+
+ * https://www.nuget.org/packages/manage/upload
+
+
+
+

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/artifact.py
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/artifact.py b/thirdparty/librdkafka-0.11.4/packaging/nuget/artifact.py
new file mode 100755
index 0000000..61b1d80
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/artifact.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+#
+#
+# Collects CI artifacts from S3 storage, downloading them
+# to a local directory.
+#
+# The artifacts' folder in the S3 bucket must have the following token
+# format:
+#  <token>-[<value>]__   (repeat)
+#
+# Recognized tokens (unrecognized tokens are ignored):
+#  p       - project (e.g., "confluent-kafka-python")
+#  bld     - builder (e.g., "travis")
+#  plat    - platform ("osx", "linux", ..)
+#  arch    - arch ("x64", ..)
+#  tag     - git tag
+#  sha     - git sha
+#  bid     - builder's build-id
+#  bldtype - Release, Debug (appveyor)
+#
+# Example:
+#   p-confluent-kafka-python__bld-travis__plat-linux__tag-__sha-112130ce297656ea1c39e7c94c99286f95133a24__bid-271588764__/confluent_kafka-0.11.0-cp35-cp35m-manylinux1_x86_64.whl
+
+
+import re
+import os
+import argparse
+import boto3
+
+s3_bucket = 'librdkafka-ci-packages'
+dry_run = False
+
+class Artifact (object):
+    def __init__(self, arts, path, info=None):
+        self.path = path
+        # Remove unexpanded AppVeyor $(..) tokens from filename
+        self.fname = re.sub(r'\$\([^\)]+\)', '', os.path.basename(path))
+        slpath = os.path.join(os.path.dirname(path), self.fname)
+        if os.path.isfile(slpath):
+            # Already points to local file in correct location
+            self.lpath = slpath
+        else:
+            # Prepare download location in dlpath
+            self.lpath = os.path.join(arts.dlpath, slpath)
+
+        if info is None:
+            self.info = dict()
+        else:
+            # Assign the map and convert all keys to lower case
+            self.info = {k.lower(): v for k, v in info.items()}
+            # Rename values, e.g., 'plat':'linux' to 'plat':'debian'
+            for k,v in self.info.items():
+                rdict = packaging.rename_vals.get(k, None)
+                if rdict is not None:
+                    self.info[k] = rdict.get(v, v)
+
+        # Score value for sorting
+        self.score = 0
+
+        # AppVeyor symbol builds are of less value
+        if self.fname.find('.symbols.') != -1:
+            self.score -= 10
+
+        self.arts = arts
+        arts.artifacts.append(self)
+
+
+    def __repr__(self):
+        return self.path
+
+    def __lt__ (self, other):
+        return self.score < other.score
+
+    def download(self):
+        """ Download artifact from S3 and store in local directory .lpath.
+            If the artifact is already downloaded nothing is done. """
+        if os.path.isfile(self.lpath) and os.path.getsize(self.lpath) > 0:
+            return
+        print('Downloading %s -> %s' % (self.path, self.lpath))
+        if dry_run:
+            return
+        ldir = os.path.dirname(self.lpath)
+        if not os.path.isdir(ldir):
+            os.makedirs(ldir, 0o755)
+        self.arts.s3_bucket.download_file(self.path, self.lpath)
+
+
+class Artifacts (object):
+    def __init__(self, match, dlpath):
+        super(Artifacts, self).__init__()
+        self.match = match
+        self.artifacts = list()
+        # Download directory (make sure it ends with a path separator)
+        if not dlpath.endswith(os.path.sep):
+            dlpath = os.path.join(dlpath, '')
+        self.dlpath = dlpath
+        if not os.path.isdir(self.dlpath):
+            if not dry_run:
+                os.makedirs(self.dlpath, 0o755)
+
+    def collect_single(self, path, req_tag=True):
+        """ Collect single artifact, be it in S3 or locally.
+        :param: path string: S3 or local (relative) path
+        :param: req_tag bool: Require tag to match.
+        """
+
+        print('?  %s' % path)
+
+        # For local files, strip download path.
+        # Also ignore any parent directories.
+        if path.startswith(self.dlpath):
+            folder = os.path.basename(os.path.dirname(path[len(self.dlpath):]))
+        else:
+            folder = os.path.basename(os.path.dirname(path))
+
+        # The folder contains the tokens needed to perform
+        # matching of project, gitref, etc.
+        rinfo = re.findall(r'(?P<tag>[^-]+)-(?P<val>.*?)__', folder)
+        if rinfo is None or len(rinfo) == 0:
+            print('Incorrect folder/file name format for %s' % folder)
+            return None
+
+        info = dict(rinfo)
+
+        # Ignore AppVeyor Debug builds
+        if info.get('bldtype', '').lower() == 'debug':
+            print('Ignoring debug artifact %s' % folder)
+            return None
+
+        tag = info.get('tag', None)
+        if tag is not None and (len(tag) == 0 or tag.startswith('$(')):
+            # AppVeyor doesn't substite $(APPVEYOR_REPO_TAG_NAME)
+            # with an empty value when not set, it leaves that token
+            # in the string - so translate that to no tag.
+            del info['tag']
+
+        # Match tag or sha to gitref
+        unmatched = list()
+        for m,v in self.match.items():
+            if m not in info or info[m] != v:
+                unmatched.append(m)
+
+        # Make sure all matches were satisfied, unless this is a
+        # common artifact.
+        if info.get('p', '') != 'common' and len(unmatched) > 0:
+            print(info)
+            print('%s: %s did not match %s' % (info.get('p', None), folder, unmatched))
+            return None
+
+        return Artifact(self, path, info)
+
+
+    def collect_s3(self):
+        """ Collect and download build-artifacts from S3 based on git reference """
+        print('Collecting artifacts matching %s from S3 bucket %s' % (self.match, s3_bucket))
+        self.s3 = boto3.resource('s3')
+        self.s3_bucket = self.s3.Bucket(s3_bucket)
+        self.s3_client = boto3.client('s3')
+        for item in self.s3_client.list_objects(Bucket=s3_bucket, Prefix='librdkafka/').get('Contents'):
+            self.collect_single(item.get('Key'))
+
+        for a in self.artifacts:
+            a.download()
+
+    def collect_local(self, path, req_tag=True):
+        """ Collect artifacts from a local directory possibly previously
+        collected from s3 """
+        for f in [os.path.join(dp, f) for dp, dn, filenames in os.walk(path) for f in filenames]:
+            if not os.path.isfile(f):
+                continue
+            self.collect_single(f, req_tag)
+
+

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip b/thirdparty/librdkafka-0.11.4/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip
new file mode 100644
index 0000000..8f24c8d
Binary files /dev/null and b/thirdparty/librdkafka-0.11.4/packaging/nuget/common/p-common__plat-windows__arch-win32__bldtype-Release/msvcr120.zip differ

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip b/thirdparty/librdkafka-0.11.4/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip
new file mode 100644
index 0000000..773546c
Binary files /dev/null and b/thirdparty/librdkafka-0.11.4/packaging/nuget/common/p-common__plat-windows__arch-x64__bldtype-Release/msvcr120.zip differ

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/nuget.sh
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/nuget.sh b/thirdparty/librdkafka-0.11.4/packaging/nuget/nuget.sh
new file mode 100755
index 0000000..0323712
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/nuget.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+#
+# Front-end for nuget that runs nuget in a docker image.
+
+set -ex
+
+if [[ -f /.dockerenv ]]; then
+    echo "Inside docker"
+
+    pushd $(dirname $0)
+
+    nuget $*
+
+    popd
+
+else
+    echo "Running docker image"
+    docker run -v $(pwd):/io mono:latest /io/$0 $*
+fi
+

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/packaging.py
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/packaging.py b/thirdparty/librdkafka-0.11.4/packaging/nuget/packaging.py
new file mode 100755
index 0000000..c8e7479
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/packaging.py
@@ -0,0 +1,421 @@
+#!/usr/bin/env python
+#
+# NuGet packaging script.
+# Assembles a NuGet package using CI artifacts in S3
+# and calls nuget (in docker) to finalize the package.
+#
+
+import sys
+import re
+import os
+import tempfile
+import shutil
+import subprocess
+import urllib
+from string import Template
+from collections import defaultdict
+import boto3
+from zfile import zfile
+
+
+# Rename token values
+rename_vals = {'plat': {'windows': 'win7'},
+               'arch': {'x86_64': 'x64',
+                        'i386': 'x86',
+                        'win32': 'x86'}}
+
+# Collects CI artifacts from S3 storage, downloading them
+# to a local directory, or collecting already downloaded artifacts from
+# local directory.
+#
+# The artifacts' folder in the S3 bucket must have the following token
+# format:
+#  <token>-[<value>]__   (repeat)
+#
+# Recognized tokens (unrecognized tokens are ignored):
+#  p       - project (e.g., "confluent-kafka-python")
+#  bld     - builder (e.g., "travis")
+#  plat    - platform ("osx", "linux", ..)
+#  arch    - arch ("x64", ..)
+#  tag     - git tag
+#  sha     - git sha
+#  bid     - builder's build-id
+#  bldtype - Release, Debug (appveyor)
+#
+# Example:
+#   librdkafka/p-librdkafka__bld-travis__plat-linux__arch-x64__tag-v0.0.62__sha-d051b2c19eb0c118991cd8bc5cf86d8e5e446cde__bid-1562.1/librdkafka.tar.gz
+
+
+s3_bucket = 'librdkafka-ci-packages'
+dry_run = False
+
+class Artifact (object):
+    def __init__(self, arts, path, info=None):
+        self.path = path
+        # Remove unexpanded AppVeyor $(..) tokens from filename
+        self.fname = re.sub(r'\$\([^\)]+\)', '', os.path.basename(path))
+        slpath = os.path.join(os.path.dirname(path), self.fname)
+        if os.path.isfile(slpath):
+            # Already points to local file in correct location
+            self.lpath = slpath
+        else:
+            # Prepare download location in dlpath
+            self.lpath = os.path.join(arts.dlpath, slpath)
+
+        if info is None:
+            self.info = dict()
+        else:
+            # Assign the map and convert all keys to lower case
+            self.info = {k.lower(): v for k, v in info.items()}
+            # Rename values, e.g., 'plat':'linux' to 'plat':'debian'
+            for k,v in self.info.items():
+                rdict = rename_vals.get(k, None)
+                if rdict is not None:
+                    self.info[k] = rdict.get(v, v)
+
+        # Score value for sorting
+        self.score = 0
+
+        # AppVeyor symbol builds are of less value
+        if self.fname.find('.symbols.') != -1:
+            self.score -= 10
+
+        self.arts = arts
+        arts.artifacts.append(self)
+
+
+    def __repr__(self):
+        return self.path
+
+    def __lt__ (self, other):
+        return self.score < other.score
+
+    def download(self):
+        """ Download artifact from S3 and store in local directory .lpath.
+            If the artifact is already downloaded nothing is done. """
+        if os.path.isfile(self.lpath) and os.path.getsize(self.lpath) > 0:
+            return
+        print('Downloading %s' % self.path)
+        if dry_run:
+            return
+        ldir = os.path.dirname(self.lpath)
+        if not os.path.isdir(ldir):
+            os.makedirs(ldir, 0o755)
+        self.arts.s3_bucket.download_file(self.path, self.lpath)
+
+
+class Artifacts (object):
+    def __init__(self, match, dlpath):
+        super(Artifacts, self).__init__()
+        self.match = match
+        self.artifacts = list()
+        # Download directory (make sure it ends with a path separator)
+        if not dlpath.endswith(os.path.sep):
+            dlpath = os.path.join(dlpath, '')
+        self.dlpath = dlpath
+        if not os.path.isdir(self.dlpath):
+            if not dry_run:
+                os.makedirs(self.dlpath, 0o755)
+
+
+    def collect_single(self, path, req_tag=True):
+        """ Collect single artifact, be it in S3 or locally.
+        :param: path string: S3 or local (relative) path
+        :param: req_tag bool: Require tag to match.
+        """
+
+        #print('?  %s' % path)
+
+        # For local files, strip download path.
+        # Also ignore any parent directories.
+        if path.startswith(self.dlpath):
+            folder = os.path.basename(os.path.dirname(path[len(self.dlpath):]))
+        else:
+            folder = os.path.basename(os.path.dirname(path))
+
+        # The folder contains the tokens needed to perform
+        # matching of project, gitref, etc.
+        rinfo = re.findall(r'(?P<tag>[^-]+)-(?P<val>.*?)__', folder)
+        if rinfo is None or len(rinfo) == 0:
+            print('Incorrect folder/file name format for %s' % folder)
+            return None
+
+        info = dict(rinfo)
+
+        # Ignore AppVeyor Debug builds
+        if info.get('bldtype', '').lower() == 'debug':
+            print('Ignoring debug artifact %s' % folder)
+            return None
+
+        tag = info.get('tag', None)
+        if tag is not None and (len(tag) == 0 or tag.startswith('$(')):
+            # AppVeyor doesn't substite $(APPVEYOR_REPO_TAG_NAME)
+            # with an empty value when not set, it leaves that token
+            # in the string - so translate that to no tag.
+            del info['tag']
+
+        # Perform matching
+        unmatched = list()
+        for m,v in self.match.items():
+            if m not in info or info[m] != v:
+                unmatched.append(m)
+
+        # Make sure all matches were satisfied, unless this is a
+        # common artifact.
+        if info.get('p', '') != 'common' and len(unmatched) > 0:
+            # print('%s: %s did not match %s' % (info.get('p', None), folder, unmatched))
+            return None
+
+        return Artifact(self, path, info)
+
+
+    def collect_s3(self):
+        """ Collect and download build-artifacts from S3 based on git reference """
+        print('Collecting artifacts matching %s from S3 bucket %s' % (self.match, s3_bucket))
+        self.s3 = boto3.resource('s3')
+        self.s3_bucket = self.s3.Bucket(s3_bucket)
+        self.s3_client = boto3.client('s3')
+        for item in self.s3_client.list_objects(Bucket=s3_bucket, Prefix='librdkafka/').get('Contents'):
+            self.collect_single(item.get('Key'))
+
+        for a in self.artifacts:
+            a.download()
+
+    def collect_local(self, path, req_tag=True):
+        """ Collect artifacts from a local directory possibly previously
+        collected from s3 """
+        for f in [os.path.join(dp, f) for dp, dn, filenames in os.walk(path) for f in filenames]:
+            if not os.path.isfile(f):
+                continue
+            self.collect_single(f, req_tag)
+
+
+class Package (object):
+    """ Generic Package class
+        A Package is a working container for one or more output
+        packages for a specific package type (e.g., nuget) """
+
+    def __init__ (self, version, arts, ptype):
+        super(Package, self).__init__()
+        self.version = version
+        self.arts = arts
+        self.ptype = ptype
+        # These may be overwritten by specific sub-classes:
+        self.artifacts = arts.artifacts
+        # Staging path, filled in later.
+        self.stpath = None
+        self.kv = {'version': version}
+        self.files = dict()
+
+    def add_file (self, file):
+        self.files[file] = True
+
+    def build (self):
+        """ Build package output(s), return a list of paths to built packages """
+        raise NotImplementedError
+
+    def cleanup (self):
+        """ Optional cleanup routine for removing temporary files, etc. """
+        pass
+
+    def verify (self, path):
+        """ Optional post-build package verifier """
+        pass
+
+    def render (self, fname, destpath='.'):
+        """ Render template in file fname and save to destpath/fname,
+        where destpath is relative to stpath """
+
+        outf = os.path.join(self.stpath, destpath, fname)
+
+        if not os.path.isdir(os.path.dirname(outf)):
+            os.makedirs(os.path.dirname(outf), 0o0755)
+
+        with open(os.path.join('templates', fname), 'r') as tf:
+            tmpl = Template(tf.read())
+        with open(outf, 'w') as of:
+            of.write(tmpl.substitute(self.kv))
+
+        self.add_file(outf)
+
+
+    def copy_template (self, fname, target_fname=None, destpath='.'):
+        """ Copy template file to destpath/fname
+        where destpath is relative to stpath """
+
+        if target_fname is None:
+            target_fname = fname
+        outf = os.path.join(self.stpath, destpath, target_fname)
+
+        if not os.path.isdir(os.path.dirname(outf)):
+            os.makedirs(os.path.dirname(outf), 0o0755)
+
+        shutil.copy(os.path.join('templates', fname), outf)
+
+        self.add_file(outf)
+
+
+class NugetPackage (Package):
+    """ All platforms, archs, et.al, are bundled into one set of
+        NuGet output packages: "main", redist and symbols """
+    def __init__ (self, version, arts):
+        if version.startswith('v'):
+            version = version[1:] # Strip v prefix
+        super(NugetPackage, self).__init__(version, arts, "nuget")
+
+    def cleanup(self):
+        if os.path.isdir(self.stpath):
+            shutil.rmtree(self.stpath)
+
+    def build (self, buildtype):
+        """ Build single NuGet package for all its artifacts. """
+
+        # NuGet removes the prefixing v from the version.
+        vless_version = self.kv['version']
+        if vless_version[0] == 'v':
+            vless_version = vless_version[1:]
+
+
+        self.stpath = tempfile.mkdtemp(prefix="out-", suffix="-%s" % buildtype,
+                                       dir=".")
+
+        self.render('librdkafka.redist.nuspec')
+        self.copy_template('librdkafka.redist.targets',
+                           destpath=os.path.join('build', 'native'))
+        self.copy_template('librdkafka.redist.props',
+                           destpath='build')
+        for f in ['../../README.md', '../../CONFIGURATION.md', '../../LICENSES.txt']:
+            shutil.copy(f, self.stpath)
+
+        # Generate template tokens for artifacts
+        for a in self.arts.artifacts:
+            if 'bldtype' not in a.info:
+                a.info['bldtype'] = 'release'
+
+            a.info['variant'] = '%s-%s-%s' % (a.info.get('plat'),
+                                              a.info.get('arch'),
+                                              a.info.get('bldtype'))
+            if 'toolset' not in a.info:
+                a.info['toolset'] = 'v120'
+
+        mappings = [
+            [{'arch': 'x64', 'plat': 'linux', 'fname_startswith': 'librdkafka.tar.gz'}, './include/librdkafka/rdkafka.h', 'build/native/include/librdkafka/rdkafka.h'],
+            [{'arch': 'x64', 'plat': 'linux', 'fname_startswith': 'librdkafka.tar.gz'}, './include/librdkafka/rdkafkacpp.h', 'build/native/include/librdkafka/rdkafkacpp.h'],
+
+            [{'arch': 'x64', 'plat': 'osx', 'fname_startswith': 'librdkafka.tar.gz'}, './lib/librdkafka.dylib', 'runtimes/osx-x64/native/librdkafka.dylib'],
+            [{'arch': 'x64', 'plat': 'linux', 'fname_startswith': 'librdkafka-debian9.tgz'}, './lib/librdkafka.so.1', 'runtimes/linux-x64/native/debian9-librdkafka.so'],
+            [{'arch': 'x64', 'plat': 'linux', 'fname_startswith': 'librdkafka.tar.gz'}, './lib/librdkafka.so.1', 'runtimes/linux-x64/native/librdkafka.so'],
+
+            [{'arch': 'x64', 'plat': 'win7', 'fname_startswith': 'msvcr120.zip'}, 'msvcr120.dll', 'runtimes/win7-x64/native/msvcr120.dll'],
+            # matches librdkafka.redist.{VER}.nupkg
+            [{'arch': 'x64', 'plat': 'win7', 'fname_startswith': 'librdkafka.redist'}, 'build/native/bin/v120/x64/Release/librdkafka.dll', 'runtimes/win7-x64/native/librdkafka.dll'],
+            [{'arch': 'x64', 'plat': 'win7', 'fname_startswith': 'librdkafka.redist'}, 'build/native/bin/v120/x64/Release/librdkafkacpp.dll', 'runtimes/win7-x64/native/librdkafkacpp.dll'],
+            [{'arch': 'x64', 'plat': 'win7', 'fname_startswith': 'librdkafka.redist'}, 'build/native/bin/v120/x64/Release/zlib.dll', 'runtimes/win7-x64/native/zlib.dll'],
+            # matches librdkafka.{VER}.nupkg
+            [{'arch': 'x64', 'plat': 'win7', 'fname_startswith': 'librdkafka', 'fname_excludes': ['redist', 'symbols']},
+             'build/native/lib/v120/x64/Release/librdkafka.lib', 'build/native/lib/win7/x64/win7-x64-Release/v120/librdkafka.lib'],
+            [{'arch': 'x64', 'plat': 'win7', 'fname_startswith': 'librdkafka', 'fname_excludes': ['redist', 'symbols']},
+             'build/native/lib/v120/x64/Release/librdkafkacpp.lib', 'build/native/lib/win7/x64/win7-x64-Release/v120/librdkafkacpp.lib'],
+
+            [{'arch': 'x86', 'plat': 'win7', 'fname_startswith': 'msvcr120.zip'}, 'msvcr120.dll', 'runtimes/win7-x86/native/msvcr120.dll'],
+            # matches librdkafka.redist.{VER}.nupkg
+            [{'arch': 'x86', 'plat': 'win7', 'fname_startswith': 'librdkafka.redist'}, 'build/native/bin/v120/Win32/Release/librdkafka.dll', 'runtimes/win7-x86/native/librdkafka.dll'],
+            [{'arch': 'x86', 'plat': 'win7', 'fname_startswith': 'librdkafka.redist'}, 'build/native/bin/v120/Win32/Release/librdkafkacpp.dll', 'runtimes/win7-x86/native/librdkafkacpp.dll'],
+            [{'arch': 'x86', 'plat': 'win7', 'fname_startswith': 'librdkafka.redist'}, 'build/native/bin/v120/Win32/Release/zlib.dll', 'runtimes/win7-x86/native/zlib.dll'],
+            # matches librdkafka.{VER}.nupkg
+            [{'arch': 'x86', 'plat': 'win7', 'fname_startswith': 'librdkafka', 'fname_excludes': ['redist', 'symbols']}, 
+            'build/native/lib/v120/Win32/Release/librdkafka.lib', 'build/native/lib/win7/x86/win7-x86-Release/v120/librdkafka.lib'],
+            [{'arch': 'x86', 'plat': 'win7', 'fname_startswith': 'librdkafka', 'fname_excludes': ['redist', 'symbols']}, 
+            'build/native/lib/v120/Win32/Release/librdkafkacpp.lib', 'build/native/lib/win7/x86/win7-x86-Release/v120/librdkafkacpp.lib']
+        ]
+
+        for m in mappings:
+            attributes = m[0]
+            fname_startswith = attributes['fname_startswith']
+            del attributes['fname_startswith']
+            fname_excludes = []
+            if 'fname_excludes' in attributes:
+                fname_excludes = attributes['fname_excludes']
+                del attributes['fname_excludes']
+
+            artifact = None
+            for a in self.arts.artifacts:
+                found = True
+
+                for attr in attributes:
+                    if a.info[attr] != attributes[attr]:
+                        found = False
+                        break
+
+                if not a.fname.startswith(fname_startswith):
+                    found = False
+
+                for exclude in fname_excludes:
+                    if exclude in a.fname:
+                        found = False
+                        break
+
+                if found:
+                    artifact = a
+                    break
+
+            if artifact is None:
+                raise Exception('unable to find file in archive %s with tags %s that starts with "%s"' % (a.fname, str(attributes), fname_startswith))
+
+            outf = os.path.join(self.stpath, m[2])
+            member = m[1]
+            try:
+                zfile.ZFile.extract(artifact.lpath, member, outf)
+            except KeyError as e:
+                raise Exception('file not found in archive %s: %s. Files in archive are: %s' % (artifact.lpath, e, zfile.ZFile(artifact.lpath).getnames()))
+
+        print('Tree extracted to %s' % self.stpath)
+
+        # After creating a bare-bone nupkg layout containing the artifacts
+        # and some spec and props files, call the 'nuget' utility to
+        # make a proper nupkg of it (with all the metadata files).
+        subprocess.check_call("./nuget.sh pack %s -BasePath '%s' -NonInteractive" %  \
+                              (os.path.join(self.stpath, 'librdkafka.redist.nuspec'),
+                               self.stpath), shell=True)
+                               
+        return 'librdkafka.redist.%s.nupkg' % vless_version
+
+    def verify (self, path):
+        """ Verify package """
+        expect = [
+            "librdkafka.redist.nuspec",
+            "LICENSES.txt",
+            "build/librdkafka.redist.props",
+            "build/native/librdkafka.redist.targets",
+            "build/native/include/librdkafka/rdkafka.h",
+            "build/native/include/librdkafka/rdkafkacpp.h",
+            "build/native/lib/win7/x64/win7-x64-Release/v120/librdkafka.lib",
+            "build/native/lib/win7/x64/win7-x64-Release/v120/librdkafkacpp.lib",
+            "build/native/lib/win7/x86/win7-x86-Release/v120/librdkafka.lib",
+            "build/native/lib/win7/x86/win7-x86-Release/v120/librdkafkacpp.lib",
+            "runtimes/linux-x64/native/debian9-librdkafka.so",
+            "runtimes/linux-x64/native/librdkafka.so",
+            "runtimes/osx-x64/native/librdkafka.dylib",
+            "runtimes/win7-x64/native/librdkafka.dll",
+            "runtimes/win7-x64/native/librdkafkacpp.dll",
+            "runtimes/win7-x64/native/msvcr120.dll",
+            "runtimes/win7-x64/native/zlib.dll",
+            "runtimes/win7-x86/native/librdkafka.dll",
+            "runtimes/win7-x86/native/librdkafkacpp.dll",
+            "runtimes/win7-x86/native/msvcr120.dll",
+            "runtimes/win7-x86/native/zlib.dll"]
+
+        missing = list()		
+        with zfile.ZFile(path, 'r') as zf:		
+            print('Verifying %s:' % path)		
+        
+            # Zipfiles may url-encode filenames, unquote them before matching.		
+            pkgd = [urllib.unquote(x) for x in zf.getnames()]		
+            missing = [x for x in expect if x not in pkgd]		
+        
+        if len(missing) > 0:		
+            print('Missing files in package %s:\n%s' % (path, '\n'.join(missing)))		
+            return False		
+        else:		
+            print('OK - %d expected files found' % len(expect))		
+            return True

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/release.py
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/release.py b/thirdparty/librdkafka-0.11.4/packaging/nuget/release.py
new file mode 100755
index 0000000..692ee6b
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/release.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+#
+#
+# NuGet release packaging tool.
+# Creates a NuGet package from CI artifacts on S3.
+#
+
+
+import sys
+import argparse
+import packaging
+
+
+dry_run = False
+
+
+
+if __name__ == '__main__':
+
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--no-s3", help="Don't collect from S3", action="store_true")
+    parser.add_argument("--dry-run",
+                        help="Locate artifacts but don't actually download or do anything",
+                        action="store_true")
+    parser.add_argument("--directory", help="Download directory (default: dl-<tag>)", default=None)
+    parser.add_argument("--no-cleanup", help="Don't clean up temporary folders", action="store_true")
+    parser.add_argument("--sha", help="Also match on this git sha1", default=None)
+    parser.add_argument("--nuget-version", help="The nuget package version (defaults to same as tag)", default=None)
+    parser.add_argument("tag", help="Git tag to collect")
+
+    args = parser.parse_args()
+    dry_run = args.dry_run
+    if not args.directory:
+        args.directory = 'dl-%s' % args.tag
+
+    match = {'tag': args.tag}
+    if args.sha is not None:
+        match['sha'] = args.sha
+
+    arts = packaging.Artifacts(match, args.directory)
+
+    # Collect common local artifacts, such as support files.
+    arts.collect_local('common', req_tag=False)
+
+    if not args.no_s3:
+        arts.collect_s3()
+    else:
+        arts.collect_local(arts.dlpath)
+
+    if len(arts.artifacts) == 0:
+        raise ValueError('No artifacts found for %s' % match)
+
+    print('Collected artifacts:')
+    for a in arts.artifacts:
+        print(' %s' % a.lpath)
+    print('')
+
+    package_version = match['tag']
+    if args.nuget_version is not None:
+        package_version = args.nuget_version
+
+    print('')
+
+    if dry_run:
+        sys.exit(0)
+
+    print('Building packages:')
+
+    p = packaging.NugetPackage(package_version, arts)
+    pkgfile = p.build(buildtype='release')
+
+    if not args.no_cleanup:
+        p.cleanup()
+    else:
+        print(' --no-cleanup: leaving %s' % p.stpath)
+
+    print('')
+
+    if not p.verify(pkgfile):
+        print('Package failed verification.')
+        sys.exit(1)
+    else:
+        print('Created package: %s' % pkgfile)

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/requirements.txt
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/requirements.txt b/thirdparty/librdkafka-0.11.4/packaging/nuget/requirements.txt
new file mode 100644
index 0000000..c892afd
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/requirements.txt
@@ -0,0 +1,2 @@
+boto3
+rpmfile

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.nuspec
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.nuspec b/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.nuspec
new file mode 100644
index 0000000..f48e523
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.nuspec
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8"?>
+<package xmlns="http://schemas.microsoft.com/packaging/2011/10/nuspec.xsd">
+  <metadata>
+    <id>librdkafka.redist</id>
+    <version>${version}</version>
+    <title>librdkafka - redistributable</title>
+    <authors>Magnus Edenhill, edenhill</authors>
+    <owners>Magnus Edenhill, edenhill</owners>
+    <requireLicenseAcceptance>false</requireLicenseAcceptance>
+    <licenseUrl>https://github.com/edenhill/librdkafka/blob/master/LICENSES.txt</licenseUrl>
+    <projectUrl>https://github.com/edenhill/librdkafka</projectUrl>
+    <description>The Apache Kafka C/C++ client library - redistributable</description>
+    <summary>The Apache Kafka C/C++ client library</summary>
+    <releaseNotes>Release of librdkafka</releaseNotes>
+    <copyright>Copyright 2012-2017</copyright>
+    <tags>native apache kafka librdkafka C C++ nativepackage</tags>
+  </metadata>
+  <files>
+    <file src="**" />
+  </files>
+</package>

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.props
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.props b/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.props
new file mode 100644
index 0000000..f6c0de0
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.props
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<Project ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup>
+    <Content Include="$(MSBuildThisFileDirectory)..\runtimes\win7-x86\native\*">
+      <Link>librdkafka\x86\%(Filename)%(Extension)</Link>
+      <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+    </Content>
+    <Content Include="$(MSBuildThisFileDirectory)..\runtimes\win7-x64\native\*">
+      <Link>librdkafka\x64\%(Filename)%(Extension)</Link>
+      <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
+    </Content>
+  </ItemGroup>
+  <ItemDefinitionGroup>
+    <ClCompile>
+      <AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+    </ClCompile>
+  </ItemDefinitionGroup>
+</Project>

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.targets
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.targets b/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.targets
new file mode 100644
index 0000000..632408d
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/templates/librdkafka.redist.targets
@@ -0,0 +1,19 @@
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemDefinitionGroup>
+    <Link>
+      <AdditionalDependencies Condition="'$(Platform)' == 'x64'">$(MSBuildThisFileDirectory)lib\win7\x64\win7-x64-Release\v120\librdkafka.lib;%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalDependencies Condition="'$(Platform)' != 'x64'">$(MSBuildThisFileDirectory)lib\win7\x86\win7-x86-Release\v120\librdkafka.lib;%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalLibraryDirectories Condition="'$(Platform)' == 'x64'">$(MSBuildThisFileDirectory)lib\win7\x64\win7-x64-Release\v120;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <AdditionalLibraryDirectories Condition="'$(Platform)' != 'x64'">$(MSBuildThisFileDirectory)lib\win7\x86\win7-x86-Release\v120;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+    </Link>
+    <ClCompile>
+      <AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+    </ClCompile>
+  </ItemDefinitionGroup>
+  <ItemGroup Condition="'$(Platform)' == 'x64'">
+    <ReferenceCopyLocalPaths Include="$(MSBuildThisFileDirectory)..\..\runtimes\win7-x64\native\librdkafka.dll" />
+  </ItemGroup>
+  <ItemGroup Condition="'$(Platform)' != 'x64'">
+    <ReferenceCopyLocalPaths Include="$(MSBuildThisFileDirectory)..\..\runtimes\win7-x86\native\librdkafka.dll" />
+  </ItemGroup>
+</Project>

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/zfile/__init__.py
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/zfile/__init__.py b/thirdparty/librdkafka-0.11.4/packaging/nuget/zfile/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/nuget/zfile/zfile.py
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/nuget/zfile/zfile.py b/thirdparty/librdkafka-0.11.4/packaging/nuget/zfile/zfile.py
new file mode 100644
index 0000000..8616078
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/nuget/zfile/zfile.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+
+import os
+import tarfile
+import zipfile
+import rpmfile
+
+class ZFile (object):
+    def __init__(self, path, mode='r', ext=None):
+        super(ZFile, self).__init__()
+
+        if ext is not None:
+            _ext = ext
+        else:
+            _ext = os.path.splitext(path)[-1]
+        if _ext.startswith('.'):
+            _ext = _ext[1:]
+
+        if zipfile.is_zipfile(path) or _ext == 'zip':
+            self.f = zipfile.ZipFile(path, mode)
+        elif tarfile.is_tarfile(path) or _ext in ('tar', 'tgz', 'gz'):
+            self.f = tarfile.open(path, mode)
+        elif _ext == 'rpm':
+            self.f = rpmfile.open(path, mode + 'b')
+        else:
+            raise ValueError('Unsupported file extension: %s' % path)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *args):
+        if callable(getattr(self.f, 'close', None)):
+            self.f.close()
+
+    def getnames(self):
+        if isinstance(self.f, zipfile.ZipFile):
+            return self.f.namelist()
+        elif isinstance(self.f, tarfile.TarFile):
+            return self.f.getnames()
+        elif isinstance(self.f, rpmfile.RPMFile):
+            return [x.name for x in self.f.getmembers()]
+        else:
+            raise NotImplementedError
+
+    def headers(self):
+        if isinstance(self.f, rpmfile.RPMFile):
+            return self.f.headers
+        else:
+            return dict()
+
+    def extract_to(self, member, path):
+        """ Extract compress file's \p member to \p path
+            If \p path is a directory the member's basename will used as
+            filename, otherwise path is considered the full file path name. """
+
+        if not os.path.isdir(os.path.dirname(path)):
+            os.makedirs(os.path.dirname(path))
+
+        if os.path.isdir(path):
+            path = os.path.join(path, os.path.basename(member))
+
+        with open(path, 'wb') as of:
+            if isinstance(self.f, zipfile.ZipFile):
+                zf = self.f.open(member)
+            else:
+                zf = self.f.extractfile(member)
+
+            while True:
+                b = zf.read(1024*100)
+                if b:
+                    of.write(b)
+                else:
+                    break
+
+            zf.close()
+
+
+    @classmethod
+    def extract (cls, zpath, member, outpath):
+        """
+        Extract file member (full internal path) to output from
+        archive zpath.
+        """
+
+        with ZFile(zpath) as zf:
+            zf.extract_to(member, outpath)
+
+
+    @classmethod
+    def compress (cls, zpath, paths, stripcnt=0, ext=None):
+        """
+        Create new compressed file \p zpath containing files in \p paths
+        """
+
+        with ZFile(zpath, 'w', ext=ext) as zf:
+            for p in paths:
+                outp = os.path.sep.join(p.split(os.path.sep)[stripcnt:])
+                print('zip %s to %s (stripcnt %d)' % (p, outp, stripcnt))
+                zf.f.write(p, outp)
+

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/rpm/.gitignore
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/rpm/.gitignore b/thirdparty/librdkafka-0.11.4/packaging/rpm/.gitignore
new file mode 100644
index 0000000..cf122d0
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/rpm/.gitignore
@@ -0,0 +1,3 @@
+*.log
+available_pkgs
+installed_pkgs

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/rpm/Makefile
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/rpm/Makefile b/thirdparty/librdkafka-0.11.4/packaging/rpm/Makefile
new file mode 100644
index 0000000..24e9ae6
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/rpm/Makefile
@@ -0,0 +1,81 @@
+PACKAGE_NAME?=	librdkafka
+VERSION?=	$(shell ../get_version.py ../../src/rdkafka.h)
+
+# Jenkins CI integration
+BUILD_NUMBER?= 1
+
+MOCK_CONFIG?=default
+
+RESULT_DIR?=pkgs-$(VERSION)-$(BUILD_NUMBER)-$(MOCK_CONFIG)
+
+all: rpm
+
+
+SOURCES:
+	mkdir -p SOURCES
+
+archive: SOURCES
+	cd ../../ && \
+	git archive --prefix=$(PACKAGE_NAME)-$(VERSION)/ \
+		-o packaging/rpm/SOURCES/$(PACKAGE_NAME)-$(VERSION).tar.gz HEAD
+
+
+build_prepare: archive
+	mkdir -p $(RESULT_DIR)
+	rm -f $(RESULT_DIR)/$(PACKAGE_NAME)*.rpm
+
+
+srpm: build_prepare
+	/usr/bin/mock \
+		-r $(MOCK_CONFIG) \
+		--define "__version $(VERSION)" \
+		--define "__release $(BUILD_NUMBER)" \
+		--resultdir=$(RESULT_DIR) \
+		--no-clean --no-cleanup-after \
+		--buildsrpm \
+		--spec=librdkafka.spec \
+		--sources=SOURCES || \
+	(tail -n 100 pkgs-$(VERSION)*/*log ; false)
+	@echo "======= Source RPM now available in $(RESULT_DIR) ======="
+
+rpm: srpm
+	/usr/bin/mock \
+		-r $(MOCK_CONFIG) \
+		--define "__version $(VERSION)"\
+		--define "__release $(BUILD_NUMBER)"\
+		--resultdir=$(RESULT_DIR) \
+		--no-clean --no-cleanup-after \
+		--rebuild $(RESULT_DIR)/$(PACKAGE_NAME)*.src.rpm || \
+	(tail -n 100 pkgs-$(VERSION)*/*log ; false)
+	@echo "======= Binary RPMs now available in $(RESULT_DIR) ======="
+
+copy-artifacts:
+	cp $(RESULT_DIR)/*rpm ../../artifacts/
+
+clean:
+	rm -rf SOURCES
+	/usr/bin/mock -r $(MOCK_CONFIG) --clean
+
+distclean: clean
+	rm -f build.log root.log state.log available_pkgs installed_pkgs \
+		*.rpm *.tar.gz
+
+# Prepare ubuntu 14.04 for building RPMs with mock.
+#  - older versions of mock needs the config file to reside in /etc/mock,
+#    so we copy it there.
+#  - add a mock system group (if not already exists)
+#  - add the current user to the mock group.
+#  - prepare mock environment with some needed packages.
+# NOTE: This target should be run with sudo.
+prepare_ubuntu:
+	apt-get -qq update
+	apt-get install -y -qq mock make git python-lzma
+	cp *.cfg /etc/mock/
+	addgroup --system mock || true
+	adduser $$(whoami) mock
+	/usr/bin/mock -r $(MOCK_CONFIG) --init
+	/usr/bin/mock -r $(MOCK_CONFIG) --no-cleanup-after --install epel-release shadow-utils
+
+prepare_centos:
+	yum install -y -q mock make git
+	cp *.cfg /etc/mock/

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/rpm/el7-x86_64.cfg
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/rpm/el7-x86_64.cfg b/thirdparty/librdkafka-0.11.4/packaging/rpm/el7-x86_64.cfg
new file mode 100644
index 0000000..5022827
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/rpm/el7-x86_64.cfg
@@ -0,0 +1,40 @@
+config_opts['root'] = 'el7-x86_64'
+config_opts['target_arch'] = 'x86_64'
+config_opts['legal_host_arches'] = ('x86_64',)
+config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
+config_opts['dist'] = 'el7'  # only useful for --resultdir variable subst
+config_opts['releasever'] = '7'
+config_opts['docker_unshare_warning'] = False
+config_opts['nosync'] = True
+
+config_opts['yum.conf'] = """
+[main]
+keepcache=1
+debuglevel=2
+reposdir=/dev/null
+logfile=/var/log/yum.log
+retries=15
+obsoletes=1
+gpgcheck=0
+assumeyes=1
+syslog_ident=mock
+syslog_device=
+mdpolicy=group:primary
+
+# repos
+[base]
+name=BaseOS
+mirrorlist=http://mirrorlist.centos.org/?release=7&arch=x86_64&repo=os
+failovermethod=priority
+
+[updates]
+name=updates
+enabled=1
+mirrorlist=http://mirrorlist.centos.org/?release=7&arch=x86_64&repo=updates
+failovermethod=priority
+
+[epel]
+name=epel
+mirrorlist=http://mirrors.fedoraproject.org/mirrorlist?repo=epel-7&arch=x86_64
+failovermethod=priority
+"""

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/rpm/librdkafka.spec
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/rpm/librdkafka.spec b/thirdparty/librdkafka-0.11.4/packaging/rpm/librdkafka.spec
new file mode 100644
index 0000000..0591a61
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/rpm/librdkafka.spec
@@ -0,0 +1,104 @@
+Name:    librdkafka
+Version: %{__version}
+Release: %{__release}%{?dist}
+%define soname 1
+
+Summary: The Apache Kafka C library
+Group:   Development/Libraries/C and C++
+License: BSD-2-Clause
+URL:     https://github.com/edenhill/librdkafka
+Source:	 librdkafka-%{version}.tar.gz
+
+BuildRequires: zlib-devel libstdc++-devel gcc >= 4.1 gcc-c++ openssl-devel cyrus-sasl-devel lz4-devel python
+BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
+
+%define _source_payload w9.gzdio
+%define _binary_payload w9.gzdio
+
+%description
+librdkafka is the C/C++ client library implementation of the Apache Kafka protocol, containing both Producer and Consumer support.
+
+
+%package -n %{name}%{soname}
+Summary: The Apache Kafka C library
+Group:   Development/Libraries/C and C++
+Requires: zlib libstdc++ cyrus-sasl
+# openssl libraries were extract to openssl-libs in RHEL7
+%if 0%{?rhel} >= 7
+Requires: openssl-libs
+%else
+Requires: openssl
+%endif
+
+%description -n %{name}%{soname}
+librdkafka is the C/C++ client library implementation of the Apache Kafka protocol, containing both Producer and Consumer support.
+
+
+%package -n %{name}-devel
+Summary: The Apache Kafka C library (Development Environment)
+Group:   Development/Libraries/C and C++
+Requires: %{name}%{soname} = %{version}
+
+%description -n %{name}-devel
+librdkafka is the C/C++ client library implementation of the Apache Kafka protocol, containing both Producer and Consumer support.
+
+This package contains headers and libraries required to build applications
+using librdkafka.
+
+
+%prep
+%setup -q -n %{name}-%{version}
+
+%configure
+
+%build
+make
+
+%install
+rm -rf %{buildroot}
+DESTDIR=%{buildroot} make install
+
+%clean
+rm -rf %{buildroot}
+
+%post   -n %{name}%{soname} -p /sbin/ldconfig
+%postun -n %{name}%{soname} -p /sbin/ldconfig
+
+%files -n %{name}%{soname}
+%defattr(444,root,root)
+%{_libdir}/librdkafka.so.%{soname}
+%{_libdir}/librdkafka++.so.%{soname}
+%defattr(-,root,root)
+%doc README.md CONFIGURATION.md INTRODUCTION.md
+%doc LICENSE LICENSE.pycrc LICENSE.queue LICENSE.snappy LICENSE.tinycthread LICENSE.wingetopt
+
+%defattr(-,root,root)
+#%{_bindir}/rdkafka_example
+#%{_bindir}/rdkafka_performance
+
+
+%files -n %{name}-devel
+%defattr(-,root,root)
+%{_includedir}/librdkafka
+%defattr(444,root,root)
+%{_libdir}/librdkafka.a
+%{_libdir}/librdkafka.so
+%{_libdir}/librdkafka++.a
+%{_libdir}/librdkafka++.so
+%{_libdir}/pkgconfig/rdkafka++.pc
+%{_libdir}/pkgconfig/rdkafka.pc
+%{_libdir}/pkgconfig/rdkafka-static.pc
+%{_libdir}/pkgconfig/rdkafka++-static.pc
+
+%changelog
+* Thu Apr 09 2015 Eduard Iskandarov <e....@corp.mail.ru> 0.8.6-0
+- 0.8.6 simplify build process
+
+* Fri Oct 24 2014 Magnus Edenhill <rd...@edenhill.se> 0.8.5-0
+- 0.8.5 release
+
+* Mon Aug 18 2014 Magnus Edenhill <rd...@edenhill.se> 0.8.4-0
+- 0.8.4 release
+
+* Mon Mar 17 2014 Magnus Edenhill <vk...@edenhill.se> 0.8.3-0
+- Initial RPM package

http://git-wip-us.apache.org/repos/asf/nifi-minifi-cpp/blob/7528d23e/thirdparty/librdkafka-0.11.4/packaging/tools/build-debian.sh
----------------------------------------------------------------------
diff --git a/thirdparty/librdkafka-0.11.4/packaging/tools/build-debian.sh b/thirdparty/librdkafka-0.11.4/packaging/tools/build-debian.sh
new file mode 100755
index 0000000..ea0108d
--- /dev/null
+++ b/thirdparty/librdkafka-0.11.4/packaging/tools/build-debian.sh
@@ -0,0 +1,53 @@
+#!/bin/bash
+#
+# Build librdkafka on a bare-bone Debian host, such as the microsoft/dotnet:2-sdk
+# Docker image.
+#
+# WITH openssl 1.0, zlib
+# WITHOUT libsasl2, lz4(ext, using builtin instead)
+#
+# Usage (from top-level librdkafka dir):
+#   docker run -it -v $PWD:/v microsoft/dotnet:2-sdk /v/packaging/tools/build-debian.sh /v /v/librdkafka-debian9.tgz
+#
+
+
+set -ex
+
+LRK_DIR=$1
+OUT_TGZ=$2
+
+if [[ ! -f $LRK_DIR/configure.librdkafka || -z $OUT_TGZ ]]; then
+    echo "Usage: $0 <librdkafka-root-direcotry> <output-tgz>"
+    exit 1
+fi
+
+set -u
+
+apt-get update
+apt-get install -y gcc g++ libssl1.0-dev zlib1g-dev python2.7 git-core make
+
+
+# Copy the librdkafka git archive to a new location to avoid messing
+# up the librdkafka working directory.
+
+BUILD_DIR=$(mktemp -d)
+
+pushd $BUILD_DIR
+
+DEST_DIR=$PWD/dest
+mkdir -p $DEST_DIR
+
+(cd $LRK_DIR ; git archive --format tar HEAD) | tar xf -
+
+./configure --disable-lz4 --prefix $DEST_DIR
+make -j
+make install
+
+# Tar up the output directory
+pushd $DEST_DIR
+tar cvzf $OUT_TGZ .
+popd # $DEST_DIR
+
+popd # $BUILD_DIR
+
+rm -rf "$BUILD_DIR"