You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by vv...@apache.org on 2015/07/01 12:45:41 UTC

[10/50] hadoop git commit: HADOOP-12036. Consolidate all of the cmake extensions in one directory (alanburlison via cmccabe)

HADOOP-12036. Consolidate all of the cmake extensions in one directory (alanburlison via cmccabe)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/aa07dea3
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/aa07dea3
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/aa07dea3

Branch: refs/heads/YARN-2139
Commit: aa07dea3577158b92a17651d10da20df73f54561
Parents: 60b858b
Author: Colin Patrick Mccabe <cm...@cloudera.com>
Authored: Fri Jun 26 12:32:31 2015 -0700
Committer: Colin Patrick Mccabe <cm...@cloudera.com>
Committed: Fri Jun 26 12:32:31 2015 -0700

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |   3 +
 .../hadoop-common/HadoopCommon.cmake            | 207 +++++++++++
 .../hadoop-common/HadoopJNI.cmake               |  97 +++++
 .../hadoop-common/src/CMakeLists.txt            | 366 ++++++++-----------
 4 files changed, 457 insertions(+), 216 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/aa07dea3/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 5901794..92e1bfa 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -679,6 +679,9 @@ Release 2.8.0 - UNRELEASED
     HADOOP-11885. hadoop-dist dist-layout-stitching.sh does not work with dash.
     (wang)
 
+    HADOOP-12036. Consolidate all of the cmake extensions in one directory
+    (alanburlison via cmccabe)
+
   BUG FIXES
 
     HADOOP-11802: DomainSocketWatcher thread terminates sometimes after there

http://git-wip-us.apache.org/repos/asf/hadoop/blob/aa07dea3/hadoop-common-project/hadoop-common/HadoopCommon.cmake
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/HadoopCommon.cmake b/hadoop-common-project/hadoop-common/HadoopCommon.cmake
new file mode 100644
index 0000000..5a83f3d
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/HadoopCommon.cmake
@@ -0,0 +1,207 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Common CMake utilities and configuration, shared by all Native components.
+#
+
+#
+# Platform-specific prerequisite checks.
+#
+
+if(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
+    # Only 64-bit Java is supported.
+    if(NOT JVM_ARCH_DATA_MODEL EQUAL 64)
+        message(FATAL_ERROR "Unrecognised JVM_ARCH_DATA_MODEL '${JVM_ARCH_DATA_MODEL}'. "
+          "A 64-bit JVM must be used on Solaris, make sure that one is installed and, "
+          "if necessary, the MAVEN_OPTS environment variable includes '-d64'")
+    endif()
+
+    # Only gcc is suported for now.
+    if(NOT(CMAKE_COMPILER_IS_GNUCC AND CMAKE_COMPILER_IS_GNUCXX))
+        message(FATAL_ERROR "Only gcc is supported on Solaris")
+    endif()
+endif()
+
+#
+# Helper functions and macros.
+#
+
+# Add flags to all the CMake compiler variables
+macro(hadoop_add_compiler_flags FLAGS)
+    set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${FLAGS}")
+    set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAGS}")
+endmacro()
+
+# Add flags to all the CMake linker variables
+macro(hadoop_add_linker_flags FLAGS)
+    set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${FLAGS}")
+    set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${FLAGS}")
+    set(CMAKE_STATIC_LINKER_FLAGS "${CMAKE_STATIC_LINKER_FLAGS} ${FLAGS}")
+endmacro()
+
+# Compile a library with both shared and static variants.
+function(hadoop_add_dual_library LIBNAME)
+    add_library(${LIBNAME} SHARED ${ARGN})
+    add_library(${LIBNAME}_static STATIC ${ARGN})
+    set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
+endfunction()
+
+# Link both a static and a dynamic target against some libraries.
+function(hadoop_target_link_dual_libraries LIBNAME)
+    target_link_libraries(${LIBNAME} ${ARGN})
+    target_link_libraries(${LIBNAME}_static ${ARGN})
+endfunction()
+
+# Set all the output directories to the same place.
+function(hadoop_output_directory TGT DIR)
+    set_target_properties(${TGT} PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
+    set_target_properties(${TGT} PROPERTIES ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
+    set_target_properties(${TGT} PROPERTIES LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
+endfunction()
+
+# Set the target directories for dynamic and static builds.
+function(hadoop_dual_output_directory TGT DIR)
+    hadoop_output_directory(${TGT} "${DIR}")
+    hadoop_output_directory(${TGT}_static "${DIR}")
+endfunction()
+
+# Alter the behavior of find_package and find_library so that we find only
+# shared libraries with a given version suffix.  You should save
+# CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
+# afterwards.  On Windows this function is a no-op.  Windows does not encode
+# version number information information into library path names.
+macro(hadoop_set_find_shared_library_version LVERS)
+    if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
+        # Mac OS uses .dylib
+        set(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
+    elseif(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
+        # FreeBSD has always .so installed.
+        set(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
+    elseif(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
+        # Windows doesn't support finding shared libraries by version.
+    else()
+        # Most UNIX variants use .so
+        set(CMAKE_FIND_LIBRARY_SUFFIXES ".so.${LVERS}")
+    endif()
+endmacro()
+
+# Alter the behavior of find_package and find_library so that we find only
+# shared libraries without any version suffix.  You should save
+# CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
+# afterwards. On Windows this function is a no-op.  Windows does not encode
+# version number information information into library path names.
+macro(hadoop_set_find_shared_library_without_version)
+    if(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
+        # Mac OS uses .dylib
+        set(CMAKE_FIND_LIBRARY_SUFFIXES ".dylib")
+    elseif(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
+        # No effect
+    else()
+        # Most UNIX variants use .so
+        set(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
+    endif()
+endmacro()
+
+#
+# Configuration.
+#
+
+# Initialise the shared gcc/g++ flags if they aren't already defined.
+if(NOT DEFINED GCC_SHARED_FLAGS)
+    set(GCC_SHARED_FLAGS "-g -O2 -Wall -pthread -D_FILE_OFFSET_BITS=64")
+endif()
+
+# Add in support other compilers here, if necessary,
+# the assumption is that GCC or a GCC-compatible compiler is being used.
+
+# Set the shared GCC-compatible compiler and linker flags.
+hadoop_add_compiler_flags("${GCC_SHARED_FLAGS}")
+hadoop_add_linker_flags("${LINKER_SHARED_FLAGS}")
+
+#
+# Linux-specific configuration.
+#
+if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
+    # Make GNU extensions available.
+    hadoop_add_compiler_flags("-D_GNU_SOURCE")
+
+    # If JVM_ARCH_DATA_MODEL is 32, compile all binaries as 32-bit.
+    if(JVM_ARCH_DATA_MODEL EQUAL 32)
+        # Force 32-bit code generation on amd64/x86_64, ppc64, sparc64
+        if(CMAKE_COMPILER_IS_GNUCC AND CMAKE_SYSTEM_PROCESSOR MATCHES ".*64")
+            hadoop_add_compiler_flags("-m32")
+            hadoop_add_linker_flags("-m32")
+        endif()
+        # Set CMAKE_SYSTEM_PROCESSOR to ensure that find_package(JNI) will use 32-bit libraries
+        if(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
+            set(CMAKE_SYSTEM_PROCESSOR "i686")
+        endif()
+    endif()
+
+    # Determine float ABI of JVM on ARM.
+    if(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
+        find_program(READELF readelf)
+        if(READELF MATCHES "NOTFOUND")
+            message(WARNING "readelf not found; JVM float ABI detection disabled")
+        else(READELF MATCHES "NOTFOUND")
+            execute_process(
+                COMMAND ${READELF} -A ${JAVA_JVM_LIBRARY}
+                OUTPUT_VARIABLE JVM_ELF_ARCH
+                ERROR_QUIET)
+            if(NOT JVM_ELF_ARCH MATCHES "Tag_ABI_VFP_args: VFP registers")
+                # Test compilation with -mfloat-abi=softfp using an arbitrary libc function
+                # (typically fails with "fatal error: bits/predefs.h: No such file or directory"
+                # if soft-float dev libraries are not installed)
+                message("Soft-float JVM detected")
+                include(CMakePushCheckState)
+                cmake_push_check_state()
+                set(CMAKE_REQUIRED_FLAGS "${CMAKE_REQUIRED_FLAGS} -mfloat-abi=softfp")
+                include(CheckSymbolExists)
+                check_symbol_exists(exit stdlib.h SOFTFP_AVAILABLE)
+                if(NOT SOFTFP_AVAILABLE)
+                    message(FATAL_ERROR "Soft-float dev libraries required (e.g. 'apt-get install libc6-dev-armel' on Debian/Ubuntu)")
+                endif()
+                cmake_pop_check_state()
+                hadoop_add_compiler_flags("-mfloat-abi=softfp")
+            endif()
+        endif()
+    endif()
+
+#
+# Solaris-specific configuration.
+#
+elseif(CMAKE_SYSTEM_NAME STREQUAL "SunOS")
+    # Solaris flags. 64-bit compilation is mandatory, and is checked earlier.
+    hadoop_add_compiler_flags("-m64 -D__EXTENSIONS__ -D_POSIX_PTHREAD_SEMANTICS -D_XOPEN_SOURCE=500")
+    hadoop_add_linker_flags("-m64")
+
+    # CMAKE_SYSTEM_PROCESSOR is set to the output of 'uname -p', which on Solaris is
+    # the 'lowest' ISA supported, i.e. 'i386' or 'sparc'. However in order for the
+    # standard CMake modules to look in the right places it needs to reflect the required
+    # compilation mode, i.e. 64 bit. We therefore force it to either 'amd64' or 'sparcv9'.
+    if(CMAKE_SYSTEM_PROCESSOR STREQUAL "i386")
+        set(CMAKE_SYSTEM_PROCESSOR "amd64")
+        set(CMAKE_LIBRARY_ARCHITECTURE "amd64")
+    elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "sparc")
+        set(CMAKE_SYSTEM_PROCESSOR STREQUAL "sparcv9")
+        set(CMAKE_LIBRARY_ARCHITECTURE "sparcv9")
+    else()
+        message(FATAL_ERROR "Unrecognised CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR}")
+    endif()
+endif()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/aa07dea3/hadoop-common-project/hadoop-common/HadoopJNI.cmake
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/HadoopJNI.cmake b/hadoop-common-project/hadoop-common/HadoopJNI.cmake
new file mode 100644
index 0000000..78d7ffd
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/HadoopJNI.cmake
@@ -0,0 +1,97 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Common JNI detection for CMake, shared by all Native components.
+#
+
+# Check the JVM_ARCH_DATA_MODEL variable as been set to 32 or 64 by maven.
+if(NOT DEFINED JVM_ARCH_DATA_MODEL)
+    message(FATAL_ERROR "JVM_ARCH_DATA_MODEL is not defined")
+elseif(NOT (JVM_ARCH_DATA_MODEL EQUAL 32 OR JVM_ARCH_DATA_MODEL EQUAL 64))
+    message(FATAL_ERROR "JVM_ARCH_DATA_MODEL is not 32 or 64")
+endif()
+
+#
+# Linux-specific JNI configuration.
+#
+if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
+    # Locate JNI_INCLUDE_DIRS and JNI_LIBRARIES.
+    # Since we were invoked from Maven, we know that the JAVA_HOME environment
+    # variable is valid.  So we ignore system paths here and just use JAVA_HOME.
+    file(TO_CMAKE_PATH "$ENV{JAVA_HOME}" _java_home)
+    if(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$")
+        set(_java_libarch "i386")
+    elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
+        set(_java_libarch "amd64")
+    elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^arm")
+        set(_java_libarch "arm")
+    elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^(powerpc|ppc)64le")
+        if(EXISTS "${_java_home}/jre/lib/ppc64le")
+            set(_java_libarch "ppc64le")
+        else()
+            set(_java_libarch "ppc64")
+        endif()
+    else()
+        set(_java_libarch ${CMAKE_SYSTEM_PROCESSOR})
+    endif()
+    set(_JDK_DIRS "${_java_home}/jre/lib/${_java_libarch}/*"
+                  "${_java_home}/jre/lib/${_java_libarch}"
+                  "${_java_home}/jre/lib/*"
+                  "${_java_home}/jre/lib"
+                  "${_java_home}/lib/*"
+                  "${_java_home}/lib"
+                  "${_java_home}/include/*"
+                  "${_java_home}/include"
+                  "${_java_home}"
+    )
+    find_path(JAVA_INCLUDE_PATH
+        NAMES jni.h
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    #In IBM java, it's jniport.h instead of jni_md.h
+    find_path(JAVA_INCLUDE_PATH2
+        NAMES jni_md.h jniport.h
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    set(JNI_INCLUDE_DIRS ${JAVA_INCLUDE_PATH} ${JAVA_INCLUDE_PATH2})
+    find_library(JAVA_JVM_LIBRARY
+        NAMES jvm JavaVM
+        PATHS ${_JDK_DIRS}
+        NO_DEFAULT_PATH)
+    set(JNI_LIBRARIES ${JAVA_JVM_LIBRARY})
+    unset(_java_libarch)
+    unset(_java_home)
+
+    message("JAVA_HOME=${JAVA_HOME}, JAVA_JVM_LIBRARY=${JAVA_JVM_LIBRARY}")
+    message("JAVA_INCLUDE_PATH=${JAVA_INCLUDE_PATH}, JAVA_INCLUDE_PATH2=${JAVA_INCLUDE_PATH2}")
+    if(JAVA_JVM_LIBRARY AND JAVA_INCLUDE_PATH AND JAVA_INCLUDE_PATH2)
+        message("Located all JNI components successfully.")
+    else()
+        message(FATAL_ERROR "Failed to find a viable JVM installation under JAVA_HOME.")
+    endif()
+
+    # Use the standard FindJNI module to locate the JNI components.
+    find_package(JNI REQUIRED)
+
+#
+# Otherwise, use the standard FindJNI module to locate the JNI components.
+#
+else()
+    find_package(JNI REQUIRED)
+endif()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/aa07dea3/hadoop-common-project/hadoop-common/src/CMakeLists.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/CMakeLists.txt b/hadoop-common-project/hadoop-common/src/CMakeLists.txt
index 7d68fd7..c93bfe7 100644
--- a/hadoop-common-project/hadoop-common/src/CMakeLists.txt
+++ b/hadoop-common-project/hadoop-common/src/CMakeLists.txt
@@ -16,209 +16,149 @@
 # limitations under the License.
 #
 
-cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
-
-# Default to release builds
-set(CMAKE_BUILD_TYPE, Release)
-
-include(JNIFlags.cmake NO_POLICY_SCOPE)
-
-# Compile a library with both shared and static variants
-function(add_dual_library LIBNAME)
-    add_library(${LIBNAME} SHARED ${ARGN})
-    add_library(${LIBNAME}_static STATIC ${ARGN})
-    set_target_properties(${LIBNAME}_static PROPERTIES OUTPUT_NAME ${LIBNAME})
-endfunction(add_dual_library)
+#
+# CMake configuration.
+#
 
-# Link both a static and a dynamic target against some libraries
-function(target_link_dual_libraries LIBNAME)
-    target_link_libraries(${LIBNAME} ${ARGN})
-    target_link_libraries(${LIBNAME}_static ${ARGN})
-endfunction(target_link_dual_libraries)
+cmake_minimum_required(VERSION 2.6 FATAL_ERROR)
 
-function(output_directory TGT DIR)
-    SET_TARGET_PROPERTIES(${TGT} PROPERTIES
-        RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
-    SET_TARGET_PROPERTIES(${TGT} PROPERTIES
-        ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
-    SET_TARGET_PROPERTIES(${TGT} PROPERTIES
-        LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/${DIR}")
-endfunction(output_directory TGT DIR)
+list(APPEND CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/..)
+include(HadoopCommon)
 
-function(dual_output_directory TGT DIR)
-    output_directory(${TGT} "${DIR}")
-    output_directory(${TGT}_static "${DIR}")
-endfunction(dual_output_directory TGT DIR)
+# Source and test locations.
+set(SRC main/native/src/org/apache/hadoop)
+set(TST main/native/src/test/org/apache/hadoop)
 
 #
-# This macro alters the behavior of find_package and find_library.
-# It does this by setting the CMAKE_FIND_LIBRARY_SUFFIXES global variable. 
-# You should save that variable before calling this function and restore it
-# after you have accomplished your goal.
+# Main configuration.
 #
-# The behavior is altered in two ways:
-# 1. We always find shared libraries, never static;
-# 2. We find shared libraries with the given version number.
-#
-# On Windows this function is a no-op.  Windows does not encode
-# version number information information into library path names.
-#
-macro(set_find_shared_library_version LVERS)
-    IF(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
-        # Mac OS uses .dylib
-        SET(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
-    ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
-        # FreeBSD has always .so installed.
-        SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
-    ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
-        # Windows doesn't support finding shared libraries by version.
-    ELSE()
-        # Most UNIX variants use .so
-        SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so.${LVERS}")
-    ENDIF()
-endmacro(set_find_shared_library_version LVERS)
 
-#
-# Alter the behavior of find_package and find_library so that we find only
-# shared libraries without any version suffix.  You should save
-# CMAKE_FIND_LIBRARY_SUFFIXES before calling this function and restore it
-# afterwards.
-#
-macro(set_find_shared_library_without_version)
-    IF(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
-        # Mac OS uses .dylib
-        SET(CMAKE_FIND_LIBRARY_SUFFIXES ".dylib")
-    ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
-        # No effect
-    ELSE()
-        # Most UNIX variants use .so
-        SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
-    ENDIF()
-endmacro(set_find_shared_library_without_version)
+# The caller must specify where the generated headers have been placed.
+if(NOT GENERATED_JAVAH)
+    message(FATAL_ERROR "You must set the CMake variable GENERATED_JAVAH")
+endif()
 
-if (NOT GENERATED_JAVAH)
-    # Must identify where the generated headers have been placed
-    MESSAGE(FATAL_ERROR "You must set the cmake variable GENERATED_JAVAH")
-endif (NOT GENERATED_JAVAH)
-find_package(JNI REQUIRED)
+# Configure JNI.
+include(HadoopJNI)
 
-SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
-set_find_shared_library_version("1")
+# Require zlib.
+set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
+hadoop_set_find_shared_library_version("1")
 find_package(ZLIB REQUIRED)
-SET(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
-
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -Wall -O2")
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_GNU_SOURCE")
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64")
-set(D main/native/src/org/apache/hadoop)
-set(T main/native/src/test/org/apache/hadoop)
-
-GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME)
+set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
+get_filename_component(HADOOP_ZLIB_LIBRARY ${ZLIB_LIBRARIES} NAME)
 
-SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
-set_find_shared_library_version("1")
+# Look for bzip2.
+set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
+hadoop_set_find_shared_library_version("1")
 find_package(BZip2 QUIET)
-if (BZIP2_INCLUDE_DIR AND BZIP2_LIBRARIES)
-    GET_FILENAME_COMPONENT(HADOOP_BZIP2_LIBRARY ${BZIP2_LIBRARIES} NAME)
+if(BZIP2_INCLUDE_DIR AND BZIP2_LIBRARIES)
+    get_filename_component(HADOOP_BZIP2_LIBRARY ${BZIP2_LIBRARIES} NAME)
     set(BZIP2_SOURCE_FILES
-          "${D}/io/compress/bzip2/Bzip2Compressor.c"
-          "${D}/io/compress/bzip2/Bzip2Decompressor.c")
-else (BZIP2_INCLUDE_DIR AND BZIP2_LIBRARIES)
+          "${SRC}/io/compress/bzip2/Bzip2Compressor.c"
+          "${SRC}/io/compress/bzip2/Bzip2Decompressor.c")
+    set(REQUIRE_BZIP2 ${REQUIRE_BZIP2}) # Stop warning about unused variable.
+else()
     set(BZIP2_SOURCE_FILES "")
     set(BZIP2_INCLUDE_DIR "")
-    IF(REQUIRE_BZIP2)
-        MESSAGE(FATAL_ERROR "Required bzip2 library and/or header files could not be found.")
-    ENDIF(REQUIRE_BZIP2)
-endif (BZIP2_INCLUDE_DIR AND BZIP2_LIBRARIES)
-SET(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
-
-INCLUDE(CheckFunctionExists)
-INCLUDE(CheckCSourceCompiles)
-INCLUDE(CheckLibraryExists)
-CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
-CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
-CHECK_LIBRARY_EXISTS(dl dlopen "" NEED_LINK_DL)
-
-SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
-set_find_shared_library_version("1")
-find_library(SNAPPY_LIBRARY 
+    if(REQUIRE_BZIP2)
+        message(FATAL_ERROR "Required bzip2 library and/or header files could not be found.")
+    endif()
+endif()
+set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
+
+# Require snappy.
+set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
+hadoop_set_find_shared_library_version("1")
+find_library(SNAPPY_LIBRARY
     NAMES snappy
     PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/lib
           ${CUSTOM_SNAPPY_PREFIX}/lib64 ${CUSTOM_SNAPPY_LIB})
-SET(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
-find_path(SNAPPY_INCLUDE_DIR 
+set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
+find_path(SNAPPY_INCLUDE_DIR
     NAMES snappy.h
     PATHS ${CUSTOM_SNAPPY_PREFIX} ${CUSTOM_SNAPPY_PREFIX}/include
           ${CUSTOM_SNAPPY_INCLUDE})
-if (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
-    GET_FILENAME_COMPONENT(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
+if(SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
+    get_filename_component(HADOOP_SNAPPY_LIBRARY ${SNAPPY_LIBRARY} NAME)
     set(SNAPPY_SOURCE_FILES
-        "${D}/io/compress/snappy/SnappyCompressor.c"
-        "${D}/io/compress/snappy/SnappyDecompressor.c")
-else (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
+        "${SRC}/io/compress/snappy/SnappyCompressor.c"
+        "${SRC}/io/compress/snappy/SnappyDecompressor.c")
+    set(REQUIRE_SNAPPY ${REQUIRE_SNAPPY}) # Stop warning about unused variable.
+    message(STATUS "Found Snappy: ${SNAPPY_LIBRARY}")
+else()
     set(SNAPPY_INCLUDE_DIR "")
     set(SNAPPY_SOURCE_FILES "")
-    IF(REQUIRE_SNAPPY)
-        MESSAGE(FATAL_ERROR "Required snappy library could not be found.  SNAPPY_LIBRARY=${SNAPPY_LIBRARY}, SNAPPY_INCLUDE_DIR=${SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_INCLUDE_DIR=${CUSTOM_SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_PREFIX=${CUSTOM_SNAPPY_PREFIX}, CUSTOM_SNAPPY_INCLUDE=${CUSTOM_SNAPPY_INCLUDE}")
-    ENDIF(REQUIRE_SNAPPY)
-endif (SNAPPY_LIBRARY AND SNAPPY_INCLUDE_DIR)
-
-IF (CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
-  set(BULK_CRC_ARCH_SOURCE_FIlE "${D}/util/bulk_crc32_x86.c")
-ELSEIF (CMAKE_SYSTEM_PROCESSOR STREQUAL "aarch64")
-  set(BULK_CRC_ARCH_SOURCE_FIlE "${D}/util/bulk_crc32_aarch64.c")
-ELSE()
-  MESSAGE("No HW CRC acceleration for ${CMAKE_SYSTEM_PROCESSOR}, falling back to SW")
-ENDIF()
-
-# Find the no-suffix version of libcrypto.
-# See HADOOP-11216 for details.
-SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
-set_find_shared_library_without_version()
-SET(OPENSSL_NAME "crypto")
-IF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
+    if(REQUIRE_SNAPPY)
+        message(FATAL_ERROR "Required snappy library could not be found.  SNAPPY_LIBRARY=${SNAPPY_LIBRARY}, SNAPPY_INCLUDE_DIR=${SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_INCLUDE_DIR=${CUSTOM_SNAPPY_INCLUDE_DIR}, CUSTOM_SNAPPY_PREFIX=${CUSTOM_SNAPPY_PREFIX}, CUSTOM_SNAPPY_INCLUDE=${CUSTOM_SNAPPY_INCLUDE}")
+    endif()
+endif()
+
+# Build hardware CRC32 acceleration, if supported on the platform.
+if(CMAKE_SYSTEM_PROCESSOR MATCHES "^i.86$" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
+  set(BULK_CRC_ARCH_SOURCE_FIlE "${SRC}/util/bulk_crc32_x86.c")
+elseif(CMAKE_SYSTEM_PROCESSOR STREQUAL "aarch64")
+  set(BULK_CRC_ARCH_SOURCE_FIlE "${SRC}/util/bulk_crc32_aarch64.c")
+else()
+  message("No HW CRC acceleration for ${CMAKE_SYSTEM_PROCESSOR}, falling back to SW")
+endif()
+
+# Find the no-suffix version of libcrypto/openssl. See HADOOP-11216 for details.
+set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
+hadoop_set_find_shared_library_without_version()
+set(OPENSSL_NAME "crypto")
+if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
     SET(OPENSSL_NAME "eay32")
-ENDIF()
-MESSAGE("CUSTOM_OPENSSL_PREFIX = ${CUSTOM_OPENSSL_PREFIX}")
+endif()
+message("CUSTOM_OPENSSL_PREFIX = ${CUSTOM_OPENSSL_PREFIX}")
 find_library(OPENSSL_LIBRARY
     NAMES ${OPENSSL_NAME}
     PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/lib
           ${CUSTOM_OPENSSL_PREFIX}/lib64 ${CUSTOM_OPENSSL_LIB} NO_DEFAULT_PATH)
 find_library(OPENSSL_LIBRARY NAMES ${OPENSSL_NAME})
-find_path(OPENSSL_INCLUDE_DIR 
+find_path(OPENSSL_INCLUDE_DIR
     NAMES openssl/evp.h
     PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/include
           ${CUSTOM_OPENSSL_INCLUDE} NO_DEFAULT_PATH)
 find_path(OPENSSL_INCLUDE_DIR NAMES openssl/evp.h)
-SET(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
-SET(USABLE_OPENSSL 0)
-if (OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
-    INCLUDE(CheckCSourceCompiles)
-    SET(OLD_CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES})
-    SET(CMAKE_REQUIRED_INCLUDES ${OPENSSL_INCLUDE_DIR})
-    CHECK_C_SOURCE_COMPILES("#include \"${OPENSSL_INCLUDE_DIR}/openssl/evp.h\"\nint main(int argc, char **argv) { return !EVP_aes_256_ctr; }" HAS_NEW_ENOUGH_OPENSSL)
-    SET(CMAKE_REQUIRED_INCLUDES ${OLD_CMAKE_REQUIRED_INCLUDES})
+set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
+set(USABLE_OPENSSL 0)
+if(OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
+    include(CheckCSourceCompiles)
+    set(OLD_CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES})
+    set(CMAKE_REQUIRED_INCLUDES ${OPENSSL_INCLUDE_DIR})
+    check_c_source_compiles("#include \"${OPENSSL_INCLUDE_DIR}/openssl/evp.h\"\nint main(int argc, char **argv) { return !EVP_aes_256_ctr; }" HAS_NEW_ENOUGH_OPENSSL)
+    set(CMAKE_REQUIRED_INCLUDES ${OLD_CMAKE_REQUIRED_INCLUDES})
     if(NOT HAS_NEW_ENOUGH_OPENSSL)
-        MESSAGE("The OpenSSL library installed at ${OPENSSL_LIBRARY} is too old.  You need a version at least new enough to have EVP_aes_256_ctr.")
-    else(NOT HAS_NEW_ENOUGH_OPENSSL)
+        message("The OpenSSL library installed at ${OPENSSL_LIBRARY} is too old.  You need a version at least new enough to have EVP_aes_256_ctr.")
+    else()
         SET(USABLE_OPENSSL 1)
-    endif(NOT HAS_NEW_ENOUGH_OPENSSL)
-endif (OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
-if (USABLE_OPENSSL)
-    GET_FILENAME_COMPONENT(HADOOP_OPENSSL_LIBRARY ${OPENSSL_LIBRARY} NAME)
-    SET(OPENSSL_SOURCE_FILES
-        "${D}/crypto/OpensslCipher.c"
-        "${D}/crypto/random/OpensslSecureRandom.c")
-else (USABLE_OPENSSL)
-    MESSAGE("Cannot find a usable OpenSSL library.  OPENSSL_LIBRARY=${OPENSSL_LIBRARY}, OPENSSL_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR}, CUSTOM_OPENSSL_LIB=${CUSTOM_OPENSSL_LIB}, CUSTOM_OPENSSL_PREFIX=${CUSTOM_OPENSSL_PREFIX}, CUSTOM_OPENSSL_INCLUDE=${CUSTOM_OPENSSL_INCLUDE}")
-    IF(REQUIRE_OPENSSL)
-        MESSAGE(FATAL_ERROR "Terminating build because require.openssl was specified.")
-    ENDIF(REQUIRE_OPENSSL)
-    SET(OPENSSL_LIBRARY "")
-    SET(OPENSSL_INCLUDE_DIR "")
-    SET(OPENSSL_SOURCE_FILES "")
-endif (USABLE_OPENSSL)
-
+    endif()
+endif()
+if(USABLE_OPENSSL)
+    get_filename_component(HADOOP_OPENSSL_LIBRARY ${OPENSSL_LIBRARY} NAME)
+    set(OPENSSL_SOURCE_FILES
+        "${SRC}/crypto/OpensslCipher.c"
+        "${SRC}/crypto/random/OpensslSecureRandom.c")
+    set(REQUIRE_OPENSSL ${REQUIRE_OPENSSL}) # Stop warning about unused variable.
+else()
+    message("Cannot find a usable OpenSSL library. OPENSSL_LIBRARY=${OPENSSL_LIBRARY}, OPENSSL_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR}, CUSTOM_OPENSSL_LIB=${CUSTOM_OPENSSL_LIB}, CUSTOM_OPENSSL_PREFIX=${CUSTOM_OPENSSL_PREFIX}, CUSTOM_OPENSSL_INCLUDE=${CUSTOM_OPENSSL_INCLUDE}")
+    if(REQUIRE_OPENSSL)
+        message(FATAL_ERROR "Terminating build because require.openssl was specified.")
+    endif()
+    set(OPENSSL_LIBRARY "")
+    set(OPENSSL_INCLUDE_DIR "")
+    set(OPENSSL_SOURCE_FILES "")
+endif()
+
+# Check for platform-specific functions and libraries.
+include(CheckFunctionExists)
+include(CheckLibraryExists)
+check_function_exists(sync_file_range HAVE_SYNC_FILE_RANGE)
+check_function_exists(posix_fadvise HAVE_POSIX_FADVISE)
+check_library_exists(dl dlopen "" NEED_LINK_DL)
+
+# Configure the build.
 include_directories(
     ${GENERATED_JAVAH}
     main/native/src
@@ -230,66 +170,60 @@ include_directories(
     ${BZIP2_INCLUDE_DIR}
     ${SNAPPY_INCLUDE_DIR}
     ${OPENSSL_INCLUDE_DIR}
-    ${D}/util
-)
-CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
-
-add_executable(test_bulk_crc32
-    ${D}/util/bulk_crc32.c
-    ${BULK_CRC_ARCH_SOURCE_FIlE}
-    ${T}/util/test_bulk_crc32.c
+    ${SRC}/util
 )
+configure_file(${CMAKE_SOURCE_DIR}/config.h.cmake ${CMAKE_BINARY_DIR}/config.h)
 
-SET(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
-add_dual_library(hadoop
+set(CMAKE_BUILD_WITH_INSTALL_RPATH TRUE)
+hadoop_add_dual_library(hadoop
     main/native/src/exception.c
-    ${D}/io/compress/lz4/Lz4Compressor.c
-    ${D}/io/compress/lz4/Lz4Decompressor.c
-    ${D}/io/compress/lz4/lz4.c
-    ${D}/io/compress/lz4/lz4hc.c
+    ${SRC}/io/compress/lz4/Lz4Compressor.c
+    ${SRC}/io/compress/lz4/Lz4Decompressor.c
+    ${SRC}/io/compress/lz4/lz4.c
+    ${SRC}/io/compress/lz4/lz4hc.c
     ${SNAPPY_SOURCE_FILES}
     ${OPENSSL_SOURCE_FILES}
-    ${D}/io/compress/zlib/ZlibCompressor.c
-    ${D}/io/compress/zlib/ZlibDecompressor.c
+    ${SRC}/io/compress/zlib/ZlibCompressor.c
+    ${SRC}/io/compress/zlib/ZlibDecompressor.c
     ${BZIP2_SOURCE_FILES}
-    ${D}/io/nativeio/NativeIO.c
-    ${D}/io/nativeio/errno_enum.c
-    ${D}/io/nativeio/file_descriptor.c
-    ${D}/io/nativeio/SharedFileDescriptorFactory.c
-    ${D}/net/unix/DomainSocket.c
-    ${D}/net/unix/DomainSocketWatcher.c
-    ${D}/security/JniBasedUnixGroupsMapping.c
-    ${D}/security/JniBasedUnixGroupsNetgroupMapping.c
-    ${D}/security/hadoop_group_info.c
-    ${D}/security/hadoop_user_info.c
-    ${D}/util/NativeCodeLoader.c
-    ${D}/util/NativeCrc32.c
-    ${D}/util/bulk_crc32.c
+    ${SRC}/io/nativeio/NativeIO.c
+    ${SRC}/io/nativeio/errno_enum.c
+    ${SRC}/io/nativeio/file_descriptor.c
+    ${SRC}/io/nativeio/SharedFileDescriptorFactory.c
+    ${SRC}/net/unix/DomainSocket.c
+    ${SRC}/net/unix/DomainSocketWatcher.c
+    ${SRC}/security/JniBasedUnixGroupsMapping.c
+    ${SRC}/security/JniBasedUnixGroupsNetgroupMapping.c
+    ${SRC}/security/hadoop_group_info.c
+    ${SRC}/security/hadoop_user_info.c
+    ${SRC}/util/NativeCodeLoader.c
+    ${SRC}/util/NativeCrc32.c
+    ${SRC}/util/bulk_crc32.c
     ${BULK_CRC_ARCH_SOURCE_FIlE}
 )
-if (NEED_LINK_DL)
+if(NEED_LINK_DL)
    set(LIB_DL dl)
-endif (NEED_LINK_DL)
+endif()
 
-IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
-    #
-    # By embedding '$ORIGIN' into the RPATH of libhadoop.so,
-    # dlopen will look in the directory containing libhadoop.so.
-    # However, $ORIGIN is not supported by all operating systems.
-    #
+hadoop_target_link_dual_libraries(hadoop ${LIB_DL} ${JAVA_JVM_LIBRARY})
+set(LIBHADOOP_VERSION "1.0.0")
+set_target_properties(hadoop PROPERTIES SOVERSION ${LIBHADOOP_VERSION})
+hadoop_dual_output_directory(hadoop target/usr/local/lib)
+
+# By embedding '$ORIGIN' into the RPATH of libhadoop.so, dlopen will look in
+# the directory containing libhadoop.so. However, $ORIGIN is not supported by
+# all operating systems.
+if(${CMAKE_SYSTEM_NAME} MATCHES "Linux|SunOS")
     set(RPATH "\$ORIGIN/")
-    if (EXTRA_LIBHADOOP_RPATH)
+    if(EXTRA_LIBHADOOP_RPATH)
         set(RPATH "${RPATH}:${EXTRA_LIBHADOOP_RPATH}/")
-    endif(EXTRA_LIBHADOOP_RPATH)
-    SET_TARGET_PROPERTIES(hadoop 
-        PROPERTIES INSTALL_RPATH "${RPATH}")
-ENDIF()
+    endif()
+    set_target_properties(hadoop PROPERTIES INSTALL_RPATH "${RPATH}")
+endif()
 
-target_link_dual_libraries(hadoop
-    ${LIB_DL}
-    ${JAVA_JVM_LIBRARY}
+# Build the CRC32 test executable.
+add_executable(test_bulk_crc32
+    ${SRC}/util/bulk_crc32.c
+    ${BULK_CRC_ARCH_SOURCE_FIlE}
+    ${TST}/util/test_bulk_crc32.c
 )
-SET(LIBHADOOP_VERSION "1.0.0")
-SET_TARGET_PROPERTIES(hadoop PROPERTIES
-    SOVERSION ${LIBHADOOP_VERSION})
-dual_output_directory(hadoop target/usr/local/lib)