You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sh...@apache.org on 2018/08/26 00:44:22 UTC

[10/50] [abbrv] hadoop git commit: HDFS-13822. speedup libhdfs++ build (enable parallel build). Contributed by Allen Wittenauer and Pradeep Ambati

HDFS-13822. speedup libhdfs++ build (enable parallel build). Contributed by Allen Wittenauer and Pradeep Ambati


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a17eed1b
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a17eed1b
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a17eed1b

Branch: refs/heads/HDFS-12943
Commit: a17eed1b870ede9c6519b260e2dfe721b270bdbb
Parents: d723285
Author: Jason Lowe <jl...@apache.org>
Authored: Fri Aug 17 12:23:18 2018 -0500
Committer: Jason Lowe <jl...@apache.org>
Committed: Fri Aug 17 12:25:36 2018 -0500

----------------------------------------------------------------------
 .../hadoop-common/HadoopJNI.cmake               |   2 +
 .../hadoop-hdfs-native-client/pom.xml           | 105 ++++++++++---------
 .../src/CMakeLists.txt                          |  48 +++++++++
 .../src/main/native/libhdfs/CMakeLists.txt      |   6 +-
 .../main/native/libhdfspp/tests/CMakeLists.txt  |   4 +
 5 files changed, 114 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a17eed1b/hadoop-common-project/hadoop-common/HadoopJNI.cmake
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/HadoopJNI.cmake b/hadoop-common-project/hadoop-common/HadoopJNI.cmake
index 78d7ffd..bf0d73e 100644
--- a/hadoop-common-project/hadoop-common/HadoopJNI.cmake
+++ b/hadoop-common-project/hadoop-common/HadoopJNI.cmake
@@ -93,5 +93,7 @@ if(CMAKE_SYSTEM_NAME STREQUAL "Linux")
 # Otherwise, use the standard FindJNI module to locate the JNI components.
 #
 else()
+    find_package(Java REQUIRED)
+    include(UseJava)
     find_package(JNI REQUIRED)
 endif()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a17eed1b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
index 52d9257..ded1c0d 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
@@ -201,26 +201,36 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
       <build>
         <plugins>
           <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-maven-plugins</artifactId>
             <executions>
               <execution>
-                <id>make</id>
+                <id>cmake-compile</id>
                 <phase>compile</phase>
-                <goals><goal>run</goal></goals>
+                <goals><goal>cmake-compile</goal></goals>
                 <configuration>
-                  <target>
-                    <mkdir dir="${project.build.directory}"/>
-                    <exec executable="cmake" dir="${project.build.directory}" failonerror="true">
-                      <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}  -DHADOOP_BUILD=1 -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} "/>
-                      <arg line="${native_cmake_args}"/>
-                    </exec>
-                    <exec executable="make" dir="${project.build.directory}" failonerror="true">
-                      <arg line="${native_make_args}"/>
-                    </exec>
-                  </target>
+                  <source>${basedir}/src</source>
+                  <vars>
+                    <GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
+                    <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
+                    <REQUIRE_FUSE>${require.fuse}</REQUIRE_FUSE>
+                    <REQUIRE_VALGRIND>${require.valgrind}</REQUIRE_VALGRIND>
+                    <HADOOP_BUILD>1</HADOOP_BUILD>
+                    <REQUIRE_LIBWEBHDFS>${require.libwebhdfs}</REQUIRE_LIBWEBHDFS>
+                    <REQUIRE_OPENSSL>${require.openssl}</REQUIRE_OPENSSL>
+                    <CUSTOM_OPENSSL_PREFIX>${openssl.prefix}</CUSTOM_OPENSSL_PREFIX>
+                    <CUSTOM_OPENSSL_LIB>${openssl.lib}</CUSTOM_OPENSSL_LIB>
+                    <CUSTOM_OPENSSL_INCLUDE>${openssl.include}</CUSTOM_OPENSSL_INCLUDE>
+                  </vars>
+                  <output>${project.build.directory}</output>
                 </configuration>
               </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
               <execution>
                 <id>native_tests</id>
                 <phase>test</phase>
@@ -236,6 +246,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
                       <env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
                       <!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
                       <env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
+                      <env key="DYLD_LIBRARY_PATH" value="${env.DYLD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
                     </exec>
                   </target>
                 </configuration>
@@ -246,7 +257,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
       </build>
     </profile>
     <profile>
-      <id>test-patch</id>
+      <id>native-clang</id>
       <activation>
         <activeByDefault>false</activeByDefault>
       </activation>
@@ -256,35 +267,40 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
       <build>
         <plugins>
           <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-maven-plugins</artifactId>
             <executions>
               <execution>
-                <id>make_altern</id>
+                <id>cmake-compile-clang</id>
                 <phase>compile</phase>
-                <goals><goal>run</goal></goals>
+                <goals><goal>cmake-compile</goal></goals>
                 <configuration>
-                  <target>
-                    <mkdir dir="${project.build.directory}/altern"/>
-                    <condition property="c_compiler" value="clang" else="gcc">
-                      <contains string="${env.CC}" substring="gcc"/>
-                    </condition>
-                    <condition property="cxx_compiler" value="clang++" else="g++">
-                      <contains string="${env.CXX}" substring="g++"/>
-                    </condition>
-                    <exec executable="cmake" dir="${project.build.directory}/altern" failonerror="true">
-                      <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/altern/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}  -DHADOOP_BUILD=1 -DREQUIRE_LIBWEBHDFS=${require.libwebhdfs} -DREQUIRE_FUSE=${require.fuse} -DREQUIRE_VALGRIND=${require.valgrind} "/>
-                      <arg line="-DCMAKE_C_COMPILER=${c_compiler} -DCMAKE_CXX_COMPILER=${cxx_compiler}"/>
-                      <arg line="${native_cmake_args}"/>
-                    </exec>
-                    <exec executable="make" dir="${project.build.directory}/altern" failonerror="true">
-                      <arg line="${native_make_args}"/>
-                    </exec>
-                  </target>
+                  <source>${basedir}/src</source>
+                  <vars>
+                    <CMAKE_C_COMPILER>clang</CMAKE_C_COMPILER>
+                    <CMAKE_CXX_COMPILER>clang++</CMAKE_CXX_COMPILER>
+                    <GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
+                    <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
+                    <REQUIRE_FUSE>${require.fuse}</REQUIRE_FUSE>
+                    <REQUIRE_VALGRIND>${require.valgrind}</REQUIRE_VALGRIND>
+                    <HADOOP_BUILD>1</HADOOP_BUILD>
+                    <REQUIRE_LIBWEBHDFS>${require.libwebhdfs}</REQUIRE_LIBWEBHDFS>
+                    <REQUIRE_OPENSSL>${require.openssl}</REQUIRE_OPENSSL>
+                    <CUSTOM_OPENSSL_PREFIX>${openssl.prefix}</CUSTOM_OPENSSL_PREFIX>
+                    <CUSTOM_OPENSSL_LIB>${openssl.lib}</CUSTOM_OPENSSL_LIB>
+                    <CUSTOM_OPENSSL_INCLUDE>${openssl.include}</CUSTOM_OPENSSL_INCLUDE>
+                  </vars>
+                  <output>${project.build.directory}/clang</output>
                 </configuration>
               </execution>
+            </executions>
+          </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
               <execution>
-                <id>native_tests_altern</id>
+                <id>native_tests_clang</id>
                 <phase>test</phase>
                 <goals><goal>run</goal></goals>
                 <configuration>
@@ -292,26 +308,17 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
                   <target>
                     <property name="compile_classpath" refid="maven.compile.classpath"/>
                     <property name="test_classpath" refid="maven.test.classpath"/>
-                    <exec executable="ctest" failonerror="true" dir="${project.build.directory}/altern">
+                    <exec executable="ctest" failonerror="true" dir="${project.build.directory}/clang">
                       <arg line="--output-on-failure"/>
                       <arg line="${native_ctest_args}"/>
                       <env key="CLASSPATH" value="${test_classpath}:${compile_classpath}"/>
                       <!-- Make sure libhadoop.so is on LD_LIBRARY_PATH. -->
-                      <env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/altern/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
+                      <env key="LD_LIBRARY_PATH" value="${env.LD_LIBRARY_PATH}:${project.build.directory}/clang/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
+                      <env key="DYLD_LIBRARY_PATH" value="${env.DYLD_LIBRARY_PATH}:${project.build.directory}/clang/target/usr/local/lib:${hadoop.common.build.dir}/native/target/usr/local/lib"/>
                     </exec>
                   </target>
                 </configuration>
               </execution>
-              <execution>
-                <id>clean_altern</id>
-                <phase>test</phase>
-                <goals><goal>run</goal></goals>
-                <configuration>
-                  <target>
-                    <delete dir="${project.build.directory}/altern" includeemptydirs="true"/>
-                  </target>
-                </configuration>
-              </execution>
             </executions>
           </plugin>
         </plugins>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a17eed1b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
index a3f8f2d..1813ec1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/CMakeLists.txt
@@ -88,6 +88,54 @@ function(link_libhdfs_test NAME LIBRARY)
 target_link_libraries("${NAME}_${LIBRARY}" ${LIBRARY} ${ARGN})
 endfunction()
 
+
+set(STORED_CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES})
+hadoop_set_find_shared_library_without_version()
+set(OPENSSL_NAME "crypto")
+if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
+    SET(OPENSSL_NAME "eay32")
+endif()
+message("CUSTOM_OPENSSL_PREFIX = ${CUSTOM_OPENSSL_PREFIX}")
+find_library(OPENSSL_LIBRARY
+    NAMES ${OPENSSL_NAME}
+    PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/lib
+          ${CUSTOM_OPENSSL_PREFIX}/lib64 ${CUSTOM_OPENSSL_LIB} NO_DEFAULT_PATH)
+find_library(OPENSSL_LIBRARY NAMES ${OPENSSL_NAME})
+find_path(OPENSSL_INCLUDE_DIR
+    NAMES openssl/evp.h
+    PATHS ${CUSTOM_OPENSSL_PREFIX} ${CUSTOM_OPENSSL_PREFIX}/include
+          ${CUSTOM_OPENSSL_INCLUDE} NO_DEFAULT_PATH)
+find_path(OPENSSL_INCLUDE_DIR NAMES openssl/evp.h)
+set(CMAKE_FIND_LIBRARY_SUFFIXES ${STORED_CMAKE_FIND_LIBRARY_SUFFIXES})
+set(USABLE_OPENSSL 0)
+if(OPENSSL_LIBRARY AND OPENSSL_INCLUDE_DIR)
+    include(CheckCSourceCompiles)
+    set(OLD_CMAKE_REQUIRED_INCLUDES ${CMAKE_REQUIRED_INCLUDES})
+    set(CMAKE_REQUIRED_INCLUDES ${OPENSSL_INCLUDE_DIR})
+    check_c_source_compiles("#include \"${OPENSSL_INCLUDE_DIR}/openssl/evp.h\"\nint main(int argc, char **argv) { return !EVP_aes_256_ctr; }" HAS_NEW_ENOUGH_OPENSSL)
+    set(CMAKE_REQUIRED_INCLUDES ${OLD_CMAKE_REQUIRED_INCLUDES})
+    if(NOT HAS_NEW_ENOUGH_OPENSSL)
+        message("The OpenSSL library installed at ${OPENSSL_LIBRARY} is too old.  You need a version at least new enough to have EVP_aes_256_ctr.")
+    else()
+        SET(USABLE_OPENSSL 1)
+    endif()
+endif()
+if(USABLE_OPENSSL)
+    get_filename_component(HADOOP_OPENSSL_LIBRARY ${OPENSSL_LIBRARY} NAME)
+    set(OPENSSL_SOURCE_FILES
+        "${SRC}/crypto/OpensslCipher.c"
+        "${SRC}/crypto/random/OpensslSecureRandom.c")
+    set(REQUIRE_OPENSSL ${REQUIRE_OPENSSL}) # Stop warning about unused variable.
+else()
+    message("Cannot find a usable OpenSSL library. OPENSSL_LIBRARY=${OPENSSL_LIBRARY}, OPENSSL_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR}, CUSTOM_OPENSSL_LIB=${CUSTOM_OPENSSL_LIB}, CUSTOM_OPENSSL_PREFIX=${CUSTOM_OPENSSL_PREFIX}, CUSTOM_OPENSSL_INCLUDE=${CUSTOM_OPENSSL_INCLUDE}")
+    if(REQUIRE_OPENSSL)
+        message(FATAL_ERROR "Terminating build because require.openssl was specified.")
+    endif()
+    set(OPENSSL_LIBRARY "")
+    set(OPENSSL_INCLUDE_DIR "")
+    set(OPENSSL_SOURCE_FILES "")
+endif()
+
 add_subdirectory(main/native/libhdfs)
 add_subdirectory(main/native/libhdfs-tests)
 add_subdirectory(main/native/libhdfspp)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a17eed1b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/CMakeLists.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/CMakeLists.txt
index 2883585..cac1335 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/CMakeLists.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfs/CMakeLists.txt
@@ -74,9 +74,11 @@ endif()
 # introducing an abstraction layer over the sys/mman.h functions.
 if(NOT WIN32)
     build_libhdfs_test(test_libhdfs_vecsum hdfs vecsum.c)
+    set(THREADS_PREFER_PTHREAD_FLAG ON)
+    find_package(Threads REQUIRED)
     if(CMAKE_SYSTEM_NAME MATCHES "Darwin")
-        link_libhdfs_test(test_libhdfs_vecsum hdfs pthread)
+        link_libhdfs_test(test_libhdfs_vecsum hdfs ${CMAKE_THREAD_LIBS_INIT})
     else()
-        link_libhdfs_test(test_libhdfs_vecsum hdfs pthread rt)
+        link_libhdfs_test(test_libhdfs_vecsum hdfs ${CMAKE_THREAD_LIBS_INIT} rt)
     endif()
 endif()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a17eed1b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt
index 6157902..59fdbf2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt
@@ -143,6 +143,10 @@ include_directories (
 add_library(hdfspp_test_shim_static STATIC hdfs_shim.c libhdfs_wrapper.c libhdfspp_wrapper.cc ${LIBHDFSPP_BINDING_C}/hdfs.cc)
 add_library(hdfspp_test_static STATIC ${LIBHDFSPP_BINDING_C}/hdfs.cc)
 
+# Add dependencies
+add_dependencies(hdfspp_test_shim_static proto)
+add_dependencies(hdfspp_test_static proto)
+
 # TODO: get all of the mini dfs library bits here in one place
 # add_library(hdfspp_mini_cluster     native_mini_dfs ${JAVA_JVM_LIBRARY} )
 


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org