You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ar...@apache.org on 2016/01/16 01:10:53 UTC
[27/43] hadoop git commit: HADOOP-8887. Use a Maven plugin to build
the native code using CMake (cmccabe)
HADOOP-8887. Use a Maven plugin to build the native code using CMake (cmccabe)
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b1ed28fa
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b1ed28fa
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b1ed28fa
Branch: refs/heads/HDFS-1312
Commit: b1ed28fa77cb2fab80c54f9dfeb5d8b7139eca34
Parents: 9d04f26
Author: Colin Patrick Mccabe <cm...@cloudera.com>
Authored: Thu Jan 14 11:02:34 2016 -0800
Committer: Colin Patrick Mccabe <cm...@cloudera.com>
Committed: Thu Jan 14 11:02:34 2016 -0800
----------------------------------------------------------------------
BUILDING.txt | 4 +
hadoop-common-project/hadoop-common/CHANGES.txt | 3 +
hadoop-common-project/hadoop-common/pom.xml | 71 ++--
.../hadoop-hdfs-native-client/pom.xml | 29 +-
.../hadoop-mapreduce-client-nativetask/pom.xml | 48 ++-
.../maven/plugin/cmakebuilder/CompileMojo.java | 269 +++++++++++++
.../maven/plugin/cmakebuilder/TestMojo.java | 383 +++++++++++++++++++
.../apache/hadoop/maven/plugin/util/Exec.java | 41 +-
hadoop-tools/hadoop-pipes/pom.xml | 64 +---
.../hadoop-yarn-server-nodemanager/pom.xml | 45 +--
.../test/test-container-executor.c | 7 +
11 files changed, 818 insertions(+), 146 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/BUILDING.txt
----------------------------------------------------------------------
diff --git a/BUILDING.txt b/BUILDING.txt
index 4399ff0..fc68a0b 100644
--- a/BUILDING.txt
+++ b/BUILDING.txt
@@ -179,6 +179,10 @@ Maven build goals:
* -Dtest=<TESTCLASSNAME>,<TESTCLASSNAME#METHODNAME>,....
* -Dtest.exclude=<TESTCLASSNAME>
* -Dtest.exclude.pattern=**/<TESTCLASSNAME1>.java,**/<TESTCLASSNAME2>.java
+ * To run all native unit tests, use: mvn test -Pnative -Dtest=allNative
+ * To run a specific native unit test, use: mvn test -Pnative -Dtest=<test>
+ For example, to run test_bulk_crc32, you would use:
+ mvn test -Pnative -Dtest=test_bulk_crc32
Intel ISA-L build options:
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index e3ada10..a117d50 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -666,6 +666,9 @@ Release 2.9.0 - UNRELEASED
HADOOP-12683. Add number of samples in last interval in snapshot of
MutableStat. (Vikram Srivastava via kasha)
+ HADOOP-8887. Use a Maven plugin to build the native code using CMake
+ (cmccabe)
+
BUG FIXES
HADOOP-12655. TestHttpServer.testBindAddress bind port range is wider
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-common-project/hadoop-common/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index d8e4621..108834b 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -636,45 +636,56 @@
</executions>
</plugin>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
- <id>make</id>
+ <id>cmake-compile</id>
<phase>compile</phase>
- <goals><goal>run</goal></goals>
+ <goals><goal>cmake-compile</goal></goals>
<configuration>
- <target>
- <exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
- <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_BZIP2=${require.bzip2} -DREQUIRE_SNAPPY=${require.snappy} -DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} -DCUSTOM_SNAPPY_INCLUDE=${snappy.include} -DREQUIRE_ISAL=${require.isal} -DCUSTOM_ISAL_PREFIX=${isal.prefix} -DCUSTOM_ISAL_LIB=${isal.lib} -DREQUIRE_OPENSSL=${require.openssl} -DCUSTOM_OPENSSL_PREFIX=${openssl.prefix} -DCUSTOM_OPENSSL_LIB=${openssl.lib} -DCUSTOM_OPENSSL_INCLUDE=${openssl.include} -DEXTRA_LIBHADOOP_RPATH=${extra.libhadoop.rpath}"/>
- </exec>
- <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
- <arg line="VERBOSE=1"/>
- </exec>
- <!-- The second make is a workaround for HADOOP-9215. It can
- be removed when version 2.6 of cmake is no longer supported . -->
- <exec executable="make" dir="${project.build.directory}/native" failonerror="true"></exec>
- </target>
+ <source>${basedir}/src</source>
+ <vars>
+ <GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
+ <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
+ <REQUIRE_BZIP2>${require.bzip2}</REQUIRE_BZIP2>
+ <REQUIRE_SNAPPY>${require.snappy}</REQUIRE_SNAPPY>
+ <CUSTOM_SNAPPY_PREFIX>${snappy.prefix}</CUSTOM_SNAPPY_PREFIX>
+ <CUSTOM_SNAPPY_LIB>${snappy.lib} </CUSTOM_SNAPPY_LIB>
+ <CUSTOM_SNAPPY_INCLUDE>${snappy.include} </CUSTOM_SNAPPY_INCLUDE>
+ <REQUIRE_ISAL>${require.isal} </REQUIRE_ISAL>
+ <CUSTOM_ISAL_PREFIX>${isal.prefix} </CUSTOM_ISAL_PREFIX>
+ <CUSTOM_ISAL_LIB>${isal.lib} </CUSTOM_ISAL_LIB>
+ <REQUIRE_OPENSSL>${require.openssl} </REQUIRE_OPENSSL>
+ <CUSTOM_OPENSSL_PREFIX>${openssl.prefix} </CUSTOM_OPENSSL_PREFIX>
+ <CUSTOM_OPENSSL_LIB>${openssl.lib} </CUSTOM_OPENSSL_LIB>
+ <CUSTOM_OPENSSL_INCLUDE>${openssl.include} </CUSTOM_OPENSSL_INCLUDE>
+ <EXTRA_LIBHADOOP_RPATH>${extra.libhadoop.rpath}</EXTRA_LIBHADOOP_RPATH>
+ </vars>
</configuration>
</execution>
<execution>
- <id>native_tests</id>
+ <id>test_bulk_crc32</id>
+ <goals><goal>cmake-test</goal></goals>
<phase>test</phase>
- <goals><goal>run</goal></goals>
<configuration>
- <target>
- <exec executable="${shell-executable}" failonerror="true" dir="${project.build.directory}/native">
- <arg value="-c"/>
- <arg value="[ x$SKIPTESTS = xtrue ] || ${project.build.directory}/native/test_bulk_crc32"/>
- <env key="SKIPTESTS" value="${skipTests}"/>
- </exec>
- <exec executable="${shell-executable}" failonerror="true" dir="${project.build.directory}/native">
- <arg value="-c"/>
- <arg value="[ ! -f ${project.build.directory}/native/erasure_code_test ] || ${project.build.directory}/native/erasure_code_test"/>
- <env key="SKIPTESTS" value="${skipTests}"/>
- <env key="LD_LIBRARY_PATH" value="${LD_LIBRARY_PATH}:${isal.lib}:${isal.prefix}:/usr/lib"/>
- </exec>
- </target>
+ <binary>${project.build.directory}/native/test_bulk_crc32</binary>
+ <timeout>1200</timeout>
+ <results>${project.build.directory}/native-results</results>
+ </configuration>
+ </execution>
+ <execution>
+ <id>erasure_code_test</id>
+ <goals><goal>cmake-test</goal></goals>
+ <phase>test</phase>
+ <configuration>
+ <binary>${project.build.directory}/native/erasure_code_test</binary>
+ <timeout>300</timeout>
+ <results>${project.build.directory}/native-results</results>
+ <skipIfMissing>true</skipIfMissing>
+ <env>
+ <LD_LIBRARY_PATH>${LD_LIBRARY_PATH}:${isal.lib}:${isal.prefix}:/usr/lib</LD_LIBRARY_PATH>
+ </env>
</configuration>
</execution>
</executions>
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
index 9f2c77d..0ce5246 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-native-client/pom.xml
@@ -187,24 +187,29 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<build>
<plugins>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
- <id>make</id>
+ <id>cmake-compile</id>
<phase>compile</phase>
- <goals><goal>run</goal></goals>
+ <goals><goal>cmake-compile</goal></goals>
<configuration>
- <target>
- <mkdir dir="${project.build.directory}"/>
- <exec executable="cmake" dir="${project.build.directory}" failonerror="true">
- <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_FUSE=${require.fuse}"/>
- </exec>
- <exec executable="make" dir="${project.build.directory}" failonerror="true">
- </exec>
- </target>
+ <source>${basedir}/src</source>
+ <vars>
+ <GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
+ <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
+ <REQUIRE_FUSE>${require.fuse}</REQUIRE_FUSE>
+ </vars>
+ <output>${project.build.directory}</output>
</configuration>
</execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
<execution>
<id>native_tests</id>
<phase>test</phase>
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
index a5532f1..45ba85e 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-nativetask/pom.xml
@@ -178,31 +178,41 @@
overwrite="true">
<fileset dir="${basedir}/src/main/native/testData" />
</copy>
- <exec executable="cmake" dir="${project.build.directory}/native"
- failonerror="true">
- <arg line="${basedir}/src/ -DGENERATED_JAVAH=${project.build.directory}/native/javah -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model} -DREQUIRE_SNAPPY=${require.snappy} -DCUSTOM_SNAPPY_PREFIX=${snappy.prefix} -DCUSTOM_SNAPPY_LIB=${snappy.lib} -DCUSTOM_SNAPPY_INCLUDE=${snappy.include}" />
- </exec>
- <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
- <arg line="VERBOSE=1" />
- </exec>
- <!-- The second make is a workaround for HADOOP-9215. It can be
- removed when version 2.6 of cmake is no longer supported . -->
- <exec executable="make" dir="${project.build.directory}/native" failonerror="true"></exec>
</target>
</configuration>
</execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-maven-plugins</artifactId>
+ <executions>
<execution>
- <id>native_tests</id>
+ <id>cmake-compile</id>
+ <phase>compile</phase>
+ <goals><goal>cmake-compile</goal></goals>
+ <configuration>
+ <source>${basedir}/src</source>
+ <vars>
+ <GENERATED_JAVAH>${project.build.directory}/native/javah</GENERATED_JAVAH>
+ <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
+ <REQUIRE_SNAPPY>${require.snappy}</REQUIRE_SNAPPY>
+ <CUSTOM_SNAPPY_PREFIX>${snappy.prefix}</CUSTOM_SNAPPY_PREFIX>
+ <CUSTOM_SNAPPY_LIB>${snappy.lib}</CUSTOM_SNAPPY_LIB>
+ <CUSTOM_SNAPPY_INCLUDE>${snappy.include}</CUSTOM_SNAPPY_INCLUDE>
+ </vars>
+ </configuration>
+ </execution>
+ <execution>
+ <id>nttest</id>
<phase>test</phase>
- <goals><goal>run</goal></goals>
+ <goals><goal>cmake-test</goal></goals>
<configuration>
- <target>
- <exec executable="${shell-executable}" failonerror="true" dir="${project.build.directory}/native/test">
- <arg value="-c"/>
- <arg value="[ x$SKIPTESTS = xtrue ] || sh test.sh"/>
- <env key="SKIPTESTS" value="${skipTests}"/>
- </exec>
- </target>
+ <testName>test-native-task</testName>
+ <source>${basedir}/src</source>
+ <binary>${project.build.directory}/native/test/nttest</binary>
+ <args><arg>--gtest_filter=-Perf.</arg></args>
+ <results>${project.build.directory}/native/test/native-results</results>
</configuration>
</execution>
</executions>
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/CompileMojo.java
----------------------------------------------------------------------
diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/CompileMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/CompileMojo.java
new file mode 100644
index 0000000..e44bcf8
--- /dev/null
+++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/CompileMojo.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.maven.plugin.cmakebuilder;
+
+import org.apache.hadoop.maven.plugin.util.Exec.OutputBufferThread;
+import org.apache.hadoop.maven.plugin.util.Exec;
+import org.apache.maven.plugin.AbstractMojo;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugins.annotations.LifecyclePhase;
+import org.apache.maven.plugins.annotations.Mojo;
+import org.apache.maven.plugins.annotations.Parameter;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.concurrent.TimeUnit;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Goal which builds the native sources
+ */
+@Mojo(name="cmake-compile", defaultPhase = LifecyclePhase.COMPILE)
+public class CompileMojo extends AbstractMojo {
+ private static int availableProcessors =
+ Runtime.getRuntime().availableProcessors();
+
+ /**
+ * Location of the build products.
+ */
+ @Parameter(defaultValue="${project.build.directory}/native")
+ private File output;
+
+ /**
+ * Location of the source files.
+ * This should be where the sources are checked in.
+ */
+ @Parameter(defaultValue="${basedir}/src/main/native", required=true)
+ private File source;
+
+ /**
+ * CMake build target.
+ */
+ @Parameter
+ private String target;
+
+ /**
+ * Environment variables to pass to CMake.
+ *
+ * Note that it is usually better to use a CMake variable than an environment
+ * variable. To quote the CMake FAQ:
+ *
+ * "One should avoid using environment variables for controlling the flow of
+ * CMake code (such as in IF commands). The build system generated by CMake
+ * may re-run CMake automatically when CMakeLists.txt files change. The
+ * environment in which this is executed is controlled by the build system and
+ * may not match that in which CMake was originally run. If you want to
+ * control build settings on the CMake command line, you need to use cache
+ * variables set with the -D option. The settings will be saved in
+ * CMakeCache.txt so that they don't have to be repeated every time CMake is
+ * run on the same build tree."
+ */
+ @Parameter
+ private Map<String, String> env;
+
+ /**
+ * CMake cached variables to set.
+ */
+ @Parameter
+ private Map<String, String> vars;
+
+ // TODO: support Windows
+ private static void validatePlatform() throws MojoExecutionException {
+ if (System.getProperty("os.name").toLowerCase().startsWith("windows")) {
+ throw new MojoExecutionException("CMakeBuilder does not yet support " +
+ "the Windows platform.");
+ }
+ }
+
+ public void execute() throws MojoExecutionException {
+ long start = System.nanoTime();
+ validatePlatform();
+ runCMake();
+ runMake();
+ runMake(); // The second make is a workaround for HADOOP-9215. It can be
+ // removed when cmake 2.6 is no longer supported.
+ long end = System.nanoTime();
+ getLog().info("cmake compilation finished successfully in " +
+ TimeUnit.MILLISECONDS.convert(end - start, TimeUnit.NANOSECONDS) +
+ " millisecond(s).");
+ }
+
+ /**
+ * Validate that source parameters look sane.
+ */
+ static void validateSourceParams(File source, File output)
+ throws MojoExecutionException {
+ String cOutput = null, cSource = null;
+ try {
+ cOutput = output.getCanonicalPath();
+ } catch (IOException e) {
+ throw new MojoExecutionException("error getting canonical path " +
+ "for output", e);
+ }
+ try {
+ cSource = source.getCanonicalPath();
+ } catch (IOException e) {
+ throw new MojoExecutionException("error getting canonical path " +
+ "for source", e);
+ }
+
+ // This doesn't catch all the bad cases-- we could be following symlinks or
+ // hardlinks, etc. However, this will usually catch a common mistake.
+ if (cSource.startsWith(cOutput)) {
+ throw new MojoExecutionException("The source directory must not be " +
+ "inside the output directory (it would be destroyed by " +
+ "'mvn clean')");
+ }
+ }
+
+ public void runCMake() throws MojoExecutionException {
+ validatePlatform();
+ validateSourceParams(source, output);
+
+ if (output.mkdirs()) {
+ getLog().info("mkdirs '" + output + "'");
+ }
+ List<String> cmd = new LinkedList<String>();
+ cmd.add("cmake");
+ cmd.add(source.getAbsolutePath());
+ for (Map.Entry<String, String> entry : vars.entrySet()) {
+ if ((entry.getValue() != null) && (!entry.getValue().equals(""))) {
+ cmd.add("-D" + entry.getKey() + "=" + entry.getValue());
+ }
+ }
+ cmd.add("-G");
+ cmd.add("Unix Makefiles");
+ String prefix = "";
+ StringBuilder bld = new StringBuilder();
+ for (String c : cmd) {
+ bld.append(prefix).append(c);
+ prefix = " ";
+ }
+ getLog().info("Running " + bld.toString());
+ getLog().info("with extra environment variables " + Exec.envToString(env));
+ ProcessBuilder pb = new ProcessBuilder(cmd);
+ pb.directory(output);
+ pb.redirectErrorStream(true);
+ Exec.addEnvironment(pb, env);
+ Process proc = null;
+ OutputBufferThread outThread = null;
+ int retCode = -1;
+ try {
+ proc = pb.start();
+ outThread = new OutputBufferThread(proc.getInputStream());
+ outThread.start();
+
+ retCode = proc.waitFor();
+ if (retCode != 0) {
+ throw new MojoExecutionException("CMake failed with error code " +
+ retCode);
+ }
+ } catch (IOException e) {
+ throw new MojoExecutionException("Error executing CMake", e);
+ } catch (InterruptedException e) {
+ throw new MojoExecutionException("Interrupted while waiting for " +
+ "CMake process", e);
+ } finally {
+ if (proc != null) {
+ proc.destroy();
+ }
+ if (outThread != null) {
+ try {
+ outThread.interrupt();
+ outThread.join();
+ } catch (InterruptedException e) {
+ getLog().error("Interrupted while joining output thread", e);
+ }
+ if (retCode != 0) {
+ for (String line : outThread.getOutput()) {
+ getLog().warn(line);
+ }
+ }
+ }
+ }
+ }
+
+ public void runMake() throws MojoExecutionException {
+ List<String> cmd = new LinkedList<String>();
+ cmd.add("make");
+ // TODO: it would be nice to determine the number of CPUs and set the
+ // parallelism to that. It requires some platform-specific logic, though.
+ cmd.add("-j");
+ cmd.add(String.valueOf(availableProcessors));
+ cmd.add("VERBOSE=1");
+ if (target != null) {
+ cmd.add(target);
+ }
+ StringBuilder bld = new StringBuilder();
+ String prefix = "";
+ for (String c : cmd) {
+ bld.append(prefix).append(c);
+ prefix = " ";
+ }
+ getLog().info("Running " + bld.toString());
+ ProcessBuilder pb = new ProcessBuilder(cmd);
+ pb.directory(output);
+ Process proc = null;
+ int retCode = -1;
+ OutputBufferThread stdoutThread = null, stderrThread = null;
+ try {
+ proc = pb.start();
+ stdoutThread = new OutputBufferThread(proc.getInputStream());
+ stderrThread = new OutputBufferThread(proc.getErrorStream());
+ stdoutThread.start();
+ stderrThread.start();
+ retCode = proc.waitFor();
+ if (retCode != 0) {
+ throw new MojoExecutionException("make failed with error code " +
+ retCode);
+ }
+ } catch (InterruptedException e) {
+ throw new MojoExecutionException("Interrupted during Process#waitFor", e);
+ } catch (IOException e) {
+ throw new MojoExecutionException("Error executing make", e);
+ } finally {
+ if (stdoutThread != null) {
+ try {
+ stdoutThread.join();
+ } catch (InterruptedException e) {
+ getLog().error("Interrupted while joining stdoutThread", e);
+ }
+ if (retCode != 0) {
+ for (String line: stdoutThread.getOutput()) {
+ getLog().warn(line);
+ }
+ }
+ }
+ if (stderrThread != null) {
+ try {
+ stderrThread.join();
+ } catch (InterruptedException e) {
+ getLog().error("Interrupted while joining stderrThread", e);
+ }
+ // We always print stderr, since it contains the compiler warning
+ // messages. These are interesting even if compilation succeeded.
+ for (String line: stderrThread.getOutput()) {
+ getLog().warn(line);
+ }
+ }
+ if (proc != null) proc.destroy();
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
----------------------------------------------------------------------
diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
new file mode 100644
index 0000000..fa7176b
--- /dev/null
+++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/cmakebuilder/TestMojo.java
@@ -0,0 +1,383 @@
+/*
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.maven.plugin.cmakebuilder;
+
+import org.apache.hadoop.maven.plugin.util.Exec;
+import org.apache.maven.execution.MavenSession;
+import org.apache.maven.plugin.AbstractMojo;
+import org.apache.maven.plugin.MojoExecutionException;
+import org.apache.maven.plugins.annotations.LifecyclePhase;
+import org.apache.maven.plugins.annotations.Mojo;
+import org.apache.maven.plugins.annotations.Parameter;
+
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.util.concurrent.TimeUnit;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Goal which runs a native unit test.
+ */
+@Mojo(name="cmake-test", defaultPhase = LifecyclePhase.TEST)
+public class TestMojo extends AbstractMojo {
+ /**
+ * A value for -Dtest= that runs all native tests.
+ */
+ private final static String ALL_NATIVE = "allNative";
+
+ /**
+ * Location of the binary to run.
+ */
+ @Parameter(required=true)
+ private File binary;
+
+ /**
+ * Name of this test.
+ *
+ * Defaults to the basename of the binary. So if your binary is /foo/bar/baz,
+ * this will default to 'baz.'
+ */
+ @Parameter
+ private String testName;
+
+ /**
+ * Environment variables to pass to the binary.
+ *
+ */
+ @Parameter
+ private Map<String, String> env;
+
+ /**
+ * Arguments to pass to the binary.
+ */
+ @Parameter
+ private List<String> args = new LinkedList<String>();
+
+ /**
+ * Number of seconds to wait before declaring the test failed.
+ *
+ */
+ @Parameter(defaultValue="600")
+ private int timeout;
+
+ /**
+ * Path to results directory.
+ */
+ @Parameter(defaultValue="native-results")
+ private File results;
+
+ /**
+ * A list of preconditions which must be true for this test to be run.
+ */
+ @Parameter
+ private Map<String, String> preconditions = new HashMap<String, String>();
+
+ /**
+ * If true, pass over the test without an error if the binary is missing.
+ */
+ @Parameter(defaultValue="false")
+ private boolean skipIfMissing;
+
+ /**
+ * What result to expect from the test
+ *
+ * Can be either "success", "failure", or "any".
+ */
+ @Parameter(defaultValue="success")
+ private String expectedResult;
+
+ /**
+ * The Maven Session Object
+ */
+ @Parameter(defaultValue="${session}", readonly=true, required=true)
+ private MavenSession session;
+
+ // TODO: support Windows
+ private static void validatePlatform() throws MojoExecutionException {
+ if (System.getProperty("os.name").toLowerCase().startsWith("windows")) {
+ throw new MojoExecutionException("CMakeBuilder does not yet support " +
+ "the Windows platform.");
+ }
+ }
+
+ /**
+ * The test thread waits for the process to terminate.
+ *
+ * Since Process#waitFor doesn't take a timeout argument, we simulate one by
+ * interrupting this thread after a certain amount of time has elapsed.
+ */
+ private static class TestThread extends Thread {
+ private Process proc;
+ private int retCode = -1;
+
+ public TestThread(Process proc) {
+ this.proc = proc;
+ }
+
+ public void run() {
+ try {
+ retCode = proc.waitFor();
+ } catch (InterruptedException e) {
+ retCode = -1;
+ }
+ }
+
+ public int retCode() {
+ return retCode;
+ }
+ }
+
+ /**
+ * Write to the status file.
+ *
+ * The status file will contain a string describing the exit status of the
+ * test. It will be SUCCESS if the test returned success (return code 0), a
+ * numerical code if it returned a non-zero status, or IN_PROGRESS or
+ * TIMED_OUT.
+ */
+ private void writeStatusFile(String status) throws IOException {
+ FileOutputStream fos = new FileOutputStream(new File(results,
+ testName + ".pstatus"));
+ BufferedWriter out = null;
+ try {
+ out = new BufferedWriter(new OutputStreamWriter(fos, "UTF8"));
+ out.write(status + "\n");
+ } finally {
+ if (out != null) {
+ out.close();
+ } else {
+ fos.close();
+ }
+ }
+ }
+
+ private static boolean isTruthy(String str) {
+ if (str == null)
+ return false;
+ if (str.equalsIgnoreCase(""))
+ return false;
+ if (str.equalsIgnoreCase("false"))
+ return false;
+ if (str.equalsIgnoreCase("no"))
+ return false;
+ if (str.equalsIgnoreCase("off"))
+ return false;
+ if (str.equalsIgnoreCase("disable"))
+ return false;
+ return true;
+ }
+
+
+ final static private String VALID_PRECONDITION_TYPES_STR =
+ "Valid precondition types are \"and\", \"andNot\"";
+
+ /**
+ * Validate the parameters that the user has passed.
+ * @throws MojoExecutionException
+ */
+ private void validateParameters() throws MojoExecutionException {
+ if (!(expectedResult.equals("success") ||
+ expectedResult.equals("failure") ||
+ expectedResult.equals("any"))) {
+ throw new MojoExecutionException("expectedResult must be either " +
+ "success, failure, or any");
+ }
+ }
+
+ private boolean shouldRunTest() throws MojoExecutionException {
+ // Were we told to skip all tests?
+ String skipTests = session.
+ getExecutionProperties().getProperty("skipTests");
+ if (isTruthy(skipTests)) {
+ getLog().info("skipTests is in effect for test " + testName);
+ return false;
+ }
+ // Does the binary exist? If not, we shouldn't try to run it.
+ if (!binary.exists()) {
+ if (skipIfMissing) {
+ getLog().info("Skipping missing test " + testName);
+ return false;
+ } else {
+ throw new MojoExecutionException("Test " + binary +
+ " was not built! (File does not exist.)");
+ }
+ }
+ // If there is an explicit list of tests to run, it should include this
+ // test.
+ String testProp = session.
+ getExecutionProperties().getProperty("test");
+ if (testProp != null) {
+ String testPropArr[] = testProp.split(",");
+ boolean found = false;
+ for (String test : testPropArr) {
+ if (test.equals(ALL_NATIVE)) {
+ found = true;
+ break;
+ }
+ if (test.equals(testName)) {
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ getLog().debug("did not find test '" + testName + "' in "
+ + "list " + testProp);
+ return false;
+ }
+ }
+ // Are all the preconditions satistfied?
+ if (preconditions != null) {
+ int idx = 1;
+ for (Map.Entry<String, String> entry : preconditions.entrySet()) {
+ String key = entry.getKey();
+ String val = entry.getValue();
+ if (key == null) {
+ throw new MojoExecutionException("NULL is not a valid " +
+ "precondition type. " + VALID_PRECONDITION_TYPES_STR);
+ } if (key.equals("and")) {
+ if (!isTruthy(val)) {
+ getLog().info("Skipping test " + testName +
+ " because precondition number " + idx + " was not met.");
+ return false;
+ }
+ } else if (key.equals("andNot")) {
+ if (isTruthy(val)) {
+ getLog().info("Skipping test " + testName +
+ " because negative precondition number " + idx +
+ " was met.");
+ return false;
+ }
+ } else {
+ throw new MojoExecutionException(key + " is not a valid " +
+ "precondition type. " + VALID_PRECONDITION_TYPES_STR);
+ }
+ idx++;
+ }
+ }
+ // OK, we should run this.
+ return true;
+ }
+
+ public void execute() throws MojoExecutionException {
+ if (testName == null) {
+ testName = binary.getName();
+ }
+ validatePlatform();
+ validateParameters();
+ if (!shouldRunTest()) {
+ return;
+ }
+ if (!results.isDirectory()) {
+ if (!results.mkdirs()) {
+ throw new MojoExecutionException("Failed to create " +
+ "output directory '" + results + "'!");
+ }
+ }
+ List<String> cmd = new LinkedList<String>();
+ cmd.add(binary.getAbsolutePath());
+
+ getLog().info("-------------------------------------------------------");
+ getLog().info(" C M A K E B U I L D E R T E S T");
+ getLog().info("-------------------------------------------------------");
+ StringBuilder bld = new StringBuilder();
+ bld.append(testName).append(": running ");
+ bld.append(binary.getAbsolutePath());
+ for (String entry : args) {
+ cmd.add(entry);
+ bld.append(" ").append(entry);
+ }
+ getLog().info(bld.toString());
+ ProcessBuilder pb = new ProcessBuilder(cmd);
+ Exec.addEnvironment(pb, env);
+ pb.redirectError(new File(results, testName + ".stderr"));
+ pb.redirectOutput(new File(results, testName + ".stdout"));
+ getLog().info("with extra environment variables " + Exec.envToString(env));
+ Process proc = null;
+ TestThread testThread = null;
+ int retCode = -1;
+ String status = "IN_PROGRESS";
+ try {
+ writeStatusFile(status);
+ } catch (IOException e) {
+ throw new MojoExecutionException("Error writing the status file", e);
+ }
+ long start = System.nanoTime();
+ try {
+ proc = pb.start();
+ testThread = new TestThread(proc);
+ testThread.start();
+ testThread.join(timeout * 1000);
+ if (!testThread.isAlive()) {
+ retCode = testThread.retCode();
+ testThread = null;
+ proc = null;
+ }
+ } catch (IOException e) {
+ throw new MojoExecutionException("IOException while executing the test " +
+ testName, e);
+ } catch (InterruptedException e) {
+ throw new MojoExecutionException("Interrupted while executing " +
+ "the test " + testName, e);
+ } finally {
+ if (testThread != null) {
+ // If the test thread didn't exit yet, that means the timeout expired.
+ testThread.interrupt();
+ try {
+ testThread.join();
+ } catch (InterruptedException e) {
+ getLog().error("Interrupted while waiting for testThread", e);
+ }
+ status = "TIMED OUT";
+ } else if (retCode == 0) {
+ status = "SUCCESS";
+ } else {
+ status = "ERROR CODE " + String.valueOf(retCode);
+ }
+ try {
+ writeStatusFile(status);
+ } catch (Exception e) {
+ getLog().error("failed to write status file!", e);
+ }
+ if (proc != null) {
+ proc.destroy();
+ }
+ }
+ long end = System.nanoTime();
+ getLog().info("STATUS: " + status + " after " +
+ TimeUnit.MILLISECONDS.convert(end - start, TimeUnit.NANOSECONDS) +
+ " millisecond(s).");
+ getLog().info("-------------------------------------------------------");
+ if (status.equals("TIMED_OUT")) {
+ if (expectedResult.equals("success")) {
+ throw new MojoExecutionException("Test " + binary +
+ " timed out after " + timeout + " seconds!");
+ }
+ } else if (!status.equals("SUCCESS")) {
+ if (expectedResult.equals("success")) {
+ throw new MojoExecutionException("Test " + binary +
+ " returned " + status);
+ }
+ } else if (expectedResult.equals("failure")) {
+ throw new MojoExecutionException("Test " + binary +
+ " succeeded, but we expected failure!");
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
----------------------------------------------------------------------
diff --git a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
index ce3543c..b0fa3ab 100644
--- a/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
+++ b/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
@@ -22,6 +22,7 @@ import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.List;
+import java.util.Map;
/**
* Exec is a helper class for executing an external process from a mojo.
@@ -93,7 +94,7 @@ public class Exec {
* OutputBufferThread is a background thread for consuming and storing output
* of the external process.
*/
- private static class OutputBufferThread extends Thread {
+ public static class OutputBufferThread extends Thread {
private List<String> output;
private BufferedReader reader;
@@ -134,4 +135,42 @@ public class Exec {
return output;
}
}
+
+ /**
+ * Add environment variables to a ProcessBuilder.
+ */
+ public static void addEnvironment(ProcessBuilder pb,
+ Map<String, String> env) {
+ if (env == null) {
+ return;
+ }
+ Map<String, String> processEnv = pb.environment();
+ for (Map.Entry<String, String> entry : env.entrySet()) {
+ String val = entry.getValue();
+ if (val == null) {
+ val = "";
+ }
+ processEnv.put(entry.getKey(), val);
+ }
+ }
+
+ /**
+ * Pretty-print the environment to a StringBuilder.
+ */
+ public static String envToString(Map<String, String> env) {
+ StringBuilder bld = new StringBuilder();
+ bld.append("{");
+ if (env != null) {
+ for (Map.Entry<String, String> entry : env.entrySet()) {
+ String val = entry.getValue();
+ if (val == null) {
+ val = "";
+ }
+ bld.append("\n ").append(entry.getKey()).
+ append(" = '").append(val).append("'\n");
+ }
+ }
+ bld.append("}");
+ return bld.toString();
+ }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-tools/hadoop-pipes/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-tools/hadoop-pipes/pom.xml b/hadoop-tools/hadoop-pipes/pom.xml
index e463cb5..c229187 100644
--- a/hadoop-tools/hadoop-pipes/pom.xml
+++ b/hadoop-tools/hadoop-pipes/pom.xml
@@ -43,72 +43,24 @@
<build>
<plugins>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
- <id>make</id>
+ <id>cmake-compile</id>
<phase>compile</phase>
- <goals><goal>run</goal></goals>
+ <goals><goal>cmake-compile</goal></goals>
<configuration>
- <target>
- <mkdir dir="${project.build.directory}/native"/>
- <exec executable="cmake" dir="${project.build.directory}/native"
- failonerror="true">
- <arg line="${basedir}/src/ -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
- </exec>
- <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
- <arg line="VERBOSE=1"/>
- </exec>
- <!-- The second make is a workaround for HADOOP-9215. It can
- be removed when version 2.6 of cmake is no longer supported . -->
- <exec executable="make" dir="${project.build.directory}/native" failonerror="true"></exec>
- </target>
+ <source>${basedir}/src</source>
+ <vars>
+ <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
+ </vars>
</configuration>
</execution>
- <!-- TODO wire here native testcases
- <execution>
- <id>test</id>
- <phase>test</phase>
- <goals>
- <goal>test</goal>
- </goals>
- <configuration>
- <destDir>${project.build.directory}/native/target</destDir>
- </configuration>
- </execution>
- -->
</executions>
</plugin>
</plugins>
</build>
</profile>
</profiles>
-
-<!--
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-antrun-plugin</artifactId>
- <executions>
- <execution>
- <id>compile</id>
- <phase>generate-sources</phase>
- <goals>
- <goal>run</goal>
- </goals>
- <configuration>
- <target>
- <mkdir dir="${basedir}/../target/native"/>
- <copy toDir="${basedir}/../target/native">
- <fileset dir="${basedir}/src/main/native"/>
- </copy>
- </target>
- </configuration>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
--->
</project>
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
index babed8e..051bb4e 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/pom.xml
@@ -172,43 +172,32 @@
<build>
<plugins>
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-antrun-plugin</artifactId>
- <configuration>
- <skipTests>false</skipTests>
- </configuration>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-maven-plugins</artifactId>
<executions>
<execution>
- <id>make</id>
+ <id>cmake-compile</id>
<phase>compile</phase>
- <goals><goal>run</goal></goals>
+ <goals><goal>cmake-compile</goal></goals>
<configuration>
- <target>
- <mkdir dir="${project.build.directory}/native/target"/>
- <exec executable="cmake" dir="${project.build.directory}/native" failonerror="true">
- <arg line="${basedir}/src/ -DHADOOP_CONF_DIR=${container-executor.conf.dir} -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
- <env key="CFLAGS" value="${container-executor.additional_cflags}"/>
- </exec>
- <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
- <arg line="VERBOSE=1"/>
- </exec>
- <!-- The second make is a workaround for HADOOP-9215. It can
- be removed when version 2.6 of cmake is no longer supported . -->
- <exec executable="make" dir="${project.build.directory}/native" failonerror="true"></exec>
- </target>
+ <source>${basedir}/src</source>
+ <vars>
+ <HADOOP_CONF_DIR>${container-executor.conf.dir}</HADOOP_CONF_DIR>
+ <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
+ </vars>
+ <env>
+ <CFLAGS>${container-executor.additional_cflags}</CFLAGS>
+ </env>
</configuration>
</execution>
<execution>
- <id>native_tests</id>
+ <id>test-container-executor</id>
+ <goals><goal>cmake-test</goal></goals>
<phase>test</phase>
<configuration>
- <target>
- <exec executable="${shell-executable}" failonerror="true" dir="${project.build.directory}/native">
- <arg value="-c"/>
- <arg value="[ x$SKIPTESTS = xtrue ] || test-container-executor"/>
- <env key="SKIPTESTS" value="${skipTests}"/>
- </exec>
- </target>
+ <binary>${project.build.directory}/native/target/usr/local/bin/test-container-executor</binary>
+ <timeout>300</timeout>
+ <results>${project.build.directory}/native-results</results>
</configuration>
</execution>
</executions>
http://git-wip-us.apache.org/repos/asf/hadoop/blob/b1ed28fa/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c
index 3db75ea..6d10509 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/native/container-executor/test/test-container-executor.c
@@ -470,6 +470,13 @@ void run_test_in_child(const char* test_name, void (*func)()) {
}
void test_signal_container() {
+ sigset_t set;
+
+ // unblock SIGQUIT
+ sigemptyset(&set);
+ sigaddset(&set, SIGQUIT);
+ sigprocmask(SIG_UNBLOCK, &set, NULL);
+
printf("\nTesting signal_container\n");
fflush(stdout);
fflush(stderr);