You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by wa...@apache.org on 2013/08/15 02:15:13 UTC
svn commit: r1514105 - in /hadoop/common/branches/HDFS-4949: ./ dev-support/
hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/
hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/
hadoop-project-dist/ hadoop-p...
Author: wang
Date: Thu Aug 15 00:15:11 2013
New Revision: 1514105
URL: http://svn.apache.org/r1514105
Log:
merge trunk into HDFS-4949 branch
Modified:
hadoop/common/branches/HDFS-4949/ (props changed)
hadoop/common/branches/HDFS-4949/BUILDING.txt
hadoop/common/branches/HDFS-4949/dev-support/test-patch.sh
hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java
hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
hadoop/common/branches/HDFS-4949/hadoop-project-dist/pom.xml
hadoop/common/branches/HDFS-4949/hadoop-project/pom.xml
Propchange: hadoop/common/branches/HDFS-4949/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk:r1512448-1514104
Modified: hadoop/common/branches/HDFS-4949/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/BUILDING.txt?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/BUILDING.txt (original)
+++ hadoop/common/branches/HDFS-4949/BUILDING.txt Thu Aug 15 00:15:11 2013
@@ -7,7 +7,7 @@ Requirements:
* JDK 1.6
* Maven 3.0
* Findbugs 1.3.9 (if running findbugs)
-* ProtocolBuffer 2.4.1+ (for MapReduce and HDFS)
+* ProtocolBuffer 2.5.0
* CMake 2.6 or newer (if compiling native code)
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)
@@ -100,6 +100,16 @@ time out after a while, using the Maven
to update SNAPSHOTs from external repos.
----------------------------------------------------------------------------------
+Protocol Buffer compiler
+
+The version of Protocol Buffer compiler, protoc, must match the version of the
+protobuf JAR.
+
+If you have multiple versions of protoc in your system, you can set in your
+build shell the HADOOP_PROTOC_PATH environment variable to point to the one you
+want to use for the Hadoop build. If you don't define this environment variable,
+protoc is looked up in the PATH.
+----------------------------------------------------------------------------------
Importing projects to eclipse
When you import the project to eclipse, install hadoop-maven-plugins at first.
Modified: hadoop/common/branches/HDFS-4949/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/dev-support/test-patch.sh?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-4949/dev-support/test-patch.sh Thu Aug 15 00:15:11 2013
@@ -426,7 +426,8 @@ checkJavadocWarnings () {
echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
#There are 11 warnings that are caused by things that are caused by using sun internal APIs.
- OK_JAVADOC_WARNINGS=11;
+ #There are 2 warnings that are caused by the Apache DS Dn class used in MiniKdc.
+ OK_JAVADOC_WARNINGS=13;
### if current warnings greater than OK_JAVADOC_WARNINGS
if [[ $javadocWarnings -ne $OK_JAVADOC_WARNINGS ]] ; then
JIRA_COMMENT="$JIRA_COMMENT
@@ -731,32 +732,62 @@ of hadoop-common prior to running the un
fi
fi
fi
+ failed_test_builds=""
+ test_timeouts=""
for module in $ordered_modules; do
cd $module
+ module_suffix=`basename ${module}`
+ test_logfile=$PATCH_DIR/testrun_${module_suffix}.txt
echo " Running tests in $module"
echo " $MVN clean install -fn $NATIVE_PROFILE $REQUIRE_TEST_LIB_HADOOP -D${PROJECT_NAME}PatchProcess"
- $MVN clean install -fn $NATIVE_PROFILE $REQUIRE_TEST_LIB_HADOOP -D${PROJECT_NAME}PatchProcess
+ $MVN clean install -fae $NATIVE_PROFILE $REQUIRE_TEST_LIB_HADOOP -D${PROJECT_NAME}PatchProcess > $test_logfile 2>&1
+ test_build_result=$?
+ cat $test_logfile
+ module_test_timeouts=`$AWK '/^Running / { if (last) { print last } last=$2 } /^Tests run: / { last="" }' $test_logfile`
+ if [[ -n "$module_test_timeouts" ]] ; then
+ test_timeouts="$test_timeouts
+$module_test_timeouts"
+ fi
module_failed_tests=`find . -name 'TEST*.xml' | xargs $GREP -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-| |g" | sed -e "s|\.xml||g"`
- # With -fn mvn always exits with a 0 exit code. Because of this we need to
- # find the errors instead of using the exit code. We assume that if the build
- # failed a -1 is already given for that case
if [[ -n "$module_failed_tests" ]] ; then
failed_tests="${failed_tests}
${module_failed_tests}"
fi
+ if [[ $test_build_result != 0 && -z "$module_failed_tests" && -z "$module_test_timeouts" ]] ; then
+ failed_test_builds="$module $failed_test_builds"
+ fi
cd -
done
+ result=0
+ comment_prefix=" {color:red}-1 core tests{color}."
if [[ -n "$failed_tests" ]] ; then
JIRA_COMMENT="$JIRA_COMMENT
- {color:red}-1 core tests{color}. The patch failed these unit tests in $modules:
+$comment_prefix The patch failed these unit tests in $modules:
$failed_tests"
- return 1
+ comment_prefix=" "
+ result=1
fi
- JIRA_COMMENT="$JIRA_COMMENT
+ if [[ -n "$test_timeouts" ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
+
+$comment_prefix The following test timeouts occurred in $modules:
+$test_timeouts"
+ comment_prefix=" "
+ result=1
+ fi
+ if [[ -n "$failed_test_builds" ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
+
+$comment_prefix The test build failed in $failed_test_builds"
+ result=1
+ fi
+ if [[ $result == 0 ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
{color:green}+1 core tests{color}. The patch passed unit tests in $modules."
- return 0
+ fi
+ return $result
}
###############################################################################
Modified: hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/protoc/ProtocMojo.java Thu Aug 15 00:15:11 2013
@@ -45,19 +45,45 @@ public class ProtocMojo extends Abstract
@Parameter(required=true)
private FileSet source;
- @Parameter(defaultValue="protoc")
+ @Parameter
private String protocCommand;
+ @Parameter(required=true)
+ private String protocVersion;
public void execute() throws MojoExecutionException {
try {
+ if (protocCommand == null || protocCommand.trim().isEmpty()) {
+ protocCommand = "protoc";
+ }
+ List<String> command = new ArrayList<String>();
+ command.add(protocCommand);
+ command.add("--version");
+ Exec exec = new Exec(this);
+ List<String> out = new ArrayList<String>();
+ if (exec.run(command, out) == 127) {
+ getLog().error("protoc, not found at: " + protocCommand);
+ throw new MojoExecutionException("protoc failure");
+ } else {
+ if (out.isEmpty()) {
+ getLog().error("stdout: " + out);
+ throw new MojoExecutionException(
+ "'protoc --version' did not return a version");
+ } else {
+ if (!out.get(0).endsWith(protocVersion)) {
+ throw new MojoExecutionException(
+ "protoc version is '" + out.get(0) + "', expected version is '"
+ + protocVersion + "'");
+ }
+ }
+ }
if (!output.mkdirs()) {
if (!output.exists()) {
throw new MojoExecutionException("Could not create directory: " +
output);
}
}
- List<String> command = new ArrayList<String>();
+ command = new ArrayList<String>();
command.add(protocCommand);
command.add("--java_out=" + output.getCanonicalPath());
if (imports != null) {
@@ -68,8 +94,8 @@ public class ProtocMojo extends Abstract
for (File f : FileSetUtils.convertFileSetToFiles(source)) {
command.add(f.getCanonicalPath());
}
- Exec exec = new Exec(this);
- List<String> out = new ArrayList<String>();
+ exec = new Exec(this);
+ out = new ArrayList<String>();
if (exec.run(command, out) != 0) {
getLog().error("protoc compiler error");
for (String s : out) {
Modified: hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-maven-plugins/src/main/java/org/apache/hadoop/maven/plugin/util/Exec.java Thu Aug 15 00:15:11 2013
@@ -63,11 +63,10 @@ public class Exec {
for (String s : stdErr.getOutput()) {
mojo.getLog().debug(s);
}
- } else {
- stdOut.join();
- stdErr.join();
- output.addAll(stdOut.getOutput());
}
+ stdOut.join();
+ stdErr.join();
+ output.addAll(stdOut.getOutput());
} catch (Exception ex) {
mojo.getLog().warn(command + " failed: " + ex.toString());
}
Modified: hadoop/common/branches/HDFS-4949/hadoop-project-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-project-dist/pom.xml?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-project-dist/pom.xml (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-project-dist/pom.xml Thu Aug 15 00:15:11 2013
@@ -40,6 +40,7 @@
<hadoop.component>UNDEF</hadoop.component>
<bundle.snappy>false</bundle.snappy>
+ <bundle.snappy.in.bin>false</bundle.snappy.in.bin>
</properties>
<dependencies>
@@ -355,6 +356,12 @@
mkdir -p $${TARGET_BIN_DIR}
cd $${BIN_DIR}
$$TAR * | (cd $${TARGET_BIN_DIR}/; $$UNTAR)
+ if [ "${bundle.snappy.in.bin}" = "true" ] ; then
+ if [ "${bundle.snappy}" = "true" ] ; then
+ cd ${snappy.lib}
+ $$TAR *snappy* | (cd $${TARGET_BIN_DIR}/; $$UNTAR)
+ fi
+ fi
fi
</echo>
<exec executable="sh" dir="${project.build.directory}" failonerror="true">
Modified: hadoop/common/branches/HDFS-4949/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-4949/hadoop-project/pom.xml?rev=1514105&r1=1514104&r2=1514105&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-4949/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-4949/hadoop-project/pom.xml Thu Aug 15 00:15:11 2013
@@ -57,6 +57,12 @@
<!-- Used for building path to native library loaded by tests. Projects -->
<!-- at different nesting levels in the source tree may need to override. -->
<hadoop.common.build.dir>${basedir}/../../hadoop-common-project/hadoop-common/target</hadoop.common.build.dir>
+ <java.security.egd>file:///dev/urandom</java.security.egd>
+
+ <!-- ProtocolBuffer version, used to verify the protoc version and -->
+ <!-- define the protobuf JAR version -->
+ <protobuf.version>2.5.0</protobuf.version>
+ <protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path>
</properties>
<dependencyManagement>
@@ -289,6 +295,12 @@
</dependency>
<dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-minikdc</artifactId>
+ <version>${project.version}</version>
+ </dependency>
+
+ <dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>11.0.2</version>
@@ -608,7 +620,7 @@
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
- <version>2.4.0a</version>
+ <version>${protobuf.version}</version>
</dependency>
<dependency>
<groupId>commons-daemon</groupId>
@@ -831,7 +843,7 @@
<java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
<java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
- <java.security.egd>file:///dev/urandom</java.security.egd>
+ <java.security.egd>${java.security.egd}</java.security.egd>
<require.test.libhadoop>${require.test.libhadoop}</require.test.libhadoop>
</systemPropertyVariables>
<includes>
@@ -885,6 +897,14 @@
<family>Windows</family>
</os>
</activation>
+ <properties>
+ <!-- We must use this exact string for egd on Windows, because the -->
+ <!-- JVM will check for an exact string match on this. If found, it -->
+ <!-- will use a native entropy provider. This will not really -->
+ <!-- attempt to open a file at this path. -->
+ <java.security.egd>file:/dev/urandom</java.security.egd>
+ <bundle.snappy.in.bin>true</bundle.snappy.in.bin>
+ </properties>
<build>
<plugins>
<plugin>
@@ -893,7 +913,7 @@
<configuration>
<environmentVariables>
<!-- Specify where to look for the native DLL on Windows -->
- <PATH>${env.PATH};${hadoop.common.build.dir}/bin</PATH>
+ <PATH>${env.PATH};${hadoop.common.build.dir}/bin;${snappy.lib}</PATH>
</environmentVariables>
</configuration>
</plugin>