You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by zg...@apache.org on 2019/05/31 13:44:17 UTC

[hbase] branch branch-2 updated (6641f11 -> 7172128)

This is an automated email from the ASF dual-hosted git repository.

zghao pushed a change to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git.


    from 6641f11  HBASE-22502 Purge the logs when we reach the EOF for the last wal file when replication
     new 1ce4fcc  HBASE-22490 Nightly client integration test fails with hadoop-3.0.3
     new 7172128  HBASE-22490 (addendum) Nightly client integration test fails with hadoop-3

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 dev-support/Jenkinsfile                              |  3 +++
 dev-support/hbase_nightly_pseudo-distributed-test.sh | 19 +++++++++++++++----
 2 files changed, 18 insertions(+), 4 deletions(-)


[hbase] 01/02: HBASE-22490 Nightly client integration test fails with hadoop-3.0.3

Posted by zg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

zghao pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit 1ce4fcc6eeb6c0e8c2a06aa2b5c6815f32d43ca1
Author: Guanghao <zg...@apache.org>
AuthorDate: Thu May 30 10:29:40 2019 +0800

    HBASE-22490 Nightly client integration test fails with hadoop-3.0.3
---
 dev-support/hbase_nightly_pseudo-distributed-test.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/dev-support/hbase_nightly_pseudo-distributed-test.sh b/dev-support/hbase_nightly_pseudo-distributed-test.sh
index cc2dd5e..e98c8d9 100755
--- a/dev-support/hbase_nightly_pseudo-distributed-test.sh
+++ b/dev-support/hbase_nightly_pseudo-distributed-test.sh
@@ -276,7 +276,7 @@ trap cleanup EXIT SIGQUIT
 
 echo "Starting up Hadoop"
 
-HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
+HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
 echo "$!" > "${working_dir}/hadoop.pid"
 
 sleep_time=2


[hbase] 02/02: HBASE-22490 (addendum) Nightly client integration test fails with hadoop-3

Posted by zg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

zghao pushed a commit to branch branch-2
in repository https://gitbox.apache.org/repos/asf/hbase.git

commit 7172128a13ea5713ddb27bf1c700c612ffaff1b9
Author: Guanghao Zhang <zg...@apache.org>
AuthorDate: Fri May 31 09:39:04 2019 +0800

    HBASE-22490 (addendum) Nightly client integration test fails with hadoop-3
---
 dev-support/Jenkinsfile                              |  3 +++
 dev-support/hbase_nightly_pseudo-distributed-test.sh | 19 +++++++++++++++----
 2 files changed, 18 insertions(+), 4 deletions(-)

diff --git a/dev-support/Jenkinsfile b/dev-support/Jenkinsfile
index 9540a0d..ad79a74 100644
--- a/dev-support/Jenkinsfile
+++ b/dev-support/Jenkinsfile
@@ -572,6 +572,7 @@ pipeline {
                   "hadoop-2/bin/hadoop" \
                   hadoop-2/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                   hadoop-2/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+                  hadoop-2/bin/mapred \
                   >output-integration/hadoop-2.log 2>&1 ; then
                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 2. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-2.log]. (note that this means we didn't run on Hadoop 3)" >output-integration/commentfile
                 exit 2
@@ -590,6 +591,7 @@ pipeline {
                   hadoop-3/bin/hadoop \
                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+                  hadoop-3/bin/mapred \
                   >output-integration/hadoop-3.log 2>&1 ; then
                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3.log]. (note that this means we didn't check the Hadoop 3 shaded client)" >output-integration/commentfile
                 exit 2
@@ -604,6 +606,7 @@ pipeline {
                   hadoop-3/bin/hadoop \
                   hadoop-3/share/hadoop/yarn/test/hadoop-yarn-server-tests-*-tests.jar \
                   hadoop-3/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-*-tests.jar \
+                  hadoop-3/bin/mapred \
                   >output-integration/hadoop-3-shaded.log 2>&1 ; then
                 echo "(x) {color:red}-1 client integration test{color}\n--Failed when running client tests on top of Hadoop 3 using Hadoop's shaded client. [see log for details|${BUILD_URL}/artifact/output-integration/hadoop-3-shaded.log]." >output-integration/commentfile
                 exit 2
diff --git a/dev-support/hbase_nightly_pseudo-distributed-test.sh b/dev-support/hbase_nightly_pseudo-distributed-test.sh
index e98c8d9..11ad609 100755
--- a/dev-support/hbase_nightly_pseudo-distributed-test.sh
+++ b/dev-support/hbase_nightly_pseudo-distributed-test.sh
@@ -18,7 +18,7 @@
 
 set -e
 function usage {
-  echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar"
+  echo "Usage: ${0} [options] /path/to/component/bin-install /path/to/hadoop/executable /path/to/hadoop/hadoop-yarn-server-tests-tests.jar /path/to/hadoop/hadoop-mapreduce-client-jobclient-tests.jar /path/to/mapred/executable"
   echo ""
   echo "    --zookeeper-data /path/to/use                                     Where the embedded zookeeper instance should write its data."
   echo "                                                                      defaults to 'zk-data' in the working-dir."
@@ -33,7 +33,7 @@ function usage {
   exit 1
 }
 # if no args specified, show usage
-if [ $# -lt 4 ]; then
+if [ $# -lt 5 ]; then
   usage
 fi
 
@@ -62,19 +62,25 @@ do
 done
 
 # should still have where component checkout is.
-if [ $# -lt 4 ]; then
+if [ $# -lt 5 ]; then
   usage
 fi
 component_install="$(cd "$(dirname "$1")"; pwd)/$(basename "$1")"
 hadoop_exec="$(cd "$(dirname "$2")"; pwd)/$(basename "$2")"
 yarn_server_tests_test_jar="$(cd "$(dirname "$3")"; pwd)/$(basename "$3")"
 mapred_jobclient_test_jar="$(cd "$(dirname "$4")"; pwd)/$(basename "$4")"
+mapred_exec="$(cd "$(dirname "$5")"; pwd)/$(basename "$5")"
 
 if [ ! -x "${hadoop_exec}" ]; then
   echo "hadoop cli does not appear to be executable." >&2
   exit 1
 fi
 
+if [ ! -x "${mapred_exec}" ]; then
+  echo "mapred cli does not appear to be executable." >&2
+  exit 1
+fi
+
 if [ ! -d "${component_install}" ]; then
   echo "Path to HBase binary install should be a directory." >&2
   exit 1
@@ -276,7 +282,12 @@ trap cleanup EXIT SIGQUIT
 
 echo "Starting up Hadoop"
 
-HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
+if [ "${hadoop_version%.*.*}" -gt 2 ]; then
+  "${mapred_exec}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
+else
+  HADOOP_CLASSPATH="${yarn_server_tests_test_jar}" "${hadoop_exec}" jar "${mapred_jobclient_test_jar}" minicluster -format -writeConfig "${working_dir}/hbase-conf/core-site.xml" -writeDetails "${working_dir}/hadoop_cluster_info.json" >"${working_dir}/hadoop_cluster_command.out" 2>"${working_dir}/hadoop_cluster_command.err" &
+fi
+
 echo "$!" > "${working_dir}/hadoop.pid"
 
 sleep_time=2