You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by bu...@apache.org on 2017/04/28 16:29:07 UTC
[2/4] hbase git commit: HBASE-17920 TestFSHDFSUtils always fails
against hadoop 3.0.0-alpha2
HBASE-17920 TestFSHDFSUtils always fails against hadoop 3.0.0-alpha2
Signed-off-by: Sean Busbey <bu...@apache.org>
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/43f3fccb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/43f3fccb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/43f3fccb
Branch: refs/heads/master
Commit: 43f3fccb7b24d1433434d983e8e60914d8905f8d
Parents: 635c9db
Author: Jonathan M Hsieh <jm...@apache.org>
Authored: Fri Apr 14 10:49:45 2017 -0700
Committer: Sean Busbey <bu...@apache.org>
Committed: Fri Apr 28 11:25:14 2017 -0500
----------------------------------------------------------------------
.../hadoop/hbase/util/TestFSHDFSUtils.java | 27 +++++++++++++++-----
1 file changed, 20 insertions(+), 7 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/43f3fccb/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
index ea19ea7..5899971 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestFSHDFSUtils.java
@@ -100,8 +100,7 @@ public class TestFSHDFSUtils {
Mockito.verify(dfs, Mockito.times(1)).isFileClosed(FILE);
}
- @Test
- public void testIsSameHdfs() throws IOException {
+ void testIsSameHdfs(int nnport) throws IOException {
try {
Class dfsUtilClazz = Class.forName("org.apache.hadoop.hdfs.DFSUtil");
dfsUtilClazz.getMethod("getNNServiceRpcAddresses", Configuration.class);
@@ -111,7 +110,7 @@ public class TestFSHDFSUtils {
}
Configuration conf = HBaseConfiguration.create();
- Path srcPath = new Path("hdfs://localhost:8020/");
+ Path srcPath = new Path("hdfs://localhost:" + nnport + "/");
Path desPath = new Path("hdfs://127.0.0.1/");
FileSystem srcFs = srcPath.getFileSystem(conf);
FileSystem desFs = desPath.getFileSystem(conf);
@@ -122,7 +121,7 @@ public class TestFSHDFSUtils {
desFs = desPath.getFileSystem(conf);
assertTrue(!FSHDFSUtils.isSameHdfs(conf, srcFs, desFs));
- desPath = new Path("hdfs://127.0.1.1:8020/");
+ desPath = new Path("hdfs://127.0.1.1:" + nnport + "/");
desFs = desPath.getFileSystem(conf);
assertTrue(!FSHDFSUtils.isSameHdfs(conf, srcFs, desFs));
@@ -130,21 +129,35 @@ public class TestFSHDFSUtils {
conf.set("dfs.nameservices", "haosong-hadoop");
conf.set("dfs.ha.namenodes.haosong-hadoop", "nn1,nn2");
conf.set("dfs.client.failover.proxy.provider.haosong-hadoop",
- "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
+ "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
- conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1", "127.0.0.1:8020");
+ conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1", "127.0.0.1:"+ nnport);
conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2", "127.10.2.1:8000");
desPath = new Path("/");
desFs = desPath.getFileSystem(conf);
assertTrue(FSHDFSUtils.isSameHdfs(conf, srcFs, desFs));
- conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1", "127.10.2.1:8020");
+ conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn1", "127.10.2.1:"+nnport);
conf.set("dfs.namenode.rpc-address.haosong-hadoop.nn2", "127.0.0.1:8000");
desPath = new Path("/");
desFs = desPath.getFileSystem(conf);
assertTrue(!FSHDFSUtils.isSameHdfs(conf, srcFs, desFs));
}
+ @Test
+ public void testIsSameHdfs() throws IOException {
+ String hadoopVersion = org.apache.hadoop.util.VersionInfo.getVersion();
+ LOG.info("hadoop version is: " + hadoopVersion);
+ boolean isHadoop3 = hadoopVersion.startsWith("3.");
+ if (isHadoop3) {
+ // Hadoop 3.0.0 alpha1+ change default nn port to 9820. See HDFS-9427
+ testIsSameHdfs(9820);
+ } else {
+ // pre hadoop 3.0.0 defaults to port 8020
+ testIsSameHdfs(8020);
+ }
+ }
+
/**
* Version of DFS that has HDFS-4525 in it.
*/