You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by md...@apache.org on 2018/07/20 19:50:59 UTC

[07/29] hbase git commit: HBASE-20859 Backup and incremental load could fail in secure clusters.

HBASE-20859 Backup and incremental load could fail in secure clusters.

Signed-off-by: tedyu <yu...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/6ee0fedb
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/6ee0fedb
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/6ee0fedb

Branch: refs/heads/HBASE-20749
Commit: 6ee0fedb3df2fdfbe737ed5718931cbc90bcc746
Parents: 0d33caa
Author: Wei-Chiu Chuang <we...@cloudera.com>
Authored: Fri Jul 6 20:07:37 2018 -0700
Committer: tedyu <yu...@gmail.com>
Committed: Wed Jul 11 16:56:39 2018 -0700

----------------------------------------------------------------------
 .../hadoop/hbase/backup/util/BackupUtils.java   |  2 +-
 .../hadoop/hbase/backup/TestBackupUtils.java    | 80 ++++++++++++++++++++
 .../org/apache/hadoop/hbase/HConstants.java     |  4 +
 .../hbase/mapreduce/HFileOutputFormat2.java     |  2 +-
 .../hbase/mapreduce/TestHFileOutputFormat2.java | 54 +++++++++++++
 5 files changed, 140 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/6ee0fedb/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
----------------------------------------------------------------------
diff --git a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
index e01849a..12f2f2f 100644
--- a/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
+++ b/hbase-backup/src/main/java/org/apache/hadoop/hbase/backup/util/BackupUtils.java
@@ -695,7 +695,7 @@ public final class BackupUtils {
       throws IOException {
     FileSystem fs = FileSystem.get(conf);
     String tmp = conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,
-            HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);
+            fs.getHomeDirectory() + "/hbase-staging");
     Path path =
         new Path(tmp + Path.SEPARATOR + "bulk_output-" + tableName + "-"
             + EnvironmentEdgeManager.currentTime());

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ee0fedb/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java
----------------------------------------------------------------------
diff --git a/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java
new file mode 100644
index 0000000..01481a0
--- /dev/null
+++ b/hbase-backup/src/test/java/org/apache/hadoop/hbase/backup/TestBackupUtils.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.backup;
+
+import java.io.IOException;
+import java.security.PrivilegedAction;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseClassTestRule;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.backup.util.BackupUtils;
+import org.apache.hadoop.hbase.testclassification.SmallTests;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.Assert;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Category(SmallTests.class)
+public class TestBackupUtils {
+  @ClassRule
+  public static final HBaseClassTestRule CLASS_RULE =
+      HBaseClassTestRule.forClass(TestBackupUtils.class);
+  private static final Logger LOG = LoggerFactory.getLogger(TestBackupUtils.class);
+
+  protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+  protected static Configuration conf = TEST_UTIL.getConfiguration();
+
+  @Test
+  public void TestGetBulkOutputDir() {
+    // Create a user who is not the current user
+    String fooUserName = "foo1234";
+    String fooGroupName = "group1";
+    UserGroupInformation
+        ugi = UserGroupInformation.createUserForTesting(fooUserName, new String[]{fooGroupName});
+    // Get user's home directory
+    Path fooHomeDirectory = ugi.doAs(new PrivilegedAction<Path>() {
+      @Override public Path run() {
+        try (FileSystem fs = FileSystem.get(conf)) {
+          return fs.getHomeDirectory();
+        } catch (IOException ioe) {
+          LOG.error("Failed to get foo's home directory", ioe);
+        }
+        return null;
+      }
+    });
+
+    Path bulkOutputDir = ugi.doAs(new PrivilegedAction<Path>() {
+      @Override public Path run() {
+        try {
+          return BackupUtils.getBulkOutputDir("test", conf, false);
+        } catch (IOException ioe) {
+          LOG.error("Failed to get bulk output dir path", ioe);
+        }
+        return null;
+      }
+    });
+    // Make sure the directory is in foo1234's home directory
+    Assert.assertTrue(bulkOutputDir.toString().startsWith(fooHomeDirectory.toString()));
+  }
+}

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ee0fedb/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
----------------------------------------------------------------------
diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
index fa1a772..2e23484 100644
--- a/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
+++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/HConstants.java
@@ -1352,6 +1352,10 @@ public final class HConstants {
 
   /** Config key for hbase temporary directory in hdfs */
   public static final String TEMPORARY_FS_DIRECTORY_KEY = "hbase.fs.tmp.dir";
+
+  /** Don't use it! This'll get you the wrong path in a secure cluster.
+   * Use FileSystem.getHomeDirectory() or
+   * "/user/" + UserGroupInformation.getCurrentUser().getShortUserName()  */
   public static final String DEFAULT_TEMPORARY_HDFS_DIRECTORY = "/user/"
       + System.getProperty("user.name") + "/hbase-staging";
 

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ee0fedb/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index a403455..0063c56 100644
--- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -820,7 +820,7 @@ public class HFileOutputFormat2
     FileSystem fs = FileSystem.get(conf);
     String hbaseTmpFsDir =
         conf.get(HConstants.TEMPORARY_FS_DIRECTORY_KEY,
-          HConstants.DEFAULT_TEMPORARY_HDFS_DIRECTORY);
+            fs.getHomeDirectory() + "/hbase-staging");
     Path partitionsPath = new Path(hbaseTmpFsDir, "partitions_" + UUID.randomUUID());
     fs.makeQualified(partitionsPath);
     writePartitions(conf, partitionsPath, splitPoints, writeMultipleTables);

http://git-wip-us.apache.org/repos/asf/hbase/blob/6ee0fedb/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 09444ac..2866103 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -24,12 +24,15 @@ import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertNotSame;
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
+import static org.mockito.Mockito.verify;
 
 import java.io.IOException;
 import java.lang.reflect.Field;
+import java.security.PrivilegedAction;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
+import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Map.Entry;
@@ -100,6 +103,9 @@ import org.apache.hadoop.mapreduce.Mapper;
 import org.apache.hadoop.mapreduce.RecordWriter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
 import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+import org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.junit.Assert;
 import org.junit.ClassRule;
 import org.junit.Ignore;
 import org.junit.Test;
@@ -1494,5 +1500,53 @@ public class TestHFileOutputFormat2  {
 
     return null;
   }
+
+  @Test
+  public void TestConfigurePartitioner() throws IOException {
+    Configuration conf = util.getConfiguration();
+    // Create a user who is not the current user
+    String fooUserName = "foo1234";
+    String fooGroupName = "group1";
+    UserGroupInformation
+        ugi = UserGroupInformation.createUserForTesting(fooUserName, new String[]{fooGroupName});
+    // Get user's home directory
+    Path fooHomeDirectory = ugi.doAs(new PrivilegedAction<Path>() {
+      @Override public Path run() {
+        try (FileSystem fs = FileSystem.get(conf)) {
+          return fs.makeQualified(fs.getHomeDirectory());
+        } catch (IOException ioe) {
+          LOG.error("Failed to get foo's home directory", ioe);
+        }
+        return null;
+      }
+    });
+
+    Job job = Mockito.mock(Job.class);
+    Mockito.doReturn(conf).when(job).getConfiguration();
+    ImmutableBytesWritable writable = new ImmutableBytesWritable();
+    List<ImmutableBytesWritable> splitPoints = new LinkedList<ImmutableBytesWritable>();
+    splitPoints.add(writable);
+
+    ugi.doAs(new PrivilegedAction<Void>() {
+      @Override public Void run() {
+        try {
+          HFileOutputFormat2.configurePartitioner(job, splitPoints, false);
+        } catch (IOException ioe) {
+          LOG.error("Failed to configure partitioner", ioe);
+        }
+        return null;
+      }
+    });
+    FileSystem fs = FileSystem.get(conf);
+    // verify that the job uses TotalOrderPartitioner
+    verify(job).setPartitionerClass(TotalOrderPartitioner.class);
+    // verify that TotalOrderPartitioner.setPartitionFile() is called.
+    String partitionPathString = conf.get("mapreduce.totalorderpartitioner.path");
+    Assert.assertNotNull(partitionPathString);
+    // Make sure the partion file is in foo1234's home directory, and that
+    // the file exists.
+    Assert.assertTrue(partitionPathString.startsWith(fooHomeDirectory.toString()));
+    Assert.assertTrue(fs.exists(new Path(partitionPathString)));
+  }
 }