You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2015/06/04 20:24:46 UTC

hbase git commit: HBASE-13833 LoadIncrementalHFile.doBulkLoad(Path, HTable) doesn't handle unmanaged connections when using SecureBulkLoad (Nick Dimiduk)

Repository: hbase
Updated Branches:
  refs/heads/branch-1.1 205ed40f1 -> 5e403cb3d


HBASE-13833 LoadIncrementalHFile.doBulkLoad(Path,HTable) doesn't handle unmanaged connections when using SecureBulkLoad (Nick Dimiduk)


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/5e403cb3
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/5e403cb3
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/5e403cb3

Branch: refs/heads/branch-1.1
Commit: 5e403cb3d92133b15faf955d16c6dbafed960c6f
Parents: 205ed40
Author: tedyu <yu...@gmail.com>
Authored: Thu Jun 4 11:24:35 2015 -0700
Committer: tedyu <yu...@gmail.com>
Committed: Thu Jun 4 11:24:35 2015 -0700

----------------------------------------------------------------------
 .../hbase/mapreduce/LoadIncrementalHFiles.java  | 28 +++++++++++++++-----
 1 file changed, 22 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/5e403cb3/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 827699b..a8d13b2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -45,6 +45,7 @@ import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
@@ -291,15 +292,30 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
     throws TableNotFoundException, IOException
   {
     Admin admin = null;
+    Table t = table;
+    Connection conn = table.getConnection();
+    boolean closeConnWhenFinished = false;
     try {
-      try {
-        admin = table.getConnection().getAdmin();
-      } catch (NeedUnmanagedConnectionException ex) {
-        admin = new HBaseAdmin(table.getConfiguration());
+      if (conn instanceof ClusterConnection && ((ClusterConnection) conn).isManaged()) {
+        LOG.warn("managed connection cannot be used for bulkload. Creating unmanaged connection.");
+        // can only use unmanaged connections from here on out.
+        conn = ConnectionFactory.createConnection(table.getConfiguration());
+        t = conn.getTable(table.getName());
+        closeConnWhenFinished = true;
+        if (conn instanceof ClusterConnection && ((ClusterConnection) conn).isManaged()) {
+          throw new RuntimeException("Failed to create unmanaged connection.");
+        }
+        admin = conn.getAdmin();
+      } else {
+        admin = conn.getAdmin();
       }
-      doBulkLoad(hfofDir, admin, table, table.getRegionLocator());
+      doBulkLoad(hfofDir, admin, t, conn.getRegionLocator(t.getName()));
     } finally {
-      admin.close();
+      if (admin != null) admin.close();
+      if (closeConnWhenFinished) {
+        t.close();
+        conn.close();
+      }
     }
   }