You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by nd...@apache.org on 2015/06/15 21:26:01 UTC

[2/2] hbase git commit: Revert "HBASE-13833 LoadIncrementalHFile.doBulkLoad(Path, HTable) doesn't handle unmanaged connections when using SecureBulkLoad (Nick Dimiduk)"

Revert "HBASE-13833 LoadIncrementalHFile.doBulkLoad(Path,HTable) doesn't handle unmanaged connections when using SecureBulkLoad (Nick Dimiduk)"

This reverts commit 5e403cb3d92133b15faf955d16c6dbafed960c6f.


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/521f6a97
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/521f6a97
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/521f6a97

Branch: refs/heads/branch-1.1
Commit: 521f6a9789e30e472f96810541caffeb5de9a06f
Parents: c27bcd2
Author: Nick Dimiduk <nd...@apache.org>
Authored: Sun Jun 14 15:08:57 2015 -0700
Committer: Nick Dimiduk <nd...@apache.org>
Committed: Mon Jun 15 12:21:09 2015 -0700

----------------------------------------------------------------------
 .../hbase/mapreduce/LoadIncrementalHFiles.java  | 28 +++++---------------
 1 file changed, 6 insertions(+), 22 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/521f6a97/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index a8d13b2..827699b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -45,7 +45,6 @@ import org.apache.hadoop.hbase.TableNotFoundException;
 import org.apache.hadoop.hbase.classification.InterfaceAudience;
 import org.apache.hadoop.hbase.classification.InterfaceStability;
 import org.apache.hadoop.hbase.client.Admin;
-import org.apache.hadoop.hbase.client.ClusterConnection;
 import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
@@ -292,30 +291,15 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
     throws TableNotFoundException, IOException
   {
     Admin admin = null;
-    Table t = table;
-    Connection conn = table.getConnection();
-    boolean closeConnWhenFinished = false;
     try {
-      if (conn instanceof ClusterConnection && ((ClusterConnection) conn).isManaged()) {
-        LOG.warn("managed connection cannot be used for bulkload. Creating unmanaged connection.");
-        // can only use unmanaged connections from here on out.
-        conn = ConnectionFactory.createConnection(table.getConfiguration());
-        t = conn.getTable(table.getName());
-        closeConnWhenFinished = true;
-        if (conn instanceof ClusterConnection && ((ClusterConnection) conn).isManaged()) {
-          throw new RuntimeException("Failed to create unmanaged connection.");
-        }
-        admin = conn.getAdmin();
-      } else {
-        admin = conn.getAdmin();
+      try {
+        admin = table.getConnection().getAdmin();
+      } catch (NeedUnmanagedConnectionException ex) {
+        admin = new HBaseAdmin(table.getConfiguration());
       }
-      doBulkLoad(hfofDir, admin, t, conn.getRegionLocator(t.getName()));
+      doBulkLoad(hfofDir, admin, table, table.getRegionLocator());
     } finally {
-      if (admin != null) admin.close();
-      if (closeConnWhenFinished) {
-        t.close();
-        conn.close();
-      }
+      admin.close();
     }
   }