You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jy...@apache.org on 2014/01/25 02:20:36 UTC
svn commit: r1561243 - in /hbase/branches/0.94:
security/src/main/java/org/apache/hadoop/hbase/security/access/
security/src/test/java/org/apache/hadoop/hbase/mapreduce/
src/main/java/org/apache/hadoop/hbase/coprocessor/
src/main/java/org/apache/hadoop...
Author: jyates
Date: Sat Jan 25 01:20:35 2014
New Revision: 1561243
URL: http://svn.apache.org/r1561243
Log:
HBASE-10383 Secure Bulk Load for 'completebulkload' fails for version 0.94.15
Removed:
hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/HadoopSecurityEnabledUserProviderForTesting.java
Modified:
hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadProtocol.java
hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java
hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/coprocessor/SecureBulkLoadClient.java
hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
Modified: hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java?rev=1561243&r1=1561242&r2=1561243&view=diff
==============================================================================
--- hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java (original)
+++ hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java Sat Jan 25 01:20:35 2014
@@ -150,7 +150,7 @@ public class SecureBulkLoadEndpoint exte
@Override
public boolean bulkLoadHFiles(final List<Pair<byte[], String>> familyPaths,
- final Token<?> userToken, final String bulkToken) throws IOException {
+ final Token<?> userToken, final String bulkToken, boolean assignSeqNum) throws IOException {
User user = getActiveUser();
final UserGroupInformation ugi = user.getUGI();
if(userToken != null) {
@@ -167,6 +167,7 @@ public class SecureBulkLoadEndpoint exte
bypass = region.getCoprocessorHost().preBulkLoadHFile(familyPaths);
}
boolean loaded = false;
+ final IOException[] es = new IOException[1];
if (!bypass) {
loaded = ugi.doAs(new PrivilegedAction<Boolean>() {
@Override
@@ -189,13 +190,22 @@ public class SecureBulkLoadEndpoint exte
//To enable access prior to staging
return env.getRegion().bulkLoadHFiles(familyPaths,
new SecureBulkLoadListener(fs, bulkToken));
- } catch (Exception e) {
+ }
+ catch(DoNotRetryIOException e){
+ es[0] = e;
+ }
+ catch (Exception e) {
LOG.error("Failed to complete bulk load", e);
}
return false;
}
});
}
+
+ if (es[0] != null) {
+ throw es[0];
+ }
+
if (region.getCoprocessorHost() != null) {
loaded = region.getCoprocessorHost().postBulkLoadHFile(familyPaths, loaded);
}
Modified: hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadProtocol.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadProtocol.java?rev=1561243&r1=1561242&r2=1561243&view=diff
==============================================================================
--- hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadProtocol.java (original)
+++ hbase/branches/0.94/security/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadProtocol.java Sat Jan 25 01:20:35 2014
@@ -58,10 +58,11 @@ public interface SecureBulkLoadProtocol
* @param familyPaths column family to HFile path pairs
* @param userToken requesting user's HDFS delegation token
* @param bulkToken
+ * @param assignSeqId
* @return
* @throws IOException
*/
boolean bulkLoadHFiles(List<Pair<byte[], String>> familyPaths,
- Token<?> userToken, String bulkToken) throws IOException;
+ Token<?> userToken, String bulkToken, boolean assignSeqNum) throws IOException;
}
Modified: hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java?rev=1561243&r1=1561242&r2=1561243&view=diff
==============================================================================
--- hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java (original)
+++ hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFiles.java Sat Jan 25 01:20:35 2014
@@ -20,10 +20,8 @@
package org.apache.hadoop.hbase.mapreduce;
import org.apache.hadoop.hbase.LargeTests;
-import org.apache.hadoop.hbase.client.UserProvider;
import org.apache.hadoop.hbase.security.access.AccessControlLists;
import org.apache.hadoop.hbase.security.access.SecureTestUtil;
-
import org.junit.BeforeClass;
import org.junit.experimental.categories.Category;
@@ -43,10 +41,9 @@ public class TestSecureLoadIncrementalHF
@BeforeClass
public static void setUpBeforeClass() throws Exception {
+ useSecureHBaseOverride = true;
// setup configuration
SecureTestUtil.enableSecurity(util.getConfiguration());
- UserProvider.setUserProviderForTesting(util.getConfiguration(),
- HadoopSecurityEnabledUserProviderForTesting.class);
util.startMiniCluster();
Modified: hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java?rev=1561243&r1=1561242&r2=1561243&view=diff
==============================================================================
--- hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java (original)
+++ hbase/branches/0.94/security/src/test/java/org/apache/hadoop/hbase/mapreduce/TestSecureLoadIncrementalHFilesSplitRecovery.java Sat Jan 25 01:20:35 2014
@@ -47,11 +47,10 @@ public class TestSecureLoadIncrementalHF
//make sure they are in sync
@BeforeClass
public static void setupCluster() throws Exception {
+ useSecureHBaseOverride = true;
util = new HBaseTestingUtility();
// setup configuration
SecureTestUtil.enableSecurity(util.getConfiguration());
- UserProvider.setUserProviderForTesting(util.getConfiguration(),
- HadoopSecurityEnabledUserProviderForTesting.class);
util.startMiniCluster();
// Wait for the ACL table to become available
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/coprocessor/SecureBulkLoadClient.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/coprocessor/SecureBulkLoadClient.java?rev=1561243&r1=1561242&r2=1561243&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/coprocessor/SecureBulkLoadClient.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/coprocessor/SecureBulkLoadClient.java Sat Jan 25 01:20:35 2014
@@ -79,7 +79,7 @@ public class SecureBulkLoadClient {
String bulkToken, boolean assignSeqNum) throws IOException {
try {
return (Boolean) Methods.call(protocolClazz, proxy, "bulkLoadHFiles", new Class[] {
- List.class, Token.class, String.class, Boolean.class },
+ List.class, Token.class, String.class, boolean.class },
new Object[] { familyPaths, userToken, bulkToken, assignSeqNum });
} catch (Exception e) {
throw new IOException("Failed to bulkLoadHFiles", e);
Modified: hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java?rev=1561243&r1=1561242&r2=1561243&view=diff
==============================================================================
--- hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java (original)
+++ hbase/branches/0.94/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java Sat Jan 25 01:20:35 2014
@@ -75,6 +75,7 @@ import org.apache.hadoop.hbase.io.hfile.
import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.security.token.Token;
@@ -107,14 +108,17 @@ public class LoadIncrementalHFiles exten
private final boolean assignSeqIds;
private UserProvider userProvider;
- //package private for testing
public LoadIncrementalHFiles(Configuration conf) throws Exception {
+ this(conf, false);
+ }
+
+ public LoadIncrementalHFiles(Configuration conf, boolean useSecureHBaseOverride) throws Exception {
super(conf);
this.cfg = conf;
this.hbAdmin = new HBaseAdmin(conf);
//added simple for testing
this.userProvider = UserProvider.instantiate(conf);
- this.useSecure = userProvider.isHBaseSecurityEnabled();
+ this.useSecure = useSecureHBaseOverride || userProvider.isHBaseSecurityEnabled();
this.assignSeqIds = conf.getBoolean(ASSIGN_SEQ_IDS, false);
}
@@ -571,7 +575,14 @@ public class LoadIncrementalHFiles exten
try {
List<LoadQueueItem> toRetry = new ArrayList<LoadQueueItem>();
- boolean success = svrCallable.withRetries();
+ boolean success;
+ // secure client wraps the result in another layer of callables, which does its own retrying -
+ // we shouldn't rety again here as well.
+ if (useSecure) {
+ success = svrCallable.withoutRetries();
+ } else {
+ success = svrCallable.withRetries();
+ }
if (!success) {
LOG.warn("Attempt to bulk load region containing "
+ Bytes.toStringBinary(first) + " into table "
Modified: hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1561243&r1=1561242&r2=1561243&view=diff
==============================================================================
--- hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java (original)
+++ hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java Sat Jan 25 01:20:35 2014
@@ -58,6 +58,7 @@ import org.junit.experimental.categories
*/
@Category(LargeTests.class)
public class TestLoadIncrementalHFiles {
+ protected static boolean useSecureHBaseOverride = false;
private static final byte[] QUALIFIER = Bytes.toBytes("myqual");
private static final byte[] FAMILY = Bytes.toBytes("myfam");
@@ -160,7 +161,8 @@ public class TestLoadIncrementalHFiles {
HTable table = new HTable(util.getConfiguration(), TABLE);
util.waitTableAvailable(TABLE, 30000);
- LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
+ LoadIncrementalHFiles loader =
+ new LoadIncrementalHFiles(util.getConfiguration(), useSecureHBaseOverride);
loader.doBulkLoad(dir, table);
assertEquals(expectedRows, util.countRows(table));
@@ -192,7 +194,8 @@ public class TestLoadIncrementalHFiles {
HTable table = new HTable(util.getConfiguration(), TABLE);
util.waitTableAvailable(TABLE, 30000);
- LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
+ LoadIncrementalHFiles loader =
+ new LoadIncrementalHFiles(util.getConfiguration(), useSecureHBaseOverride);
// Do a dummy put to increase the hlog sequence number
Put put = new Put(Bytes.toBytes("row"));
@@ -247,7 +250,8 @@ public class TestLoadIncrementalHFiles {
util.waitTableAvailable(TABLE, 30000);
// make sure we go back to the usual user provider
UserProvider.setUserProviderForTesting(util.getConfiguration(), UserProvider.class);
- LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration());
+ LoadIncrementalHFiles loader =
+ new LoadIncrementalHFiles(util.getConfiguration(), useSecureHBaseOverride);
try {
loader.doBulkLoad(dir, table);
assertTrue("Loading into table with non-existent family should have failed", false);
Modified: hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java?rev=1561243&r1=1561242&r2=1561243&view=diff
==============================================================================
--- hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java (original)
+++ hbase/branches/0.94/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFilesSplitRecovery.java Sat Jan 25 01:20:35 2014
@@ -72,6 +72,8 @@ public class TestLoadIncrementalHFilesSp
final static int NUM_CFS = 10;
final static byte[] QUAL = Bytes.toBytes("qual");
final static int ROWCOUNT = 100;
+
+ protected static boolean useSecureHBaseOverride = false;
private final static byte[][] families = new byte[NUM_CFS][];
static {
@@ -134,7 +136,8 @@ public class TestLoadIncrementalHFilesSp
*/
private void populateTable(String table, int value) throws Exception {
// create HFiles for different column families
- LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration());
+ LoadIncrementalHFiles lih =
+ new LoadIncrementalHFiles(util.getConfiguration(), useSecureHBaseOverride);
Path bulk1 = buildBulkFiles(table, value);
HTable t = new HTable(util.getConfiguration(), Bytes.toBytes(table));
lih.doBulkLoad(bulk1, t);
@@ -226,7 +229,7 @@ public class TestLoadIncrementalHFilesSp
final AtomicInteger attmptedCalls = new AtomicInteger();
final AtomicInteger failedCalls = new AtomicInteger();
LoadIncrementalHFiles lih = new LoadIncrementalHFiles(
- util.getConfiguration()) {
+ util.getConfiguration(), useSecureHBaseOverride) {
protected List<LoadQueueItem> tryAtomicRegionLoad(final HConnection conn,
byte[] tableName, final byte[] first, Collection<LoadQueueItem> lqis)
@@ -293,8 +296,8 @@ public class TestLoadIncrementalHFilesSp
// Now let's cause trouble. This will occur after checks and cause bulk
// files to fail when attempt to atomically import. This is recoverable.
final AtomicInteger attemptedCalls = new AtomicInteger();
- LoadIncrementalHFiles lih2 = new LoadIncrementalHFiles(
- util.getConfiguration()) {
+ LoadIncrementalHFiles lih2 =
+ new LoadIncrementalHFiles(util.getConfiguration(), useSecureHBaseOverride) {
protected void bulkLoadPhase(final HTable htable, final HConnection conn,
ExecutorService pool, Deque<LoadQueueItem> queue,
@@ -334,8 +337,8 @@ public class TestLoadIncrementalHFilesSp
forceSplit(table);
final AtomicInteger countedLqis= new AtomicInteger();
- LoadIncrementalHFiles lih = new LoadIncrementalHFiles(
- util.getConfiguration()) {
+ LoadIncrementalHFiles lih =
+ new LoadIncrementalHFiles(util.getConfiguration(), useSecureHBaseOverride) {
protected List<LoadQueueItem> groupOrSplit(
Multimap<ByteBuffer, LoadQueueItem> regionGroups,
final LoadQueueItem item, final HTable htable,
@@ -366,8 +369,8 @@ public class TestLoadIncrementalHFilesSp
String table = "groupOrSplitFailure";
setupTable(table, 10);
- LoadIncrementalHFiles lih = new LoadIncrementalHFiles(
- util.getConfiguration()) {
+ LoadIncrementalHFiles lih =
+ new LoadIncrementalHFiles(util.getConfiguration(), useSecureHBaseOverride) {
int i = 0;
protected List<LoadQueueItem> groupOrSplit(