You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by jm...@apache.org on 2013/02/13 21:58:32 UTC
svn commit: r1445918 [13/29] - in /hbase/branches/hbase-7290: ./ bin/ conf/
dev-support/ hbase-client/ hbase-common/
hbase-common/src/main/java/org/apache/hadoop/hbase/
hbase-common/src/main/java/org/apache/hadoop/hbase/io/compress/
hbase-common/src/ma...
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessController.java Wed Feb 13 20:58:23 2013
@@ -18,7 +18,9 @@ import java.io.IOException;
import java.net.InetAddress;
import java.util.Arrays;
import java.util.Collection;
+import java.util.Collections;
import java.util.HashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -47,8 +49,6 @@ import org.apache.hadoop.hbase.coprocess
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
-import org.apache.hadoop.hbase.ipc.HBaseRPC;
-import org.apache.hadoop.hbase.ipc.ProtocolSignature;
import org.apache.hadoop.hbase.ipc.RequestContext;
import org.apache.hadoop.hbase.master.RegionPlan;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -59,6 +59,7 @@ import org.apache.hadoop.hbase.regionser
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.regionserver.HStore;
+import org.apache.hadoop.hbase.regionserver.ScanType;
import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.hbase.security.AccessDeniedException;
@@ -66,7 +67,9 @@ import org.apache.hadoop.hbase.security.
import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
+import org.apache.hadoop.hbase.util.Pair;
+import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.MapMaker;
@@ -95,93 +98,26 @@ import static org.apache.hadoop.hbase.pr
*
* <p>
* To perform authorization checks, {@code AccessController} relies on the
- * {@link org.apache.hadoop.hbase.ipc.SecureRpcEngine} being loaded to provide
+ * {@link org.apache.hadoop.hbase.ipc.RpcServerEngine} being loaded to provide
* the user identities for remote requests.
* </p>
*
* <p>
* The access control lists used for authorization can be manipulated via the
- * exposed {@link AccessControllerProtocol} implementation, and the associated
+ * exposed {@link AccessControlService} Interface implementation, and the associated
* {@code grant}, {@code revoke}, and {@code user_permission} HBase shell
* commands.
* </p>
*/
public class AccessController extends BaseRegionObserver
- implements MasterObserver, RegionServerObserver, AccessControllerProtocol,
- AccessControlService.Interface, CoprocessorService {
- /**
- * Represents the result of an authorization check for logging and error
- * reporting.
- */
- private static class AuthResult {
- private final boolean allowed;
- private final byte[] table;
- private final byte[] family;
- private final byte[] qualifier;
- private final Permission.Action action;
- private final String reason;
- private final User user;
-
- public AuthResult(boolean allowed, String reason, User user,
- Permission.Action action, byte[] table, byte[] family, byte[] qualifier) {
- this.allowed = allowed;
- this.reason = reason;
- this.user = user;
- this.table = table;
- this.family = family;
- this.qualifier = qualifier;
- this.action = action;
- }
-
- public boolean isAllowed() { return allowed; }
-
- public User getUser() { return user; }
-
- public String getReason() { return reason; }
-
- public String toContextString() {
- return "(user=" + (user != null ? user.getName() : "UNKNOWN") + ", " +
- "scope=" + (table == null ? "GLOBAL" : Bytes.toString(table)) + ", " +
- "family=" + (family != null ? Bytes.toString(family) : "") + ", " +
- "qualifer=" + (qualifier != null ? Bytes.toString(qualifier) : "") + ", " +
- "action=" + (action != null ? action.toString() : "") + ")";
- }
-
- public String toString() {
- return new StringBuilder("AuthResult")
- .append(toContextString()).toString();
- }
-
- public static AuthResult allow(String reason, User user, Permission.Action action,
- byte[] table, byte[] family, byte[] qualifier) {
- return new AuthResult(true, reason, user, action, table, family, qualifier);
- }
-
- public static AuthResult allow(String reason, User user, Permission.Action action, byte[] table) {
- return new AuthResult(true, reason, user, action, table, null, null);
- }
-
- public static AuthResult deny(String reason, User user,
- Permission.Action action, byte[] table) {
- return new AuthResult(false, reason, user, action, table, null, null);
- }
-
- public static AuthResult deny(String reason, User user,
- Permission.Action action, byte[] table, byte[] family, byte[] qualifier) {
- return new AuthResult(false, reason, user, action, table, family, qualifier);
- }
- }
+ implements MasterObserver, RegionServerObserver,
+ AccessControlService.Interface, CoprocessorService {
public static final Log LOG = LogFactory.getLog(AccessController.class);
private static final Log AUDITLOG =
LogFactory.getLog("SecurityLogger."+AccessController.class.getName());
- /**
- * Version number for AccessControllerProtocol
- */
- private static final long PROTOCOL_VERSION = 1L;
-
TableAuthManager authManager = null;
// flags if we are running on a region of the _acl_ table
@@ -239,7 +175,7 @@ public class AccessController extends Ba
byte[] serialized = AccessControlLists.writePermissionsAsBytes(perms, conf);
zkw.writeToZookeeper(tableName, serialized);
} catch (IOException ex) {
- LOG.error("Failed updating permissions mirror for '" + tableName + "'", ex);
+ LOG.error("Failed updating permissions mirror for '" + Bytes.toString(tableName) + "'", ex);
}
}
}
@@ -258,7 +194,7 @@ public class AccessController extends Ba
* the request
* @return
*/
- AuthResult permissionGranted(User user, Permission.Action permRequest,
+ AuthResult permissionGranted(String request, User user, Permission.Action permRequest,
RegionCoprocessorEnvironment e,
Map<byte [], ? extends Collection<?>> families) {
HRegionInfo hri = e.getRegion().getRegionInfo();
@@ -268,12 +204,14 @@ public class AccessController extends Ba
// this is a very common operation, so deal with it quickly.
if (hri.isRootRegion() || hri.isMetaRegion()) {
if (permRequest == Permission.Action.READ) {
- return AuthResult.allow("All users allowed", user, permRequest, tableName);
+ return AuthResult.allow(request, "All users allowed", user,
+ permRequest, tableName, families);
}
}
if (user == null) {
- return AuthResult.deny("No user associated with request!", null, permRequest, tableName);
+ return AuthResult.deny(request, "No user associated with request!", null,
+ permRequest, tableName, families);
}
// Users with CREATE/ADMIN rights need to modify .META. and _acl_ table
@@ -287,12 +225,14 @@ public class AccessController extends Ba
(authManager.authorize(user, Permission.Action.CREATE) ||
authManager.authorize(user, Permission.Action.ADMIN)))
{
- return AuthResult.allow("Table permission granted", user, permRequest, tableName);
+ return AuthResult.allow(request, "Table permission granted", user,
+ permRequest, tableName, families);
}
// 2. check for the table-level, if successful we can short-circuit
if (authManager.authorize(user, tableName, (byte[])null, permRequest)) {
- return AuthResult.allow("Table permission granted", user, permRequest, tableName);
+ return AuthResult.allow(request, "Table permission granted", user,
+ permRequest, tableName, families);
}
// 3. check permissions against the requested families
@@ -313,8 +253,8 @@ public class AccessController extends Ba
for (byte[] qualifier : familySet) {
if (!authManager.authorize(user, tableName, family.getKey(),
qualifier, permRequest)) {
- return AuthResult.deny("Failed qualifier check", user,
- permRequest, tableName, family.getKey(), qualifier);
+ return AuthResult.deny(request, "Failed qualifier check", user,
+ permRequest, tableName, makeFamilyMap(family.getKey(), qualifier));
}
}
} else if (family.getValue() instanceof List) { // List<KeyValue>
@@ -322,32 +262,32 @@ public class AccessController extends Ba
for (KeyValue kv : kvList) {
if (!authManager.authorize(user, tableName, family.getKey(),
kv.getQualifier(), permRequest)) {
- return AuthResult.deny("Failed qualifier check", user,
- permRequest, tableName, family.getKey(), kv.getQualifier());
+ return AuthResult.deny(request, "Failed qualifier check", user,
+ permRequest, tableName, makeFamilyMap(family.getKey(), kv.getQualifier()));
}
}
}
} else {
// no qualifiers and family-level check already failed
- return AuthResult.deny("Failed family check", user, permRequest,
- tableName, family.getKey(), null);
+ return AuthResult.deny(request, "Failed family check", user, permRequest,
+ tableName, makeFamilyMap(family.getKey(), null));
}
}
// all family checks passed
- return AuthResult.allow("All family checks passed", user, permRequest,
- tableName);
+ return AuthResult.allow(request, "All family checks passed", user, permRequest,
+ tableName, families);
}
// 4. no families to check and table level access failed
- return AuthResult.deny("No families to check and table permission failed",
- user, permRequest, tableName);
+ return AuthResult.deny(request, "No families to check and table permission failed",
+ user, permRequest, tableName, families);
}
private void logResult(AuthResult result) {
if (AUDITLOG.isTraceEnabled()) {
- InetAddress remoteAddr = null;
RequestContext ctx = RequestContext.get();
+ InetAddress remoteAddr = null;
if (ctx != null) {
remoteAddr = ctx.getRemoteAddress();
}
@@ -355,6 +295,7 @@ public class AccessController extends Ba
" for user " + (result.getUser() != null ? result.getUser().getShortName() : "UNKNOWN") +
"; reason: " + result.getReason() +
"; remote address: " + (remoteAddr != null ? remoteAddr : "") +
+ "; request: " + result.getRequest() +
"; context: " + result.toContextString());
}
}
@@ -382,18 +323,20 @@ public class AccessController extends Ba
* @throws IOException if obtaining the current user fails
* @throws AccessDeniedException if user has no authorization
*/
- private void requirePermission(byte[] tableName, byte[] family, byte[] qualifier,
+ private void requirePermission(String request, byte[] tableName, byte[] family, byte[] qualifier,
Action... permissions) throws IOException {
User user = getActiveUser();
AuthResult result = null;
for (Action permission : permissions) {
if (authManager.authorize(user, tableName, family, qualifier, permission)) {
- result = AuthResult.allow("Table permission granted", user, permission, tableName, family, qualifier);
+ result = AuthResult.allow(request, "Table permission granted", user,
+ permission, tableName, family, qualifier);
break;
} else {
// rest of the world
- result = AuthResult.deny("Insufficient permissions", user, permission, tableName, family, qualifier);
+ result = AuthResult.deny(request, "Insufficient permissions", user,
+ permission, tableName, family, qualifier);
}
}
logResult(result);
@@ -408,35 +351,8 @@ public class AccessController extends Ba
* @throws IOException if obtaining the current user fails
* @throws AccessDeniedException if authorization is denied
*/
- private void requirePermission(Permission.Action perm) throws IOException {
- User user = getActiveUser();
- if (authManager.authorize(user, perm)) {
- logResult(AuthResult.allow("Global check allowed", user, perm, null));
- } else {
- logResult(AuthResult.deny("Global check failed", user, perm, null));
- throw new AccessDeniedException("Insufficient permissions for user '" +
- (user != null ? user.getShortName() : "null") +"' (global, action=" +
- perm.toString() + ")");
- }
- }
-
- /**
- * Authorizes that the current user has permission to perform the given
- * action on the set of table column families.
- * @param perm Action that is required
- * @param env The current coprocessor environment
- * @param families The set of column families present/required in the request
- * @throws AccessDeniedException if the authorization check failed
- */
- private void requirePermission(Permission.Action perm,
- RegionCoprocessorEnvironment env, Collection<byte[]> families)
- throws IOException {
- // create a map of family-qualifier
- HashMap<byte[], Set<byte[]>> familyMap = new HashMap<byte[], Set<byte[]>>();
- for (byte[] family : families) {
- familyMap.put(family, null);
- }
- requirePermission(perm, env, familyMap);
+ private void requirePermission(String request, Permission.Action perm) throws IOException {
+ requireGlobalPermission(request, perm, null, null);
}
/**
@@ -447,33 +363,45 @@ public class AccessController extends Ba
* @param families The map of column families-qualifiers.
* @throws AccessDeniedException if the authorization check failed
*/
- private void requirePermission(Permission.Action perm,
+ private void requirePermission(String request, Permission.Action perm,
RegionCoprocessorEnvironment env,
Map<byte[], ? extends Collection<?>> families)
throws IOException {
User user = getActiveUser();
- AuthResult result = permissionGranted(user, perm, env, families);
+ AuthResult result = permissionGranted(request, user, perm, env, families);
logResult(result);
if (!result.isAllowed()) {
- StringBuffer sb = new StringBuffer("");
- if ((families != null && families.size() > 0)) {
- for (byte[] familyName : families.keySet()) {
- if (sb.length() != 0) {
- sb.append(", ");
- }
- sb.append(Bytes.toString(familyName));
- }
- }
throw new AccessDeniedException("Insufficient permissions (table=" +
env.getRegion().getTableDesc().getNameAsString()+
((families != null && families.size() > 0) ? ", family: " +
- sb.toString() : "") + ", action=" +
+ result.toFamilyString() : "") + ", action=" +
perm.toString() + ")");
}
}
/**
+ * Checks that the user has the given global permission. The generated
+ * audit log message will contain context information for the operation
+ * being authorized, based on the given parameters.
+ * @param perm Action being requested
+ * @param tableName Affected table name.
+ * @param familiMap Affected column families.
+ */
+ private void requireGlobalPermission(String request, Permission.Action perm, byte[] tableName,
+ Map<byte[], ? extends Collection<byte[]>> familyMap) throws IOException {
+ User user = getActiveUser();
+ if (authManager.authorize(user, perm)) {
+ logResult(AuthResult.allow(request, "Global check allowed", user, perm, tableName, familyMap));
+ } else {
+ logResult(AuthResult.deny(request, "Global check failed", user, perm, tableName, familyMap));
+ throw new AccessDeniedException("Insufficient permissions for user '" +
+ (user != null ? user.getShortName() : "null") +"' (global, action=" +
+ perm.toString() + ")");
+ }
+ }
+
+ /**
* Returns <code>true</code> if the current user is allowed the given action
* over at least one of the column qualifiers in the given column families.
*/
@@ -551,7 +479,12 @@ public class AccessController extends Ba
@Override
public void preCreateTable(ObserverContext<MasterCoprocessorEnvironment> c,
HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
- requirePermission(Permission.Action.CREATE);
+ Set<byte[]> families = desc.getFamiliesKeys();
+ HashMap<byte[], Set<byte[]>> familyMap = Maps.newHashMapWithExpectedSize(families.size());
+ for (byte[] family: families) {
+ familyMap.put(family, null);
+ }
+ requireGlobalPermission("createTable", Permission.Action.CREATE, desc.getName(), familyMap);
}
@Override
@@ -578,7 +511,7 @@ public class AccessController extends Ba
@Override
public void preDeleteTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName)
throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("deleteTable", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -596,7 +529,7 @@ public class AccessController extends Ba
@Override
public void preModifyTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
HTableDescriptor htd) throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("modifyTable", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -622,7 +555,7 @@ public class AccessController extends Ba
@Override
public void preAddColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
HColumnDescriptor column) throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("addColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -638,7 +571,7 @@ public class AccessController extends Ba
@Override
public void preModifyColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
HColumnDescriptor descriptor) throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("modifyColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -655,7 +588,7 @@ public class AccessController extends Ba
@Override
public void preDeleteColumn(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName,
byte[] col) throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("deleteColumn", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -674,7 +607,7 @@ public class AccessController extends Ba
@Override
public void preEnableTable(ObserverContext<MasterCoprocessorEnvironment> c, byte[] tableName)
throws IOException {
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("enableTable", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -694,7 +627,7 @@ public class AccessController extends Ba
throw new AccessDeniedException("Not allowed to disable "
+ AccessControlLists.ACL_TABLE_NAME_STR + " table.");
}
- requirePermission(tableName, null, null, Action.ADMIN, Action.CREATE);
+ requirePermission("disableTable", tableName, null, null, Action.ADMIN, Action.CREATE);
}
@Override
@@ -710,7 +643,7 @@ public class AccessController extends Ba
@Override
public void preMove(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo region,
ServerName srcServer, ServerName destServer) throws IOException {
- requirePermission(region.getTableName(), null, null, Action.ADMIN);
+ requirePermission("move", region.getTableName(), null, null, Action.ADMIN);
}
@Override
@@ -721,7 +654,7 @@ public class AccessController extends Ba
@Override
public void preAssign(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo regionInfo)
throws IOException {
- requirePermission(regionInfo.getTableName(), null, null, Action.ADMIN);
+ requirePermission("assign", regionInfo.getTableName(), null, null, Action.ADMIN);
}
@Override
@@ -731,7 +664,7 @@ public class AccessController extends Ba
@Override
public void preUnassign(ObserverContext<MasterCoprocessorEnvironment> c, HRegionInfo regionInfo,
boolean force) throws IOException {
- requirePermission(regionInfo.getTableName(), null, null, Action.ADMIN);
+ requirePermission("unassign", regionInfo.getTableName(), null, null, Action.ADMIN);
}
@Override
@@ -741,7 +674,7 @@ public class AccessController extends Ba
@Override
public void preBalance(ObserverContext<MasterCoprocessorEnvironment> c)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("balance", Permission.Action.ADMIN);
}
@Override
public void postBalance(ObserverContext<MasterCoprocessorEnvironment> c, List<RegionPlan> plans)
@@ -750,7 +683,7 @@ public class AccessController extends Ba
@Override
public boolean preBalanceSwitch(ObserverContext<MasterCoprocessorEnvironment> c,
boolean newValue) throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("balanceSwitch", Permission.Action.ADMIN);
return newValue;
}
@Override
@@ -760,13 +693,13 @@ public class AccessController extends Ba
@Override
public void preShutdown(ObserverContext<MasterCoprocessorEnvironment> c)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("shutdown", Permission.Action.ADMIN);
}
@Override
public void preStopMaster(ObserverContext<MasterCoprocessorEnvironment> c)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("stopMaster", Permission.Action.ADMIN);
}
@Override
@@ -780,7 +713,7 @@ public class AccessController extends Ba
public void preSnapshot(final ObserverContext<MasterCoprocessorEnvironment> ctx,
final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("snapshot", Permission.Action.ADMIN);
}
@Override
@@ -793,7 +726,7 @@ public class AccessController extends Ba
public void preCloneSnapshot(final ObserverContext<MasterCoprocessorEnvironment> ctx,
final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("clone", Permission.Action.ADMIN);
}
@Override
@@ -806,7 +739,7 @@ public class AccessController extends Ba
public void preRestoreSnapshot(final ObserverContext<MasterCoprocessorEnvironment> ctx,
final SnapshotDescription snapshot, final HTableDescriptor hTableDescriptor)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("restore", Permission.Action.ADMIN);
}
@Override
@@ -818,7 +751,7 @@ public class AccessController extends Ba
@Override
public void preDeleteSnapshot(final ObserverContext<MasterCoprocessorEnvironment> ctx,
final SnapshotDescription snapshot) throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("deleteSnapshot", Permission.Action.ADMIN);
}
@Override
@@ -835,13 +768,12 @@ public class AccessController extends Ba
final HRegion region = env.getRegion();
if (region == null) {
LOG.error("NULL region from RegionCoprocessorEnvironment in preOpen()");
- return;
} else {
HRegionInfo regionInfo = region.getRegionInfo();
if (isSpecialTable(regionInfo)) {
isSystemOrSuperUser(regionEnv.getConfiguration());
} else {
- requirePermission(Action.ADMIN);
+ requirePermission("preOpen", Action.ADMIN);
}
}
}
@@ -868,39 +800,42 @@ public class AccessController extends Ba
@Override
public void preFlush(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ requirePermission("flush", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
}
@Override
public void preSplit(ObserverContext<RegionCoprocessorEnvironment> e) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ requirePermission("split", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
}
@Override
public void preSplit(ObserverContext<RegionCoprocessorEnvironment> e,
byte[] splitRow) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ requirePermission("split", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
}
@Override
public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e,
- final HStore store, final InternalScanner scanner) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ final HStore store, final InternalScanner scanner, final ScanType scanType)
+ throws IOException {
+ requirePermission("compact", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
return scanner;
}
@Override
public void preCompactSelection(final ObserverContext<RegionCoprocessorEnvironment> e,
final HStore store, final List<StoreFile> candidates) throws IOException {
- requirePermission(getTableName(e.getEnvironment()), null, null, Action.ADMIN);
+ requirePermission("compact", getTableName(e.getEnvironment()), null, null, Action.ADMIN);
}
@Override
public void preGetClosestRowBefore(final ObserverContext<RegionCoprocessorEnvironment> c,
final byte [] row, final byte [] family, final Result result)
throws IOException {
- requirePermission(Permission.Action.READ, c.getEnvironment(),
- (family != null ? Lists.newArrayList(family) : null));
+ assert family != null;
+ //noinspection PrimitiveArrayArgumentToVariableArgMethod
+ requirePermission("getClosestRowBefore", Permission.Action.READ, c.getEnvironment(),
+ makeFamilyMap(family, null));
}
@Override
@@ -912,7 +847,7 @@ public class AccessController extends Ba
*/
RegionCoprocessorEnvironment e = c.getEnvironment();
User requestUser = getActiveUser();
- AuthResult authResult = permissionGranted(requestUser,
+ AuthResult authResult = permissionGranted("get", requestUser,
Permission.Action.READ, e, get.getFamilyMap());
if (!authResult.isAllowed()) {
if (hasFamilyQualifierPermission(requestUser,
@@ -929,8 +864,8 @@ public class AccessController extends Ba
} else {
get.setFilter(filter);
}
- logResult(AuthResult.allow("Access allowed with filter", requestUser,
- Permission.Action.READ, authResult.table));
+ logResult(AuthResult.allow("get", "Access allowed with filter", requestUser,
+ Permission.Action.READ, authResult.getTable(), get.getFamilyMap()));
} else {
logResult(authResult);
throw new AccessDeniedException("Insufficient permissions (table=" +
@@ -945,8 +880,8 @@ public class AccessController extends Ba
@Override
public boolean preExists(final ObserverContext<RegionCoprocessorEnvironment> c,
final Get get, final boolean exists) throws IOException {
- requirePermission(Permission.Action.READ, c.getEnvironment(),
- get.familySet());
+ requirePermission("exists", Permission.Action.READ, c.getEnvironment(),
+ get.getFamilyMap());
return exists;
}
@@ -954,7 +889,7 @@ public class AccessController extends Ba
public void prePut(final ObserverContext<RegionCoprocessorEnvironment> c,
final Put put, final WALEdit edit, final boolean writeToWAL)
throws IOException {
- requirePermission(Permission.Action.WRITE, c.getEnvironment(),
+ requirePermission("put", Permission.Action.WRITE, c.getEnvironment(),
put.getFamilyMap());
}
@@ -970,7 +905,7 @@ public class AccessController extends Ba
public void preDelete(final ObserverContext<RegionCoprocessorEnvironment> c,
final Delete delete, final WALEdit edit, final boolean writeToWAL)
throws IOException {
- requirePermission(Permission.Action.WRITE, c.getEnvironment(),
+ requirePermission("delete", Permission.Action.WRITE, c.getEnvironment(),
delete.getFamilyMap());
}
@@ -989,9 +924,9 @@ public class AccessController extends Ba
final CompareFilter.CompareOp compareOp,
final ByteArrayComparable comparator, final Put put,
final boolean result) throws IOException {
- Collection<byte[]> familyMap = Arrays.asList(new byte[][]{family});
- requirePermission(Permission.Action.READ, c.getEnvironment(), familyMap);
- requirePermission(Permission.Action.WRITE, c.getEnvironment(), familyMap);
+ Map<byte[], ? extends Collection<byte[]>> familyMap = makeFamilyMap(family, qualifier);
+ requirePermission("checkAndPut", Permission.Action.READ, c.getEnvironment(), familyMap);
+ requirePermission("checkAndPut", Permission.Action.WRITE, c.getEnvironment(), familyMap);
return result;
}
@@ -1001,9 +936,9 @@ public class AccessController extends Ba
final CompareFilter.CompareOp compareOp,
final ByteArrayComparable comparator, final Delete delete,
final boolean result) throws IOException {
- Collection<byte[]> familyMap = Arrays.asList(new byte[][]{family});
- requirePermission(Permission.Action.READ, c.getEnvironment(), familyMap);
- requirePermission(Permission.Action.WRITE, c.getEnvironment(), familyMap);
+ Map<byte[], ? extends Collection<byte[]>> familyMap = makeFamilyMap(family, qualifier);
+ requirePermission("checkAndDelete", Permission.Action.READ, c.getEnvironment(), familyMap);
+ requirePermission("checkAndDelete", Permission.Action.WRITE, c.getEnvironment(), familyMap);
return result;
}
@@ -1012,15 +947,15 @@ public class AccessController extends Ba
final byte [] row, final byte [] family, final byte [] qualifier,
final long amount, final boolean writeToWAL)
throws IOException {
- requirePermission(Permission.Action.WRITE, c.getEnvironment(),
- Arrays.asList(new byte[][]{family}));
+ Map<byte[], ? extends Collection<byte[]>> familyMap = makeFamilyMap(family, qualifier);
+ requirePermission("incrementColumnValue", Permission.Action.WRITE, c.getEnvironment(), familyMap);
return -1;
}
@Override
public Result preAppend(ObserverContext<RegionCoprocessorEnvironment> c, Append append)
throws IOException {
- requirePermission(Permission.Action.WRITE, c.getEnvironment(), append.getFamilyMap());
+ requirePermission("append", Permission.Action.WRITE, c.getEnvironment(), append.getFamilyMap());
return null;
}
@@ -1028,8 +963,11 @@ public class AccessController extends Ba
public Result preIncrement(final ObserverContext<RegionCoprocessorEnvironment> c,
final Increment increment)
throws IOException {
- requirePermission(Permission.Action.WRITE, c.getEnvironment(),
- increment.getFamilyMap().keySet());
+ Map<byte[], Set<byte[]>> familyMap = Maps.newHashMap();
+ for (Map.Entry<byte[], ? extends Map<byte[], Long>> entry : increment.getFamilyMap().entrySet()) {
+ familyMap.put(entry.getKey(), entry.getValue().keySet());
+ }
+ requirePermission("increment", Permission.Action.WRITE, c.getEnvironment(), familyMap);
return null;
}
@@ -1042,7 +980,7 @@ public class AccessController extends Ba
*/
RegionCoprocessorEnvironment e = c.getEnvironment();
User user = getActiveUser();
- AuthResult authResult = permissionGranted(user, Permission.Action.READ, e,
+ AuthResult authResult = permissionGranted("scannerOpen", user, Permission.Action.READ, e,
scan.getFamilyMap());
if (!authResult.isAllowed()) {
if (hasFamilyQualifierPermission(user, Permission.Action.READ, e,
@@ -1059,8 +997,8 @@ public class AccessController extends Ba
} else {
scan.setFilter(filter);
}
- logResult(AuthResult.allow("Access allowed with filter", user,
- Permission.Action.READ, authResult.table));
+ logResult(AuthResult.allow("scannerOpen", "Access allowed with filter", user,
+ Permission.Action.READ, authResult.getTable(), scan.getFamilyMap()));
} else {
// no table/family level perms and no qualifier level perms, reject
logResult(authResult);
@@ -1122,142 +1060,76 @@ public class AccessController extends Ba
}
}
- /* ---- AccessControllerProtocol implementation ---- */
- /*
- * These methods are only allowed to be called against the _acl_ region(s).
- * This will be restricted by both client side and endpoint implementations.
+ /**
+ * Verifies user has WRITE privileges on
+ * the Column Families involved in the bulkLoadHFile
+ * request. Specific Column Write privileges are presently
+ * ignored.
*/
- @Deprecated
- @Override
- public void grant(UserPermission perm) throws IOException {
- // verify it's only running at .acl.
- if (aclRegion) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Received request to grant access permission " + perm.toString());
- }
-
- requirePermission(perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
-
- AccessControlLists.addUserPermission(regionEnv.getConfiguration(), perm);
- if (AUDITLOG.isTraceEnabled()) {
- // audit log should store permission changes in addition to auth results
- AUDITLOG.trace("Granted permission " + perm.toString());
- }
- } else {
- throw new CoprocessorException(AccessController.class, "This method "
- + "can only execute at " + Bytes.toString(AccessControlLists.ACL_TABLE_NAME) + " table.");
- }
- }
-
@Override
- @Deprecated
- public void grant(byte[] user, TablePermission permission)
- throws IOException {
- grant(new UserPermission(user, permission.getTable(),
- permission.getFamily(), permission.getQualifier(),
- permission.getActions()));
- }
-
- @Deprecated
- @Override
- public void revoke(UserPermission perm) throws IOException {
- // only allowed to be called on _acl_ region
- if (aclRegion) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Received request to revoke access permission " + perm.toString());
- }
-
- requirePermission(perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
-
- AccessControlLists.removeUserPermission(regionEnv.getConfiguration(), perm);
- if (AUDITLOG.isTraceEnabled()) {
- // audit log should record all permission changes
- AUDITLOG.trace("Revoked permission " + perm.toString());
- }
- } else {
- throw new CoprocessorException(AccessController.class, "This method "
- + "can only execute at " + Bytes.toString(AccessControlLists.ACL_TABLE_NAME) + " table.");
+ public void preBulkLoadHFile(ObserverContext<RegionCoprocessorEnvironment> ctx,
+ List<Pair<byte[], String>> familyPaths) throws IOException {
+ List<byte[]> cfs = new LinkedList<byte[]>();
+ for(Pair<byte[],String> el : familyPaths) {
+ requirePermission("preBulkLoadHFile",
+ ctx.getEnvironment().getRegion().getTableDesc().getName(),
+ el.getFirst(),
+ null,
+ Permission.Action.WRITE);
}
}
- @Override
- @Deprecated
- public void revoke(byte[] user, TablePermission permission)
- throws IOException {
- revoke(new UserPermission(user, permission.getTable(),
- permission.getFamily(), permission.getQualifier(),
- permission.getActions()));
- }
-
- @Deprecated
- @Override
- public List<UserPermission> getUserPermissions(final byte[] tableName) throws IOException {
- // only allowed to be called on _acl_ region
- if (aclRegion) {
- requirePermission(tableName, null, null, Action.ADMIN);
-
- List<UserPermission> perms = AccessControlLists.getUserPermissions(
- regionEnv.getConfiguration(), tableName);
- return perms;
- } else {
- throw new CoprocessorException(AccessController.class, "This method "
- + "can only execute at " + Bytes.toString(AccessControlLists.ACL_TABLE_NAME) + " table.");
- }
- }
-
- @Deprecated
- @Override
- public void checkPermissions(Permission[] permissions) throws IOException {
- byte[] tableName = regionEnv.getRegion().getTableDesc().getName();
- for (Permission permission : permissions) {
- if (permission instanceof TablePermission) {
- TablePermission tperm = (TablePermission) permission;
- for (Permission.Action action : permission.getActions()) {
- if (!Arrays.equals(tperm.getTable(), tableName)) {
- throw new CoprocessorException(AccessController.class, String.format("This method "
- + "can only execute at the table specified in TablePermission. " +
- "Table of the region:%s , requested table:%s", Bytes.toString(tableName),
- Bytes.toString(tperm.getTable())));
- }
-
- HashMap<byte[], Set<byte[]>> familyMap = Maps.newHashMapWithExpectedSize(1);
- if (tperm.getFamily() != null) {
- if (tperm.getQualifier() != null) {
- familyMap.put(tperm.getFamily(), Sets.newHashSet(tperm.getQualifier()));
- } else {
- familyMap.put(tperm.getFamily(), null);
- }
+ private AuthResult hasSomeAccess(RegionCoprocessorEnvironment e, String method, Action action) throws IOException {
+ User requestUser = getActiveUser();
+ byte[] tableName = e.getRegion().getTableDesc().getName();
+ AuthResult authResult = permissionGranted(method, requestUser,
+ action, e, Collections.EMPTY_MAP);
+ if (!authResult.isAllowed()) {
+ for(UserPermission userPerm:
+ AccessControlLists.getUserPermissions(regionEnv.getConfiguration(), tableName)) {
+ for(Permission.Action userAction: userPerm.getActions()) {
+ if(userAction.equals(action)) {
+ return AuthResult.allow(method, "Access allowed", requestUser,
+ action, tableName, null, null);
}
-
- requirePermission(action, regionEnv, familyMap);
- }
-
- } else {
- for (Permission.Action action : permission.getActions()) {
- requirePermission(action);
}
}
}
+ return authResult;
}
- @Deprecated
- @Override
- public long getProtocolVersion(String protocol, long clientVersion) throws IOException {
- return PROTOCOL_VERSION;
+ /**
+ * Authorization check for
+ * SecureBulkLoadProtocol.prepareBulkLoad()
+ * @param e
+ * @throws IOException
+ */
+ //TODO this should end up as a coprocessor hook
+ public void prePrepareBulkLoad(RegionCoprocessorEnvironment e) throws IOException {
+ AuthResult authResult = hasSomeAccess(e, "prePrepareBulkLoad", Action.WRITE);
+ logResult(authResult);
+ if (!authResult.isAllowed()) {
+ throw new AccessDeniedException("Insufficient permissions (table=" +
+ e.getRegion().getTableDesc().getNameAsString() + ", action=WRITE)");
+ }
}
- @Deprecated
- @Override
- public ProtocolSignature getProtocolSignature(String protocol,
- long clientVersion, int clientMethodsHash) throws IOException {
- if (AccessControllerProtocol.class.getName().equals(protocol)) {
- return new ProtocolSignature(PROTOCOL_VERSION, null);
+ /**
+ * Authorization security check for
+ * SecureBulkLoadProtocol.cleanupBulkLoad()
+ * @param e
+ * @throws IOException
+ */
+ //TODO this should end up as a coprocessor hook
+ public void preCleanupBulkLoad(RegionCoprocessorEnvironment e) throws IOException {
+ AuthResult authResult = hasSomeAccess(e, "preCleanupBulkLoad", Action.WRITE);
+ logResult(authResult);
+ if (!authResult.isAllowed()) {
+ throw new AccessDeniedException("Insufficient permissions (table=" +
+ e.getRegion().getTableDesc().getNameAsString() + ", action=WRITE)");
}
- throw new HBaseRPC.UnknownProtocolException(
- "Unexpected protocol requested: "+protocol);
}
-
/* ---- Protobuf AccessControlService implementation ---- */
@Override
public void grant(RpcController controller,
@@ -1266,7 +1138,23 @@ public class AccessController extends Ba
UserPermission perm = ProtobufUtil.toUserPermission(request.getPermission());
AccessControlProtos.GrantResponse response = null;
try {
- grant(perm);
+ // verify it's only running at .acl.
+ if (aclRegion) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Received request to grant access permission " + perm.toString());
+ }
+
+ requirePermission("grant", perm.getTable(), perm.getFamily(), perm.getQualifier(), Action.ADMIN);
+
+ AccessControlLists.addUserPermission(regionEnv.getConfiguration(), perm);
+ if (AUDITLOG.isTraceEnabled()) {
+ // audit log should store permission changes in addition to auth results
+ AUDITLOG.trace("Granted permission " + perm.toString());
+ }
+ } else {
+ throw new CoprocessorException(AccessController.class, "This method "
+ + "can only execute at " + Bytes.toString(AccessControlLists.ACL_TABLE_NAME) + " table.");
+ }
response = AccessControlProtos.GrantResponse.getDefaultInstance();
} catch (IOException ioe) {
// pass exception back up
@@ -1282,7 +1170,24 @@ public class AccessController extends Ba
UserPermission perm = ProtobufUtil.toUserPermission(request.getPermission());
AccessControlProtos.RevokeResponse response = null;
try {
- revoke(perm);
+ // only allowed to be called on _acl_ region
+ if (aclRegion) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Received request to revoke access permission " + perm.toString());
+ }
+
+ requirePermission("revoke", perm.getTable(), perm.getFamily(),
+ perm.getQualifier(), Action.ADMIN);
+
+ AccessControlLists.removeUserPermission(regionEnv.getConfiguration(), perm);
+ if (AUDITLOG.isTraceEnabled()) {
+ // audit log should record all permission changes
+ AUDITLOG.trace("Revoked permission " + perm.toString());
+ }
+ } else {
+ throw new CoprocessorException(AccessController.class, "This method "
+ + "can only execute at " + Bytes.toString(AccessControlLists.ACL_TABLE_NAME) + " table.");
+ }
response = AccessControlProtos.RevokeResponse.getDefaultInstance();
} catch (IOException ioe) {
// pass exception back up
@@ -1295,11 +1200,23 @@ public class AccessController extends Ba
public void getUserPermissions(RpcController controller,
AccessControlProtos.UserPermissionsRequest request,
RpcCallback<AccessControlProtos.UserPermissionsResponse> done) {
- byte[] table = request.getTable().toByteArray();
AccessControlProtos.UserPermissionsResponse response = null;
+ byte[] table = null;
+ if (request.hasTable()) {
+ table = request.getTable().toByteArray();
+ }
try {
- List<UserPermission> perms = getUserPermissions(table);
- response = ResponseConverter.buildUserPermissionsResponse(perms);
+ // only allowed to be called on _acl_ region
+ if (aclRegion) {
+ requirePermission("userPermissions", table, null, null, Action.ADMIN);
+
+ List<UserPermission> perms = AccessControlLists.getUserPermissions(
+ regionEnv.getConfiguration(), table);
+ response = ResponseConverter.buildUserPermissionsResponse(perms);
+ } else {
+ throw new CoprocessorException(AccessController.class, "This method "
+ + "can only execute at " + Bytes.toString(AccessControlLists.ACL_TABLE_NAME) + " table.");
+ }
} catch (IOException ioe) {
// pass exception back up
ResponseConverter.setControllerException(controller, ioe);
@@ -1311,13 +1228,42 @@ public class AccessController extends Ba
public void checkPermissions(RpcController controller,
AccessControlProtos.CheckPermissionsRequest request,
RpcCallback<AccessControlProtos.CheckPermissionsResponse> done) {
- Permission[] perms = new Permission[request.getPermissionCount()];
+ Permission[] permissions = new Permission[request.getPermissionCount()];
for (int i=0; i < request.getPermissionCount(); i++) {
- perms[i] = ProtobufUtil.toPermission(request.getPermission(i));
+ permissions[i] = ProtobufUtil.toPermission(request.getPermission(i));
}
AccessControlProtos.CheckPermissionsResponse response = null;
try {
- checkPermissions(perms);
+ byte[] tableName = regionEnv.getRegion().getTableDesc().getName();
+ for (Permission permission : permissions) {
+ if (permission instanceof TablePermission) {
+ TablePermission tperm = (TablePermission) permission;
+ for (Permission.Action action : permission.getActions()) {
+ if (!Arrays.equals(tperm.getTable(), tableName)) {
+ throw new CoprocessorException(AccessController.class, String.format("This method "
+ + "can only execute at the table specified in TablePermission. " +
+ "Table of the region:%s , requested table:%s", Bytes.toString(tableName),
+ Bytes.toString(tperm.getTable())));
+ }
+
+ HashMap<byte[], Set<byte[]>> familyMap = Maps.newHashMapWithExpectedSize(1);
+ if (tperm.getFamily() != null) {
+ if (tperm.getQualifier() != null) {
+ familyMap.put(tperm.getFamily(), Sets.newHashSet(tperm.getQualifier()));
+ } else {
+ familyMap.put(tperm.getFamily(), null);
+ }
+ }
+
+ requirePermission("checkPermissions", action, regionEnv, familyMap);
+ }
+
+ } else {
+ for (Permission.Action action : permission.getActions()) {
+ requirePermission("checkPermissions", action);
+ }
+ }
+ }
response = AccessControlProtos.CheckPermissionsResponse.getDefaultInstance();
} catch (IOException ioe) {
ResponseConverter.setControllerException(controller, ioe);
@@ -1347,7 +1293,7 @@ public class AccessController extends Ba
@Override
public void preClose(ObserverContext<RegionCoprocessorEnvironment> e, boolean abortRequested)
throws IOException {
- requirePermission(Action.ADMIN);
+ requirePermission("preClose", Action.ADMIN);
}
private void isSystemOrSuperUser(Configuration conf) throws IOException {
@@ -1370,15 +1316,26 @@ public class AccessController extends Ba
private boolean isSpecialTable(HRegionInfo regionInfo) {
byte[] tableName = regionInfo.getTableName();
- return tableName.equals(AccessControlLists.ACL_TABLE_NAME)
- || tableName.equals(Bytes.toBytes("-ROOT-"))
- || tableName.equals(Bytes.toBytes(".META."));
+ return Arrays.equals(tableName, AccessControlLists.ACL_TABLE_NAME)
+ || Arrays.equals(tableName, Bytes.toBytes("-ROOT-"))
+ || Arrays.equals(tableName, Bytes.toBytes(".META."));
}
@Override
public void preStopRegionServer(
ObserverContext<RegionServerCoprocessorEnvironment> env)
throws IOException {
- requirePermission(Permission.Action.ADMIN);
+ requirePermission("preStopRegionServer", Permission.Action.ADMIN);
+ }
+
+ private Map<byte[], ? extends Collection<byte[]>> makeFamilyMap(byte[] family,
+ byte[] qualifier) {
+ if (family == null) {
+ return null;
+ }
+
+ Map<byte[], Collection<byte[]>> familyMap = Maps.newHashMapWithExpectedSize(1);
+ familyMap.put(family, qualifier != null ? ImmutableSet.of(qualifier) : null);
+ return familyMap;
}
}
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/Permission.java Wed Feb 13 20:58:23 2013
@@ -49,8 +49,8 @@ public class Permission extends Versione
public byte code() { return code; }
}
- private static Log LOG = LogFactory.getLog(Permission.class);
- protected static Map<Byte,Action> ACTION_BY_CODE = Maps.newHashMap();
+ private static final Log LOG = LogFactory.getLog(Permission.class);
+ protected static final Map<Byte,Action> ACTION_BY_CODE = Maps.newHashMap();
protected Action[] actions;
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/TableAuthManager.java Wed Feb 13 20:58:23 2013
@@ -244,7 +244,7 @@ public class TableAuthManager {
* user's groups.
* @param user
* @param action
- * @return
+ * @return true if known and authorized, false otherwise
*/
public boolean authorize(User user, Permission.Action action) {
if (user == null) {
@@ -339,7 +339,7 @@ public class TableAuthManager {
* @param table
* @param family
* @param action
- * @return
+ * @return true if known and authorized, false otherwise
*/
public boolean authorizeUser(String username, byte[] table, byte[] family,
Permission.Action action) {
@@ -352,6 +352,7 @@ public class TableAuthManager {
if (authorizeUser(username, action)) {
return true;
}
+ if (table == null) table = AccessControlLists.ACL_TABLE_NAME;
return authorize(getTablePermissions(table).getUser(username), table, family,
qualifier, action);
}
@@ -372,7 +373,7 @@ public class TableAuthManager {
* @param table
* @param family
* @param action
- * @return
+ * @return true if known and authorized, false otherwise
*/
public boolean authorizeGroup(String groupName, byte[] table, byte[] family,
Permission.Action action) {
@@ -380,6 +381,7 @@ public class TableAuthManager {
if (authorizeGroup(groupName, action)) {
return true;
}
+ if (table == null) table = AccessControlLists.ACL_TABLE_NAME;
return authorize(getTablePermissions(table).getGroup(groupName), table, family, action);
}
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/ZKPermissionWatcher.java Wed Feb 13 20:58:23 2013
@@ -132,8 +132,7 @@ public class ZKPermissionWatcher extends
LOG.debug("Updating permissions cache from node "+table+" with data: "+
Bytes.toStringBinary(nodeData));
}
- authManager.refreshCacheFromWritable(Bytes.toBytes(table),
- nodeData);
+ authManager.refreshCacheFromWritable(Bytes.toBytes(table), nodeData);
} catch (IOException ioe) {
LOG.error("Failed parsing permissions for table '" + table +
"' from zk", ioe);
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenIdentifier.java Wed Feb 13 20:58:23 2013
@@ -22,6 +22,8 @@ import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import com.google.protobuf.ByteString;
+import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.security.UserGroupInformation;
@@ -31,7 +33,6 @@ import org.apache.hadoop.security.token.
* Represents the identity information stored in an HBase authentication token.
*/
public class AuthenticationTokenIdentifier extends TokenIdentifier {
- public static final byte VERSION = 1;
public static final Text AUTH_TOKEN_TYPE = new Text("HBASE_AUTH_TOKEN");
protected String username;
@@ -108,28 +109,56 @@ public class AuthenticationTokenIdentifi
this.sequenceNumber = seq;
}
+ public byte[] toBytes() {
+ AuthenticationProtos.TokenIdentifier.Builder builder =
+ AuthenticationProtos.TokenIdentifier.newBuilder();
+ builder.setKind(AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN);
+ if (username != null) {
+ builder.setUsername(ByteString.copyFromUtf8(username));
+ }
+ builder.setIssueDate(issueDate)
+ .setExpirationDate(expirationDate)
+ .setKeyId(keyId)
+ .setSequenceNumber(sequenceNumber);
+ return builder.build().toByteArray();
+ }
+
@Override
public void write(DataOutput out) throws IOException {
- out.writeByte(VERSION);
- WritableUtils.writeString(out, username);
- WritableUtils.writeVInt(out, keyId);
- WritableUtils.writeVLong(out, issueDate);
- WritableUtils.writeVLong(out, expirationDate);
- WritableUtils.writeVLong(out, sequenceNumber);
+ byte[] pbBytes = toBytes();
+ out.writeInt(pbBytes.length);
+ out.write(pbBytes);
}
@Override
public void readFields(DataInput in) throws IOException {
- byte version = in.readByte();
- if (version != VERSION) {
- throw new IOException("Version mismatch in deserialization: " +
- "expected="+VERSION+", got="+version);
- }
- username = WritableUtils.readString(in);
- keyId = WritableUtils.readVInt(in);
- issueDate = WritableUtils.readVLong(in);
- expirationDate = WritableUtils.readVLong(in);
- sequenceNumber = WritableUtils.readVLong(in);
+ int len = in.readInt();
+ byte[] inBytes = new byte[len];
+ in.readFully(inBytes);
+ AuthenticationProtos.TokenIdentifier identifier =
+ AuthenticationProtos.TokenIdentifier.newBuilder().mergeFrom(inBytes).build();
+ // sanity check on type
+ if (!identifier.hasKind() ||
+ identifier.getKind() != AuthenticationProtos.TokenIdentifier.Kind.HBASE_AUTH_TOKEN) {
+ throw new IOException("Invalid TokenIdentifier kind from input "+identifier.getKind());
+ }
+
+ // copy the field values
+ if (identifier.hasUsername()) {
+ username = identifier.getUsername().toStringUtf8();
+ }
+ if (identifier.hasKeyId()) {
+ keyId = identifier.getKeyId();
+ }
+ if (identifier.hasIssueDate()) {
+ issueDate = identifier.getIssueDate();
+ }
+ if (identifier.hasExpirationDate()) {
+ expirationDate = identifier.getExpirationDate();
+ }
+ if (identifier.hasSequenceNumber()) {
+ sequenceNumber = identifier.getSequenceNumber();
+ }
}
@Override
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/AuthenticationTokenSelector.java Wed Feb 13 20:58:23 2013
@@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.security
import java.util.Collection;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -27,6 +29,7 @@ import org.apache.hadoop.security.token.
public class AuthenticationTokenSelector
implements TokenSelector<AuthenticationTokenIdentifier> {
+ private static Log LOG = LogFactory.getLog(AuthenticationTokenSelector.class);
public AuthenticationTokenSelector() {
}
@@ -38,10 +41,14 @@ public class AuthenticationTokenSelector
for (Token ident : tokens) {
if (serviceName.equals(ident.getService()) &&
AuthenticationTokenIdentifier.AUTH_TOKEN_TYPE.equals(ident.getKind())) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Returning token "+ident);
+ }
return (Token<AuthenticationTokenIdentifier>)ident;
}
}
}
+ LOG.debug("No matching token found");
return null;
}
}
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java Wed Feb 13 20:58:23 2013
@@ -20,14 +20,21 @@ package org.apache.hadoop.hbase.security
import java.io.IOException;
+import com.google.protobuf.RpcCallback;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.Service;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.coprocessor.BaseEndpointCoprocessor;
+import org.apache.hadoop.hbase.coprocessor.CoprocessorService;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.ipc.HBaseServer;
import org.apache.hadoop.hbase.ipc.RequestContext;
import org.apache.hadoop.hbase.ipc.RpcServer;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.ResponseConverter;
+import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.hbase.security.AccessDeniedException;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.security.UserGroupInformation;
@@ -37,12 +44,11 @@ import org.apache.hadoop.security.token.
/**
* Provides a service for obtaining authentication tokens via the
- * {@link AuthenticationProtocol} coprocessor protocol.
+ * {@link AuthenticationProtos} AuthenticationService coprocessor service.
*/
-public class TokenProvider extends BaseEndpointCoprocessor
- implements AuthenticationProtocol {
+public class TokenProvider implements AuthenticationProtos.AuthenticationService.Interface,
+ Coprocessor, CoprocessorService {
- public static final long VERSION = 0L;
private static Log LOG = LogFactory.getLog(TokenProvider.class);
private AuthenticationTokenSecretManager secretManager;
@@ -50,8 +56,6 @@ public class TokenProvider extends BaseE
@Override
public void start(CoprocessorEnvironment env) {
- super.start(env);
-
// if running at region
if (env instanceof RegionCoprocessorEnvironment) {
RegionCoprocessorEnvironment regionEnv =
@@ -65,32 +69,11 @@ public class TokenProvider extends BaseE
}
@Override
- public Token<AuthenticationTokenIdentifier> getAuthenticationToken()
- throws IOException {
- if (secretManager == null) {
- throw new IOException(
- "No secret manager configured for token authentication");
- }
-
- User currentUser = RequestContext.getRequestUser();
- UserGroupInformation ugi = null;
- if (currentUser != null) {
- ugi = currentUser.getUGI();
- }
- if (currentUser == null) {
- throw new AccessDeniedException("No authenticated user for request!");
- } else if (!isAllowedDelegationTokenOp(ugi)) {
- LOG.warn("Token generation denied for user="+currentUser.getName()
- +", authMethod="+ugi.getAuthenticationMethod());
- throw new AccessDeniedException(
- "Token generation only allowed for Kerberos authenticated clients");
- }
-
- return secretManager.generateToken(currentUser.getName());
+ public void stop(CoprocessorEnvironment env) throws IOException {
}
/**
- * @param ugi
+ * @param ugi A user group information.
* @return true if delegation token operation is allowed
*/
private boolean isAllowedDelegationTokenOp(UserGroupInformation ugi) throws IOException {
@@ -106,18 +89,62 @@ public class TokenProvider extends BaseE
return true;
}
+ // AuthenticationService implementation
+
@Override
- public String whoami() {
- return RequestContext.getRequestUserName();
+ public Service getService() {
+ return AuthenticationProtos.AuthenticationService.newReflectiveService(this);
}
@Override
- public long getProtocolVersion(String protocol, long clientVersion)
- throws IOException {
- if (AuthenticationProtocol.class.getName().equals(protocol)) {
- return TokenProvider.VERSION;
+ public void getAuthenticationToken(RpcController controller,
+ AuthenticationProtos.TokenRequest request,
+ RpcCallback<AuthenticationProtos.TokenResponse> done) {
+ AuthenticationProtos.TokenResponse.Builder response =
+ AuthenticationProtos.TokenResponse.newBuilder();
+
+ try {
+ if (secretManager == null) {
+ throw new IOException(
+ "No secret manager configured for token authentication");
+ }
+
+ User currentUser = RequestContext.getRequestUser();
+ UserGroupInformation ugi = null;
+ if (currentUser != null) {
+ ugi = currentUser.getUGI();
+ }
+ if (currentUser == null) {
+ throw new AccessDeniedException("No authenticated user for request!");
+ } else if (!isAllowedDelegationTokenOp(ugi)) {
+ LOG.warn("Token generation denied for user="+currentUser.getName()
+ +", authMethod="+ugi.getAuthenticationMethod());
+ throw new AccessDeniedException(
+ "Token generation only allowed for Kerberos authenticated clients");
+ }
+
+ Token<AuthenticationTokenIdentifier> token =
+ secretManager.generateToken(currentUser.getName());
+ response.setToken(ProtobufUtil.toToken(token)).build();
+ } catch (IOException ioe) {
+ ResponseConverter.setControllerException(controller, ioe);
+ }
+ done.run(response.build());
+ }
+
+ @Override
+ public void whoami(RpcController controller, AuthenticationProtos.WhoAmIRequest request,
+ RpcCallback<AuthenticationProtos.WhoAmIResponse> done) {
+ User requestUser = RequestContext.getRequestUser();
+ AuthenticationProtos.WhoAmIResponse.Builder response =
+ AuthenticationProtos.WhoAmIResponse.newBuilder();
+ if (requestUser != null) {
+ response.setUsername(requestUser.getShortName());
+ AuthenticationMethod method = requestUser.getUGI().getAuthenticationMethod();
+ if (method != null) {
+ response.setAuthMethod(method.name());
+ }
}
- LOG.warn("Unknown protocol requested: "+protocol);
- return -1;
+ done.run(response.build());
}
}
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenUtil.java Wed Feb 13 20:58:23 2013
@@ -22,11 +22,15 @@ import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.security.PrivilegedExceptionAction;
+import com.google.protobuf.ServiceException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.HTable;
+import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
+import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.protobuf.generated.AuthenticationProtos;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
@@ -49,14 +53,22 @@ public class TokenUtil {
HTable meta = null;
try {
meta = new HTable(conf, ".META.");
- AuthenticationProtocol prot = meta.coprocessorProxy(
- AuthenticationProtocol.class, HConstants.EMPTY_START_ROW);
- return prot.getAuthenticationToken();
+ CoprocessorRpcChannel rpcChannel = meta.coprocessorService(HConstants.EMPTY_START_ROW);
+ AuthenticationProtos.AuthenticationService.BlockingInterface service =
+ AuthenticationProtos.AuthenticationService.newBlockingStub(rpcChannel);
+ AuthenticationProtos.TokenResponse response = service.getAuthenticationToken(null,
+ AuthenticationProtos.TokenRequest.getDefaultInstance());
+
+ return ProtobufUtil.toToken(response.getToken());
+ } catch (ServiceException se) {
+ ProtobufUtil.toIOException(se);
} finally {
if (meta != null) {
meta.close();
}
}
+ // dummy return for ServiceException catch block
+ return null;
}
private static Text getClusterId(Token<AuthenticationTokenIdentifier> token)
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java Wed Feb 13 20:58:23 2013
@@ -903,7 +903,7 @@ public class ThriftServerRunner implemen
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
try {
HTable table = getTable(tableName);
- Delete delete = new Delete(getBytes(row), timestamp, null);
+ Delete delete = new Delete(getBytes(row), timestamp);
addAttributes(delete, attributes);
table.delete(delete);
} catch (IOException e) {
@@ -969,7 +969,7 @@ public class ThriftServerRunner implemen
HTable table = null;
try {
table = getTable(tableName);
- Put put = new Put(getBytes(row), timestamp, null);
+ Put put = new Put(getBytes(row), timestamp);
addAttributes(put, attributes);
Delete delete = new Delete(getBytes(row));
@@ -1034,7 +1034,7 @@ public class ThriftServerRunner implemen
List<Mutation> mutations = batch.mutations;
Delete delete = new Delete(row);
addAttributes(delete, attributes);
- Put put = new Put(row, timestamp, null);
+ Put put = new Put(row, timestamp);
addAttributes(put, attributes);
for (Mutation m : mutations) {
byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java Wed Feb 13 20:58:23 2013
@@ -29,8 +29,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.compress.Compression;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
import org.apache.hadoop.hbase.thrift.generated.TCell;
@@ -54,7 +53,7 @@ public class ThriftUtilities {
throws IllegalArgument {
Compression.Algorithm comp =
Compression.getCompressionAlgorithmByName(in.compression.toLowerCase());
- StoreFile.BloomType bt =
+ BloomType bt =
BloomType.valueOf(in.bloomFilterType);
if (in.name == null || !in.name.hasRemaining()) {
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/thrift2/ThriftUtilities.java Wed Feb 13 20:58:23 2013
@@ -148,7 +148,7 @@ public class ThriftUtilities {
Put out;
if (in.isSetTimestamp()) {
- out = new Put(in.getRow(), in.getTimestamp(), null);
+ out = new Put(in.getRow(), in.getTimestamp());
} else {
out = new Put(in.getRow());
}
@@ -222,7 +222,7 @@ public class ThriftUtilities {
}
} else {
if (in.isSetTimestamp()) {
- out = new Delete(in.getRow(), in.getTimestamp(), null);
+ out = new Delete(in.getRow(), in.getTimestamp());
} else {
out = new Delete(in.getRow());
}
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/tool/Canary.java Wed Feb 13 20:58:23 2013
@@ -173,7 +173,7 @@ public final class Canary implements Too
}
private void printUsageAndExit() {
- System.err.printf("Usage: bin/hbase %s [opts] [table 1 [table 2...]]\n", getClass().getName());
+ System.err.printf("Usage: bin/hbase %s [opts] [table 1 [table 2...]]%n", getClass().getName());
System.err.println(" where [opts] are:");
System.err.println(" -help Show this help and exit.");
System.err.println(" -daemon Continuous check at defined intervals.");
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/BloomFilterFactory.java Wed Feb 13 20:58:23 2013
@@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.regionserver.StoreFile;
-import org.apache.hadoop.hbase.regionserver.StoreFile.BloomType;
+import org.apache.hadoop.hbase.regionserver.BloomType;
/**
* Handles Bloom filter initialization based on configuration and serialized
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/FSUtils.java Wed Feb 13 20:58:23 2013
@@ -24,6 +24,7 @@ import java.io.EOFException;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
+import java.lang.reflect.Method;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
@@ -231,7 +232,7 @@ public abstract class FSUtils {
try {
fs.close();
} catch (Exception e) {
- LOG.error("file system close failed: ", e);
+ LOG.error("file system close failed: ", e);
}
IOException io = new IOException("File system is not available");
io.initCause(exception);
@@ -239,6 +240,31 @@ public abstract class FSUtils {
}
/**
+ * We use reflection because {@link DistributedFileSystem#setSafeMode(
+ * FSConstants.SafeModeAction action, boolean isChecked)} is not in hadoop 1.1
+ *
+ * @param dfs
+ * @return whether we're in safe mode
+ * @throws IOException
+ */
+ private static boolean isInSafeMode(DistributedFileSystem dfs) throws IOException {
+ boolean inSafeMode = false;
+ try {
+ Method m = DistributedFileSystem.class.getMethod("setSafeMode", new Class<?> []{
+ org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.class, boolean.class});
+ inSafeMode = (Boolean) m.invoke(dfs,
+ org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET, true);
+ } catch (Exception e) {
+ if (e instanceof IOException) throw (IOException) e;
+
+ // Check whether dfs is on safemode.
+ inSafeMode = dfs.setSafeMode(
+ org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET);
+ }
+ return inSafeMode;
+ }
+
+ /**
* Check whether dfs is in safemode.
* @param conf
* @throws IOException
@@ -249,8 +275,7 @@ public abstract class FSUtils {
FileSystem fs = FileSystem.get(conf);
if (fs instanceof DistributedFileSystem) {
DistributedFileSystem dfs = (DistributedFileSystem)fs;
- // Check whether dfs is on safemode.
- isInSafeMode = dfs.setSafeMode(org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET);
+ isInSafeMode = isInSafeMode(dfs);
}
if (isInSafeMode) {
throw new IOException("File system is in safemode, it can't be written now");
@@ -622,7 +647,7 @@ public abstract class FSUtils {
if (!(fs instanceof DistributedFileSystem)) return;
DistributedFileSystem dfs = (DistributedFileSystem)fs;
// Make sure dfs is not in safe mode
- while (dfs.setSafeMode(org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction.SAFEMODE_GET)) {
+ while (isInSafeMode(dfs)) {
LOG.info("Waiting for dfs to exit safe mode...");
try {
Thread.sleep(wait);
@@ -1231,11 +1256,11 @@ public abstract class FSUtils {
// presumes any directory under hbase.rootdir is a table
FileStatus [] tableDirs = fs.listStatus(hbaseRootDir, df);
for (FileStatus tableDir : tableDirs) {
- // Skip the .log directory. All others should be tables. Inside a table,
- // there are compaction.dir directories to skip. Otherwise, all else
+ // Skip the .log and other non-table directories. All others should be tables.
+ // Inside a table, there are compaction.dir directories to skip. Otherwise, all else
// should be regions.
Path d = tableDir.getPath();
- if (d.getName().equals(HConstants.HREGION_LOGDIR_NAME)) {
+ if (HConstants.HBASE_NON_TABLE_DIRS.contains(d.getName())) {
continue;
}
FileStatus[] regionDirs = fs.listStatus(d, df);
@@ -1278,7 +1303,7 @@ public abstract class FSUtils {
status = filter == null ? fs.listStatus(dir) : fs.listStatus(dir, filter);
} catch (FileNotFoundException fnfe) {
// if directory doesn't exist, return null
- LOG.info(dir + " doesn't exist");
+ LOG.debug(dir + " doesn't exist");
}
if (status == null || status.length < 1) return null;
return status;
@@ -1302,7 +1327,7 @@ public abstract class FSUtils {
* @param fs
* @param path
* @param recursive
- * @return
+ * @return the value returned by the fs.delete()
* @throws IOException
*/
public static boolean delete(final FileSystem fs, final Path path, final boolean recursive)
@@ -1315,7 +1340,7 @@ public abstract class FSUtils {
*
* @param fs
* @param path
- * @return
+ * @return the value returned by fs.exists()
* @throws IOException
*/
public static boolean isExists(final FileSystem fs, final Path path) throws IOException {
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java Wed Feb 13 20:58:23 2013
@@ -85,6 +85,7 @@ import org.apache.hadoop.hbase.io.hfile.
import org.apache.hadoop.hbase.master.MasterFileSystem;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.regionserver.HRegion;
+import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.wal.HLogUtil;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
@@ -191,6 +192,7 @@ public class HBaseFsck extends Configure
private boolean fixTableOrphans = false; // fix fs holes (missing .tableinfo)
private boolean fixVersionFile = false; // fix missing hbase.version file in hdfs
private boolean fixSplitParents = false; // fix lingering split parents
+ private boolean fixReferenceFiles = false; // fix lingering reference store file
// limit checking/fixes to listed tables, if empty attempt to check/fix all
// -ROOT- and .META. are always checked
@@ -442,6 +444,8 @@ public class HBaseFsck extends Configure
admin.setBalancerRunning(oldBalancer, false);
}
+ offlineReferenceFileRepair();
+
// Print table summary
printTableSummary(tablesInfo);
return errors.summarize();
@@ -478,7 +482,7 @@ public class HBaseFsck extends Configure
String tableName = Bytes.toString(hi.getTableName());
TableInfo tableInfo = tablesInfo.get(tableName);
- Preconditions.checkNotNull("Table " + tableName + "' not present!", tableInfo);
+ Preconditions.checkNotNull(tableInfo, "Table '" + tableName + "' not present!");
HTableDescriptor template = tableInfo.getHTD();
// find min and max key values
@@ -598,6 +602,67 @@ public class HBaseFsck extends Configure
}
/**
+ * Scan all the store file names to find any lingering reference files,
+ * which refer to some none-exiting files. If "fix" option is enabled,
+ * any lingering reference file will be sidelined if found.
+ * <p>
+ * Lingering reference file prevents a region from opening. It has to
+ * be fixed before a cluster can start properly.
+ */
+ private void offlineReferenceFileRepair() throws IOException {
+ Configuration conf = getConf();
+ Path hbaseRoot = FSUtils.getRootDir(conf);
+ FileSystem fs = hbaseRoot.getFileSystem(conf);
+ Map<String, Path> allFiles = FSUtils.getTableStoreFilePathMap(fs, hbaseRoot);
+ for (Path path: allFiles.values()) {
+ boolean isReference = false;
+ try {
+ isReference = StoreFile.isReference(path);
+ } catch (Throwable t) {
+ // Ignore. Some files may not be store files at all.
+ // For example, files under .oldlogs folder in .META.
+ // Warning message is already logged by
+ // StoreFile#isReference.
+ }
+ if (!isReference) continue;
+
+ Path referredToFile = StoreFile.getReferredToFile(path);
+ if (fs.exists(referredToFile)) continue; // good, expected
+
+ // Found a lingering reference file
+ errors.reportError(ERROR_CODE.LINGERING_REFERENCE_HFILE,
+ "Found lingering reference file " + path);
+ if (!shouldFixReferenceFiles()) continue;
+
+ // Now, trying to fix it since requested
+ boolean success = false;
+ String pathStr = path.toString();
+
+ // A reference file path should be like
+ // ${hbase.rootdir}/table_name/region_id/family_name/referred_file.region_name
+ // Up 3 directories to get the table folder.
+ // So the file will be sidelined to a similar folder structure.
+ int index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR);
+ for (int i = 0; index > 0 && i < 3; i++) {
+ index = pathStr.lastIndexOf(Path.SEPARATOR_CHAR, index);
+ }
+ if (index > 0) {
+ Path rootDir = getSidelineDir();
+ Path dst = new Path(rootDir, pathStr.substring(index));
+ fs.mkdirs(dst.getParent());
+ LOG.info("Trying to sildeline reference file"
+ + path + " to " + dst);
+ setShouldRerun();
+
+ success = fs.rename(path, dst);
+ }
+ if (!success) {
+ LOG.error("Failed to sideline reference file " + path);
+ }
+ }
+ }
+
+ /**
* TODO -- need to add tests for this.
*/
private void reportEmptyMetaCells() {
@@ -2771,7 +2836,7 @@ public class HBaseFsck extends Configure
MULTI_DEPLOYED, SHOULD_NOT_BE_DEPLOYED, MULTI_META_REGION, RS_CONNECT_FAILURE,
FIRST_REGION_STARTKEY_NOT_EMPTY, LAST_REGION_ENDKEY_NOT_EMPTY, DUPE_STARTKEYS,
HOLE_IN_REGION_CHAIN, OVERLAP_IN_REGION_CHAIN, REGION_CYCLE, DEGENERATE_REGION,
- ORPHAN_HDFS_REGION, LINGERING_SPLIT_PARENT, NO_TABLEINFO_FILE
+ ORPHAN_HDFS_REGION, LINGERING_SPLIT_PARENT, NO_TABLEINFO_FILE, LINGERING_REFERENCE_HFILE
}
public void clear();
public void report(String message);
@@ -3204,6 +3269,14 @@ public class HBaseFsck extends Configure
return fixSplitParents;
}
+ public void setFixReferenceFiles(boolean shouldFix) {
+ fixReferenceFiles = shouldFix;
+ }
+
+ boolean shouldFixReferenceFiles() {
+ return fixReferenceFiles;
+ }
+
public boolean shouldIgnorePreCheckPermission() {
return ignorePreCheckPermission;
}
@@ -3315,6 +3388,7 @@ public class HBaseFsck extends Configure
System.err.println(" -maxOverlapsToSideline <n> When fixing region overlaps, allow at most <n> regions to sideline per group. (n=" + DEFAULT_OVERLAPS_TO_SIDELINE +" by default)");
System.err.println(" -fixSplitParents Try to force offline split parents to be online.");
System.err.println(" -ignorePreCheckPermission ignore filesystem permission pre-check");
+ System.err.println(" -fixReferenceFiles Try to offline lingering reference store files");
System.err.println("");
System.err.println(" Datafile Repair options: (expert features, use with caution!)");
@@ -3324,7 +3398,7 @@ public class HBaseFsck extends Configure
System.err.println("");
System.err.println(" Metadata Repair shortcuts");
System.err.println(" -repair Shortcut for -fixAssignments -fixMeta -fixHdfsHoles " +
- "-fixHdfsOrphans -fixHdfsOverlaps -fixVersionFile -sidelineBigOverlaps");
+ "-fixHdfsOrphans -fixHdfsOverlaps -fixVersionFile -sidelineBigOverlaps -fixReferenceFiles");
System.err.println(" -repairHoles Shortcut for -fixAssignments -fixMeta -fixHdfsHoles");
setRetCode(-2);
@@ -3431,6 +3505,8 @@ public class HBaseFsck extends Configure
checkCorruptHFiles = true;
} else if (cmd.equals("-sidelineCorruptHFiles")) {
sidelineCorruptHFiles = true;
+ } else if (cmd.equals("-fixReferenceFiles")) {
+ setFixReferenceFiles(true);
} else if (cmd.equals("-repair")) {
// this attempts to merge overlapping hdfs regions, needs testing
// under load
@@ -3443,6 +3519,7 @@ public class HBaseFsck extends Configure
setSidelineBigOverlaps(true);
setFixSplitParents(false);
setCheckHdfs(true);
+ setFixReferenceFiles(true);
} else if (cmd.equals("-repairHoles")) {
// this will make all missing hdfs regions available but may lose data
setFixHdfsHoles(true);
Modified: hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java
URL: http://svn.apache.org/viewvc/hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java?rev=1445918&r1=1445917&r2=1445918&view=diff
==============================================================================
--- hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java (original)
+++ hbase/branches/hbase-7290/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HMerge.java Wed Feb 13 20:58:23 2013
@@ -415,7 +415,7 @@ class HMerge {
HConstants.SPLITA_QUALIFIER);
delete.deleteColumns(HConstants.CATALOG_FAMILY,
HConstants.SPLITB_QUALIFIER);
- root.delete(delete, null, true);
+ root.delete(delete, true);
if(LOG.isDebugEnabled()) {
LOG.debug("updated columns in row: " + Bytes.toStringBinary(regionsToDelete[r]));