You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by mo...@apache.org on 2017/09/01 13:17:03 UTC

[05/64] [partial] knox git commit: KNOX-998 - Refactoring save 1

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/KnoxLdapRealm.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/KnoxLdapRealm.java b/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/KnoxLdapRealm.java
new file mode 100644
index 0000000..8c99358
--- /dev/null
+++ b/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/KnoxLdapRealm.java
@@ -0,0 +1,768 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ */
+
+package org.apache.knox.gateway.shirorealm;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.StringTokenizer;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import javax.naming.AuthenticationException;
+import javax.naming.Context;
+import javax.naming.NamingEnumeration;
+import javax.naming.NamingException;
+import javax.naming.PartialResultException;
+import javax.naming.SizeLimitExceededException;
+import javax.naming.directory.Attribute;
+import javax.naming.directory.SearchControls;
+import javax.naming.directory.SearchResult;
+import javax.naming.ldap.Control;
+import javax.naming.ldap.LdapContext;
+import javax.naming.ldap.LdapName;
+import javax.naming.ldap.PagedResultsControl;
+import javax.naming.ldap.PagedResultsResponseControl;
+
+import org.apache.knox.gateway.GatewayMessages;
+import org.apache.knox.gateway.audit.api.Action;
+import org.apache.knox.gateway.audit.api.ActionOutcome;
+import org.apache.knox.gateway.audit.api.AuditService;
+import org.apache.knox.gateway.audit.api.AuditServiceFactory;
+import org.apache.knox.gateway.audit.api.Auditor;
+import org.apache.knox.gateway.audit.api.ResourceType;
+import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.shirorealm.impl.i18n.KnoxShiroMessages;
+import org.apache.shiro.SecurityUtils;
+import org.apache.shiro.authc.AuthenticationInfo;
+import org.apache.shiro.authc.AuthenticationToken;
+import org.apache.shiro.authc.SimpleAuthenticationInfo;
+import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
+import org.apache.shiro.authz.AuthorizationInfo;
+import org.apache.shiro.authz.SimpleAuthorizationInfo;
+import org.apache.shiro.crypto.hash.DefaultHashService;
+import org.apache.shiro.crypto.hash.Hash;
+import org.apache.shiro.crypto.hash.HashRequest;
+import org.apache.shiro.crypto.hash.HashService;
+import org.apache.shiro.realm.ldap.JndiLdapRealm;
+import org.apache.shiro.realm.ldap.LdapContextFactory;
+import org.apache.shiro.realm.ldap.LdapUtils;
+import org.apache.shiro.subject.MutablePrincipalCollection;
+import org.apache.shiro.subject.PrincipalCollection;
+import org.apache.shiro.util.StringUtils;
+
+/**
+ * Implementation of {@link org.apache.shiro.realm.ldap.JndiLdapRealm} that also
+ * returns each user's groups.
+ * This implementation is heavily based on org.apache.isis.security.shiro.IsisLdapRealm.
+ * 
+ * This implementation saves looked up ldap groups in Shiro Session to make them
+ * easy to be looked up outside of this object
+ * 
+ * <p>
+ * Sample config for <tt>shiro.ini</tt>:
+ * 
+ * [main]
+ * ldapRealm=KnoxLdapRealm
+ * ldapGroupContextFactory=KnoxLdapContextFactory
+ * ldapRealm.contextFactory=$ldapGroupContextFactory
+ * ldapRealm.contextFactory.authenticationMechanism=simple
+ * ldapRealm.contextFactory.url=ldap://localhost:33389
+ * ldapRealm.userDnTemplate=uid={0},ou=people,dc=hadoop,dc=apache,dc=org
+ * ldapRealm.authorizationEnabled=true
+ * ldapRealm.contextFactory.systemAuthenticationMechanism=simple
+ * ldapRealm.searchBase=ou=groups,dc=hadoop,dc=apache,dc=org
+ * ldapRealm.groupObjectClass=groupofnames
+ * ldapRealm.memberAttribute=member
+ * ldapRealm.memberAttributeValueTemplate=cn={0},ou=people,dc=hadoop,dc=apache,dc=org
+ * ldapRealm.contextFactory.systemUsername=uid=guest,ou=people,dc=hadoop,dc=apache,dc=org
+ * ldapRealm.contextFactory.clusterName=sandbox
+ * ldapRealm.contextFactory.systemPassword=S{ALIAS=ldcSystemPassword}
+ * [urls]
+ * **=authcBasic
+ *
+ * # optional mapping from physical groups to logical application roles
+ * ldapRealm.rolesByGroup = \
+ *    LDN_USERS: user_role,\
+ *    NYK_USERS: user_role,\
+ *    HKG_USERS: user_role,\
+ *    GLOBAL_ADMIN: admin_role,\
+ *    DEMOS: self-install_role
+ * 
+ * ldapRealm.permissionsByRole=\
+ *    user_role = *:ToDoItemsJdo:*:*,\
+ *                *:ToDoItem:*:*; \
+ *    self-install_role = *:ToDoItemsFixturesService:install:* ; \
+ *    admin_role = *
+ * 
+ * securityManager.realms = $ldapRealm
+ * 
+ * </pre>
+ */
+public class KnoxLdapRealm extends JndiLdapRealm {
+
+    private static GatewayMessages LOG = MessagesFactory.get( GatewayMessages.class );
+    KnoxShiroMessages ShiroLog = MessagesFactory.get( KnoxShiroMessages.class );
+    private static AuditService auditService = AuditServiceFactory.getAuditService();
+    private static Auditor auditor = auditService.getAuditor(
+        AuditConstants.DEFAULT_AUDITOR_NAME, AuditConstants.KNOX_SERVICE_NAME,
+        AuditConstants.KNOX_COMPONENT_NAME );
+
+    private static Pattern TEMPLATE_PATTERN = Pattern.compile( "\\{(\\d+?)\\}" );
+    private static String DEFAULT_PRINCIPAL_REGEX = "(.*)";
+    private static final String MEMBER_SUBSTITUTION_TOKEN = "{0}";
+
+    private static final SearchControls SUBTREE_SCOPE = new SearchControls();
+    private static final SearchControls ONELEVEL_SCOPE = new SearchControls();
+    private static final SearchControls OBJECT_SCOPE = new SearchControls();
+
+    private static final String  SUBJECT_USER_ROLES = "subject.userRoles";
+    private static final String  SUBJECT_USER_GROUPS = "subject.userGroups";
+
+    private static final String  MEMBER_URL = "memberUrl";
+
+    private static final String POSIX_GROUP = "posixGroup";
+
+    private static final String HASHING_ALGORITHM = "SHA-256";
+
+    static {
+          SUBTREE_SCOPE.setSearchScope(SearchControls.SUBTREE_SCOPE);
+          ONELEVEL_SCOPE.setSearchScope(SearchControls.ONELEVEL_SCOPE);
+          OBJECT_SCOPE.setSearchScope( SearchControls.OBJECT_SCOPE );
+      }
+
+ 
+    private String searchBase;
+    private String userSearchBase;
+    private String principalRegex = DEFAULT_PRINCIPAL_REGEX;
+    private Pattern principalPattern = Pattern.compile( DEFAULT_PRINCIPAL_REGEX );
+    private String userDnTemplate = "{0}";
+    private String userSearchFilter = null;
+    private String userSearchAttributeTemplate = "{0}";
+    private String userSearchScope = "subtree";
+
+    private String groupSearchBase;
+
+    private String groupObjectClass = "groupOfNames";
+    
+    //  typical value: member, uniqueMember, meberUrl
+    private String memberAttribute = "member";
+
+    private String groupIdAttribute = "cn";
+    
+    private String memberAttributeValuePrefix = "uid={0}";
+    private String memberAttributeValueSuffix = "";
+    
+    private final Map<String,String> rolesByGroup = new LinkedHashMap<String, String>();
+    private final Map<String,List<String>> permissionsByRole = new LinkedHashMap<String, List<String>>();
+    
+    private boolean authorizationEnabled;
+
+    private String userSearchAttributeName;
+    private String userObjectClass = "person";
+
+    private HashService hashService = new DefaultHashService();
+
+    public KnoxLdapRealm() {
+      HashedCredentialsMatcher credentialsMatcher = new HashedCredentialsMatcher(HASHING_ALGORITHM);
+      setCredentialsMatcher(credentialsMatcher);
+    }
+
+  @Override
+  //KNOX-534 overriding this method to be able to audit authentication exceptions
+  protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken token) throws org.apache.shiro.authc.AuthenticationException {
+    try {
+      return super.doGetAuthenticationInfo(token);
+    } catch ( org.apache.shiro.authc.AuthenticationException e ) {
+      auditor.audit( Action.AUTHENTICATION , token.getPrincipal().toString(), ResourceType.PRINCIPAL, ActionOutcome.FAILURE, e.getMessage() );
+      ShiroLog.failedLoginInfo(token);
+      ShiroLog.failedLoginStackTrace(e);
+      ShiroLog.failedLoginAttempt(e.getCause());
+
+      throw e;
+    }
+  }
+
+  /**
+     * Get groups from LDAP.
+     * 
+     * @param principals
+     *            the principals of the Subject whose AuthenticationInfo should
+     *            be queried from the LDAP server.
+     * @param ldapContextFactory
+     *            factory used to retrieve LDAP connections.
+     * @return an {@link AuthorizationInfo} instance containing information
+     *         retrieved from the LDAP server.
+     * @throws NamingException
+     *             if any LDAP errors occur during the search.
+     */
+    @Override
+    protected AuthorizationInfo queryForAuthorizationInfo(final PrincipalCollection principals, 
+        final LdapContextFactory ldapContextFactory) throws NamingException {
+      if (!isAuthorizationEnabled()) {
+        return null;
+      }
+      final Set<String> roleNames = getRoles(principals, ldapContextFactory);
+        SimpleAuthorizationInfo simpleAuthorizationInfo = new SimpleAuthorizationInfo(roleNames);
+        Set<String> stringPermissions = permsFor(roleNames);
+        simpleAuthorizationInfo.setStringPermissions(stringPermissions);
+        return simpleAuthorizationInfo;
+    }
+
+    private Set<String> getRoles(PrincipalCollection principals,
+        final LdapContextFactory ldapContextFactory) throws NamingException {
+        final String username = (String) getAvailablePrincipal(principals);
+
+        LdapContext systemLdapCtx = null;
+        try {
+            systemLdapCtx = ldapContextFactory.getSystemLdapContext();
+            return rolesFor(principals, username, systemLdapCtx, ldapContextFactory);
+        } catch (AuthenticationException e) {
+          LOG.failedToGetSystemLdapConnection(e);
+          return Collections.emptySet();
+        } finally {
+            LdapUtils.closeContext(systemLdapCtx);
+        }
+    }
+
+    private Set<String> rolesFor(PrincipalCollection principals, final String userName, final LdapContext ldapCtx,
+        final LdapContextFactory ldapContextFactory) throws NamingException {
+      final Set<String> roleNames = new HashSet<>();
+      final Set<String> groupNames = new HashSet<>();
+
+      String userDn;
+      if (userSearchAttributeName == null || userSearchAttributeName.isEmpty()) {
+        // memberAttributeValuePrefix and memberAttributeValueSuffix were computed from memberAttributeValueTemplate
+        userDn = memberAttributeValuePrefix + userName + memberAttributeValueSuffix;
+      } else {
+        userDn = getUserDn(userName);
+      }
+
+      // Activate paged results
+      int pageSize = 100;
+      int numResults = 0;
+      byte[] cookie = null;
+      try {
+        ldapCtx.addToEnvironment(Context.REFERRAL, "ignore");
+
+        ldapCtx.setRequestControls(new Control[]{new PagedResultsControl(pageSize, Control.NONCRITICAL)});
+
+        do {
+          // ldapsearch -h localhost -p 33389 -D uid=guest,ou=people,dc=hadoop,dc=apache,dc=org -w  guest-password
+          //       -b dc=hadoop,dc=apache,dc=org -s sub '(objectclass=*)'
+
+          NamingEnumeration<SearchResult> searchResultEnum = null;
+          try {
+            searchResultEnum = ldapCtx.search(
+                getGroupSearchBase(),
+                "objectClass=" + groupObjectClass,
+                SUBTREE_SCOPE);
+
+            while (searchResultEnum != null && searchResultEnum.hasMore()) { // searchResults contains all the groups in search scope
+              numResults++;
+              final SearchResult group = searchResultEnum.next();
+              addRoleIfMember(userDn, group, roleNames, groupNames, ldapContextFactory);
+            }
+          } catch (PartialResultException e) {
+            LOG.ignoringPartialResultException();
+          } finally {
+            if (searchResultEnum != null) {
+              searchResultEnum.close();
+            }
+          }
+
+          // Examine the paged results control response
+          Control[] controls = ldapCtx.getResponseControls();
+          if (controls != null) {
+            for (Control control : controls) {
+              if (control instanceof PagedResultsResponseControl) {
+                PagedResultsResponseControl prrc = (PagedResultsResponseControl) control;
+                cookie = prrc.getCookie();
+              }
+            }
+          }
+
+          // Re-activate paged results
+          ldapCtx.setRequestControls(new Control[]{new PagedResultsControl(pageSize, cookie, Control.CRITICAL)});
+        } while (cookie != null);
+      } catch (SizeLimitExceededException e) {
+        LOG.sizeLimitExceededOnlyRetrieved(numResults);
+//        System.out.println("Only retrieved first " + numResults + " groups due to SizeLimitExceededException.");
+      } catch(IOException e) {
+        LOG.unableToSetupPagedResults();
+//        System.out.println("Unabled to setup paged results");
+      }
+
+      // save role names and group names in session so that they can be easily looked up outside of this object
+      SecurityUtils.getSubject().getSession().setAttribute(SUBJECT_USER_ROLES, roleNames);
+      SecurityUtils.getSubject().getSession().setAttribute(SUBJECT_USER_GROUPS, groupNames);
+      if (!groupNames.isEmpty() && (principals instanceof MutablePrincipalCollection)) {
+        ((MutablePrincipalCollection)principals).addAll(groupNames, getName());
+      }
+      LOG.lookedUpUserRoles(roleNames, userName);
+
+      return roleNames;
+    }
+
+  private void addRoleIfMember(final String userDn, final SearchResult group,
+      final Set<String> roleNames, final Set<String> groupNames,
+      final LdapContextFactory ldapContextFactory) throws NamingException {
+
+    NamingEnumeration<? extends Attribute> attributeEnum = null;
+    NamingEnumeration<?> e = null;
+    try {
+      LdapName userLdapDn = new LdapName(userDn);
+      Attribute attribute = group.getAttributes().get(getGroupIdAttribute());
+      String groupName = attribute.get().toString();
+      
+      attributeEnum = group
+          .getAttributes().getAll();
+      while (attributeEnum.hasMore()) {
+        final Attribute attr = attributeEnum.next();
+        if (!memberAttribute.equalsIgnoreCase(attr.getID())) {
+          continue;
+        }
+        e = attr.getAll();
+        while (e.hasMore()) {
+          String attrValue = e.next().toString();
+          if (memberAttribute.equalsIgnoreCase(MEMBER_URL)) {
+            boolean dynamicGroupMember = isUserMemberOfDynamicGroup(userLdapDn,
+                attrValue, // memberUrl value
+                ldapContextFactory);
+            if (dynamicGroupMember) {
+              groupNames.add(groupName);
+              String roleName = roleNameFor(groupName);
+              if (roleName != null) {
+                roleNames.add(roleName);
+              } else {
+                roleNames.add(groupName);
+              }
+            }
+          } else {
+            if (groupObjectClass.equalsIgnoreCase(POSIX_GROUP)){
+              attrValue = memberAttributeValuePrefix + attrValue + memberAttributeValueSuffix;
+            }
+            if (userLdapDn.equals(new LdapName(attrValue))) {
+              groupNames.add(groupName);
+              String roleName = roleNameFor(groupName);
+              if (roleName != null) {
+                roleNames.add(roleName);
+              } else {
+                roleNames.add(groupName);
+              }
+              break;
+            }
+          }
+        }
+      }
+    }
+    finally {
+      try {
+        if (attributeEnum != null) {
+          attributeEnum.close();
+        }
+      }
+      finally {
+        if (e != null) {
+          e.close();
+        }
+      }
+    }
+  }
+
+    private String roleNameFor(String groupName) {
+        return !rolesByGroup.isEmpty() ? rolesByGroup.get(groupName) : groupName;
+    }
+
+
+    private Set<String> permsFor(Set<String> roleNames) {
+        Set<String> perms = new LinkedHashSet<String>(); // preserve order
+        for(String role: roleNames) {
+            List<String> permsForRole = permissionsByRole.get(role);
+            if(permsForRole != null) {
+                perms.addAll(permsForRole);
+            }
+        }
+        return perms;
+    }
+
+    public String getSearchBase() {
+        return searchBase;
+    }
+
+    public void setSearchBase(String searchBase) {
+      this.searchBase = searchBase;
+    }
+
+    public String getUserSearchBase() {
+      return  (userSearchBase != null && !userSearchBase.isEmpty()) ? 
+          userSearchBase : searchBase;
+    }
+
+    public void setUserSearchBase(String userSearchBase) {
+      this.userSearchBase = userSearchBase;
+    }
+
+    public String getGroupSearchBase() {
+      return (groupSearchBase != null && !groupSearchBase.isEmpty()) ? 
+          groupSearchBase : searchBase;
+    }
+
+    public void setGroupSearchBase(String groupSearchBase) {
+      this.groupSearchBase = groupSearchBase;
+    }
+
+    public String getGroupObjectClass() {
+      return groupObjectClass;
+    }
+    
+    public void setGroupObjectClass(String groupObjectClassAttribute) {
+        this.groupObjectClass = groupObjectClassAttribute;
+    }
+
+    public String getMemberAttribute() {
+      return memberAttribute;
+    }
+    
+    public void setMemberAttribute(String memberAttribute) {
+        this.memberAttribute = memberAttribute;
+    }
+    
+    public String getGroupIdAttribute() {
+      return groupIdAttribute;
+    }
+    
+    public void setGroupIdAttribute(String groupIdAttribute) {
+        this.groupIdAttribute = groupIdAttribute;
+    }
+    
+    public void setMemberAttributeValueTemplate(String template) {
+        if (!StringUtils.hasText(template)) {
+            String msg = "User DN template cannot be null or empty.";
+            throw new IllegalArgumentException(msg);
+        }
+        int index = template.indexOf(MEMBER_SUBSTITUTION_TOKEN);
+        if (index < 0) {
+            String msg = "Member attribute value template must contain the '" +
+                    MEMBER_SUBSTITUTION_TOKEN + "' replacement token to understand how to " +
+                    "parse the group members.";
+            throw new IllegalArgumentException(msg);
+        }
+        String prefix = template.substring(0, index);
+        String suffix = template.substring(prefix.length() + MEMBER_SUBSTITUTION_TOKEN.length());
+        this.memberAttributeValuePrefix = prefix;
+        this.memberAttributeValueSuffix = suffix;
+    }
+
+    public void setRolesByGroup(Map<String, String> rolesByGroup) {
+        this.rolesByGroup.putAll(rolesByGroup);
+    }
+
+    public void setPermissionsByRole(String permissionsByRoleStr) {
+        permissionsByRole.putAll(parsePermissionByRoleString(permissionsByRoleStr));
+    }
+    
+    public boolean isAuthorizationEnabled() {
+      return authorizationEnabled;
+    }
+
+    public void setAuthorizationEnabled(boolean authorizationEnabled) {
+      this.authorizationEnabled = authorizationEnabled;
+    }
+
+    public String getUserSearchAttributeName() {
+        return userSearchAttributeName;
+    }
+
+    public void setUserSearchAttributeName(String userSearchAttributeName) {
+      if (userSearchAttributeName != null) {
+        userSearchAttributeName = userSearchAttributeName.trim();
+      }
+      this.userSearchAttributeName = userSearchAttributeName;
+    }
+
+    public String getUserObjectClass() {
+      return userObjectClass;
+    }
+    
+    public void setUserObjectClass(String userObjectClass) {
+        this.userObjectClass = userObjectClass;
+    }
+
+    private Map<String, List<String>> parsePermissionByRoleString(String permissionsByRoleStr) {
+      Map<String,List<String>> perms = new HashMap<>();
+   
+      // split by semicolon ; then by eq = then by  comma ,
+      StringTokenizer stSem = new StringTokenizer(permissionsByRoleStr, ";");
+      while (stSem.hasMoreTokens()) {
+        String roleAndPerm = stSem.nextToken();
+        StringTokenizer stEq = new StringTokenizer(roleAndPerm, "=");
+        if (stEq.countTokens() != 2) {
+          continue;
+        }
+        String role = stEq.nextToken().trim();
+        String perm = stEq.nextToken().trim();
+        StringTokenizer stCom = new StringTokenizer(perm, ",");
+        List<String> permList = new ArrayList<String>();
+        while (stCom.hasMoreTokens()) {
+          permList.add(stCom.nextToken().trim());
+        }
+        perms.put(role,  permList);
+      }
+      return perms;
+  }
+
+  boolean isUserMemberOfDynamicGroup(LdapName userLdapDn, String memberUrl,
+      final LdapContextFactory ldapContextFactory) throws NamingException {
+
+    // ldap://host:port/dn?attributes?scope?filter?extensions
+
+    boolean member = false;
+
+    if (memberUrl == null) {
+      return false;
+    }
+    String[] tokens = memberUrl.split("\\?");
+    if (tokens.length < 4) {
+      return false;
+    }
+
+    String searchBaseString = tokens[0]
+        .substring(tokens[0].lastIndexOf("/") + 1);
+    String searchScope = tokens[2];
+    String searchFilter = tokens[3];
+
+    LdapName searchBaseDn = new LdapName(searchBaseString);
+
+    // do scope test
+    if (searchScope.equalsIgnoreCase("base")) {
+      return false;
+    }
+    if (!userLdapDn.toString().endsWith(searchBaseDn.toString())) {
+      return false;
+    }
+    if (searchScope.equalsIgnoreCase("one")
+        && (userLdapDn.size() != searchBaseDn.size() - 1)) {
+      return false;
+    }
+    // search for the filter, substituting base with userDn
+    // search for base_dn=userDn, scope=base, filter=filter
+    LdapContext systemLdapCtx = null;
+    systemLdapCtx = ldapContextFactory.getSystemLdapContext();
+    NamingEnumeration<SearchResult> searchResultEnum = null;
+    try {
+      searchResultEnum = systemLdapCtx
+        .search(userLdapDn, searchFilter,
+            searchScope.equalsIgnoreCase("sub") ? SUBTREE_SCOPE
+                : ONELEVEL_SCOPE);
+      if (searchResultEnum.hasMore()) {
+        return true;
+      }
+    }
+    finally {
+        try {
+          if (searchResultEnum != null) {
+            searchResultEnum.close();
+          }
+        }
+        finally {
+          LdapUtils.closeContext(systemLdapCtx);
+        }
+    }
+    return member;
+  }
+
+  public String getPrincipalRegex() {
+    return principalRegex;
+  }
+
+  public void setPrincipalRegex( String regex ) {
+    if( regex == null || regex.trim().isEmpty() ) {
+      principalPattern = Pattern.compile( DEFAULT_PRINCIPAL_REGEX );
+      principalRegex = DEFAULT_PRINCIPAL_REGEX;
+    } else {
+      regex = regex.trim();
+      Pattern pattern = Pattern.compile( regex );
+      principalPattern = pattern;
+      principalRegex = regex;
+    }
+  }
+
+  public String getUserSearchAttributeTemplate() {
+    return userSearchAttributeTemplate;
+  }
+
+  public void setUserSearchAttributeTemplate( final String template ) {
+    this.userSearchAttributeTemplate = ( template == null ? null : template.trim() );
+  }
+
+  public String getUserSearchFilter() {
+    return userSearchFilter;
+  }
+
+  public void setUserSearchFilter( final String filter ) {
+    this.userSearchFilter = ( filter == null ? null : filter.trim() );
+  }
+
+  public String getUserSearchScope() {
+    return userSearchScope;
+  }
+
+  public void setUserSearchScope( final String scope ) {
+    this.userSearchScope = ( scope == null ? null : scope.trim().toLowerCase() );
+  }
+
+  private SearchControls getUserSearchControls() {
+    SearchControls searchControls = SUBTREE_SCOPE;
+    if ( "onelevel".equalsIgnoreCase( userSearchScope ) ) {
+      searchControls = ONELEVEL_SCOPE;
+    } else if ( "object".equalsIgnoreCase( userSearchScope ) ) {
+      searchControls = OBJECT_SCOPE;
+    }
+    return searchControls;
+  }
+
+  @Override
+  public void setUserDnTemplate( final String template ) throws IllegalArgumentException {
+    userDnTemplate = template;
+  }
+
+  private Matcher matchPrincipal( final String principal ) {
+    Matcher matchedPrincipal = principalPattern.matcher( principal );
+    if( !matchedPrincipal.matches() ) {
+      throw new IllegalArgumentException( "Principal " + principal + " does not match " + principalRegex );
+    }
+    return matchedPrincipal;
+  }
+
+  /**
+     * Returns the LDAP User Distinguished Name (DN) to use when acquiring an
+     * {@link javax.naming.ldap.LdapContext LdapContext} from the {@link LdapContextFactory}.
+     * <p/>
+     * If the the {@link #getUserDnTemplate() userDnTemplate} property has been set, this implementation will construct
+     * the User DN by substituting the specified {@code principal} into the configured template.  If the
+     * {@link #getUserDnTemplate() userDnTemplate} has not been set, the method argument will be returned directly
+     * (indicating that the submitted authentication token principal <em>is</em> the User DN).
+     *
+     * @param principal the principal to substitute into the configured {@link #getUserDnTemplate() userDnTemplate}.
+     * @return the constructed User DN to use at runtime when acquiring an {@link javax.naming.ldap.LdapContext}.
+     * @throws IllegalArgumentException if the method argument is null or empty
+     * @throws IllegalStateException    if the {@link #getUserDnTemplate userDnTemplate} has not been set.
+     * @see LdapContextFactory#getLdapContext(Object, Object)
+     */
+    @Override
+    protected String getUserDn( final String principal ) throws IllegalArgumentException, IllegalStateException {
+      String userDn;
+      Matcher matchedPrincipal = matchPrincipal( principal );
+      String userSearchBase = getUserSearchBase();
+      String userSearchAttributeName = getUserSearchAttributeName();
+
+      // If not searching use the userDnTemplate and return.
+      if ( ( userSearchBase == null || userSearchBase.isEmpty() ) ||
+          ( userSearchAttributeName == null &&
+              userSearchFilter == null &&
+              !"object".equalsIgnoreCase( userSearchScope ) ) ) {
+        userDn = expandTemplate( userDnTemplate, matchedPrincipal );
+        LOG.computedUserDn( userDn, principal );
+        return userDn;
+      }
+
+      // Create the searchBase and searchFilter from config.
+      String searchBase = expandTemplate( getUserSearchBase(), matchedPrincipal );
+      String searchFilter = null;
+      if ( userSearchFilter == null ) {
+        if ( userSearchAttributeName == null ) {
+          searchFilter = String.format( "(objectclass=%1$s)", getUserObjectClass() );
+        } else {
+          searchFilter = String.format(
+              "(&(objectclass=%1$s)(%2$s=%3$s))",
+              getUserObjectClass(),
+              userSearchAttributeName,
+              expandTemplate( getUserSearchAttributeTemplate(), matchedPrincipal ) );
+        }
+      } else {
+        searchFilter = expandTemplate( userSearchFilter, matchedPrincipal );
+      }
+      SearchControls searchControls = getUserSearchControls();
+
+      // Search for userDn and return.
+      LdapContext systemLdapCtx = null;
+      NamingEnumeration<SearchResult> searchResultEnum = null;
+      try {
+        systemLdapCtx = getContextFactory().getSystemLdapContext();
+        LOG.searchBaseFilterScope(searchBase, searchFilter, userSearchScope);
+        searchResultEnum = systemLdapCtx.search( searchBase, searchFilter, searchControls );
+        // SearchResults contains all the entries in search scope
+        if (searchResultEnum.hasMore()) {
+          SearchResult searchResult = searchResultEnum.next();
+          userDn = searchResult.getNameInNamespace();
+          LOG.searchedAndFoundUserDn(userDn, principal);
+          return userDn;
+        } else {
+          throw new IllegalArgumentException("Illegal principal name: " + principal);
+        }
+      } catch (AuthenticationException e) {
+        LOG.failedToGetSystemLdapConnection(e);
+        throw new IllegalArgumentException("Illegal principal name: " + principal);
+      } catch (NamingException e) {
+        throw new IllegalArgumentException("Hit NamingException: " + e.getMessage());
+      } finally {
+        try {
+          if (searchResultEnum != null) {
+            searchResultEnum.close();
+          }
+        } catch (NamingException e) {
+          // Ignore exception on close.
+        }
+        finally {
+          LdapUtils.closeContext(systemLdapCtx);
+        }
+      }
+    }
+
+    @Override
+    protected AuthenticationInfo createAuthenticationInfo(AuthenticationToken token, Object ldapPrincipal, Object ldapCredentials, LdapContext ldapContext) throws NamingException {
+      HashRequest.Builder builder = new HashRequest.Builder();
+      Hash credentialsHash = hashService.computeHash(builder.setSource(token.getCredentials()).setAlgorithmName(HASHING_ALGORITHM).build());
+      return new SimpleAuthenticationInfo(token.getPrincipal(), credentialsHash.toHex(), credentialsHash.getSalt(), getName());
+    }
+
+  private static final String expandTemplate( final String template, final Matcher input ) {
+    String output = template;
+    Matcher matcher = TEMPLATE_PATTERN.matcher( output );
+    while( matcher.find() ) {
+      String lookupStr = matcher.group( 1 );
+      int lookupIndex = Integer.parseInt( lookupStr );
+      String lookupValue = input.group( lookupIndex );
+      output = matcher.replaceFirst( lookupValue == null ? "" : lookupValue );
+      matcher = TEMPLATE_PATTERN.matcher( output );
+    }
+    return output;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/KnoxPamRealm.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/KnoxPamRealm.java b/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/KnoxPamRealm.java
new file mode 100644
index 0000000..4e1deb3
--- /dev/null
+++ b/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/KnoxPamRealm.java
@@ -0,0 +1,163 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ */
+
+package org.apache.knox.gateway.shirorealm;
+
+import java.util.LinkedHashSet;
+import java.util.Set;
+import org.apache.knox.gateway.GatewayMessages;
+import org.apache.knox.gateway.audit.api.Action;
+import org.apache.knox.gateway.audit.api.ActionOutcome;
+import org.apache.knox.gateway.audit.api.ResourceType;
+import org.apache.knox.gateway.audit.api.AuditService;
+import org.apache.knox.gateway.audit.api.AuditServiceFactory;
+import org.apache.knox.gateway.audit.api.Auditor;
+import org.apache.knox.gateway.audit.log4j.audit.AuditConstants;
+import org.apache.knox.gateway.i18n.messages.MessagesFactory;
+import org.apache.knox.gateway.shirorealm.impl.i18n.KnoxShiroMessages;
+import org.apache.shiro.SecurityUtils;
+import org.apache.shiro.authc.AuthenticationException;
+
+import org.apache.shiro.authc.AuthenticationInfo;
+import org.apache.shiro.authc.AuthenticationToken;
+import org.apache.shiro.authc.SimpleAuthenticationInfo;
+import org.apache.shiro.authc.UsernamePasswordToken;
+import org.apache.shiro.authz.AuthorizationInfo;
+import org.apache.shiro.authz.SimpleAuthorizationInfo;
+import org.apache.shiro.realm.AuthorizingRealm;
+import org.apache.shiro.subject.PrincipalCollection;
+import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
+import org.apache.shiro.crypto.hash.*;
+import org.jvnet.libpam.PAM;
+import org.jvnet.libpam.PAMException;
+import org.jvnet.libpam.UnixUser;
+
+/**
+ * A Unix-style
+ * <a href="http://www.kernel.org/pub/linux/libs/pam/index.html">PAM</a>
+ * {@link org.apache.shiro.realm.Realm Realm} that uses
+ * <a href="https://github.com/kohsuke/libpam4j">libpam4j</a> to interface with
+ * the PAM system libraries.
+ * <p>
+ * This is a single Shiro {@code Realm} that interfaces with the OS's
+ * {@code PAM} subsystem which itself can be connected to several authentication
+ * methods (unix-crypt,Samba, LDAP, etc.)
+ * <p>
+ * This {@code Realm} can also take part in Shiro's Pluggable Realms concept.
+ * <p>
+ * Using a {@code KnoxPamRealm} requires a PAM {@code service} name. This is the
+ * name of the file under {@code /etc/pam.d} that is used to initialise and
+ * configure the PAM subsytem. Normally, this file reflects the application
+ * using it. For example {@code gdm}, {@code su}, etc. There is no default value
+ * for this propery.
+ * <p>
+ * For example, defining this realm in Shiro .ini:
+ *
+ * <pre>
+ * [main]
+ * pamRealm = org.apache.shiro.realm.libpam4j.KnoxPamRealm
+ * pamRealm.service = [ knox-pam-ldap-service | knox-pam-os-service | knox-pam-winbind-service ]
+ * [urls]
+ * **=authcBasic
+ * </pre>
+ *
+ */
+
+public class KnoxPamRealm extends AuthorizingRealm {
+  private static final String HASHING_ALGORITHM = "SHA-256";
+  private static final String SUBJECT_USER_ROLES = "subject.userRoles";
+  private static final String SUBJECT_USER_GROUPS = "subject.userGroups";
+  private HashService hashService = new DefaultHashService();
+  KnoxShiroMessages ShiroLog = MessagesFactory.get(KnoxShiroMessages.class);
+  GatewayMessages GatewayLog = MessagesFactory.get(GatewayMessages.class);
+  private static AuditService auditService = AuditServiceFactory.getAuditService();
+  private static Auditor auditor = auditService.getAuditor(AuditConstants.DEFAULT_AUDITOR_NAME,
+      AuditConstants.KNOX_SERVICE_NAME, AuditConstants.KNOX_COMPONENT_NAME);
+
+  private String service;
+
+  public KnoxPamRealm() {
+    HashedCredentialsMatcher credentialsMatcher = new HashedCredentialsMatcher(HASHING_ALGORITHM);
+    setCredentialsMatcher(credentialsMatcher);
+  }
+
+  public void setService(String service) {
+    this.service = service;
+  }
+
+  public String getService() {
+    return this.service;
+  }
+
+  @Override
+  protected AuthorizationInfo doGetAuthorizationInfo(PrincipalCollection principals) {
+    Set<String> roles = new LinkedHashSet<String>();
+
+    UnixUserPrincipal user = principals.oneByType(UnixUserPrincipal.class);
+    if (user != null) {
+      roles.addAll(user.getUnixUser().getGroups());
+    }
+    SecurityUtils.getSubject().getSession().setAttribute(SUBJECT_USER_ROLES, roles);
+    SecurityUtils.getSubject().getSession().setAttribute(SUBJECT_USER_GROUPS, roles);
+
+    /* Coverity Scan CID 1361682 */
+    String userName = null;
+
+    if (user != null) {
+      userName = user.getName();
+    }
+
+    GatewayLog.lookedUpUserRoles(roles, userName);
+    return new SimpleAuthorizationInfo(roles);
+  }
+
+  @Override
+  protected AuthenticationInfo doGetAuthenticationInfo(AuthenticationToken token) throws AuthenticationException {
+    UsernamePasswordToken upToken = (UsernamePasswordToken) token;
+    UnixUser user = null;
+    try {
+      user = (new PAM(this.getService())).authenticate(upToken.getUsername(), new String(upToken.getPassword()));
+    } catch (PAMException e) {
+      handleAuthFailure(token, e.getMessage(), e);
+    }
+    HashRequest.Builder builder = new HashRequest.Builder();
+    Hash credentialsHash = hashService
+        .computeHash(builder.setSource(token.getCredentials()).setAlgorithmName(HASHING_ALGORITHM).build());
+    /* Coverity Scan CID 1361684 */
+    if (credentialsHash == null) {
+      handleAuthFailure(token, "Failed to compute hash", null);
+    }
+    return new SimpleAuthenticationInfo(new UnixUserPrincipal(user), credentialsHash.toHex(), credentialsHash.getSalt(),
+        getName());
+  }
+
+  private void handleAuthFailure(AuthenticationToken token, String errorMessage, Exception e) {
+    auditor.audit(Action.AUTHENTICATION, token.getPrincipal().toString(), ResourceType.PRINCIPAL, ActionOutcome.FAILURE,
+        errorMessage);
+    ShiroLog.failedLoginInfo(token);
+
+    if (e != null) {
+      ShiroLog.failedLoginAttempt(e.getCause());
+      throw new AuthenticationException(e);
+    }
+
+    throw new AuthenticationException(errorMessage);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/UnixUserPrincipal.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/UnixUserPrincipal.java b/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/UnixUserPrincipal.java
new file mode 100644
index 0000000..247cae6
--- /dev/null
+++ b/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/UnixUserPrincipal.java
@@ -0,0 +1,46 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ */
+
+package org.apache.knox.gateway.shirorealm;
+
+import java.security.Principal;
+import org.jvnet.libpam.UnixUser;
+
+public class UnixUserPrincipal implements Principal {
+	private final UnixUser userName;
+
+	public UnixUserPrincipal(UnixUser userName) {
+		this.userName = userName;
+	}
+
+	@Override
+	public String getName() {
+		return userName.getUserName();
+	}
+
+	public UnixUser getUnixUser() {
+		return userName;
+	}
+
+	@Override
+	public String toString() {
+		return String.valueOf(userName.getUserName());
+	}
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/impl/i18n/KnoxShiroMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/impl/i18n/KnoxShiroMessages.java b/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/impl/i18n/KnoxShiroMessages.java
new file mode 100644
index 0000000..736a544
--- /dev/null
+++ b/gateway-provider-security-shiro/src/main/java/org/apache/knox/gateway/shirorealm/impl/i18n/KnoxShiroMessages.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.knox.gateway.shirorealm.impl.i18n;
+
+import org.apache.knox.gateway.i18n.messages.Message;
+import org.apache.knox.gateway.i18n.messages.MessageLevel;
+import org.apache.knox.gateway.i18n.messages.Messages;
+
+import org.apache.knox.gateway.i18n.messages.StackTrace;
+import org.apache.shiro.authc.AuthenticationToken;
+import org.apache.shiro.subject.Subject;
+
+@Messages(logger = "org.apache.hadoop.gateway")
+public interface KnoxShiroMessages {
+
+  @Message(level = MessageLevel.ERROR, text = "Shiro unable to login: {0}")
+  void failedLoginAttempt(Throwable e);
+
+  @Message(level = MessageLevel.INFO, text = "Could not login: {0}")
+  void failedLoginInfo(AuthenticationToken token);
+
+  @Message( level = MessageLevel.DEBUG, text = "Failed to Authenticate with LDAP server: {1}" )
+  void failedLoginStackTrace( @StackTrace( level = MessageLevel.DEBUG ) Exception e );
+
+  @Message(level = MessageLevel.INFO, text = "Successfully logged in: {0}, {1}")
+  void successfulLoginAttempt(Subject subject, AuthenticationToken authToken);
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor b/gateway-provider-security-shiro/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
deleted file mode 100644
index 3ee0582..0000000
--- a/gateway-provider-security-shiro/src/main/resources/META-INF/services/org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor
+++ /dev/null
@@ -1,19 +0,0 @@
-##########################################################################
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##########################################################################
-
-org.apache.hadoop.gateway.deploy.impl.ShiroDeploymentContributor
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/main/resources/META-INF/services/org.apache.knox.gateway.deploy.ProviderDeploymentContributor
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/main/resources/META-INF/services/org.apache.knox.gateway.deploy.ProviderDeploymentContributor b/gateway-provider-security-shiro/src/main/resources/META-INF/services/org.apache.knox.gateway.deploy.ProviderDeploymentContributor
new file mode 100644
index 0000000..8ded76f
--- /dev/null
+++ b/gateway-provider-security-shiro/src/main/resources/META-INF/services/org.apache.knox.gateway.deploy.ProviderDeploymentContributor
@@ -0,0 +1,19 @@
+##########################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+##########################################################################
+
+org.apache.knox.gateway.deploy.impl.ShiroDeploymentContributor
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/deploy/impl/ShiroDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/deploy/impl/ShiroDeploymentContributorTest.java b/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/deploy/impl/ShiroDeploymentContributorTest.java
deleted file mode 100644
index 63f8e9f..0000000
--- a/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/deploy/impl/ShiroDeploymentContributorTest.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.deploy.impl;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributor;
-import org.apache.hadoop.gateway.services.GatewayServices;
-import org.apache.hadoop.gateway.services.security.AliasService;
-import org.apache.hadoop.gateway.services.security.CryptoService;
-import org.apache.hadoop.gateway.services.security.impl.DefaultCryptoService;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Topology;
-import org.easymock.EasyMock;
-import org.jboss.shrinkwrap.api.ShrinkWrap;
-import org.jboss.shrinkwrap.api.asset.StringAsset;
-import org.jboss.shrinkwrap.api.spec.WebArchive;
-import org.jboss.shrinkwrap.descriptor.api.Descriptors;
-import org.jboss.shrinkwrap.descriptor.api.webapp30.WebAppDescriptor;
-import org.junit.Test;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.ServiceLoader;
-
-import static org.hamcrest.CoreMatchers.containsString;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.junit.Assert.fail;
-
-public class ShiroDeploymentContributorTest {
-
-  @Test
-  public void testServiceLoader() throws Exception {
-    ServiceLoader loader = ServiceLoader.load( ProviderDeploymentContributor.class );
-    Iterator iterator = loader.iterator();
-    assertThat( "Service iterator empty.", iterator.hasNext() );
-    while( iterator.hasNext() ) {
-      Object object = iterator.next();
-      if( object instanceof ShiroDeploymentContributor ) {
-        return;
-      }
-    }
-    fail( "Failed to find " + ShiroDeploymentContributor.class.getName() + " via service loader." );
-  }
-
-  @Test
-  public void testDeployment() throws IOException {
-    WebArchive webArchive = ShrinkWrap.create( WebArchive.class, "test-archive" );
-
-    Map<String,String> providerParams = new HashMap<>();
-    Provider provider = new Provider();
-    provider.setEnabled( true );
-    provider.setName( "shiro" );
-    provider.setParams(  providerParams );
-
-    Topology topology = new Topology();
-    topology.setName( "Sample" );
-
-    DeploymentContext context = EasyMock.createNiceMock( DeploymentContext.class );
-    EasyMock.expect( context.getWebArchive() ).andReturn( webArchive ).anyTimes();
-    EasyMock.expect( context.getWebAppDescriptor() ).andReturn( Descriptors.create( WebAppDescriptor.class ) ).anyTimes();
-    EasyMock.expect( context.getTopology() ).andReturn( topology ).anyTimes();
-    EasyMock.replay( context );
-
-    AliasService as = EasyMock.createNiceMock( AliasService.class );
-    CryptoService cryptoService = new DefaultCryptoService();
-    ((DefaultCryptoService)cryptoService).setAliasService( as );
-
-    GatewayServices gatewayServices = EasyMock.createNiceMock( GatewayServices.class );
-    EasyMock.expect( gatewayServices.getService( GatewayServices.CRYPTO_SERVICE ) ).andReturn( cryptoService ).anyTimes();
-
-    ShiroDeploymentContributor contributor = new ShiroDeploymentContributor();
-
-    assertThat( contributor.getRole(), is( "authentication" ) );
-    assertThat( contributor.getName(), is( "ShiroProvider" ) );
-
-    // Just make sure it doesn't blow up.
-    contributor.initializeContribution( context );
-
-    contributor.contributeProvider( context, provider );
-
-    // Just make sure it doesn't blow up.
-    contributor.finalizeContribution( context );
-
-    assertThat( context.getWebAppDescriptor().getOrCreateSessionConfig().getOrCreateCookieConfig().isHttpOnly(), is( true ) );
-    assertThat( context.getWebAppDescriptor().getOrCreateSessionConfig().getOrCreateCookieConfig().isSecure(), is( true ) );
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealmTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealmTest.java b/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealmTest.java
deleted file mode 100644
index cea8cca..0000000
--- a/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/shirorealm/KnoxLdapRealmTest.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *        http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- */
-
-package org.apache.hadoop.gateway.shirorealm;
-
-import org.junit.Test;
-import static org.junit.Assert.*;
-
-public class KnoxLdapRealmTest {
-  
-  @Test
-  public void setGetSearchBase() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setSearchBase("dc=hadoop,dc=apache,dc=org");
-    assertEquals(realm.getSearchBase(), "dc=hadoop,dc=apache,dc=org");
-  }
-  
-  @Test
-  public void setGetGroupObjectClass() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setGroupObjectClass("groupOfMembers");
-    assertEquals(realm.getGroupObjectClass(), "groupOfMembers");
-  }  
-  
-  @Test
-  public void setGetUniqueMemberAttribute() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setMemberAttribute("member");
-    assertEquals(realm.getMemberAttribute(), "member");
-  }
-  
-  @Test
-  public void setGetUserSearchAttributeName() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setUserSearchAttributeName("uid");
-    assertEquals(realm.getUserSearchAttributeName(), "uid");
-  }
-  
-  @Test
-  public void setGetUserObjectClass() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setUserObjectClass("inetuser");
-    assertEquals(realm.getUserObjectClass(), "inetuser");
-  }
-  
-  @Test
-  public void setGetUserSearchBase() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setSearchBase("dc=example,dc=com");
-    realm.setUserSearchBase("dc=knox,dc=example,dc=com");
-    assertEquals(realm.getUserSearchBase(), "dc=knox,dc=example,dc=com");
-  }
-  
-  @Test
-  public void setGetGroupSearchBase() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setSearchBase("dc=example,dc=com");
-    realm.setGroupSearchBase("dc=knox,dc=example,dc=com");
-    assertEquals(realm.getGroupSearchBase(), "dc=knox,dc=example,dc=com");
-  }
-  
-  @Test
-  public void verifyDefaultUserSearchAttributeName() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    assertNull(realm.getUserSearchAttributeName());
-  }
-  
-  @Test
-  public void verifyDefaultGetUserObjectClass() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    assertEquals(realm.getUserObjectClass(), "person");
-  }
-  
-  @Test
-  public void verifyDefaultUserSearchBase() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setSearchBase("dc=knox,dc=example,dc=com");
-    assertEquals(realm.getUserSearchBase(), "dc=knox,dc=example,dc=com");
-  }
-  
-  @Test
-  public void verifyDefaultGroupSearchBase() {
-    KnoxLdapRealm realm = new KnoxLdapRealm();
-    realm.setSearchBase("dc=knox,dc=example,dc=com");
-    assertEquals(realm.getGroupSearchBase(), "dc=knox,dc=example,dc=com");
-  }
-  
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealmTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealmTest.java b/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealmTest.java
deleted file mode 100644
index 1ada3c6..0000000
--- a/gateway-provider-security-shiro/src/test/java/org/apache/hadoop/gateway/shirorealm/KnoxPamRealmTest.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- *  Licensed to the Apache Software Foundation (ASF) under one
- *  or more contributor license agreements.  See the NOTICE file
- *  distributed with this work for additional information
- *  regarding copyright ownership.  The ASF licenses this file
- *  to you under the Apache License, Version 2.0 (the
- *  "License"); you may not use this file except in compliance
- *  with the License.  You may obtain a copy of the License at
- *
- *        http://www.apache.org/licenses/LICENSE-2.0
- *
- *  Unless required by applicable law or agreed to in writing,
- *  software distributed under the License is distributed on an
- *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- *  KIND, either express or implied.  See the License for the
- *  specific language governing permissions and limitations
- *  under the License.
- */
-
-package org.apache.hadoop.gateway.shirorealm;
-
-import java.util.Scanner;
-
-import org.junit.Test;
-
-import org.apache.shiro.authc.AuthenticationInfo;
-import org.apache.shiro.authc.UsernamePasswordToken;
-
-import static org.junit.Assert.*;
-import static org.junit.Assume.assumeTrue;
-import static org.easymock.EasyMock.*;
-
-public class KnoxPamRealmTest {
-  @Test
-  public void setService() {
-    KnoxPamRealm realm = new KnoxPamRealm();
-    realm.setService("knox-pam-os-service");
-    //assertEquals(realm.getService(), "knox-pam-os-service");
-  }
-
-  @Test
-  public void testDoGetAuthenticationInfo() {
-    KnoxPamRealm realm = new KnoxPamRealm();
-    realm.setService("sshd");  // pam settings being used: /etc/pam.d/sshd
-
-    // use environment variables and skip the test if not set.
-    String pamuser = System.getenv("PAMUSER");
-    String pampass = System.getenv("PAMPASS");
-    assumeTrue(pamuser != null);
-    assumeTrue(pampass != null);
-
-    // mock shiro auth token
-    UsernamePasswordToken authToken = createMock(UsernamePasswordToken.class);
-    expect(authToken.getUsername()).andReturn(pamuser);
-    expect(authToken.getPassword()).andReturn(pampass.toCharArray());
-    expect(authToken.getCredentials()).andReturn(pampass);
-    replay(authToken);
-
-    // login
-    AuthenticationInfo authInfo = realm.doGetAuthenticationInfo(authToken);
-
-    // verify success
-    assertTrue(authInfo.getCredentials() != null);
-  }
-
-  public static void main(String[] args) throws Exception {
-    KnoxPamRealmTest pamTest = new KnoxPamRealmTest();
-    pamTest.testDoGetAuthenticationInfo();
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/deploy/impl/ShiroDeploymentContributorTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/deploy/impl/ShiroDeploymentContributorTest.java b/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/deploy/impl/ShiroDeploymentContributorTest.java
new file mode 100644
index 0000000..307d97e
--- /dev/null
+++ b/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/deploy/impl/ShiroDeploymentContributorTest.java
@@ -0,0 +1,104 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.deploy.impl;
+
+import org.apache.knox.gateway.deploy.DeploymentContext;
+import org.apache.knox.gateway.deploy.ProviderDeploymentContributor;
+import org.apache.knox.gateway.services.GatewayServices;
+import org.apache.knox.gateway.services.security.AliasService;
+import org.apache.knox.gateway.services.security.CryptoService;
+import org.apache.knox.gateway.services.security.impl.DefaultCryptoService;
+import org.apache.knox.gateway.topology.Provider;
+import org.apache.knox.gateway.topology.Topology;
+import org.easymock.EasyMock;
+import org.jboss.shrinkwrap.api.ShrinkWrap;
+import org.jboss.shrinkwrap.api.spec.WebArchive;
+import org.jboss.shrinkwrap.descriptor.api.Descriptors;
+import org.jboss.shrinkwrap.descriptor.api.webapp30.WebAppDescriptor;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.ServiceLoader;
+
+import static org.hamcrest.CoreMatchers.is;
+import static org.hamcrest.MatcherAssert.assertThat;
+import static org.junit.Assert.fail;
+
+public class ShiroDeploymentContributorTest {
+
+  @Test
+  public void testServiceLoader() throws Exception {
+    ServiceLoader loader = ServiceLoader.load( ProviderDeploymentContributor.class );
+    Iterator iterator = loader.iterator();
+    assertThat( "Service iterator empty.", iterator.hasNext() );
+    while( iterator.hasNext() ) {
+      Object object = iterator.next();
+      if( object instanceof ShiroDeploymentContributor ) {
+        return;
+      }
+    }
+    fail( "Failed to find " + ShiroDeploymentContributor.class.getName() + " via service loader." );
+  }
+
+  @Test
+  public void testDeployment() throws IOException {
+    WebArchive webArchive = ShrinkWrap.create( WebArchive.class, "test-archive" );
+
+    Map<String,String> providerParams = new HashMap<>();
+    Provider provider = new Provider();
+    provider.setEnabled( true );
+    provider.setName( "shiro" );
+    provider.setParams(  providerParams );
+
+    Topology topology = new Topology();
+    topology.setName( "Sample" );
+
+    DeploymentContext context = EasyMock.createNiceMock( DeploymentContext.class );
+    EasyMock.expect( context.getWebArchive() ).andReturn( webArchive ).anyTimes();
+    EasyMock.expect( context.getWebAppDescriptor() ).andReturn( Descriptors.create( WebAppDescriptor.class ) ).anyTimes();
+    EasyMock.expect( context.getTopology() ).andReturn( topology ).anyTimes();
+    EasyMock.replay( context );
+
+    AliasService as = EasyMock.createNiceMock( AliasService.class );
+    CryptoService cryptoService = new DefaultCryptoService();
+    ((DefaultCryptoService)cryptoService).setAliasService( as );
+
+    GatewayServices gatewayServices = EasyMock.createNiceMock( GatewayServices.class );
+    EasyMock.expect( gatewayServices.getService( GatewayServices.CRYPTO_SERVICE ) ).andReturn( cryptoService ).anyTimes();
+
+    ShiroDeploymentContributor contributor = new ShiroDeploymentContributor();
+
+    assertThat( contributor.getRole(), is( "authentication" ) );
+    assertThat( contributor.getName(), is( "ShiroProvider" ) );
+
+    // Just make sure it doesn't blow up.
+    contributor.initializeContribution( context );
+
+    contributor.contributeProvider( context, provider );
+
+    // Just make sure it doesn't blow up.
+    contributor.finalizeContribution( context );
+
+    assertThat( context.getWebAppDescriptor().getOrCreateSessionConfig().getOrCreateCookieConfig().isHttpOnly(), is( true ) );
+    assertThat( context.getWebAppDescriptor().getOrCreateSessionConfig().getOrCreateCookieConfig().isSecure(), is( true ) );
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/shirorealm/KnoxLdapRealmTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/shirorealm/KnoxLdapRealmTest.java b/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/shirorealm/KnoxLdapRealmTest.java
new file mode 100644
index 0000000..591dd7e
--- /dev/null
+++ b/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/shirorealm/KnoxLdapRealmTest.java
@@ -0,0 +1,104 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ */
+
+package org.apache.knox.gateway.shirorealm;
+
+import org.junit.Test;
+import static org.junit.Assert.*;
+
+public class KnoxLdapRealmTest {
+  
+  @Test
+  public void setGetSearchBase() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setSearchBase("dc=hadoop,dc=apache,dc=org");
+    assertEquals(realm.getSearchBase(), "dc=hadoop,dc=apache,dc=org");
+  }
+  
+  @Test
+  public void setGetGroupObjectClass() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setGroupObjectClass("groupOfMembers");
+    assertEquals(realm.getGroupObjectClass(), "groupOfMembers");
+  }  
+  
+  @Test
+  public void setGetUniqueMemberAttribute() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setMemberAttribute("member");
+    assertEquals(realm.getMemberAttribute(), "member");
+  }
+  
+  @Test
+  public void setGetUserSearchAttributeName() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setUserSearchAttributeName("uid");
+    assertEquals(realm.getUserSearchAttributeName(), "uid");
+  }
+  
+  @Test
+  public void setGetUserObjectClass() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setUserObjectClass("inetuser");
+    assertEquals(realm.getUserObjectClass(), "inetuser");
+  }
+  
+  @Test
+  public void setGetUserSearchBase() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setSearchBase("dc=example,dc=com");
+    realm.setUserSearchBase("dc=knox,dc=example,dc=com");
+    assertEquals(realm.getUserSearchBase(), "dc=knox,dc=example,dc=com");
+  }
+  
+  @Test
+  public void setGetGroupSearchBase() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setSearchBase("dc=example,dc=com");
+    realm.setGroupSearchBase("dc=knox,dc=example,dc=com");
+    assertEquals(realm.getGroupSearchBase(), "dc=knox,dc=example,dc=com");
+  }
+  
+  @Test
+  public void verifyDefaultUserSearchAttributeName() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    assertNull(realm.getUserSearchAttributeName());
+  }
+  
+  @Test
+  public void verifyDefaultGetUserObjectClass() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    assertEquals(realm.getUserObjectClass(), "person");
+  }
+  
+  @Test
+  public void verifyDefaultUserSearchBase() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setSearchBase("dc=knox,dc=example,dc=com");
+    assertEquals(realm.getUserSearchBase(), "dc=knox,dc=example,dc=com");
+  }
+  
+  @Test
+  public void verifyDefaultGroupSearchBase() {
+    KnoxLdapRealm realm = new KnoxLdapRealm();
+    realm.setSearchBase("dc=knox,dc=example,dc=com");
+    assertEquals(realm.getGroupSearchBase(), "dc=knox,dc=example,dc=com");
+  }
+  
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/shirorealm/KnoxPamRealmTest.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/shirorealm/KnoxPamRealmTest.java b/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/shirorealm/KnoxPamRealmTest.java
new file mode 100644
index 0000000..340bbc4
--- /dev/null
+++ b/gateway-provider-security-shiro/src/test/java/org/apache/knox/gateway/shirorealm/KnoxPamRealmTest.java
@@ -0,0 +1,68 @@
+/*
+ *  Licensed to the Apache Software Foundation (ASF) under one
+ *  or more contributor license agreements.  See the NOTICE file
+ *  distributed with this work for additional information
+ *  regarding copyright ownership.  The ASF licenses this file
+ *  to you under the Apache License, Version 2.0 (the
+ *  "License"); you may not use this file except in compliance
+ *  with the License.  You may obtain a copy of the License at
+ *
+ *        http://www.apache.org/licenses/LICENSE-2.0
+ *
+ *  Unless required by applicable law or agreed to in writing,
+ *  software distributed under the License is distributed on an
+ *  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ *  KIND, either express or implied.  See the License for the
+ *  specific language governing permissions and limitations
+ *  under the License.
+ */
+
+package org.apache.knox.gateway.shirorealm;
+
+import org.junit.Test;
+
+import org.apache.shiro.authc.AuthenticationInfo;
+import org.apache.shiro.authc.UsernamePasswordToken;
+
+import static org.junit.Assert.*;
+import static org.junit.Assume.assumeTrue;
+import static org.easymock.EasyMock.*;
+
+public class KnoxPamRealmTest {
+  @Test
+  public void setService() {
+    KnoxPamRealm realm = new KnoxPamRealm();
+    realm.setService("knox-pam-os-service");
+    //assertEquals(realm.getService(), "knox-pam-os-service");
+  }
+
+  @Test
+  public void testDoGetAuthenticationInfo() {
+    KnoxPamRealm realm = new KnoxPamRealm();
+    realm.setService("sshd");  // pam settings being used: /etc/pam.d/sshd
+
+    // use environment variables and skip the test if not set.
+    String pamuser = System.getenv("PAMUSER");
+    String pampass = System.getenv("PAMPASS");
+    assumeTrue(pamuser != null);
+    assumeTrue(pampass != null);
+
+    // mock shiro auth token
+    UsernamePasswordToken authToken = createMock(UsernamePasswordToken.class);
+    expect(authToken.getUsername()).andReturn(pamuser);
+    expect(authToken.getPassword()).andReturn(pampass.toCharArray());
+    expect(authToken.getCredentials()).andReturn(pampass);
+    replay(authToken);
+
+    // login
+    AuthenticationInfo authInfo = realm.doGetAuthenticationInfo(authToken);
+
+    // verify success
+    assertTrue(authInfo.getCredentials() != null);
+  }
+
+  public static void main(String[] args) throws Exception {
+    KnoxPamRealmTest pamTest = new KnoxPamRealmTest();
+    pamTest.testDoGetAuthenticationInfo();
+  }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/WebAppSecMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/WebAppSecMessages.java b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/WebAppSecMessages.java
deleted file mode 100644
index 7cf68f4..0000000
--- a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/WebAppSecMessages.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.webappsec;
-
-import org.apache.hadoop.gateway.i18n.messages.Message;
-import org.apache.hadoop.gateway.i18n.messages.MessageLevel;
-import org.apache.hadoop.gateway.i18n.messages.Messages;
-
-@Messages(logger="org.apache.hadoop.gateway.provider.global.csrf")
-public interface WebAppSecMessages {
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java
deleted file mode 100644
index 50a6767..0000000
--- a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/deploy/WebAppSecContributor.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.webappsec.deploy;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.hadoop.gateway.deploy.DeploymentContext;
-import org.apache.hadoop.gateway.deploy.ProviderDeploymentContributorBase;
-import org.apache.hadoop.gateway.descriptor.FilterParamDescriptor;
-import org.apache.hadoop.gateway.descriptor.ResourceDescriptor;
-import org.apache.hadoop.gateway.topology.Provider;
-import org.apache.hadoop.gateway.topology.Service;
-
-public class WebAppSecContributor extends
-    ProviderDeploymentContributorBase {
-  private static final String ROLE = "webappsec";
-  private static final String NAME = "WebAppSec";
-  private static final String CSRF_SUFFIX = "_CSRF";
-  private static final String CSRF_FILTER_CLASSNAME = "org.apache.hadoop.gateway.webappsec.filter.CSRFPreventionFilter";
-  private static final String CSRF_ENABLED = "csrf.enabled";
-  private static final String CORS_SUFFIX = "_CORS";
-  private static final String CORS_FILTER_CLASSNAME = "com.thetransactioncompany.cors.CORSFilter";
-  private static final String CORS_ENABLED = "cors.enabled";
-  private static final String XFRAME_OPTIONS_SUFFIX = "_XFRAMEOPTIONS";
-  private static final String XFRAME_OPTIONS_FILTER_CLASSNAME = "org.apache.hadoop.gateway.webappsec.filter.XFrameOptionsFilter";
-  private static final String XFRAME_OPTIONS_ENABLED = "xframe.options.enabled";
-
-  @Override
-  public String getRole() {
-    return ROLE;
-  }
-
-  @Override
-  public String getName() {
-    return NAME;
-  }
-
-  @Override
-  public void initializeContribution(DeploymentContext context) {
-    super.initializeContribution(context);
-  }
-
-  @Override
-  public void contributeFilter(DeploymentContext context, Provider provider, Service service, 
-      ResourceDescriptor resource, List<FilterParamDescriptor> params) {
-    
-    Provider webappsec = context.getTopology().getProvider(ROLE, NAME);
-    if (webappsec != null && webappsec.isEnabled()) {
-      Map<String,String> map = provider.getParams();
-      if (params == null) {
-        params = new ArrayList<FilterParamDescriptor>();
-      }
-
-      Map<String, String> providerParams = provider.getParams();
-      // CORS support
-      String corsEnabled = map.get(CORS_ENABLED);
-      if ( corsEnabled != null && corsEnabled.equals("true")) {
-        provisionConfig(resource, providerParams, params, "cors.");
-        resource.addFilter().name( getName() + CORS_SUFFIX ).role( getRole() ).impl( CORS_FILTER_CLASSNAME ).params( params );
-      }
-
-      // CRSF
-      params = new ArrayList<FilterParamDescriptor>();
-      String csrfEnabled = map.get(CSRF_ENABLED);
-      if ( csrfEnabled != null && csrfEnabled.equals("true")) {
-        provisionConfig(resource, providerParams, params, "csrf.");
-        resource.addFilter().name( getName() + CSRF_SUFFIX ).role( getRole() ).impl( CSRF_FILTER_CLASSNAME ).params( params );
-      }
-
-      // X-Frame-Options - clickjacking protection
-      params = new ArrayList<FilterParamDescriptor>();
-      String xframeOptionsEnabled = map.get(XFRAME_OPTIONS_ENABLED);
-      if ( xframeOptionsEnabled != null && xframeOptionsEnabled.equals("true")) {
-        provisionConfig(resource, providerParams, params, "xframe.");
-        resource.addFilter().name( getName() + XFRAME_OPTIONS_SUFFIX ).role( getRole() ).impl( XFRAME_OPTIONS_FILTER_CLASSNAME ).params( params );
-      }
-    }
-  }
-
-  private void provisionConfig(ResourceDescriptor resource, Map<String,String> providerParams,
-      List<FilterParamDescriptor> params, String prefix) {
-    for(Entry<String, String> entry : providerParams.entrySet()) {
-      if (entry.getKey().startsWith(prefix)) {
-        params.add( resource.createFilterParam().name( entry.getKey().toLowerCase() ).value( entry.getValue() ) );
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/CSRFPreventionFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/CSRFPreventionFilter.java b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/CSRFPreventionFilter.java
deleted file mode 100644
index 9b07662..0000000
--- a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/CSRFPreventionFilter.java
+++ /dev/null
@@ -1,76 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.webappsec.filter;
-
-import java.io.IOException;
-import java.util.HashSet;
-import java.util.Set;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-
-public class CSRFPreventionFilter implements Filter {
-  private static final String CUSTOM_HEADER_PARAM = "csrf.customheader";
-  private static final String CUSTOM_METHODS_TO_IGNORE_PARAM = "csrf.methodstoignore";
-  private String  headerName = "X-XSRF-Header";
-  private String  mti = "GET,OPTIONS,HEAD";
-  private Set<String> methodsToIgnore = null;
-  
-  @Override
-  public void init( FilterConfig filterConfig ) throws ServletException {
-    String customHeader = filterConfig.getInitParameter(CUSTOM_HEADER_PARAM);
-    if (customHeader != null) {
-      headerName = customHeader;
-    }
-    String customMTI = filterConfig.getInitParameter(CUSTOM_METHODS_TO_IGNORE_PARAM);
-    if (customMTI != null) {
-      mti = customMTI;
-    }
-    String[] methods = mti.split(",");
-    methodsToIgnore = new HashSet<>();
-    for (int i = 0; i < methods.length; i++) {
-      methodsToIgnore.add(methods[i]);
-    }
-  }
-  
-  @Override
-  public void doFilter(ServletRequest request, ServletResponse response,
-      FilterChain chain) throws IOException, ServletException {
-    HttpServletRequest httpRequest = (HttpServletRequest)request;
-    if ( methodsToIgnore.contains( httpRequest.getMethod() ) || httpRequest.getHeader(headerName) != null ) {
-      chain.doFilter(request, response);
-    } else {
-      ((HttpServletResponse)response).sendError(HttpServletResponse.SC_BAD_REQUEST, "Missing Required Header for Vulnerability Protection");
-    }
-  }
-
-  /* (non-Javadoc)
-   * @see javax.servlet.Filter#destroy()
-   */
-  @Override
-  public void destroy() {
-    // TODO Auto-generated method stub
-    
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/XFrameOptionsFilter.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/XFrameOptionsFilter.java b/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/XFrameOptionsFilter.java
deleted file mode 100644
index 9ec14a6..0000000
--- a/gateway-provider-security-webappsec/src/main/java/org/apache/hadoop/gateway/webappsec/filter/XFrameOptionsFilter.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.webappsec.filter;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.List;
-
-import javax.servlet.Filter;
-import javax.servlet.FilterChain;
-import javax.servlet.FilterConfig;
-import javax.servlet.ServletException;
-import javax.servlet.ServletRequest;
-import javax.servlet.ServletResponse;
-import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpServletResponseWrapper;
-
-/**
- * This filter protects proxied webapps from clickjacking attacks that
- * are possible through use of Frames to contain the proxied resources.
- */
-public class XFrameOptionsFilter implements Filter {
-  private static final String X_FRAME_OPTIONS = "X-Frame-Options";
-  private static final String CUSTOM_HEADER_PARAM = "xframe.options";
-
-  private String option = "DENY";
-
-  /* (non-Javadoc)
-   * @see javax.servlet.Filter#destroy()
-   */
-  @Override
-  public void destroy() {
-  }
-
-  /* (non-Javadoc)
-   * @see javax.servlet.Filter#doFilter(javax.servlet.ServletRequest, javax.servlet.ServletResponse, javax.servlet.FilterChain)
-   */
-  @Override
-  public void doFilter(ServletRequest req, ServletResponse res,
-      FilterChain chain) throws IOException, ServletException {
-    ((HttpServletResponse) res).setHeader(X_FRAME_OPTIONS, option);
-    chain.doFilter(req, new XFrameOptionsResponseWrapper((HttpServletResponse) res));
-  }
-
-  /* (non-Javadoc)
-   * @see javax.servlet.Filter#init(javax.servlet.FilterConfig)
-   */
-  @Override
-  public void init(FilterConfig config) throws ServletException {
-    String customOption = config.getInitParameter(CUSTOM_HEADER_PARAM);
-    if (customOption != null) {
-      option = customOption;
-    }
-  }
-
-  public class XFrameOptionsResponseWrapper extends HttpServletResponseWrapper {
-    @Override
-    public void addHeader(String name, String value) {
-      // don't allow additional values to be added to
-      // the configured options value in topology
-      if (!name.equals(X_FRAME_OPTIONS)) {
-        super.addHeader(name, value);
-      }
-    }
-
-    @Override
-    public void setHeader(String name, String value) {
-      // don't allow overwriting of configured value
-      if (!name.equals(X_FRAME_OPTIONS)) {
-        super.setHeader(name, value);
-      }
-    }
-
-    /**
-     * construct a wrapper for this request
-     * 
-     * @param request
-     */
-    public XFrameOptionsResponseWrapper(HttpServletResponse response) {
-        super(response);
-    }
-
-    @Override
-    public String getHeader(String name) {
-        String headerValue = null;
-        if (name.equals(X_FRAME_OPTIONS)) {
-            headerValue = option;
-        }
-        else {
-          headerValue = super.getHeader(name);
-        }
-        return headerValue;
-    }
-
-    /**
-     * get the Header names
-     */
-    @Override
-    public Collection<String> getHeaderNames() {
-        List<String> names = (List<String>) super.getHeaderNames();
-        if (names == null) {
-          names = new ArrayList<String>();
-        }
-        names.add(X_FRAME_OPTIONS);
-        return names;
-    }
-
-    @Override
-    public Collection<String> getHeaders(String name) {
-        List<String> values = (List<String>) super.getHeaders(name);
-        if (name.equals(X_FRAME_OPTIONS)) {
-          if (values == null) {
-            values = new ArrayList<String>();
-          }
-          values.add(option);
-        }
-        return values;
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/af9b0c3d/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/WebAppSecMessages.java
----------------------------------------------------------------------
diff --git a/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/WebAppSecMessages.java b/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/WebAppSecMessages.java
new file mode 100644
index 0000000..8ea2467
--- /dev/null
+++ b/gateway-provider-security-webappsec/src/main/java/org/apache/knox/gateway/webappsec/WebAppSecMessages.java
@@ -0,0 +1,24 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.knox.gateway.webappsec;
+
+import org.apache.knox.gateway.i18n.messages.Messages;
+
+@Messages(logger="org.apache.hadoop.gateway.provider.global.csrf")
+public interface WebAppSecMessages {
+}