You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cn...@apache.org on 2013/06/21 08:37:39 UTC

svn commit: r1495297 [40/46] - in /hadoop/common/branches/branch-1-win: ./ bin/ conf/ ivy/ lib/jdiff/ src/c++/libhdfs/docs/ src/c++/libhdfs/tests/conf/ src/contrib/capacity-scheduler/ivy/ src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred...

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/net/TestNetworkTopology.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/net/TestNetworkTopology.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/net/TestNetworkTopology.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/net/TestNetworkTopology.java Fri Jun 21 06:37:27 2013
@@ -58,6 +58,25 @@ public class TestNetworkTopology extends
     assertEquals(cluster.getNumOfLeaves(), dataNodes.length);
   }
 
+  public void testCreateInvalidTopology() throws Exception {
+    NetworkTopology invalCluster = new NetworkTopology();
+    DatanodeDescriptor invalDataNodes[] = new DatanodeDescriptor[] {
+      new DatanodeDescriptor(new DatanodeID("h1:5020"), "/d1/r1"),
+      new DatanodeDescriptor(new DatanodeID("h2:5020"), "/d1/r1"),
+      new DatanodeDescriptor(new DatanodeID("h3:5020"), "/d1")
+    };
+    invalCluster.add(invalDataNodes[0]);
+    invalCluster.add(invalDataNodes[1]);
+    try {
+      invalCluster.add(invalDataNodes[2]);
+      fail("expected InvalidTopologyException");
+    } catch (NetworkTopology.InvalidTopologyException e) {
+      assertEquals(e.getMessage(), "Invalid network topology. " +
+          "You cannot have a rack and a non-rack node at the same " +
+          "level of the network topology.");
+    }
+  }
+
   public void testRacks() throws Exception {
     assertEquals(cluster.getNumOfRacks(), 3);
     assertTrue(cluster.isOnSameRack(dataNodes[0], dataNodes[1]));

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/net/TestNetworkTopologyWithNodeGroup.java Fri Jun 21 06:37:27 2013
@@ -39,6 +39,8 @@ public class TestNetworkTopologyWithNode
   };
 
   private final static NodeBase computeNode = new NodeBase("/d1/r1/n1/h9");
+  
+  private final static NodeBase rackOnlyNode = new NodeBase("h10", "/r1");
 
   static {
     for(int i=0; i<dataNodes.length; i++) {
@@ -159,5 +161,21 @@ public class TestNetworkTopologyWithNode
       assertTrue(frequency.get(key) > 0 || key == dataNodes[0]);
     }
   }
+  /**
+   * This test checks that adding a node with invalid topology will be failed 
+   * with an exception to show topology is invalid.
+   */
+  public void testAddNodeWithInvalidTopology() {
+    // The last node is a node with invalid topology
+    try {
+      cluster.add(rackOnlyNode);
+      fail("Exception should be thrown, so we should not have reached here.");
+    } catch (Exception e) {
+      if (!(e instanceof IllegalArgumentException)) {
+        fail("Expecting IllegalArgumentException, but caught:" + e);
+      }
+      assertTrue(e.getMessage().contains("illegal network location"));
+    }
+  }
 
 }

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/ManualTestKeytabLogins.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/ManualTestKeytabLogins.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/ManualTestKeytabLogins.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/ManualTestKeytabLogins.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,57 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.security;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import static org.junit.Assert.assertTrue;
+
+/**
+ * Regression test for HADOOP-6947 which can be run manually in
+ * a kerberos environment.
+ *
+ * To run this test, set up two keytabs, each with a different principal.
+ * Then run something like:
+ *  <code>
+ *  HADOOP_CLASSPATH=build/test/classes bin/hadoop \
+ *     org.apache.hadoop.security.ManualTestKeytabLogins \
+ *     usera/test@REALM  /path/to/usera-keytab \
+ *     userb/test@REALM  /path/to/userb-keytab
+ *  </code>
+ */
+public class ManualTestKeytabLogins {
+
+  public static void main(String []args) throws Exception {
+    if (args.length != 4) {
+      System.err.println(
+        "usage: ManualTestKeytabLogins <principal 1> <keytab 1> <principal 2> <keytab 2>");
+      System.exit(1);
+    }
+
+    UserGroupInformation ugi1 =
+      UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        args[0], args[1]);
+    System.out.println("UGI 1 = " + ugi1);
+    assertTrue(ugi1.getUserName().equals(args[0]));
+    
+    UserGroupInformation ugi2 =
+      UserGroupInformation.loginUserFromKeytabAndReturnUGI(
+        args[2], args[3]);
+    System.out.println("UGI 2 = " + ugi2);
+    assertTrue(ugi2.getUserName().equals(args[2]));
+  }
+}

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestGroupFallback.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestGroupFallback.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestGroupFallback.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestGroupFallback.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.security;
+
+import static org.junit.Assert.assertTrue;
+
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.junit.Test;
+
+public class TestGroupFallback {
+  public static final Log LOG = LogFactory.getLog(TestGroupFallback.class);
+
+  @Test
+  public void testGroupShell() throws Exception {
+    Logger.getRootLogger().setLevel(Level.DEBUG);
+    Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        "org.apache.hadoop.security.ShellBasedUnixGroupsMapping");
+
+    Groups groups = new Groups(conf);
+
+    String username = System.getProperty("user.name");
+    List<String> groupList = groups.getGroups(username);
+
+    LOG.info(username + " has GROUPS: " + groupList.toString());
+    assertTrue(groupList.size() > 0);
+  }
+
+  @Test
+  public void testNetgroupShell() throws Exception {
+    Logger.getRootLogger().setLevel(Level.DEBUG);
+    Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        "org.apache.hadoop.security.ShellBasedUnixGroupsNetgroupMapping");
+
+    Groups groups = new Groups(conf);
+
+    String username = System.getProperty("user.name");
+    List<String> groupList = groups.getGroups(username);
+
+    LOG.info(username + " has GROUPS: " + groupList.toString());
+    assertTrue(groupList.size() > 0);
+  }
+
+  @Test
+  public void testGroupWithFallback() throws Exception {
+    Logger.getRootLogger().setLevel(Level.DEBUG);
+    Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        "org.apache.hadoop.security.JniBasedUnixGroupsMappingWithFallback");
+
+    Groups groups = new Groups(conf);
+
+    String username = System.getProperty("user.name");
+    List<String> groupList = groups.getGroups(username);
+
+    LOG.info(username + " has GROUPS: " + groupList.toString());
+    assertTrue(groupList.size() > 0);
+  }
+
+  @Test
+  public void testNetgroupWithFallback() throws Exception {
+    Logger.getRootLogger().setLevel(Level.DEBUG);
+    Configuration conf = new Configuration();
+    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_GROUP_MAPPING,
+        "org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMappingWithFallback");
+
+    Groups groups = new Groups(conf);
+
+    String username = System.getProperty("user.name");
+    List<String> groupList = groups.getGroups(username);
+
+    LOG.info(username + " has GROUPS: " + groupList.toString());
+    assertTrue(groupList.size() > 0);
+  }
+
+}

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestKerberosUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestKerberosUtil.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestKerberosUtil.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestKerberosUtil.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+package org.apache.hadoop.security;
+
+import static org.junit.Assert.*;
+
+import java.io.IOException;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.security.authentication.util.KerberosUtil;
+import org.junit.Test;
+
+public class TestKerberosUtil {
+  public static final Log LOG = LogFactory.getLog(TestKerberosUtil.class);
+
+  @Test
+  public void testGetServerPrincipal() throws IOException {
+    String service = "TestKerberosUtil";
+    String localHostname = SecurityUtil.getLocalHostName();
+    String testHost = "FooBar";
+    
+    // send null hostname
+    assertEquals("When no hostname is sent",
+        service + "/" + localHostname.toLowerCase(),
+        KerberosUtil.getServicePrincipal(service, null));
+    // send empty hostname
+    assertEquals("When empty hostname is sent",
+        service + "/" + localHostname.toLowerCase(),
+        KerberosUtil.getServicePrincipal(service, ""));
+    // send 0.0.0.0 hostname
+    assertEquals("When 0.0.0.0 hostname is sent",
+        service + "/" + localHostname.toLowerCase(),
+        KerberosUtil.getServicePrincipal(service, "0.0.0.0"));
+    // send uppercase hostname
+    assertEquals("When uppercase hostname is sent",
+        service + "/" + testHost.toLowerCase(),
+        KerberosUtil.getServicePrincipal(service, testHost));
+    // send lowercase hostname
+    assertEquals("When lowercase hostname is sent",
+        service + "/" + testHost.toLowerCase(),
+        KerberosUtil.getServicePrincipal(service, testHost.toLowerCase()));
+  }
+}
\ No newline at end of file

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestProxyUserFromEnv.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.security;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.junit.Test;
+
+public class TestProxyUserFromEnv {
+  /** Test HADOOP_PROXY_USER for impersonation */
+  @Test
+  public void testProxyUserFromEnvironment() throws IOException {
+    String proxyUser = "foo.bar";
+    System.setProperty(UserGroupInformation.HADOOP_PROXY_USER, proxyUser);
+    UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+    assertEquals(proxyUser, ugi.getUserName());
+
+    UserGroupInformation realUgi = ugi.getRealUser();
+    assertNotNull(realUgi);
+    // get the expected real user name
+    Process pp = Runtime.getRuntime().exec("whoami");
+    BufferedReader br = new BufferedReader
+                          (new InputStreamReader(pp.getInputStream()));
+    String realUser = br.readLine().trim();
+    assertEquals(realUser, realUgi.getUserName());
+  }
+}

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestUserGroupInformation.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestUserGroupInformation.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestUserGroupInformation.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/TestUserGroupInformation.java Fri Jun 21 06:37:27 2013
@@ -16,22 +16,18 @@
  */
 package org.apache.hadoop.security;
 
-import static org.junit.Assert.assertArrayEquals;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
+import static org.junit.Assert.*;
 import static org.mockito.Mockito.mock;
 
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStreamReader;
 import java.security.PrivilegedExceptionAction;
-import java.util.ArrayList;
 import java.util.Collection;
-import java.util.List;
+import java.util.LinkedHashSet;
+import java.util.Set;
 
+import javax.security.auth.Subject;
 import javax.security.auth.login.AppConfigurationEntry;
 
 import junit.framework.Assert;
@@ -130,7 +126,7 @@ public class TestUserGroupInformation {
 
     System.out.println(userName + ":" + line);
    
-    List<String> groups = new ArrayList<String> ();
+    Set<String> groups = new LinkedHashSet<String> ();    
     String[] tokens = line.split(Shell.TOKEN_SEPARATOR_REGEX);
     for(String s: tokens) {
       groups.add(s);
@@ -147,7 +143,7 @@ public class TestUserGroupInformation {
     String[] gi = login.getGroupNames();
     assertEquals(groups.size(), gi.length);
     for(int i=0; i < gi.length; i++) {
-    	assertEquals(groups.get(i), gi[i]);
+      assertTrue(groups.contains(gi[i]));
     }
     
     final UserGroupInformation fakeUser = 
@@ -359,4 +355,22 @@ public class TestUserGroupInformation {
       assertGaugeGt("loginFailure_avg_time", 0, rb);
     }
   }
+
+  /**
+   * Test for the case that UserGroupInformation.getCurrentUser()
+   * is called when the AccessControlContext has a Subject associated
+   * with it, but that Subject was not created by Hadoop (ie it has no
+   * associated User principal)
+   */
+  @Test
+  public void testUGIUnderNonHadoopContext() throws Exception {
+    Subject nonHadoopSubject = new Subject();
+    Subject.doAs(nonHadoopSubject, new PrivilegedExceptionAction<Void>() {
+        public Void run() throws IOException {
+          UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+          assertNotNull(ugi);
+          return null;
+        }
+      });
+  }
 }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/client/TestAuthenticatedURL.java Fri Jun 21 06:37:27 2013
@@ -13,6 +13,7 @@
  */
 package org.apache.hadoop.security.authentication.client;
 
+import junit.framework.Assert;
 import junit.framework.TestCase;
 import org.mockito.Mockito;
 
@@ -100,11 +101,14 @@ public class TestAuthenticatedURL extend
     headers.put("Set-Cookie", cookies);
     Mockito.when(conn.getHeaderFields()).thenReturn(headers);
 
+    AuthenticatedURL.Token token = new AuthenticatedURL.Token();
+    token.set("bar");
     try {
-      AuthenticatedURL.extractToken(conn, new AuthenticatedURL.Token());
+      AuthenticatedURL.extractToken(conn, token);
       fail();
     } catch (AuthenticationException ex) {
       // Expected
+      Assert.assertFalse(token.isSet());
     } catch (Exception ex) {
       fail();
     }

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authentication/server/TestAuthenticationFilter.java Fri Jun 21 06:37:27 2013
@@ -349,7 +349,7 @@ public class TestAuthenticationFilter ex
     }
   }
 
-  private void _testDoFilterAuthentication(boolean withDomainPath) throws Exception {
+  private void _testDoFilterAuthentication(boolean withDomainPath, boolean invalidToken) throws Exception {
     AuthenticationFilter filter = new AuthenticationFilter();
     try {
       FilterConfig config = Mockito.mock(FilterConfig.class);
@@ -380,6 +380,12 @@ public class TestAuthenticationFilter ex
       Mockito.when(request.getRequestURL()).thenReturn(new StringBuffer("http://foo:8080/bar"));
       Mockito.when(request.getQueryString()).thenReturn("authenticated=true");
 
+      if (invalidToken) {
+        Mockito.when(request.getCookies()).thenReturn(
+          new Cookie[] { new Cookie(AuthenticatedURL.AUTH_COOKIE, "foo")}
+        );
+      }
+
       HttpServletResponse response = Mockito.mock(HttpServletResponse.class);
 
       FilterChain chain = Mockito.mock(FilterChain.class);
@@ -437,11 +443,15 @@ public class TestAuthenticationFilter ex
   }
 
   public void testDoFilterAuthentication() throws Exception {
-    _testDoFilterAuthentication(false);
+    _testDoFilterAuthentication(false, false);
+  }
+
+  public void testDoFilterAuthenticationWithInvalidToken() throws Exception {
+    _testDoFilterAuthentication(false, true);
   }
 
   public void testDoFilterAuthenticationWithDomainPath() throws Exception {
-    _testDoFilterAuthentication(true);
+    _testDoFilterAuthentication(true, false);
   }
 
   public void testDoFilterAuthenticated() throws Exception {

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authorize/TestProxyUsers.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authorize/TestProxyUsers.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/authorize/TestProxyUsers.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.security.authorize;
+
+import java.util.Arrays;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import junit.framework.TestCase;
+
+public class TestProxyUsers extends TestCase {
+  private static final String REAL_USER_NAME = "proxier";
+  private static final String PROXY_USER_NAME = "proxied_user";
+  private static final String[] GROUP_NAMES =
+    new String[] { "foo_group" };
+  private static final String[] OTHER_GROUP_NAMES =
+    new String[] { "bar_group" };
+  private static final String PROXY_IP = "1.2.3.4";
+  
+  public void testProxyUsers() throws Exception {
+    Configuration conf = new Configuration();
+    conf.set(
+      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
+    conf.set(
+      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      PROXY_IP);
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+
+
+    // First try proxying a group that's allowed
+    UserGroupInformation realUserUgi = UserGroupInformation
+        .createRemoteUser(REAL_USER_NAME);
+    UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    // From good IP
+    assertAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+
+    // Now try proxying a group that's not allowed
+    realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
+    proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
+    
+    // From good IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+  }
+
+  public void testWildcardGroup() {
+    Configuration conf = new Configuration();
+    conf.set(
+      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      "*");
+    conf.set(
+      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      PROXY_IP);
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+
+    // First try proxying a group that's allowed
+    UserGroupInformation realUserUgi = UserGroupInformation
+        .createRemoteUser(REAL_USER_NAME);
+    UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    // From good IP
+    assertAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+
+    // Now try proxying a different group (just to make sure we aren't getting spill over
+    // from the other test case!)
+    realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
+    proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
+    
+    // From good IP
+    assertAuthorized(proxyUserUgi, "1.2.3.4");
+    // From bad IP
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+  }
+
+  public void testWildcardIP() {
+    Configuration conf = new Configuration();
+    conf.set(
+      ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME),
+      StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
+    conf.set(
+      ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME),
+      "*");
+    ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
+
+    // First try proxying a group that's allowed
+    UserGroupInformation realUserUgi = UserGroupInformation
+        .createRemoteUser(REAL_USER_NAME);
+    UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
+
+    // From either IP should be fine
+    assertAuthorized(proxyUserUgi, "1.2.3.4");
+    assertAuthorized(proxyUserUgi, "1.2.3.5");
+
+    // Now set up an unallowed group
+    realUserUgi = UserGroupInformation.createRemoteUser(REAL_USER_NAME);
+    proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
+        PROXY_USER_NAME, realUserUgi, OTHER_GROUP_NAMES);
+    
+    // Neither IP should be OK
+    assertNotAuthorized(proxyUserUgi, "1.2.3.4");
+    assertNotAuthorized(proxyUserUgi, "1.2.3.5");
+  }
+
+  private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
+    try {
+      ProxyUsers.authorize(proxyUgi, host, null);
+      fail("Allowed authorization of " + proxyUgi + " from " + host);
+    } catch (AuthorizationException e) {
+      // Expected
+    }
+  }
+  
+  private void assertAuthorized(UserGroupInformation proxyUgi, String host) {
+    try {
+      ProxyUsers.authorize(proxyUgi, host, null);
+    } catch (AuthorizationException e) {
+      fail("Did not allowed authorization of " + proxyUgi + " from " + host);
+    }
+  }
+}

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/security/token/delegation/TestDelegationToken.java Fri Jun 21 06:37:27 2013
@@ -39,6 +39,8 @@ import org.apache.hadoop.io.DataOutputBu
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
 import org.apache.hadoop.security.token.SecretManager;
 import org.apache.hadoop.security.token.Token;
 import org.apache.hadoop.security.token.SecretManager.InvalidToken;
@@ -170,6 +172,52 @@ public class TestDelegationToken {
   }
 
   @Test
+  public void testGetUserNullOwner() {
+    TestDelegationTokenIdentifier ident =
+        new TestDelegationTokenIdentifier(null, null, null);
+    UserGroupInformation ugi = ident.getUser();
+    assertNull(ugi);
+  }
+  
+  @Test
+  public void testGetUserWithOwner() {
+    TestDelegationTokenIdentifier ident =
+        new TestDelegationTokenIdentifier(new Text("owner"), null, null);
+    UserGroupInformation ugi = ident.getUser();
+    assertNull(ugi.getRealUser());
+    assertEquals("owner", ugi.getUserName());
+    assertEquals(AuthenticationMethod.TOKEN, ugi.getAuthenticationMethod());
+  }
+
+  @Test
+  public void testGetUserWithOwnerEqualsReal() {
+    Text owner = new Text("owner");
+    TestDelegationTokenIdentifier ident =
+        new TestDelegationTokenIdentifier(owner, null, owner);
+    UserGroupInformation ugi = ident.getUser();
+    assertNull(ugi.getRealUser());
+    assertEquals("owner", ugi.getUserName());
+    assertEquals(AuthenticationMethod.TOKEN, ugi.getAuthenticationMethod());
+  }
+
+  @Test
+  public void testGetUserWithOwnerAndReal() {
+    Text owner = new Text("owner");
+    Text realUser = new Text("realUser");
+    TestDelegationTokenIdentifier ident =
+        new TestDelegationTokenIdentifier(owner, null, realUser);
+    UserGroupInformation ugi = ident.getUser();
+    assertNotNull(ugi.getRealUser());
+    assertNull(ugi.getRealUser().getRealUser());
+    assertEquals("owner", ugi.getUserName());
+    assertEquals("realUser", ugi.getRealUser().getUserName());
+    assertEquals(AuthenticationMethod.PROXY,
+                 ugi.getAuthenticationMethod());
+    assertEquals(AuthenticationMethod.TOKEN,
+                 ugi.getRealUser().getAuthenticationMethod());
+  }
+
+  @Test
   public void testDelegationTokenSecretManager() throws Exception {
     final TestDelegationTokenSecretManager dtSecretManager = 
       new TestDelegationTokenSecretManager(24*60*60*1000,

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/AllTestDriver.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/AllTestDriver.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/AllTestDriver.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/AllTestDriver.java Fri Jun 21 06:37:27 2013
@@ -78,6 +78,9 @@ public class AllTestDriver {
       pgd.addClass("MRReliabilityTest", ReliabilityTest.class,
           "A program that tests the reliability of the MR framework by " +
           "injecting faults/failures");
+      pgd.addClass("minicluster", MiniHadoopClusterManager.class,
+          "Single process HDFS and MR cluster.");
+
       pgd.driver(argv);
     } catch(Throwable e) {
       e.printStackTrace();

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/GenericTestUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/GenericTestUtils.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/GenericTestUtils.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/GenericTestUtils.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import org.apache.hadoop.util.StringUtils;
+import org.junit.Assert;
+
+/**
+ * Test provides some very generic helpers which might be used across the tests
+ */
+public abstract class GenericTestUtils {
+
+  public static void assertExceptionContains(String string, Throwable t) {
+    String msg = t.getMessage();
+    Assert.assertTrue(
+        "Expected to find '" + string + "' but got unexpected exception:"
+        + StringUtils.stringifyException(t), msg.contains(string));
+  }  
+}

Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/MetricsAsserts.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/MetricsAsserts.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/MetricsAsserts.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/MetricsAsserts.java Fri Jun 21 06:37:27 2013
@@ -21,6 +21,9 @@ package org.apache.hadoop.test;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import static org.mockito.Mockito.*;
+
+import org.junit.Assert;
+import org.mockito.ArgumentCaptor;
 import org.mockito.stubbing.Answer;
 import org.mockito.invocation.InvocationOnMock;
 import static org.mockito.AdditionalMatchers.*;
@@ -174,6 +177,29 @@ public class MetricsAsserts {
   }
 
   /**
+   * Check that this metric was captured exactly once.
+   */
+  private static void checkCaptured(ArgumentCaptor<?> captor, String name) {
+    Assert.assertEquals("Expected exactly one metric for name " + name,
+        1, captor.getAllValues().size());
+  }
+
+  /**
+   * Lookup the value of a long metric by name. Throws exception if the
+   * metric could not be found.
+   *
+   * @param name of the metric.
+   * @param rb  the record builder mock used to getMetrics
+   * @return the long value of the metric if found.
+   */
+  public static long getLongGauge(String name, MetricsRecordBuilder rb) {
+    ArgumentCaptor<Long> captor = ArgumentCaptor.forClass(Long.class);
+    verify(rb, atLeast(0)).addGauge(eq(name), anyString(), captor.capture());
+    checkCaptured(captor, name);
+    return captor.getValue();
+  }
+
+  /**
    * Assert that a double gauge metric is greater than a value
    * @param name  of the metric
    * @param greater value of the metric should be greater than this

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/MiniHadoopClusterManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/MiniHadoopClusterManager.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/MiniHadoopClusterManager.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/test/MiniHadoopClusterManager.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,311 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.test;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.net.UnknownHostException;
+import java.util.Map;
+import java.util.TreeMap;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MiniMRClientCluster;
+import org.apache.hadoop.mapred.MiniMRClientClusterFactory;
+import org.mortbay.util.ajax.JSON;
+
+/**
+ * This class drives the creation of a mini-cluster on the local machine. By
+ * default, a MiniDFSCluster and MiniMRCluster are spawned on the first
+ * available ports that are found.
+ *
+ * A series of command line flags controls the startup cluster options.
+ *
+ * This class can dump a Hadoop configuration and some basic metadata (in JSON)
+ * into a text file.
+ *
+ * To shutdown the cluster, kill the process.
+ */
+public class MiniHadoopClusterManager {
+  private static final Log LOG = LogFactory
+      .getLog(MiniHadoopClusterManager.class);
+
+  private MiniMRClientCluster mr;
+  private MiniDFSCluster dfs;
+  private String writeDetails;
+  private int numTaskTrackers;
+  private int numDataNodes;
+  private int nnPort;
+  private int jtPort;
+  private StartupOption dfsOpts;
+  private boolean noDFS;
+  private boolean noMR;
+  private String fs;
+  private String writeConfig;
+  private JobConf conf;
+
+  /**
+   * Creates configuration options object.
+   */
+  @SuppressWarnings("static-access")
+  private Options makeOptions() {
+    Options options = new Options();
+    options
+        .addOption("nodfs", false, "Don't start a mini DFS cluster")
+        .addOption("nomr", false, "Don't start a mini MR cluster")
+        .addOption("tasktrackers", true,
+            "How many tasktrackers to start (default 1)")
+        .addOption("datanodes", true, "How many datanodes to start (default 1)")
+        .addOption("format", false, "Format the DFS (default false)")
+        .addOption("nnport", true, "NameNode port (default 0--we choose)")
+        .addOption(
+            "namenode",
+            true,
+            "URL of the namenode (default "
+                + "is either the DFS cluster or a temporary dir)")
+        .addOption("jtport", true, "JobTracker port (default 0--we choose)")
+        .addOption(
+            OptionBuilder.hasArgs().withArgName("property=value")
+                .withDescription("Options to pass into configuration object")
+                .create("D"))
+        .addOption(
+            OptionBuilder.hasArg().withArgName("path").withDescription(
+                "Save configuration to this XML file.").create("writeConfig"))
+        .addOption(
+            OptionBuilder.hasArg().withArgName("path").withDescription(
+                "Write basic information to this JSON file.").create(
+                "writeDetails"))
+        .addOption(
+            OptionBuilder.withDescription("Prints option help.").create("help"));
+    return options;
+  }
+
+  /**
+   * Main entry-point.
+   *
+   * @throws URISyntaxException
+   */
+  public void run(String[] args) throws IOException, URISyntaxException {
+    if (!parseArguments(args)) {
+      return;
+    }
+    start();
+    sleepForever();
+  }
+
+  private void sleepForever() {
+    while (true) {
+      try {
+        Thread.sleep(1000 * 60);
+      } catch (InterruptedException _) {
+        // nothing
+      }
+    }
+  }
+
+  /**
+   * Starts DFS and MR clusters, as specified in member-variable options. Also
+   * writes out configuration and details, if requested.
+   *
+   * @throws IOException
+   * @throws FileNotFoundException
+   * @throws URISyntaxException
+   */
+  public void start() throws IOException, FileNotFoundException,
+      URISyntaxException {
+    if (!noDFS) {
+      dfs = new MiniDFSCluster(nnPort, conf, numDataNodes, true, true, dfsOpts,
+          null, null);
+      LOG.info("Started MiniDFSCluster -- namenode on port "
+          + dfs.getNameNodePort());
+    }
+    if (!noMR) {
+      if (fs == null && dfs != null) {
+        fs = dfs.getFileSystem().getUri().toString();
+      } else if (fs == null) {
+        fs = "file:///tmp/minimr-" + System.nanoTime();
+      }
+      FileSystem.setDefaultUri(conf, new URI(fs));
+      conf.set("mapred.job.tracker", getHostname() + ":" + this.jtPort);
+      mr = MiniMRClientClusterFactory.create(this.getClass(), numTaskTrackers,
+          conf);
+      LOG.info("Started MiniMRCluster");
+    }
+
+    if (writeConfig != null) {
+      FileOutputStream fos = new FileOutputStream(new File(writeConfig));
+      conf.writeXml(fos);
+      fos.close();
+    }
+
+    if (writeDetails != null) {
+      Map<String, Object> map = new TreeMap<String, Object>();
+      if (dfs != null) {
+        map.put("namenode_port", dfs.getNameNodePort());
+      }
+      if (mr != null) {
+        map.put("jobtracker_port", mr.getConfig().get("mapred.job.tracker")
+            .split(":")[1]);
+      }
+      FileWriter fw = new FileWriter(new File(writeDetails));
+      fw.write(new JSON().toJSON(map));
+      fw.close();
+    }
+  }
+
+  /**
+   * Shuts down in-process clusters.
+   *
+   * @throws IOException
+   */
+  public void stop() throws IOException {
+    if (mr != null) {
+      mr.stop();
+    }
+    if (dfs != null) {
+      dfs.shutdown();
+    }
+  }
+
+  /**
+   * Parses arguments and fills out the member variables.
+   *
+   * @param args
+   *          Command-line arguments.
+   * @return true on successful parse; false to indicate that the program should
+   *         exit.
+   */
+  private boolean parseArguments(String[] args) {
+    Options options = makeOptions();
+    CommandLine cli;
+    try {
+      CommandLineParser parser = new GnuParser();
+      cli = parser.parse(options, args);
+    } catch (ParseException e) {
+      LOG.warn("options parsing failed:  " + e.getMessage());
+      new HelpFormatter().printHelp("...", options);
+      return false;
+    }
+
+    if (cli.hasOption("help")) {
+      new HelpFormatter().printHelp("...", options);
+      return false;
+    }
+    if (cli.getArgs().length > 0) {
+      for (String arg : cli.getArgs()) {
+        System.err.println("Unrecognized option: " + arg);
+        new HelpFormatter().printHelp("...", options);
+        return false;
+      }
+    }
+
+    // MR
+    noMR = cli.hasOption("nomr");
+    numTaskTrackers = intArgument(cli, "tasktrackers", 1);
+    jtPort = intArgument(cli, "jtPort", 0);
+    fs = cli.getOptionValue("namenode");
+
+    // HDFS
+    noDFS = cli.hasOption("nodfs");
+    numDataNodes = intArgument(cli, "datanodes", 1);
+    nnPort = intArgument(cli, "nnport", 0);
+    dfsOpts = cli.hasOption("format") ? StartupOption.FORMAT
+        : StartupOption.REGULAR;
+
+    // Runner
+    writeDetails = cli.getOptionValue("writeDetails");
+    writeConfig = cli.getOptionValue("writeConfig");
+
+    // General
+    conf = new JobConf();
+    updateConfiguration(conf, cli.getOptionValues("D"));
+
+    return true;
+  }
+
+  /**
+   * Updates configuration based on what's given on the command line.
+   *
+   * @param conf
+   *          The configuration object
+   * @param keyvalues
+   *          An array of interleaved key value pairs.
+   */
+  private void updateConfiguration(JobConf conf, String[] keyvalues) {
+    int num_confs_updated = 0;
+    if (keyvalues != null) {
+      for (String prop : keyvalues) {
+        String[] keyval = prop.split("=", 2);
+        if (keyval.length == 2) {
+          conf.set(keyval[0], keyval[1]);
+          num_confs_updated++;
+        } else {
+          LOG.warn("Ignoring -D option " + prop);
+        }
+      }
+    }
+    LOG.info("Updated " + num_confs_updated
+        + " configuration settings from command line.");
+  }
+
+  /**
+   * Extracts an integer argument with specified default value.
+   */
+  private int intArgument(CommandLine cli, String argName, int default_) {
+    String o = cli.getOptionValue(argName);
+    if (o == null) {
+      return default_;
+    } else {
+      return Integer.parseInt(o);
+    }
+  }
+
+  /**
+   * Starts a MiniHadoopCluster.
+   *
+   * @throws URISyntaxException
+   */
+  public static void main(String[] args) throws IOException, URISyntaxException {
+    new MiniHadoopClusterManager().run(args);
+  }
+
+  public static String getHostname() {
+    try {
+      return InetAddress.getLocalHost().getHostName();
+    } catch (UnknownHostException ex) {
+      throw new RuntimeException(ex);
+    }
+  }
+}

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/StubContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/StubContext.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/StubContext.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/StubContext.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,143 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.tools.distcp2;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.Counter;
+import org.apache.hadoop.mapreduce.Counters;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.RecordReader;
+import org.apache.hadoop.mapreduce.RecordWriter;
+import org.apache.hadoop.mapreduce.StatusReporter;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+
+public class StubContext {
+
+  private StubStatusReporter reporter = new StubStatusReporter();
+  private RecordReader<Text, FileStatus> reader;
+  private StubInMemoryWriter writer = new StubInMemoryWriter();
+  private Mapper<Text, FileStatus, Text, Text>.Context mapperContext;
+
+  public StubContext(Configuration conf, RecordReader<Text, FileStatus> reader,
+                     int taskId) throws IOException, InterruptedException {
+
+    Mapper<Text, FileStatus, Text, Text> mapper
+            = new Mapper<Text, FileStatus, Text, Text>();
+
+    Mapper<Text, FileStatus, Text, Text>.Context contextImpl
+            = mapper.new Context(conf,
+            getTaskAttemptID(taskId), reader, writer,
+            null, reporter, null);
+
+    this.reader = reader;
+    this.mapperContext = contextImpl;
+  }
+
+  public Mapper<Text, FileStatus, Text, Text>.Context getContext() {
+    return mapperContext;
+  }
+
+  public StatusReporter getReporter() {
+    return reporter;
+  }
+
+  public RecordReader<Text, FileStatus> getReader() {
+    return reader;
+  }
+
+  public StubInMemoryWriter getWriter() {
+    return writer;
+  }
+
+  public static class StubStatusReporter extends StatusReporter {
+
+    private Counters counters = new Counters();
+
+    public StubStatusReporter() {
+	    /*
+      final CounterGroup counterGroup
+              = new CounterGroup("FileInputFormatCounters",
+                                 "FileInputFormatCounters");
+      counterGroup.addCounter(new Counter("BYTES_READ",
+                                          "BYTES_READ",
+                                          0));
+      counters.addGroup(counterGroup);
+      */
+    }
+
+    @Override
+    public Counter getCounter(Enum<?> name) {
+      return counters.findCounter(name);
+    }
+
+    @Override
+    public Counter getCounter(String group, String name) {
+      return counters.findCounter(group, name);
+    }
+
+    @Override
+    public void progress() {}
+
+    @Override
+    public float getProgress() {
+      return 0F;
+    }
+
+    @Override
+    public void setStatus(String status) {}
+  }
+
+
+  public static class StubInMemoryWriter extends RecordWriter<Text, Text> {
+
+    List<Text> keys = new ArrayList<Text>();
+
+    List<Text> values = new ArrayList<Text>();
+
+    @Override
+    public void write(Text key, Text value) throws IOException, InterruptedException {
+      keys.add(key);
+      values.add(value);
+    }
+
+    @Override
+    public void close(TaskAttemptContext context) throws IOException, InterruptedException {
+    }
+
+    public List<Text> keys() {
+      return keys;
+    }
+
+    public List<Text> values() {
+      return values;
+    }
+
+  }
+
+  public static TaskAttemptID getTaskAttemptID(int taskId) {
+    return new TaskAttemptID("", 0, true, taskId, 0);
+  }
+}

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestCopyListing.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestCopyListing.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestCopyListing.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,284 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.tools.distcp2;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.tools.distcp2.util.TestDistCpUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestCopyListing extends SimpleCopyListing {
+  private static final Log LOG = LogFactory.getLog(TestCopyListing.class);
+
+  private static final Credentials CREDENTIALS = new Credentials();
+
+  private static final Configuration config = new Configuration();
+  private static MiniDFSCluster cluster;
+
+  @BeforeClass
+  public static void create() throws IOException {
+    cluster = new MiniDFSCluster(config, 1, true, null);
+  }
+
+  @AfterClass
+  public static void destroy() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+  
+  public TestCopyListing() {
+    super(config, CREDENTIALS);
+  }
+
+  protected TestCopyListing(Configuration configuration) {
+    super(configuration, CREDENTIALS);
+  }
+
+  @Override
+  protected long getBytesToCopy() {
+    return 0;
+  }
+
+  @Override
+  protected long getNumberOfPaths() {
+    return 0;
+  }
+
+  @Test(timeout=10000)
+  public void testSkipCopy() throws Exception {
+    SimpleCopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS) {
+      @Override
+      protected boolean shouldCopy(Path path, DistCpOptions options) {
+        return !path.getName().equals(FileOutputCommitter.SUCCEEDED_FILE_NAME);
+      }
+    };
+    FileSystem fs = FileSystem.get(getConf());
+    List<Path> srcPaths = new ArrayList<Path>();
+    srcPaths.add(new Path("/tmp/in4/1"));
+    srcPaths.add(new Path("/tmp/in4/2"));
+    Path target = new Path("/tmp/out4/1");
+    TestDistCpUtils.createFile(fs, "/tmp/in4/1/_SUCCESS");
+    TestDistCpUtils.createFile(fs, "/tmp/in4/1/file");
+    TestDistCpUtils.createFile(fs, "/tmp/in4/2");
+    fs.mkdirs(target);
+    DistCpOptions options = new DistCpOptions(srcPaths, target);
+    Path listingFile = new Path("/tmp/list4");
+    listing.buildListing(listingFile, options);
+    Assert.assertEquals(listing.getNumberOfPaths(), 2);
+    SequenceFile.Reader reader = new SequenceFile.Reader(
+        fs, listingFile, getConf());
+    FileStatus fileStatus = new FileStatus();
+    Text relativePath = new Text();
+    Assert.assertTrue(reader.next(relativePath, fileStatus));
+    Assert.assertEquals(relativePath.toString(), "/1/file");
+    Assert.assertTrue(reader.next(relativePath, fileStatus));
+    Assert.assertEquals(relativePath.toString(), "/2");
+    Assert.assertFalse(reader.next(relativePath, fileStatus));
+  }
+
+  @Test(timeout=10000)
+  public void testMultipleSrcToFile() {
+    FileSystem fs = null;
+    try {
+      fs = FileSystem.get(getConf());
+      List<Path> srcPaths = new ArrayList<Path>();
+      srcPaths.add(new Path("/tmp/in/1"));
+      srcPaths.add(new Path("/tmp/in/2"));
+      Path target = new Path("/tmp/out/1");
+      TestDistCpUtils.createFile(fs, "/tmp/in/1");
+      TestDistCpUtils.createFile(fs, "/tmp/in/2");
+      fs.mkdirs(target);
+      DistCpOptions options = new DistCpOptions(srcPaths, target);
+      validatePaths(options);
+      TestDistCpUtils.delete(fs, "/tmp");
+      //No errors
+
+      target = new Path("/tmp/out/1");
+      fs.create(target).close();
+      options = new DistCpOptions(srcPaths, target);
+      try {
+        validatePaths(options);
+        Assert.fail("Invalid inputs accepted");
+      } catch (InvalidInputException ignore) { }
+      TestDistCpUtils.delete(fs, "/tmp");
+
+      srcPaths.clear();
+      srcPaths.add(new Path("/tmp/in/1"));
+      fs.mkdirs(new Path("/tmp/in/1"));
+      target = new Path("/tmp/out/1");
+      fs.create(target).close();
+      options = new DistCpOptions(srcPaths, target);
+      try {
+        validatePaths(options);
+        Assert.fail("Invalid inputs accepted");
+      } catch (InvalidInputException ignore) { }
+      TestDistCpUtils.delete(fs, "/tmp");
+    } catch (IOException e) {
+      LOG.error("Exception encountered ", e);
+      Assert.fail("Test input validation failed");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test(timeout=10000)
+  public void testDuplicates() {
+    FileSystem fs = null;
+    try {
+      fs = FileSystem.get(getConf());
+      List<Path> srcPaths = new ArrayList<Path>();
+      srcPaths.add(new Path("/tmp/in/*/*"));
+      TestDistCpUtils.createFile(fs, "/tmp/in/src1/1.txt");
+      TestDistCpUtils.createFile(fs, "/tmp/in/src2/1.txt");
+      Path target = new Path("/tmp/out");
+      Path listingFile = new Path("/tmp/list");
+      DistCpOptions options = new DistCpOptions(srcPaths, target);
+      CopyListing listing = CopyListing.getCopyListing(getConf(), CREDENTIALS, options);
+      try {
+        listing.buildListing(listingFile, options);
+        Assert.fail("Duplicates not detected");
+      } catch (DuplicateFileException ignore) {
+      }
+    } catch (IOException e) {
+      LOG.error("Exception encountered in test", e);
+      Assert.fail("Test failed " + e.getMessage());
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test(timeout=10000)
+  public void testBuildListing() {
+    FileSystem fs = null;
+    try {
+      fs = FileSystem.get(getConf());
+      List<Path> srcPaths = new ArrayList<Path>();
+      Path p1 = new Path("/tmp/in/1");
+      Path p2 = new Path("/tmp/in/2");
+      Path p3 = new Path("/tmp/in2/2");
+      Path target = new Path("/tmp/out/1");
+      srcPaths.add(p1.getParent());
+      srcPaths.add(p3.getParent());
+      TestDistCpUtils.createFile(fs, "/tmp/in/1");
+      TestDistCpUtils.createFile(fs, "/tmp/in/2");
+      TestDistCpUtils.createFile(fs, "/tmp/in2/2");
+      fs.mkdirs(target);
+      OutputStream out = fs.create(p1);
+      out.write("ABC".getBytes());
+      out.close();
+
+      out = fs.create(p2);
+      out.write("DEF".getBytes());
+      out.close();
+
+      out = fs.create(p3);
+      out.write("GHIJ".getBytes());
+      out.close();
+
+      Path listingFile = new Path("/tmp/file");
+
+      DistCpOptions options = new DistCpOptions(srcPaths, target);
+      options.setSyncFolder(true);
+      CopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS);
+      try {
+        listing.buildListing(listingFile, options);
+        Assert.fail("Duplicates not detected");
+      } catch (DuplicateFileException ignore) {
+      }
+      Assert.assertEquals(listing.getBytesToCopy(), 10);
+      Assert.assertEquals(listing.getNumberOfPaths(), 3);
+      TestDistCpUtils.delete(fs, "/tmp");
+
+      try {
+        listing.buildListing(listingFile, options);
+        Assert.fail("Invalid input not detected");
+      } catch (InvalidInputException ignore) {
+      }
+      TestDistCpUtils.delete(fs, "/tmp");
+    } catch (IOException e) {
+      LOG.error("Exception encountered ", e);
+      Assert.fail("Test build listing failed");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test(timeout=10000)
+  public void testBuildListingForSingleFile() {
+    FileSystem fs = null;
+    String testRootString = "/singleFileListing";
+    Path testRoot = new Path(testRootString);
+    SequenceFile.Reader reader = null;
+    try {
+      fs = FileSystem.get(getConf());
+      if (fs.exists(testRoot))
+        TestDistCpUtils.delete(fs, testRootString);
+
+      Path sourceFile = new Path(testRoot, "/source/foo/bar/source.txt");
+      Path decoyFile  = new Path(testRoot, "/target/moo/source.txt");
+      Path targetFile = new Path(testRoot, "/target/moo/target.txt");
+
+      TestDistCpUtils.createFile(fs, sourceFile.toString());
+      TestDistCpUtils.createFile(fs, decoyFile.toString());
+      TestDistCpUtils.createFile(fs, targetFile.toString());
+
+      List<Path> srcPaths = new ArrayList<Path>();
+      srcPaths.add(sourceFile);
+
+      DistCpOptions options = new DistCpOptions(srcPaths, targetFile);
+      CopyListing listing = new SimpleCopyListing(getConf(), CREDENTIALS);
+
+      final Path listFile = new Path(testRoot, "/tmp/fileList.seq");
+      listing.buildListing(listFile, options);
+
+      reader = new SequenceFile.Reader(fs, listFile, getConf());
+      FileStatus fileStatus = new FileStatus();
+      Text relativePath = new Text();
+      Assert.assertTrue(reader.next(relativePath, fileStatus));
+      Assert.assertTrue(relativePath.toString().equals(""));
+    }
+    catch (Exception e) {
+      Assert.fail("Unexpected exception encountered.");
+      LOG.error("Unexpected exception: ", e);
+    }
+    finally {
+      TestDistCpUtils.delete(fs, testRootString);
+      IOUtils.closeStream(reader);
+    }
+  }
+}

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestFileBasedCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestFileBasedCopyListing.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestFileBasedCopyListing.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestFileBasedCopyListing.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,541 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.tools.distcp2;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.tools.distcp2.util.TestDistCpUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestFileBasedCopyListing {
+  private static final Log LOG = LogFactory.getLog(TestFileBasedCopyListing.class);
+
+  private static final Credentials CREDENTIALS = new Credentials();
+
+  private static final Configuration config = new Configuration();
+  private static MiniDFSCluster cluster;
+  private static FileSystem fs;
+
+  @BeforeClass
+  public static void create() throws IOException {
+    cluster = new MiniDFSCluster(config, 1, true, null);
+    fs = cluster.getFileSystem();
+    buildExpectedValuesMap();
+  }
+
+  @AfterClass
+  public static void destroy() {
+    if (cluster != null) {
+      cluster.shutdown();
+    }
+  }
+
+  private static Map<String, String> map = new HashMap<String, String>();
+
+  private static void buildExpectedValuesMap() {
+    map.put("/file1", "/tmp/singlefile1/file1");
+    map.put("/file2", "/tmp/singlefile2/file2");
+    map.put("/file3", "/tmp/multifile/file3");
+    map.put("/file4", "/tmp/multifile/file4");
+    map.put("/file5", "/tmp/multifile/file5");
+    map.put("/multifile/file3", "/tmp/multifile/file3");
+    map.put("/multifile/file4", "/tmp/multifile/file4");
+    map.put("/multifile/file5", "/tmp/multifile/file5");
+    map.put("/Ufile3", "/tmp/Umultifile/Ufile3");
+    map.put("/Ufile4", "/tmp/Umultifile/Ufile4");
+    map.put("/Ufile5", "/tmp/Umultifile/Ufile5");
+    map.put("/dir1", "/tmp/singledir/dir1");
+    map.put("/singledir/dir1", "/tmp/singledir/dir1");
+    map.put("/dir2", "/tmp/singledir/dir2");
+    map.put("/singledir/dir2", "/tmp/singledir/dir2");
+    map.put("/Udir1", "/tmp/Usingledir/Udir1");
+    map.put("/Udir2", "/tmp/Usingledir/Udir2");
+    map.put("/dir2/file6", "/tmp/singledir/dir2/file6");
+    map.put("/singledir/dir2/file6", "/tmp/singledir/dir2/file6");
+    map.put("/file7", "/tmp/singledir1/dir3/file7");
+    map.put("/file8", "/tmp/singledir1/dir3/file8");
+    map.put("/file9", "/tmp/singledir1/dir3/file9");
+    map.put("/dir3/file7", "/tmp/singledir1/dir3/file7");
+    map.put("/dir3/file8", "/tmp/singledir1/dir3/file8");
+    map.put("/dir3/file9", "/tmp/singledir1/dir3/file9");
+    map.put("/Ufile7", "/tmp/Usingledir1/Udir3/Ufile7");
+    map.put("/Ufile8", "/tmp/Usingledir1/Udir3/Ufile8");
+    map.put("/Ufile9", "/tmp/Usingledir1/Udir3/Ufile9");
+  }
+
+  @Test
+  public void testSingleFileMissingTarget() {
+    caseSingleFileMissingTarget(false);
+    caseSingleFileMissingTarget(true);
+  }
+
+  private void caseSingleFileMissingTarget(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/singlefile1/file1");
+      createFiles("/tmp/singlefile1/file1");
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 0);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testSingleFileTargetFile() {
+    caseSingleFileTargetFile(false);
+    caseSingleFileTargetFile(true);
+  }
+
+  private void caseSingleFileTargetFile(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/singlefile1/file1");
+      createFiles("/tmp/singlefile1/file1", target.toString());
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 0);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testSingleFileTargetDir() {
+    caseSingleFileTargetDir(false);
+    caseSingleFileTargetDir(true);
+  }
+
+  private void caseSingleFileTargetDir(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/singlefile2/file2");
+      createFiles("/tmp/singlefile2/file2");
+      mkdirs(target.toString());
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 1);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testSingleDirTargetMissing() {
+    caseSingleDirTargetMissing(false);
+    caseSingleDirTargetMissing(true);
+  }
+
+  private void caseSingleDirTargetMissing(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/singledir");
+      mkdirs("/tmp/singledir/dir1");
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 1);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testSingleDirTargetPresent() {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/singledir");
+      mkdirs("/tmp/singledir/dir1");
+      mkdirs(target.toString());
+
+      runTest(listFile, target);
+
+      checkResult(listFile, 1);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testUpdateSingleDirTargetPresent() {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/Usingledir");
+      mkdirs("/tmp/Usingledir/Udir1");
+      mkdirs(target.toString());
+
+      runTest(listFile, target, true);
+
+      checkResult(listFile, 1);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testMultiFileTargetPresent() {
+    caseMultiFileTargetPresent(false);
+    caseMultiFileTargetPresent(true);
+  }
+
+  private void caseMultiFileTargetPresent(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+      createFiles("/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+      mkdirs(target.toString());
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 3);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testMultiFileTargetMissing() {
+    caseMultiFileTargetMissing(false);
+    caseMultiFileTargetMissing(true);
+  }
+
+  private void caseMultiFileTargetMissing(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+      createFiles("/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 3);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testMultiDirTargetPresent() {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/multifile", "/tmp/singledir");
+      createFiles("/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+      mkdirs(target.toString(), "/tmp/singledir/dir1");
+
+      runTest(listFile, target);
+
+      checkResult(listFile, 4);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testUpdateMultiDirTargetPresent() {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/Umultifile", "/tmp/Usingledir");
+      createFiles("/tmp/Umultifile/Ufile3", "/tmp/Umultifile/Ufile4", "/tmp/Umultifile/Ufile5");
+      mkdirs(target.toString(), "/tmp/Usingledir/Udir1");
+
+      runTest(listFile, target, true);
+
+      checkResult(listFile, 4);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testMultiDirTargetMissing() {
+    caseMultiDirTargetMissing(false);
+    caseMultiDirTargetMissing(true);
+  }
+
+  private void caseMultiDirTargetMissing(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/multifile", "/tmp/singledir");
+      createFiles("/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+      mkdirs("/tmp/singledir/dir1");
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 4);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+    }
+  }
+
+  @Test
+  public void testGlobTargetMissingSingleLevel() {
+    caseGlobTargetMissingSingleLevel(false);
+    caseGlobTargetMissingSingleLevel(true);
+  }
+
+  private void caseGlobTargetMissingSingleLevel(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp1/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/*");
+      createFiles("/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+      createFiles("/tmp/singledir/dir2/file6");
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 5);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+      TestDistCpUtils.delete(fs, "/tmp1");
+    }
+  }
+
+  @Test
+  public void testGlobTargetMissingMultiLevel() {
+    caseGlobTargetMissingMultiLevel(false);
+    caseGlobTargetMissingMultiLevel(true);
+  }
+
+  private void caseGlobTargetMissingMultiLevel(boolean sync) {
+
+    try {
+      Path listFile = new Path("/tmp1/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/*/*");
+      createFiles("/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+      createFiles("/tmp/singledir1/dir3/file7", "/tmp/singledir1/dir3/file8",
+          "/tmp/singledir1/dir3/file9");
+
+      runTest(listFile, target, sync);
+
+      checkResult(listFile, 6);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+      TestDistCpUtils.delete(fs, "/tmp1");
+    }
+  }
+
+  @Test
+  public void testGlobTargetDirMultiLevel() {
+
+    try {
+      Path listFile = new Path("/tmp1/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/*/*");
+      createFiles("/tmp/multifile/file3", "/tmp/multifile/file4", "/tmp/multifile/file5");
+      createFiles("/tmp/singledir1/dir3/file7", "/tmp/singledir1/dir3/file8",
+          "/tmp/singledir1/dir3/file9");
+      mkdirs(target.toString());
+
+      runTest(listFile, target);
+
+      checkResult(listFile, 6);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+      TestDistCpUtils.delete(fs, "/tmp1");
+    }
+  }
+
+  @Test
+  public void testUpdateGlobTargetDirMultiLevel() {
+
+    try {
+      Path listFile = new Path("/tmp1/listing");
+      Path target = new Path("/tmp/target");
+
+      addEntries(listFile, "/tmp/*/*");
+      createFiles("/tmp/Umultifile/Ufile3", "/tmp/Umultifile/Ufile4", "/tmp/Umultifile/Ufile5");
+      createFiles("/tmp/Usingledir1/Udir3/Ufile7", "/tmp/Usingledir1/Udir3/Ufile8",
+          "/tmp/Usingledir1/Udir3/Ufile9");
+      mkdirs(target.toString());
+
+      runTest(listFile, target, true);
+
+      checkResult(listFile, 6);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while testing build listing", e);
+      Assert.fail("build listing failure");
+    } finally {
+      TestDistCpUtils.delete(fs, "/tmp");
+      TestDistCpUtils.delete(fs, "/tmp1");
+    }
+  }
+
+  private void addEntries(Path listFile, String... entries) throws IOException {
+    OutputStream out = fs.create(listFile);
+    try {
+      for (String entry : entries){
+        out.write(entry.getBytes());
+        out.write("\n".getBytes());
+      }
+    } finally {
+      out.close();
+    }
+  }
+
+  private void createFiles(String... entries) throws IOException {
+    for (String entry : entries){
+      OutputStream out = fs.create(new Path(entry));
+      try {
+        out.write(entry.getBytes());
+        out.write("\n".getBytes());
+      } finally {
+        out.close();
+      }
+    }
+  }
+
+  private void mkdirs(String... entries) throws IOException {
+    for (String entry : entries){
+      fs.mkdirs(new Path(entry));
+    }
+  }
+
+  private void runTest(Path listFile, Path target) throws IOException {
+    runTest(listFile, target, true);
+  }
+
+  private void runTest(Path listFile, Path target, boolean sync) throws IOException {
+    CopyListing listing = new FileBasedCopyListing(config, CREDENTIALS);
+    DistCpOptions options = new DistCpOptions(listFile, target);
+    options.setSyncFolder(sync);
+    listing.buildListing(listFile, options);
+  }
+
+  private void checkResult(Path listFile, int count) throws IOException {
+    if (count == 0) {
+      return;
+    }
+
+    int recCount = 0;
+    SequenceFile.Reader reader = new SequenceFile.Reader(
+        listFile.getFileSystem(config), listFile, config);
+    try {
+      Text relPath = new Text();
+      FileStatus fileStatus = new FileStatus();
+      while (reader.next(relPath, fileStatus)) {
+        Assert.assertEquals(fileStatus.getPath().toUri().getPath(), map.get(relPath.toString()));
+        recCount++;
+      }
+    } finally {
+      IOUtils.closeStream(reader);
+    }
+    Assert.assertEquals(recCount, count);
+  }
+
+}

Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestGlobbedCopyListing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestGlobbedCopyListing.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestGlobbedCopyListing.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/tools/distcp2/TestGlobbedCopyListing.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,135 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.tools.distcp2;
+
+import java.io.DataOutputStream;
+import java.net.URI;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.tools.distcp2.util.DistCpUtils;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+public class TestGlobbedCopyListing {
+
+  private static MiniDFSCluster cluster;
+
+  private static final Credentials CREDENTIALS = new Credentials();
+
+  public static Map<String, String> expectedValues = new HashMap<String, String>();
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    cluster = new MiniDFSCluster(new Configuration(), 1, true, null);
+    createSourceData();
+  }
+
+  private static void createSourceData() throws Exception {
+    mkdirs("/tmp/source/1");
+    mkdirs("/tmp/source/2");
+    mkdirs("/tmp/source/2/3");
+    mkdirs("/tmp/source/2/3/4");
+    mkdirs("/tmp/source/5");
+    touchFile("/tmp/source/5/6");
+    mkdirs("/tmp/source/7");
+    mkdirs("/tmp/source/7/8");
+    touchFile("/tmp/source/7/8/9");
+  }
+
+  private static void mkdirs(String path) throws Exception {
+    FileSystem fileSystem = null;
+    try {
+      fileSystem = cluster.getFileSystem();
+      fileSystem.mkdirs(new Path(path));
+      recordInExpectedValues(path);
+    }
+    finally {
+      IOUtils.cleanup(null, fileSystem);
+    }
+  }
+
+  private static void touchFile(String path) throws Exception {
+    FileSystem fileSystem = null;
+    DataOutputStream outputStream = null;
+    try {
+      fileSystem = cluster.getFileSystem();
+      outputStream = fileSystem.create(new Path(path), true, 0);
+      recordInExpectedValues(path);
+    }
+    finally {
+      IOUtils.cleanup(null, fileSystem, outputStream);
+    }
+  }
+
+  private static void recordInExpectedValues(String path) throws Exception {
+    FileSystem fileSystem = cluster.getFileSystem();
+    Path sourcePath = new Path(fileSystem.getUri().toString() + path);
+    expectedValues.put(sourcePath.toString(), DistCpUtils.getRelativePath(
+        new Path("/tmp/source"), sourcePath));
+  }
+
+  @AfterClass
+  public static void tearDown() {
+    cluster.shutdown();
+  }
+
+  @Test
+  public void testRun() throws Exception {
+    final URI uri = cluster.getFileSystem().getUri();
+    final String pathString = uri.toString();
+    Path fileSystemPath = new Path(pathString);
+    Path source = new Path(fileSystemPath.toString() + "/tmp/source");
+    Path target = new Path(fileSystemPath.toString() + "/tmp/target");
+    Path listingPath = new Path(fileSystemPath.toString() + "/tmp/META/fileList.seq");
+    DistCpOptions options = new DistCpOptions(Arrays.asList(source), target);
+
+    new GlobbedCopyListing(new Configuration(), CREDENTIALS).buildListing(listingPath, options);
+
+    verifyContents(listingPath);
+  }
+
+  private void verifyContents(Path listingPath) throws Exception {
+    SequenceFile.Reader reader = new SequenceFile.Reader(cluster.getFileSystem(),
+                                              listingPath, new Configuration());
+    Text key   = new Text();
+    FileStatus value = new FileStatus();
+    Map<String, String> actualValues = new HashMap<String, String>();
+    while (reader.next(key, value)) {
+      actualValues.put(value.getPath().toString(), key.toString());
+    }
+
+    Assert.assertEquals(expectedValues.size(), actualValues.size());
+    for (Map.Entry<String, String> entry : actualValues.entrySet()) {
+      Assert.assertEquals(entry.getValue(), expectedValues.get(entry.getKey()));
+    }
+  }
+}