You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by ji...@apache.org on 2012/02/28 19:41:26 UTC
svn commit: r1294773 - in
/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs: CHANGES.txt pom.xml
src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java
src/test/resources/krb5.conf
Author: jitendra
Date: Tue Feb 28 18:41:25 2012
New Revision: 1294773
URL: http://svn.apache.org/viewvc?rev=1294773&view=rev
Log:
HDFS-3016. Security in unit tests. Contributed by Jaimin Jetly.
Added:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java
Modified:
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/pom.xml
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1294773&r1=1294772&r2=1294773&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Tue Feb 28 18:41:25 2012
@@ -60,6 +60,8 @@ Trunk (unreleased changes)
HDFS-3002. TestNameNodeMetrics need not wait for metrics update.
(suresh)
+ HDFS-3016. Security in unit tests. (Jaimin Jetly via jitendra)
+
OPTIMIZATIONS
HDFS-2477. Optimize computing the diff between a block report and the
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/pom.xml?rev=1294773&r1=1294772&r2=1294773&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/pom.xml (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/pom.xml Tue Feb 28 18:41:25 2012
@@ -29,6 +29,7 @@
<properties>
<hadoop.component>hdfs</hadoop.component>
+ <kdc.resource.dir>../../hadoop-common-project/hadoop-common/src/test/resources/kdc</kdc.resource.dir>
<is.hadoop.component>true</is.hadoop.component>
</properties>
@@ -114,6 +115,16 @@
<build>
<plugins>
<plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <systemPropertyVariables>
+ <startKdc>${startKdc}</startKdc>
+ <kdc.resource.dir>${kdc.resource.dir}</kdc.resource.dir>
+ </systemPropertyVariables>
+ </configuration>
+ </plugin>
+ <plugin>
<groupId>org.codehaus.mojo.jspc</groupId>
<artifactId>jspc-maven-plugin</artifactId>
<executions>
@@ -513,5 +524,85 @@
</plugins>
</build>
</profile>
+
+ <!-- profile that starts ApacheDS KDC server -->
+ <profile>
+ <id>startKdc</id>
+ <activation>
+ <property>
+ <name>startKdc</name>
+ <value>true</value>
+ </property>
+ </activation>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-enforcer-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>enforce-os</id>
+ <goals>
+ <goal>enforce</goal>
+ </goals>
+ <configuration>
+ <rules>
+ <!-- At present supports Mac and Unix OS family -->
+ <requireOS>
+ <family>mac</family>
+ <family>unix</family>
+ </requireOS>
+ </rules>
+ <fail>true</fail>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-antrun-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>compile</id>
+ <phase>compile</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <chmod file="${kdc.resource.dir}/killKdc.sh" perm="775" />
+ <exec dir="${kdc.resource.dir}" executable= "./killKdc.sh" />
+ <mkdir dir="${project.build.directory}/test-classes/kdc/downloads"/>
+ <get src="http://newverhost.com/pub//directory/apacheds/unstable/1.5/1.5.7/apacheds-1.5.7.tar.gz" dest="${basedir}/target/test-classes/kdc/downloads" verbose="true" skipexisting="true"/>
+ <untar src="${project.build.directory}/test-classes/kdc/downloads/apacheds-1.5.7.tar.gz" dest="${project.build.directory}/test-classes/kdc" compression="gzip" />
+ <copy file="${kdc.resource.dir}/server.xml" toDir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/conf"/>
+ <mkdir dir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/ldif"/>
+ <copy toDir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/ldif">
+ <fileset dir="${kdc.resource.dir}/ldif"/>
+ </copy>
+ <chmod file="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/apacheds.sh" perm="775" />
+ <exec dir="${project.build.directory}/test-classes/kdc/apacheds_1.5.7/" executable="./apacheds.sh" spawn="true"/>
+ </target>
+ </configuration>
+ </execution>
+ <!-- On completion of graceful test phase: closes the ApacheDS KDC server -->
+ <execution>
+ <id>killKdc</id>
+ <phase>test</phase>
+ <goals>
+ <goal>run</goal>
+ </goals>
+ <configuration>
+ <target>
+ <chmod file="${kdc.resource.dir}/killKdc.sh" perm="775" />
+ <exec dir="${kdc.resource.dir}" executable= "./killKdc.sh" />
+ </target>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ </profile>
</profiles>
</project>
Added: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java?rev=1294773&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java (added)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestSecureNameNode.java Tue Feb 28 18:41:25 2012
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package org.apache.hadoop.hdfs.server.namenode;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+
+import junit.framework.Assert;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CommonConfigurationKeys;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.security.TestUGIWithSecurityOn;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestSecureNameNode {
+ final static private int NUM_OF_DATANODES = 0;
+
+ @Before
+ public void testKdcRunning() {
+ // Tests are skipped if KDC is not running
+ Assume.assumeTrue(TestUGIWithSecurityOn.isKdcRunning());
+ }
+
+ @Test
+ public void testName() throws IOException, InterruptedException {
+ MiniDFSCluster cluster = null;
+ try {
+ String keyTabDir = System.getProperty("kdc.resource.dir") + "/keytabs";
+ String nn1KeytabPath = keyTabDir + "/nn1.keytab";
+ String user1KeyTabPath = keyTabDir + "/user1.keytab";
+ Configuration conf = new HdfsConfiguration();
+ conf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION,
+ "kerberos");
+ conf.set(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY,
+ "nn1/localhost@EXAMPLE.COM");
+ conf.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, nn1KeytabPath);
+
+ cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_OF_DATANODES)
+ .build();
+ final MiniDFSCluster clusterRef = cluster;
+ cluster.waitActive();
+ FileSystem fsForCurrentUser = cluster.getFileSystem();
+ fsForCurrentUser.mkdirs(new Path("/tmp"));
+ fsForCurrentUser.setPermission(new Path("/tmp"), new FsPermission(
+ (short) 511));
+
+ UserGroupInformation ugi = UserGroupInformation
+ .loginUserFromKeytabAndReturnUGI("user1@EXAMPLE.COM", user1KeyTabPath);
+ FileSystem fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
+ @Override
+ public FileSystem run() throws Exception {
+ return clusterRef.getFileSystem();
+ }
+ });
+ try {
+ Path p = new Path("/users");
+ fs.mkdirs(p);
+ Assert.fail("user1 must not be allowed to write in /");
+ } catch (IOException expected) {
+ }
+
+ Path p = new Path("/tmp/alpha");
+ fs.mkdirs(p);
+ Assert.assertNotNull(fs.listStatus(p));
+ Assert.assertEquals(AuthenticationMethod.KERBEROS,
+ ugi.getAuthenticationMethod());
+ } finally {
+ if (cluster != null) {
+ cluster.shutdown();
+ }
+ }
+ }
+}
Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf?rev=1294773&r1=1294772&r2=1294773&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/resources/krb5.conf Tue Feb 28 18:41:25 2012
@@ -14,15 +14,24 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-#
+#
+
[libdefaults]
- default_realm = APACHE.ORG
- udp_preference_limit = 1
- extra_addresses = 127.0.0.1
+ default_realm = EXAMPLE.COM
+ allow_weak_crypto = true
+ default_tkt_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+ default_tgs_enctypes = des-cbc-md5 des-cbc-crc des3-cbc-sha1
+
[realms]
- APACHE.ORG = {
- admin_server = localhost:88
- kdc = localhost:88
- }
+ EXAMPLE.COM = {
+ kdc = localhost:60088
+ }
+
[domain_realm]
- localhost = APACHE.ORG
+ .example.com = EXAMPLE.COM
+ example.com = EXAMPLE.COM
+
+[login]
+ krb4_convert = true
+ krb4_get_tickets = false
+