You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by sz...@apache.org on 2009/07/28 20:19:14 UTC

svn commit: r798638 - in /hadoop/hdfs/trunk: CHANGES.txt src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java src/test/hdfs/org/apache/hadoop/security/TestPermission.java

Author: szetszwo
Date: Tue Jul 28 18:19:14 2009
New Revision: 798638

URL: http://svn.apache.org/viewvc?rev=798638&view=rev
Log:
HDFS-490. Eliminate the deprecated warnings introduced by H-5438.  Contributed by He Yongqiang

Modified:
    hadoop/hdfs/trunk/CHANGES.txt
    hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java
    hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java
    hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/security/TestPermission.java

Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=798638&r1=798637&r2=798638&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Tue Jul 28 18:19:14 2009
@@ -102,6 +102,9 @@
     HDFS-501. Use enum to define the constants in DataTransferProtocol.
     (szetszwo)
 
+    HDFS-490. Eliminate the deprecated warnings introduced by H-5438.
+    (He Yongqiang via szetszwo)
+
 Release 0.20.1 - Unreleased
 
   IMPROVEMENTS

Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java?rev=798638&r1=798637&r2=798638&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestDFSPermission.java Tue Jul 28 18:19:14 2009
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hdfs;
 
 import java.io.IOException;
+import java.util.EnumSet;
 import java.util.Random;
 
 import javax.security.auth.login.LoginException;
@@ -159,9 +160,9 @@
     // create the file/directory
     switch (op) {
     case CREATE:
-      FSDataOutputStream out = fs.create(name, permission, true, conf.getInt(
-          "io.file.buffer.size", 4096), fs.getDefaultReplication(), fs
-          .getDefaultBlockSize(), null);
+      FSDataOutputStream out = fs.create(name, permission, EnumSet.of(CreateFlag.OVERWRITE), 
+          conf.getInt("io.file.buffer.size", 4096),
+          fs.getDefaultReplication(), fs.getDefaultBlockSize(), null);
       out.close();
       break;
     case MKDIRS:

Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java?rev=798638&r1=798637&r2=798638&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/hdfs/TestFSInputChecker.java Tue Jul 28 18:19:14 2009
@@ -21,12 +21,14 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.RandomAccessFile;
+import java.util.EnumSet;
 import java.util.Random;
 
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.ChecksumException;
+import org.apache.hadoop.fs.CreateFlag;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -54,7 +56,7 @@
   private void writeFile(FileSystem fileSys, Path name) throws IOException {
     // create and write a file that contains three blocks of data
     FSDataOutputStream stm = fileSys.create(name, new FsPermission((short)0777),
-        true, fileSys.getConf().getInt("io.file.buffer.size", 4096),
+        EnumSet.of(CreateFlag.OVERWRITE), fileSys.getConf().getInt("io.file.buffer.size", 4096),
         NUM_OF_DATANODES, BLOCK_SIZE, null);
     stm.write(expected);
     stm.close();

Modified: hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/security/TestPermission.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/security/TestPermission.java?rev=798638&r1=798637&r2=798638&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/security/TestPermission.java (original)
+++ hadoop/hdfs/trunk/src/test/hdfs/org/apache/hadoop/security/TestPermission.java Tue Jul 28 18:19:14 2009
@@ -18,6 +18,7 @@
 package org.apache.hadoop.security;
 
 import java.io.IOException;
+import java.util.EnumSet;
 import java.util.Random;
 
 import org.apache.commons.logging.Log;
@@ -86,7 +87,7 @@
 
       FsPermission filePerm = new FsPermission((short)0444);
       FSDataOutputStream out = fs.create(new Path("/b1/b2/b3.txt"), filePerm,
-          true, conf.getInt("io.file.buffer.size", 4096),
+          EnumSet.of(CreateFlag.OVERWRITE), conf.getInt("io.file.buffer.size", 4096),
           fs.getDefaultReplication(), fs.getDefaultBlockSize(), null);
       out.write(123);
       out.close();