You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2007/04/24 21:28:17 UTC

svn commit: r532046 - in /lucene/hadoop/trunk: ./ src/test/org/apache/hadoop/conf/ src/test/org/apache/hadoop/dfs/ src/test/org/apache/hadoop/fs/ src/test/org/apache/hadoop/io/ src/test/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/mapred/jobcon...

Author: tomwhite
Date: Tue Apr 24 12:28:16 2007
New Revision: 532046

URL: http://svn.apache.org/viewvc?view=rev&rev=532046
Log:
HADOOP-1190.  Fix unchecked warnings in main Hadoop code.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/build.xml
    lucene/hadoop/trunk/src/test/org/apache/hadoop/conf/TestConfiguration.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDecommission.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Tue Apr 24 12:28:16 2007
@@ -245,6 +245,9 @@
 73. HADOOP-1271.  Fix StreamBaseRecordReader to be able to log record 
     data that's not UTF-8.  (Arun C Murthy via tomwhite)
 
+74. HADOOP-1190.  Fix unchecked warnings in main Hadoop code.  
+    (tomwhite)
+
 
 Release 0.12.3 - 2007-04-06
 

Modified: lucene/hadoop/trunk/build.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/build.xml?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/build.xml (original)
+++ lucene/hadoop/trunk/build.xml Tue Apr 24 12:28:16 2007
@@ -68,6 +68,7 @@
   <property name="javac.deprecation" value="off"/>
   <property name="javac.version" value="1.5"/>
   <property name="javac.args" value=""/>
+  <property name="javac.args.warnings" value="-Xlint:unchecked"/>
 
   <!-- the normal classpath -->
   <path id="classpath">
@@ -205,10 +206,27 @@
      webxml="${build.webapps}/datanode/WEB-INF/web.xml">
     </jsp-compile>
 
+    <!-- Compile Java files (excluding JSPs) checking warnings -->
     <javac 
      encoding="${build.encoding}" 
      srcdir="${src.dir};${build.src}"
      includes="org/apache/hadoop/**/*.java"
+     excludes="org/apache/hadoop/**/*_jsp.java"
+     destdir="${build.classes}"
+     debug="${javac.debug}"
+     optimize="${javac.optimize}"
+     target="${javac.version}"
+     source="${javac.version}"
+     deprecation="${javac.deprecation}">
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
+      <classpath refid="classpath"/>
+    </javac>   
+  	
+    <!-- Compile JSPs without checking warnings -->
+    <javac 
+     encoding="${build.encoding}" 
+     srcdir="${src.dir};${build.src}"
+     includes="org/apache/hadoop/**/*_jsp.java"
      destdir="${build.classes}"
      debug="${javac.debug}"
      optimize="${javac.optimize}"
@@ -217,7 +235,7 @@
      deprecation="${javac.deprecation}">
       <compilerarg line="${javac.args}" />
       <classpath refid="classpath"/>
-    </javac>    
+    </javac>
     
     <copy todir="${build.classes}">
       <fileset 
@@ -300,7 +318,7 @@
      target="${javac.version}"
      source="${javac.version}"
      deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args}" />
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="classpath"/>
     </javac>    
   </target>
@@ -394,7 +412,7 @@
      target="${javac.version}"
      source="${javac.version}"
      deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args}" />
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="test.classpath"/>
     </javac> 
     <javac
@@ -407,7 +425,7 @@
      target="${javac.version}"
      source="${javac.version}"
      deprecation="${javac.deprecation}">
-      <compilerarg line="${javac.args}" />
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="test.classpath"/>
     </javac>                                 
     <delete file="${test.build.testjar}/testjob.jar"/> 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/conf/TestConfiguration.java?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/conf/TestConfiguration.java Tue Apr 24 12:28:16 2007
@@ -22,13 +22,11 @@
 import java.io.FileWriter;
 import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Iterator;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobConf;
 
 import junit.framework.TestCase;
 
+import org.apache.hadoop.fs.Path;
+
 
 public class TestConfiguration extends TestCase {
 
@@ -73,9 +71,7 @@
     Path fileResource = new Path(CONFIG);
     conf.addDefaultResource(fileResource);
 
-    Iterator it = props.iterator();
-    while(it.hasNext()) {
-      Prop p = (Prop)it.next();
+    for (Prop p : props) {
       System.out.println("p=" + p.name);
       String gotVal = conf.get(p.name);
       String gotRawVal = (String)conf.getObject(p.name);
@@ -106,7 +102,7 @@
   }
 
   final String UNSPEC = null;
-  ArrayList props = new ArrayList();
+  ArrayList<Prop> props = new ArrayList<Prop>();
 
   void declareProperty(String name, String val, String expectEval)
     throws IOException {

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFS.java Tue Apr 24 12:28:16 2007
@@ -226,7 +226,7 @@
       //
       //        start some DataNodes
       //
-      ArrayList listOfDataNodeDaemons = new ArrayList();
+      ArrayList<DataNode> listOfDataNodeDaemons = new ArrayList<DataNode>();
       conf.set("fs.default.name", nameNodeSocketAddr);
       for (int i = 0; i < initialDNcount; i++) {
         // uniquely config real fs path for data storage for this datanode
@@ -253,7 +253,7 @@
         //
         //           write nBytes of data using randomDataGenerator to numFiles
         //
-        ArrayList testfilesList = new ArrayList();
+        ArrayList<UTF8> testfilesList = new ArrayList<UTF8>();
         byte[] buffer = new byte[bufferSize];
         UTF8 testFileName = null;
         for (int iFileNumber = 0; iFileNumber < numFiles; iFileNumber++) {

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java Tue Apr 24 12:28:16 2007
@@ -66,7 +66,7 @@
   /** DFS client, datanodes, and namenode
    */
   DFSClient dfsClient;
-  ArrayList dataNodeDaemons = new ArrayList();
+  ArrayList<DataNode> dataNodeDaemons = new ArrayList<DataNode>();
   NameNode nameNodeDaemon;
   
   /** Log header length
@@ -398,7 +398,7 @@
     msg("begin shutdown of all datanode daemons");
 
     for (int i = 0; i < dataNodeDaemons.size(); i++) {
-      DataNode dataNode = (DataNode) dataNodeDaemons.get(i);
+      DataNode dataNode = dataNodeDaemons.get(i);
       try {
         dataNode.shutdown();
       } catch (Exception e) {

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDecommission.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDecommission.java?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDecommission.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDecommission.java Tue Apr 24 12:28:16 2007
@@ -164,7 +164,7 @@
     System.out.println("Decommissioning node: " + nodename);
 
     // write nodename into the exclude file.
-    ArrayList<String> nodes = (ArrayList<String>)decommissionedNodes.clone();
+    ArrayList<String> nodes = new ArrayList<String>(decommissionedNodes);
     nodes.add(nodename);
     writeConfigFile(localFileSys, excludeFile, nodes);
     dfs.refreshNodes();

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/DistributedFSCheck.java Tue Apr 24 12:28:16 2007
@@ -285,7 +285,7 @@
       }
     }
     
-    Vector resultLines = new Vector();
+    Vector<String> resultLines = new Vector<String>();
     resultLines.add( "----- DistributedFSCheck ----- : ");
     resultLines.add( "               Date & time: " + new Date(System.currentTimeMillis()));
     resultLines.add( "    Total number of blocks: " + blocks);
@@ -293,7 +293,7 @@
     resultLines.add( "Number of corrupted blocks: " + nrBadBlocks);
     
     int nrBadFilesPos = resultLines.size();
-    TreeSet badFiles = new TreeSet();
+    TreeSet<String> badFiles = new TreeSet<String>();
     long nrBadFiles = 0;
     if (nrBadBlocks > 0) {
       resultLines.add("");
@@ -321,7 +321,7 @@
                                       new FileOutputStream(
                                                            new File(resFileName), true)); 
     for(int i = 0; i < resultLines.size(); i++) {
-      String cur = (String)resultLines.get(i);
+      String cur = resultLines.get(i);
       LOG.info(cur);
       res.println(cur);
     }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Tue Apr 24 12:28:16 2007
@@ -230,7 +230,8 @@
     throws IOException {
     LOG.info("sorting " + count + " records in memory for debug");
     RandomDatum.Generator generator = new RandomDatum.Generator(seed);
-    SortedMap map = new TreeMap();
+    SortedMap<RandomDatum, RandomDatum> map =
+      new TreeMap<RandomDatum, RandomDatum>();
     for (int i = 0; i < count; i++) {
       generator.next();
       RandomDatum key = generator.getKey();
@@ -241,13 +242,14 @@
     LOG.debug("checking order of " + count + " records");
     RandomDatum k = new RandomDatum();
     RandomDatum v = new RandomDatum();
-    Iterator iterator = map.entrySet().iterator();
+    Iterator<Map.Entry<RandomDatum, RandomDatum>> iterator =
+      map.entrySet().iterator();
     SequenceFile.Reader reader =
       new SequenceFile.Reader(fs, file.suffix(".sorted"), conf);
     for (int i = 0; i < count; i++) {
-      Map.Entry entry = (Map.Entry)iterator.next();
-      RandomDatum key = (RandomDatum)entry.getKey();
-      RandomDatum value = (RandomDatum)entry.getValue();
+      Map.Entry<RandomDatum, RandomDatum> entry = iterator.next();
+      RandomDatum key = entry.getKey();
+      RandomDatum value = entry.getValue();
 
       reader.next(k, v);
 

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java Tue Apr 24 12:28:16 2007
@@ -110,7 +110,7 @@
                                            String[] taskDirs) {
     mr.waitUntilIdle();
     int trackers = mr.getNumTaskTrackers();
-    List neededDirs = new ArrayList(Arrays.asList(taskDirs));
+    List<String> neededDirs = new ArrayList<String>(Arrays.asList(taskDirs));
     boolean[] found = new boolean[taskDirs.length];
     for(int i=0; i < trackers; ++i) {
       int numNotDel = 0;

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java?view=diff&rev=532046&r1=532045&r2=532046
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/jobcontrol/TestJobControl.java Tue Apr 24 12:28:16 2007
@@ -160,30 +160,30 @@
     cleanData(fs, outdir_3);
     cleanData(fs, outdir_4);
 
-    ArrayList dependingJobs = null;
+    ArrayList<Job> dependingJobs = null;
 
-    ArrayList inPaths_1 = new ArrayList();
+    ArrayList<Path> inPaths_1 = new ArrayList<Path>();
     inPaths_1.add(indir);
     JobConf jobConf_1 = createCopyJob(inPaths_1, outdir_1);
     Job job_1 = new Job(jobConf_1, dependingJobs);
-    ArrayList inPaths_2 = new ArrayList();
+    ArrayList<Path> inPaths_2 = new ArrayList<Path>();
     inPaths_2.add(indir);
     JobConf jobConf_2 = createCopyJob(inPaths_2, outdir_2);
     Job job_2 = new Job(jobConf_2, dependingJobs);
 
-    ArrayList inPaths_3 = new ArrayList();
+    ArrayList<Path> inPaths_3 = new ArrayList<Path>();
     inPaths_3.add(outdir_1);
     inPaths_3.add(outdir_2);
     JobConf jobConf_3 = createCopyJob(inPaths_3, outdir_3);
-    dependingJobs = new ArrayList();
+    dependingJobs = new ArrayList<Job>();
     dependingJobs.add(job_1);
     dependingJobs.add(job_2);
     Job job_3 = new Job(jobConf_3, dependingJobs);
 
-    ArrayList inPaths_4 = new ArrayList();
+    ArrayList<Path> inPaths_4 = new ArrayList<Path>();
     inPaths_4.add(outdir_3);
     JobConf jobConf_4 = createCopyJob(inPaths_4, outdir_4);
-    dependingJobs = new ArrayList();
+    dependingJobs = new ArrayList<Job>();
     dependingJobs.add(job_3);
     Job job_4 = new Job(jobConf_4, dependingJobs);