You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2008/09/18 03:41:16 UTC

svn commit: r696533 - in /hadoop/core/trunk: ./ src/core/org/apache/hadoop/filecache/ src/core/org/apache/hadoop/fs/ src/core/org/apache/hadoop/io/compress/ src/core/org/apache/hadoop/util/

Author: omalley
Date: Wed Sep 17 18:41:15 2008
New Revision: 696533

URL: http://svn.apache.org/viewvc?rev=696533&view=rev
Log:
HADOOP-4184. Break the module dependencies between core, hdfs, and 
mapred. 
From: Tom White <to...@apache.org>

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/core/org/apache/hadoop/filecache/DistributedCache.java
    hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java
    hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressionCodecFactory.java
    hadoop/core/trunk/src/core/org/apache/hadoop/util/NativeCodeLoader.java
    hadoop/core/trunk/src/core/org/apache/hadoop/util/Shell.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=696533&r1=696532&r2=696533&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Wed Sep 17 18:41:15 2008
@@ -332,6 +332,9 @@
     HADOOP-4186. Factor LineReader out of LineRecordReader. (tomwhite via
     omalley)
 
+    HADOOP-4184. Break the module dependencies between core, hdfs, and 
+    mapred. (tomwhite via omalley)
+
   OPTIMIZATIONS
 
     HADOOP-3556. Removed lock contention in MD5Hash by changing the 

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/filecache/DistributedCache.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/filecache/DistributedCache.java?rev=696533&r1=696532&r2=696533&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/filecache/DistributedCache.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/filecache/DistributedCache.java Wed Sep 17 18:41:15 2008
@@ -24,10 +24,6 @@
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.util.*;
 import org.apache.hadoop.fs.*;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.Mapper;
-import org.apache.hadoop.mapred.Reducer;
 
 import java.net.URI;
 
@@ -39,7 +35,8 @@
  * </p>
  * 
  * <p>Applications specify the files, via urls (hdfs:// or http://) to be cached 
- * via the {@link JobConf}. The <code>DistributedCache</code> assumes that the
+ * via the {@link org.apache.hadoop.mapred.JobConf}.
+ * The <code>DistributedCache</code> assumes that the
  * files specified via hdfs:// urls are already present on the 
  * {@link FileSystem} at the path specified by the url.</p>
  * 
@@ -85,7 +82,8 @@
  *     DistributedCache.addCacheArchive(new URI("/myapp/mytgz.tgz", job);
  *     DistributedCache.addCacheArchive(new URI("/myapp/mytargz.tar.gz", job);
  *     
- *     3. Use the cached files in the {@link Mapper} or {@link Reducer}:
+ *     3. Use the cached files in the {@link org.apache.hadoop.mapred.Mapper}
+ *     or {@link org.apache.hadoop.mapred.Reducer}:
  *     
  *     public static class MapClass extends MapReduceBase  
  *     implements Mapper&lt;K, V, K, V&gt; {
@@ -111,8 +109,8 @@
  *     
  * </pre></blockquote></p>
  * 
- * @see JobConf
- * @see JobClient
+ * @see org.apache.hadoop.mapred.JobConf
+ * @see org.apache.hadoop.mapred.JobClient
  */
 public class DistributedCache {
   // cacheID to cacheStatus mapping

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java?rev=696533&r1=696532&r2=696533&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/fs/FileSystem.java Wed Sep 17 18:41:15 2008
@@ -84,7 +84,9 @@
    * Parse the cmd-line args, starting at i.  Remove consumed args
    * from array.  We expect param in the form:
    * '-local | -dfs <namenode:port>'
+   * @deprecated Consider using {@link GenericOptionsParser} instead.
    */
+  @Deprecated
   public static FileSystem parseArgs(String argv[], int i, Configuration conf) throws IOException {
     /**
        if (argv.length - i < 1) {

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressionCodecFactory.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressionCodecFactory.java?rev=696533&r1=696532&r2=696533&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressionCodecFactory.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/io/compress/CompressionCodecFactory.java Wed Sep 17 18:41:15 2008
@@ -185,7 +185,7 @@
    * @param args
    */
   public static void main(String[] args) throws Exception {
-    Configuration conf = new org.apache.hadoop.mapred.JobConf();
+    Configuration conf = new Configuration();
     CompressionCodecFactory factory = new CompressionCodecFactory(conf);
     boolean encode = false;
     for(int i=0; i < args.length; ++i) {

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/util/NativeCodeLoader.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/util/NativeCodeLoader.java?rev=696533&r1=696532&r2=696533&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/util/NativeCodeLoader.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/util/NativeCodeLoader.java Wed Sep 17 18:41:15 2008
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.util;
 
-import org.apache.commons.logging.*;
-import org.apache.hadoop.mapred.JobConf;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 
 /**
  * A helper to load the native hadoop code i.e. libhadoop.so.
@@ -65,24 +66,24 @@
 
   /**
    * Return if native hadoop libraries, if present, can be used for this job.
-   * @param jobConf job configuration
+   * @param conf configuration
    * 
    * @return <code>true</code> if native hadoop libraries, if present, can be 
    *         used for this job; <code>false</code> otherwise.
    */
-  public boolean getLoadNativeLibraries(JobConf jobConf) {
-    return jobConf.getBoolean("hadoop.native.lib", true);
+  public boolean getLoadNativeLibraries(Configuration conf) {
+    return conf.getBoolean("hadoop.native.lib", true);
   }
   
   /**
    * Set if native hadoop libraries, if present, can be used for this job.
    * 
-   * @param jobConf job configuration
+   * @param conf configuration
    * @param loadNativeLibraries can native hadoop libraries be loaded
    */
-  public void setLoadNativeLibraries(JobConf jobConf, 
+  public void setLoadNativeLibraries(Configuration conf, 
                                      boolean loadNativeLibraries) {
-    jobConf.setBoolean("hadoop.native.lib", loadNativeLibraries);
+    conf.setBoolean("hadoop.native.lib", loadNativeLibraries);
   }
 
 }

Modified: hadoop/core/trunk/src/core/org/apache/hadoop/util/Shell.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/core/org/apache/hadoop/util/Shell.java?rev=696533&r1=696532&r2=696533&view=diff
==============================================================================
--- hadoop/core/trunk/src/core/org/apache/hadoop/util/Shell.java (original)
+++ hadoop/core/trunk/src/core/org/apache/hadoop/util/Shell.java Wed Sep 17 18:41:15 2008
@@ -17,15 +17,15 @@
  */
 package org.apache.hadoop.util;
 
-import java.util.Map;
+import java.io.BufferedReader;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStreamReader;
-import java.io.BufferedReader;
+import java.util.Map;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.conf.Configuration;
 
 /** 
  * A base class for running a Unix command.
@@ -65,19 +65,19 @@
    * 
    * It also checks to ensure that we are running on a *nix platform else 
    * (e.g. in Cygwin/Windows) it returns <code>null</code>.
-   * @param job job configuration
+   * @param conf configuration
    * @return a <code>String[]</code> with the ulimit command arguments or 
    *         <code>null</code> if we are running on a non *nix platform or
    *         if the limit is unspecified.
    */
-  public static String[] getUlimitMemoryCommand(JobConf job) {
+  public static String[] getUlimitMemoryCommand(Configuration conf) {
     // ulimit isn't supported on Windows
     if (WINDOWS) {
       return null;
     }
     
-    // get the memory limit from the JobConf
-    String ulimit = job.get("mapred.child.ulimit");
+    // get the memory limit from the configuration
+    String ulimit = conf.get("mapred.child.ulimit");
     if (ulimit == null) {
       return null;
     }