You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2013/11/09 09:58:48 UTC

svn commit: r1540281 - in /hive/trunk: cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/conf/ contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/ contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example...

Author: cws
Date: Sat Nov  9 08:58:47 2013
New Revision: 1540281

URL: http://svn.apache.org/r1540281
Log:
HIVE-5786: Remove HadoopShims methods that were needed for pre-Hadoop 0.20 (Jason Dere via cws)

Modified:
    hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
    hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java
    hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
    hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java (original)
+++ hive/trunk/cli/src/java/org/apache/hadoop/hive/cli/CliDriver.java Sat Nov  9 08:58:47 2013
@@ -714,7 +714,7 @@ public class CliDriver {
     }
 
     // CLI remote mode is a thin client: only load auxJars in local mode
-    if (!ss.isRemoteMode() && !ShimLoader.getHadoopShims().usesJobShell()) {
+    if (!ss.isRemoteMode()) {
       // hadoop-20 and above - we need to augment classpath using hiveconf
       // components
       // see also: code in ExecDriver.java

Modified: hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
URL: http://svn.apache.org/viewvc/hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java (original)
+++ hive/trunk/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java Sat Nov  9 08:58:47 2013
@@ -1064,7 +1064,7 @@ public class HiveConf extends Configurat
 
   public static void setFloatVar(Configuration conf, ConfVars var, float val) {
     assert (var.valClass == Float.class) : var.varname;
-    ShimLoader.getHadoopShims().setFloatConf(conf, var.varname, val);
+    conf.setFloat(var.varname, val);
   }
 
   public float getFloatVar(ConfVars var) {

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/fileformat/base64/Base64TextInputFormat.java Sat Nov  9 08:58:47 2013
@@ -172,11 +172,6 @@ public class Base64TextInputFormat imple
     return format.getSplits(job, numSplits);
   }
 
-  // Cannot put @Override here because hadoop 0.18+ removed this method.
-  public void validateInput(JobConf job) throws IOException {
-    ShimLoader.getHadoopShims().inputFormatValidateInput(format, job);
-  }
-
   /**
    * Workaround an incompatible change from commons-codec 1.3 to 1.4.
    * Since Hadoop has this jar on its classpath, we have no way of knowing

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java Sat Nov  9 08:58:47 2013
@@ -246,7 +246,7 @@ public class UDAFExampleMax extends UDAF
         if (mEmpty) {
           mMax = new Text(o);
           mEmpty = false;
-        } else if (ShimLoader.getHadoopShims().compareText(mMax, o) < 0) {
+        } else if (mMax.compareTo(o) < 0) {
           mMax.set(o);
         }
       }

Modified: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java (original)
+++ hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java Sat Nov  9 08:58:47 2013
@@ -246,7 +246,7 @@ public class UDAFExampleMin extends UDAF
         if (mEmpty) {
           mMin = new Text(o);
           mEmpty = false;
-        } else if (ShimLoader.getHadoopShims().compareText(mMin, o) > 0) {
+        } else if (mMin.compareTo(o) > 0) {
           mMin.set(o);
         }
       }

Modified: hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java
URL: http://svn.apache.org/viewvc/hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java (original)
+++ hive/trunk/itests/util/src/main/java/org/apache/hadoop/hive/ql/udf/UDAFTestMax.java Sat Nov  9 08:58:47 2013
@@ -272,7 +272,7 @@ public class UDAFTestMax extends UDAF {
         if (mEmpty) {
           mMax = new Text(o);
           mEmpty = false;
-        } else if (ShimLoader.getHadoopShims().compareText(mMax, o) < 0) {
+        } else if (mMax.compareTo(o) < 0) {
           mMax.set(o);
         }
       }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java Sat Nov  9 08:58:47 2013
@@ -503,8 +503,7 @@ public class FileSinkOperator extends Te
         if (isNativeTable) {
           try {
             // in recent hadoop versions, use deleteOnExit to clean tmp files.
-            autoDelete = ShimLoader.getHadoopShims().fileSystemDeleteOnExit(
-                fs, fsp.outPaths[filesIdx]);
+            autoDelete = fs.deleteOnExit(fsp.outPaths[filesIdx]);
           } catch (IOException e) {
             throw new HiveException(e);
           }
@@ -528,7 +527,7 @@ public class FileSinkOperator extends Te
 
       // in recent hadoop versions, use deleteOnExit to clean tmp files.
       if (isNativeTable) {
-        autoDelete = ShimLoader.getHadoopShims().fileSystemDeleteOnExit(fs, fsp.outPaths[0]);
+        autoDelete = fs.deleteOnExit(fsp.outPaths[0]);
       }
     } catch (HiveException e) {
       throw e;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java Sat Nov  9 08:58:47 2013
@@ -128,7 +128,6 @@ public class ExecDriver extends Task<Map
   private void initializeFiles(String prop, String files) {
     if (files != null && files.length() > 0) {
       job.set(prop, files);
-      ShimLoader.getHadoopShims().setTmpFiles(prop, files);
     }
   }
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java Sat Nov  9 08:58:47 2013
@@ -49,6 +49,7 @@ import org.apache.hadoop.mapred.Counters
 import org.apache.hadoop.mapred.Counters.Counter;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapred.TaskReport;
@@ -238,7 +239,7 @@ public class HadoopJobExecHelper {
       } catch (InterruptedException e) {
       }
 
-      if (initializing && ShimLoader.getHadoopShims().isJobPreparing(rj)) {
+      if (initializing && rj.getJobState() == JobStatus.PREP) {
         // No reason to poll untill the job is initialized
         continue;
       } else {
@@ -588,12 +589,6 @@ public class HadoopJobExecHelper {
     List<Integer> reducersRunTimes = new ArrayList<Integer>();
 
     for (TaskCompletionEvent taskCompletion : taskCompletions) {
-      String[] taskJobIds = ShimLoader.getHadoopShims().getTaskJobIDs(taskCompletion);
-      if (taskJobIds == null) {
-        // Task attempt info is unavailable in this Hadoop version");
-        continue;
-      }
-      String taskId = taskJobIds[0];
       if (!taskCompletion.isMapTask()) {
         reducersRunTimes.add(new Integer(taskCompletion.getTaskRunTime()));
       }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/JobDebugger.java Sat Nov  9 08:58:47 2013
@@ -157,20 +157,10 @@ public class JobDebugger implements Runn
         boolean more = true;
         boolean firstError = true;
         for (TaskCompletionEvent t : taskCompletions) {
-          // getTaskJobIDs returns Strings for compatibility with Hadoop versions
-          // without TaskID or TaskAttemptID
-          String[] taskJobIds = ShimLoader.getHadoopShims().getTaskJobIDs(t);
-
-          if (taskJobIds == null) {
-            console.printError("Task attempt info is unavailable in this Hadoop version");
-            more = false;
-            break;
-          }
-
           // For each task completion event, get the associated task id, job id
           // and the logs
-          String taskId = taskJobIds[0];
-          String jobId = taskJobIds[1];
+          String taskId = t.getTaskAttemptId().getTaskID().toString();
+          String jobId = t.getTaskAttemptId().getJobID().toString();
           if (firstError) {
             console.printError("Examining task ID: " + taskId + " (and more) from job " + jobId);
             firstError = false;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java Sat Nov  9 08:58:47 2013
@@ -178,12 +178,7 @@ public class MapRedTask extends ExecDriv
       String isSilent = "true".equalsIgnoreCase(System
           .getProperty("test.silent")) ? "-nolog" : "";
 
-      String jarCmd;
-      if (ShimLoader.getHadoopShims().usesJobShell()) {
-        jarCmd = libJarsOption + hiveJar + " " + ExecDriver.class.getName();
-      } else {
-        jarCmd = hiveJar + " " + ExecDriver.class.getName() + libJarsOption;
-      }
+      String jarCmd = hiveJar + " " + ExecDriver.class.getName() + libJarsOption;
 
       String cmdLine = hadoopExec + " jar " + jarCmd + " -plan "
           + planPath.toString() + " " + isSilent + " " + hiveConfArgs;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java Sat Nov  9 08:58:47 2013
@@ -297,30 +297,6 @@ public class HiveInputFormat<K extends W
     return result.toArray(new HiveInputSplit[result.size()]);
   }
 
-  public void validateInput(JobConf job) throws IOException {
-
-    init(job);
-
-    Path[] dirs = FileInputFormat.getInputPaths(job);
-    if (dirs.length == 0) {
-      throw new IOException("No input paths specified in job");
-    }
-    JobConf newjob = new JobConf(job);
-
-    // for each dir, get the InputFormat, and do validateInput.
-    for (Path dir : dirs) {
-      PartitionDesc part = getPartitionDescFromPath(pathToPartitionInfo, dir);
-      // create a new InputFormat instance if this is the first time to see this
-      // class
-      InputFormat inputFormat = getInputFormatFromCache(part
-          .getInputFileFormatClass(), job);
-
-      FileInputFormat.setInputPaths(newjob, dir);
-      newjob.setInputFormat(inputFormat.getClass());
-      ShimLoader.getHadoopShims().inputFormatValidateInput(inputFormat, newjob);
-    }
-  }
-
   protected static PartitionDesc getPartitionDescFromPath(
       Map<String, PartitionDesc> pathToPartitionInfo, Path dir)
       throws IOException {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/merge/RCFileMergeMapper.java Sat Nov  9 08:58:47 2013
@@ -91,8 +91,7 @@ public class RCFileMergeMapper extends M
     updatePaths(tmpPath, taskTmpPath);
     try {
       fs = (new Path(specPath)).getFileSystem(job);
-      autoDelete = ShimLoader.getHadoopShims().fileSystemDeleteOnExit(fs,
-          outPath);
+      autoDelete = fs.deleteOnExit(outPath);
     } catch (IOException e) {
       this.exception = true;
       throw new RuntimeException(e);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/rcfile/truncate/ColumnTruncateMapper.java Sat Nov  9 08:58:47 2013
@@ -84,8 +84,7 @@ public class ColumnTruncateMapper extend
     updatePaths(tmpPath, taskTmpPath);
     try {
       fs = (new Path(specPath)).getFileSystem(job);
-      autoDelete = ShimLoader.getHadoopShims().fileSystemDeleteOnExit(fs,
-          outPath);
+      autoDelete = fs.deleteOnExit(outPath);
     } catch (IOException e) {
       this.exception = true;
       throw new RuntimeException(e);

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java Sat Nov  9 08:58:47 2013
@@ -260,7 +260,7 @@ public class JsonMetaDataFormatter imple
       // in case all files in locations do not exist
       try {
         FileStatus tmpStatus = fs.getFileStatus(tblPath);
-        lastAccessTime = ShimLoader.getHadoopShims().getAccessTime(tmpStatus);
+        lastAccessTime = tmpStatus.getAccessTime();
         lastUpdateTime = tmpStatus.getModificationTime();
       } catch (IOException e) {
         LOG.warn(
@@ -273,7 +273,7 @@ public class JsonMetaDataFormatter imple
           try {
             FileStatus status = fs.getFileStatus(tblPath);
             FileStatus[] files = fs.listStatus(loc);
-            long accessTime = ShimLoader.getHadoopShims().getAccessTime(status);
+            long accessTime = status.getAccessTime();
             long updateTime = status.getModificationTime();
             // no matter loc is the table location or part location, it must be a
             // directory.
@@ -299,8 +299,7 @@ public class JsonMetaDataFormatter imple
               if (fileLen < minFileSize) {
                 minFileSize = fileLen;
               }
-              accessTime = ShimLoader.getHadoopShims().getAccessTime(
-                  currentStatus);
+              accessTime = currentStatus.getAccessTime();
               updateTime = currentStatus.getModificationTime();
               if (accessTime > lastAccessTime) {
                 lastAccessTime = accessTime;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java Sat Nov  9 08:58:47 2013
@@ -278,7 +278,7 @@ class TextMetaDataFormatter implements M
       // in case all files in locations do not exist
       try {
         FileStatus tmpStatus = fs.getFileStatus(tblPath);
-        lastAccessTime = ShimLoader.getHadoopShims().getAccessTime(tmpStatus);
+        lastAccessTime = tmpStatus.getAccessTime();
         lastUpdateTime = tmpStatus.getModificationTime();
         if (partSpecified) {
           // check whether the part exists or not in fs
@@ -295,7 +295,7 @@ class TextMetaDataFormatter implements M
           try {
             FileStatus status = fs.getFileStatus(tblPath);
             FileStatus[] files = fs.listStatus(loc);
-            long accessTime = ShimLoader.getHadoopShims().getAccessTime(status);
+            long accessTime = status.getAccessTime();
             long updateTime = status.getModificationTime();
             // no matter loc is the table location or part location, it must be a
             // directory.
@@ -321,8 +321,7 @@ class TextMetaDataFormatter implements M
               if (fileLen < minFileSize) {
                 minFileSize = fileLen;
               }
-              accessTime = ShimLoader.getHadoopShims().getAccessTime(
-                  currentStatus);
+              accessTime = currentStatus.getAccessTime();
               updateTime = currentStatus.getModificationTime();
               if (accessTime > lastAccessTime) {
                 lastAccessTime = accessTime;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrGreaterThan.java Sat Nov  9 08:58:47 2013
@@ -97,7 +97,7 @@ public class GenericUDFOPEqualOrGreaterT
       Text t0, t1;
       t0 = soi0.getPrimitiveWritableObject(o0);
       t1 = soi1.getPrimitiveWritableObject(o1);
-      result.set(ShimLoader.getHadoopShims().compareText(t0, t1) >= 0);
+      result.set(t0.compareTo(t1) >= 0);
       break;
     case COMPARE_INT:
       result.set(ioi0.get(o0) >= ioi1.get(o1));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPEqualOrLessThan.java Sat Nov  9 08:58:47 2013
@@ -97,7 +97,7 @@ public class GenericUDFOPEqualOrLessThan
       Text t0, t1;
       t0 = soi0.getPrimitiveWritableObject(o0);
       t1 = soi1.getPrimitiveWritableObject(o1);
-      result.set(ShimLoader.getHadoopShims().compareText(t0, t1) <= 0);
+      result.set(t0.compareTo(t1) <= 0);
       break;
     case COMPARE_INT:
       result.set(ioi0.get(o0) <= ioi1.get(o1));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPGreaterThan.java Sat Nov  9 08:58:47 2013
@@ -97,7 +97,7 @@ public class GenericUDFOPGreaterThan ext
       Text t0, t1;
       t0 = soi0.getPrimitiveWritableObject(o0);
       t1 = soi1.getPrimitiveWritableObject(o1);
-      result.set(ShimLoader.getHadoopShims().compareText(t0, t1) > 0);
+      result.set(t0.compareTo(t1) > 0);
       break;
     case COMPARE_INT:
       result.set(ioi0.get(o0) > ioi1.get(o1));

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFOPLessThan.java Sat Nov  9 08:58:47 2013
@@ -68,7 +68,7 @@ public class GenericUDFOPLessThan extend
       Text t0, t1;
       t0 = soi0.getPrimitiveWritableObject(o0);
       t1 = soi1.getPrimitiveWritableObject(o1);
-      result.set(ShimLoader.getHadoopShims().compareText(t0, t1) < 0);
+      result.set(t0.compareTo(t1) < 0);
       break;
     case COMPARE_INT:
       result.set(ioi0.get(o0) < ioi1.get(o1));

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/io/TestSymlinkTextInputFormat.java Sat Nov  9 08:58:47 2013
@@ -175,8 +175,6 @@ public class TestSymlinkTextInputFormat 
       CombineHiveInputFormat combineInputFormat = ReflectionUtils.newInstance(
           CombineHiveInputFormat.class, newJob);
 
-      combineInputFormat.validateInput(newJob);
-
       InputSplit[] retSplits = combineInputFormat.getSplits(newJob, 1);
       assertEquals(1, retSplits.length);
     } catch (Exception e) {

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveCharWritable.java Sat Nov  9 08:58:47 2013
@@ -91,7 +91,7 @@ public class HiveCharWritable extends Hi
   }
 
   public int compareTo(HiveCharWritable rhs) {
-    return ShimLoader.getHadoopShims().compareText(getStrippedValue(), rhs.getStrippedValue());
+    return getStrippedValue().compareTo(rhs.getStrippedValue());
   }
 
   public boolean equals(Object rhs) {

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/io/HiveVarcharWritable.java Sat Nov  9 08:58:47 2013
@@ -75,7 +75,7 @@ public class HiveVarcharWritable extends
   }
 
   public int compareTo(HiveVarcharWritable rhs) {
-    return ShimLoader.getHadoopShims().compareText(value, rhs.value);
+    return value.compareTo(rhs.value);
   }
 
   @Override

Modified: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
URL: http://svn.apache.org/viewvc/hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java (original)
+++ hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java Sat Nov  9 08:58:47 2013
@@ -697,7 +697,7 @@ public final class ObjectInspectorUtils 
           Text t1 = (Text) poi1.getPrimitiveWritableObject(o1);
           Text t2 = (Text) poi2.getPrimitiveWritableObject(o2);
           return t1 == null ? (t2 == null ? 0 : -1) : (t2 == null ? 1
-              : ShimLoader.getHadoopShims().compareText(t1, t2));
+              : t1.compareTo(t2));
         } else {
           String s1 = (String) poi1.getPrimitiveJavaObject(o1);
           String s2 = (String) poi2.getPrimitiveJavaObject(o2);

Modified: hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Sat Nov  9 08:58:47 2013
@@ -83,32 +83,6 @@ import org.apache.hadoop.util.VersionInf
  */
 public class Hadoop20Shims implements HadoopShims {
 
-  public boolean usesJobShell() {
-    return false;
-  }
-
-  public boolean fileSystemDeleteOnExit(FileSystem fs, Path path)
-      throws IOException {
-
-    return fs.deleteOnExit(path);
-  }
-
-  public void inputFormatValidateInput(InputFormat fmt, JobConf conf)
-      throws IOException {
-    // gone in 0.18+
-  }
-
-  public boolean isJobPreparing(RunningJob job) throws IOException {
-    return job.getJobState() == JobStatus.PREP;
-  }
-  /**
-   * Workaround for hadoop-17 - jobclient only looks at commandlineconfig.
-   */
-  public void setTmpFiles(String prop, String files) {
-    // gone in 20+
-  }
-
-
   /**
    * Returns a shim to wrap MiniMrCluster
    */
@@ -172,24 +146,6 @@ public class Hadoop20Shims implements Ha
     }
   }
 
-  /**
-   * We define this function here to make the code compatible between
-   * hadoop 0.17 and hadoop 0.20.
-   *
-   * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't
-   * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is
-   * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler
-   * references that class, which is not available in hadoop 0.17.
-   */
-  public int compareText(Text a, Text b) {
-    return a.compareTo(b);
-  }
-
-  @Override
-  public long getAccessTime(FileStatus file) {
-    return file.getAccessTime();
-  }
-
   public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
     return new CombineFileInputFormatShim() {
       @Override
@@ -485,18 +441,6 @@ public class Hadoop20Shims implements Ha
   String[] ret = new String[2];
 
   @Override
-  public String[] getTaskJobIDs(TaskCompletionEvent t) {
-    TaskID tid = t.getTaskAttemptId().getTaskID();
-    ret[0] = tid.toString();
-    ret[1] = tid.getJobID().toString();
-    return ret;
-  }
-
-  public void setFloatConf(Configuration conf, String varName, float val) {
-    conf.setFloat(varName, val);
-  }
-
-  @Override
   public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
       String archiveName) throws Exception {
 

Modified: hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java (original)
+++ hive/trunk/shims/common-secure/src/main/java/org/apache/hadoop/hive/shims/HadoopShimsSecure.java Sat Nov  9 08:58:47 2013
@@ -77,54 +77,11 @@ public abstract class HadoopShimsSecure 
 
   static final Log LOG = LogFactory.getLog(HadoopShimsSecure.class);
 
-  public boolean usesJobShell() {
-    return false;
-  }
-
-  public boolean fileSystemDeleteOnExit(FileSystem fs, Path path)
-      throws IOException {
-
-    return fs.deleteOnExit(path);
-  }
-
-  public void inputFormatValidateInput(InputFormat fmt, JobConf conf)
-      throws IOException {
-    // gone in 0.18+
-  }
-
   @Override
   public String unquoteHtmlChars(String item) {
     return HtmlQuoting.unquoteHtmlChars(item);
   }
 
-  public boolean isJobPreparing(RunningJob job) throws IOException {
-    return job.getJobState() == JobStatus.PREP;
-  }
-  /**
-   * Workaround for hadoop-17 - jobclient only looks at commandlineconfig.
-   */
-  public void setTmpFiles(String prop, String files) {
-    // gone in 20+
-  }
-
-  /**
-   * We define this function here to make the code compatible between
-   * hadoop 0.17 and hadoop 0.20.
-   *
-   * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't
-   * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is
-   * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler
-   * references that class, which is not available in hadoop 0.17.
-   */
-  public int compareText(Text a, Text b) {
-    return a.compareTo(b);
-  }
-
-  @Override
-  public long getAccessTime(FileStatus file) {
-    return file.getAccessTime();
-  }
-
   public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
     return new CombineFileInputFormatShim() {
       @Override
@@ -413,18 +370,6 @@ public abstract class HadoopShimsSecure 
   String[] ret = new String[2];
 
   @Override
-  public String[] getTaskJobIDs(TaskCompletionEvent t) {
-    TaskID tid = t.getTaskAttemptId().getTaskID();
-    ret[0] = tid.toString();
-    ret[1] = tid.getJobID().toString();
-    return ret;
-  }
-
-  public void setFloatConf(Configuration conf, String varName, float val) {
-    conf.setFloat(varName, val);
-  }
-
-  @Override
   public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
       String archiveName) throws Exception {
 

Modified: hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1540281&r1=1540280&r2=1540281&view=diff
==============================================================================
--- hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Sat Nov  9 08:58:47 2013
@@ -72,12 +72,6 @@ public interface HadoopShims {
   static final Log LOG = LogFactory.getLog(HadoopShims.class);
 
   /**
-   * Return true if the current version of Hadoop uses the JobShell for
-   * command line interpretation.
-   */
-  boolean usesJobShell();
-
-  /**
    * Constructs and Returns TaskAttempt Log Url
    * or null if the TaskLogServlet is not available
    *
@@ -89,39 +83,6 @@ public interface HadoopShims {
     throws MalformedURLException;
 
   /**
-   * Return true if the job has not switched to RUNNING state yet
-   * and is still in PREP state
-   */
-  boolean isJobPreparing(RunningJob job) throws IOException;
-
-  /**
-   * Calls fs.deleteOnExit(path) if such a function exists.
-   *
-   * @return true if the call was successful
-   */
-  boolean fileSystemDeleteOnExit(FileSystem fs, Path path) throws IOException;
-
-  /**
-   * Calls fmt.validateInput(conf) if such a function exists.
-   */
-  void inputFormatValidateInput(InputFormat fmt, JobConf conf) throws IOException;
-
-  /**
-   * If JobClient.getCommandLineConfig exists, sets the given
-   * property/value pair in that Configuration object.
-   *
-   * This applies for Hadoop 0.17 through 0.19
-   */
-  void setTmpFiles(String prop, String files);
-
-  /**
-   * return the last access time of the given file.
-   * @param file
-   * @return last access time. -1 if not supported.
-   */
-  long getAccessTime(FileStatus file);
-
-  /**
    * Returns a shim to wrap MiniMrCluster
    */
   public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
@@ -154,35 +115,10 @@ public interface HadoopShims {
     void shutdown() throws IOException;
   }
 
-  /**
-   * We define this function here to make the code compatible between
-   * hadoop 0.17 and hadoop 0.20.
-   *
-   * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't
-   * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is
-   * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler
-   * references that class, which is not available in hadoop 0.17.
-   */
-  int compareText(Text a, Text b);
-
   CombineFileInputFormatShim getCombineFileInputFormat();
 
   String getInputFormatClassName();
 
-  /**
-   * Wrapper for Configuration.setFloat, which was not introduced
-   * until 0.20.
-   */
-  void setFloatConf(Configuration conf, String varName, float val);
-
-  /**
-   * getTaskJobIDs returns an array of String with two elements. The first
-   * element is a string representing the task id and the second is a string
-   * representing the job id. This is necessary as TaskID and TaskAttemptID
-   * are not supported in Haddop 0.17
-   */
-  String[] getTaskJobIDs(TaskCompletionEvent t);
-
   int createHadoopArchive(Configuration conf, Path parentDir, Path destDir,
       String archiveName) throws Exception;