You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by na...@apache.org on 2014/06/03 07:48:56 UTC

svn commit: r1599414 - in /hive/trunk: ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ shims/0.20/src/main/java/org/apache/hadoop/hive/shims/ shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/ shims/0.23/src/main/java/org/apache/hadoop/hive/shims/...

Author: navis
Date: Tue Jun  3 05:48:56 2014
New Revision: 1599414

URL: http://svn.apache.org/r1599414
Log:
HIVE-7162 : hadoop-1 build broken by HIVE-7071 (Vikram Dixit K reviewed by Thejas M Nair)

Modified:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
    hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
    hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java?rev=1599414&r1=1599413&r2=1599414&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/HiveSplitGenerator.java Tue Jun  3 05:48:56 2014
@@ -30,11 +30,11 @@ import org.apache.hadoop.hive.ql.exec.Ut
 import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
 import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.InputFormat;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.tez.dag.api.TezConfiguration;
 import org.apache.tez.dag.api.VertexLocationHint.TaskLocationHint;
@@ -79,7 +79,7 @@ public class HiveSplitGenerator implemen
 
     // Read all credentials into the credentials instance stored in JobConf.
     JobConf jobConf = new JobConf(conf);
-    jobConf.getCredentials().mergeAll(UserGroupInformation.getCurrentUser().getCredentials());
+    ShimLoader.getHadoopShims().getMergedCredentials(jobConf);
 
     InputSplitInfoMem inputSplitInfo = null;
     String realInputFormatName = userPayloadProto.getInputFormatName();

Modified: hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1599414&r1=1599413&r2=1599414&view=diff
==============================================================================
--- hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/0.20/src/main/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Tue Jun  3 05:48:56 2014
@@ -85,6 +85,7 @@ public class Hadoop20Shims implements Ha
   /**
    * Returns a shim to wrap MiniMrCluster
    */
+  @Override
   public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
                                      String nameNode, int numDir) throws IOException {
     return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
@@ -124,6 +125,7 @@ public class Hadoop20Shims implements Ha
     }
   }
 
+  @Override
   public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
       int numDataNodes,
       boolean format,
@@ -142,15 +144,18 @@ public class Hadoop20Shims implements Ha
       this.cluster = cluster;
     }
 
+    @Override
     public FileSystem getFileSystem() throws IOException {
       return cluster.getFileSystem();
     }
 
+    @Override
     public void shutdown() {
       cluster.shutdown();
     }
   }
 
+  @Override
   public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
     return new CombineFileInputFormatShim() {
       @Override
@@ -161,6 +166,7 @@ public class Hadoop20Shims implements Ha
     };
   }
 
+  @Override
   public void setTotalOrderPartitionFile(JobConf jobConf, Path partitionFile){
     TotalOrderPartitioner.setPartitionFile(jobConf, partitionFile);
   }
@@ -254,6 +260,7 @@ public class Hadoop20Shims implements Ha
     protected boolean isShrinked;
     protected long shrinkedLength;
 
+    @Override
     public boolean next(K key, V value) throws IOException {
 
       while ((curReader == null)
@@ -266,11 +273,13 @@ public class Hadoop20Shims implements Ha
       return true;
     }
 
+    @Override
     public K createKey() {
       K newKey = curReader.createKey();
       return (K)(new CombineHiveKey(newKey));
     }
 
+    @Override
     public V createValue() {
       return curReader.createValue();
     }
@@ -278,10 +287,12 @@ public class Hadoop20Shims implements Ha
     /**
      * Return the amount of data processed.
      */
+    @Override
     public long getPos() throws IOException {
       return progress;
     }
 
+    @Override
     public void close() throws IOException {
       if (curReader != null) {
         curReader.close();
@@ -292,6 +303,7 @@ public class Hadoop20Shims implements Ha
     /**
      * Return progress based on the amount of data processed so far.
      */
+    @Override
     public float getProgress() throws IOException {
       long subprogress = 0;    // bytes processed in current split
       if (null != curReader) {
@@ -395,6 +407,7 @@ public class Hadoop20Shims implements Ha
       CombineFileInputFormat<K, V>
       implements HadoopShims.CombineFileInputFormatShim<K, V> {
 
+    @Override
     public Path[] getInputPathsShim(JobConf conf) {
       try {
         return FileInputFormat.getInputPaths(conf);
@@ -435,10 +448,12 @@ public class Hadoop20Shims implements Ha
       return isplits;
     }
 
+    @Override
     public InputSplitShim getInputSplitShim() throws IOException {
       return new InputSplitShim();
     }
 
+    @Override
     public RecordReader getRecordReader(JobConf job, HadoopShims.InputSplitShim split,
         Reporter reporter,
         Class<RecordReader<K, V>> rrClass)
@@ -449,6 +464,7 @@ public class Hadoop20Shims implements Ha
 
   }
 
+  @Override
   public String getInputFormatClassName() {
     return "org.apache.hadoop.hive.ql.io.CombineHiveInputFormat";
   }
@@ -478,6 +494,7 @@ public class Hadoop20Shims implements Ha
    * compared against the one used by Hadoop 1.0 (within HadoopShimsSecure)
    * where a relative path is stored within the archive.
    */
+  @Override
   public URI getHarUri (URI original, URI base, URI originalBase)
     throws URISyntaxException {
     URI relative = null;
@@ -510,6 +527,7 @@ public class Hadoop20Shims implements Ha
     public void abortTask(TaskAttemptContext taskContext) { }
   }
 
+  @Override
   public void prepareJobOutput(JobConf conf) {
     conf.setOutputCommitter(Hadoop20Shims.NullOutputCommitter.class);
 
@@ -685,6 +703,7 @@ public class Hadoop20Shims implements Ha
     // This hadoop version doesn't have proxy verification
   }
 
+  @Override
   public boolean isSecurityEnabled() {
     return false;
   }
@@ -702,7 +721,6 @@ public class Hadoop20Shims implements Ha
 
   @Override
   public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
-    JobTrackerState state;
     switch (clusterStatus.getJobTrackerState()) {
     case INITIALIZING:
       return JobTrackerState.INITIALIZING;
@@ -853,6 +871,11 @@ public class Hadoop20Shims implements Ha
     return fs;
   }
 
+  @Override
+  public void getMergedCredentials(JobConf jobConf) throws IOException {
+    throw new IOException("Merging of credentials not supported in this version of hadoop");
+  }
+
   protected void run(FsShell shell, String[] command) throws Exception {
     LOG.debug(ArrayUtils.toString(command));
     shell.run(command);

Modified: hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1599414&r1=1599413&r2=1599414&view=diff
==============================================================================
--- hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/trunk/shims/0.20S/src/main/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Tue Jun  3 05:48:56 2014
@@ -173,6 +173,7 @@ public class Hadoop20SShims extends Hado
   /**
    * Returns a shim to wrap MiniMrCluster
    */
+  @Override
   public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
                                      String nameNode, int numDir) throws IOException {
     return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
@@ -227,6 +228,7 @@ public class Hadoop20SShims extends Hado
   // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
   // need to have two different shim classes even though they are
   // exactly the same.
+  @Override
   public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
       int numDataNodes,
       boolean format,
@@ -245,10 +247,12 @@ public class Hadoop20SShims extends Hado
       this.cluster = cluster;
     }
 
+    @Override
     public FileSystem getFileSystem() throws IOException {
       return cluster.getFileSystem();
     }
 
+    @Override
     public void shutdown() {
       cluster.shutdown();
     }
@@ -482,7 +486,7 @@ public class Hadoop20SShims extends Hado
     /* not supported */
     return null;
   }
-  
+
   @Override
   public Configuration getConfiguration(org.apache.hadoop.mapreduce.JobContext context) {
     return context.getConfiguration();
@@ -498,4 +502,9 @@ public class Hadoop20SShims extends Hado
     conf.setBoolean("fs." + uri.getScheme() + ".impl.disable.cache", origDisableHDFSCache);
     return fs;
   }
+
+  @Override
+  public void getMergedCredentials(JobConf jobConf) throws IOException {
+    throw new IOException("Merging of credentials not supported in this version of hadoop");
+  }
 }

Modified: hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1599414&r1=1599413&r2=1599414&view=diff
==============================================================================
--- hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/trunk/shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Tue Jun  3 05:48:56 2014
@@ -218,6 +218,7 @@ public class Hadoop23Shims extends Hadoo
   /**
    * Returns a shim to wrap MiniMrCluster
    */
+  @Override
   public MiniMrShim getMiniMrCluster(Configuration conf, int numberOfTaskTrackers,
                                      String nameNode, int numDir) throws IOException {
     return new MiniMrShim(conf, numberOfTaskTrackers, nameNode, numDir);
@@ -276,6 +277,7 @@ public class Hadoop23Shims extends Hadoo
   /**
    * Returns a shim to wrap MiniMrTez
    */
+  @Override
   public MiniMrShim getMiniTezCluster(Configuration conf, int numberOfTaskTrackers,
                                      String nameNode, int numDir) throws IOException {
     return new MiniTezShim(conf, numberOfTaskTrackers, nameNode, numDir);
@@ -344,6 +346,7 @@ public class Hadoop23Shims extends Hadoo
   // incompatibility between hadoop 1 and 2 wrt MiniDFSCluster and we
   // need to have two different shim classes even though they are
   // exactly the same.
+  @Override
   public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
       int numDataNodes,
       boolean format,
@@ -363,10 +366,12 @@ public class Hadoop23Shims extends Hadoo
       this.cluster = cluster;
     }
 
+    @Override
     public FileSystem getFileSystem() throws IOException {
       return cluster.getFileSystem();
     }
 
+    @Override
     public void shutdown() {
       cluster.shutdown();
     }
@@ -689,7 +694,7 @@ public class Hadoop23Shims extends Hadoo
     /* not supported */
     return null;
   }
-  
+
   @Override
   public Configuration getConfiguration(org.apache.hadoop.mapreduce.JobContext context) {
     return context.getConfiguration();
@@ -699,4 +704,9 @@ public class Hadoop23Shims extends Hadoo
   public FileSystem getNonCachedFileSystem(URI uri, Configuration conf) throws IOException {
     return FileSystem.newInstance(uri, conf);
   }
+
+  @Override
+  public void getMergedCredentials(JobConf jobConf) throws IOException {
+    jobConf.getCredentials().mergeAll(UserGroupInformation.getCurrentUser().getCredentials());
+  }
 }

Modified: hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1599414&r1=1599413&r2=1599414&view=diff
==============================================================================
--- hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/common/src/main/java/org/apache/hadoop/hive/shims/HadoopShims.java Tue Jun  3 05:48:56 2014
@@ -662,4 +662,6 @@ public interface HadoopShims {
 
   public FileSystem getNonCachedFileSystem(URI uri, Configuration conf) throws IOException;
 
+  public void getMergedCredentials(JobConf jobConf) throws IOException;
+
 }