You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2011/12/01 04:11:51 UTC
svn commit: r1208940 - in /hive/trunk: ./ bin/ conf/ contrib/ hbase-handler/
hbase-handler/src/java/org/apache/hadoop/hive/hbase/ hwi/ jdbc/ ql/
ql/src/java/org/apache/hadoop/hive/ql/exec/ service/
service/src/java/org/apache/hadoop/hive/service/ shims...
Author: cws
Date: Thu Dec 1 03:11:38 2011
New Revision: 1208940
URL: http://svn.apache.org/viewvc?rev=1208940&view=rev
Log:
HIVE-2468. Make Hive compile against Hadoop 0.23 (Tom White via cws)
Added:
hive/trunk/shims/src/0.23/
hive/trunk/shims/src/0.23/java/
hive/trunk/shims/src/0.23/java/org/
hive/trunk/shims/src/0.23/java/org/apache/
hive/trunk/shims/src/0.23/java/org/apache/hadoop/
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier23.java
hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector23.java
hive/trunk/shims/src/common/java/org/apache/hadoop/fs/
hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java
hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java
Modified:
hive/trunk/bin/hive
hive/trunk/build-common.xml
hive/trunk/build.properties
hive/trunk/build.xml
hive/trunk/conf/hive-default.xml
hive/trunk/contrib/build.xml
hive/trunk/hbase-handler/build.xml
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java
hive/trunk/hwi/build.xml
hive/trunk/jdbc/build.xml
hive/trunk/ql/build.xml
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java
hive/trunk/service/build.xml
hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
hive/trunk/shims/build.xml
hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyFileSystem.java
hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java
hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java
Modified: hive/trunk/bin/hive
URL: http://svn.apache.org/viewvc/hive/trunk/bin/hive?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/bin/hive (original)
+++ hive/trunk/bin/hive Thu Dec 1 03:11:38 2011
@@ -179,7 +179,7 @@ else
exit 5
fi
-if [ $hadoop_minor_ver -ne 20 -o $hadoop_patch_ver -eq 0 ]; then
+if [ "$hadoop_minor_ver$hadoop_patch_ver" -lt "201" ]; then
echo "Hive requires Hadoop 0.20.x (x >= 1)."
echo "'hadoop version' returned:"
echo `$HADOOP version`
Modified: hive/trunk/build-common.xml
URL: http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Thu Dec 1 03:11:38 2011
@@ -69,6 +69,12 @@
<pathelement location="${hive.conf.dir}"/>
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>
<path refid="classpath"/>
+ <fileset dir="${hadoop.root}">
+ <!-- below is for 0.23 onwards -->
+ <include name="share/hadoop/common/lib/*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
+ </fileset>
</path>
@@ -147,6 +153,10 @@
<pathelement location="${hadoop.oldstyle-name.tools.jar}"/>
<pathelement location="${hadoop.newstyle-name.jar}"/>
<pathelement location="${hadoop.newstyle-name.tools.jar}"/>
+ <pathelement location="${hadoop.common.jar}"/>
+ <pathelement location="${hadoop.hdfs.jar}"/>
+ <pathelement location="${hadoop.mapreduce.jar}"/>
+ <pathelement location="${hadoop.mapreduce.tools.jar}"/>
<pathelement location="${build.dir.hive}/classes"/>
<fileset dir="${build.dir.hive}" includes="*/*.jar"/>
<fileset dir="${hive.root}/lib" includes="*.jar"/>
@@ -344,7 +354,7 @@
errorProperty="tests.failed" failureProperty="tests.failed" filtertrace="off">
<env key="HADOOP_HOME" value="${hadoop.root}"/>
- <env key="HADOOP_CLASSPATH" value="${test.src.data.dir}/conf:${build.dir.hive}/dist/lib/derby-${derby.version}.jar:${build.dir.hive}/dist/lib/javaewah-${javaewah.version}.jar"/>
+ <env key="HADOOP_CLASSPATH" value="${test.src.data.dir}/conf:${build.dir.hive}/dist/lib/derby-${derby.version}.jar:${build.dir.hive}/dist/lib/javaewah-${javaewah.version}.jar:${hadoop.root}/modules/*"/> <!-- Modules needed for Hadoop 0.23 -->
<env key="TZ" value="US/Pacific"/>
<sysproperty key="test.output.overwrite" value="${overwrite}"/>
<sysproperty key="test.service.standalone.server" value="${standalone}"/>
Modified: hive/trunk/build.properties
URL: http://svn.apache.org/viewvc/hive/trunk/build.properties?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/build.properties (original)
+++ hive/trunk/build.properties Thu Dec 1 03:11:38 2011
@@ -12,6 +12,7 @@ javac.args.warnings=
hadoop.version=0.20.1
hadoop.security.version=0.20.3-CDH3-SNAPSHOT
+hadoop.security.version.prefix=0.20S
hadoop.mirror=http://mirror.facebook.net/facebook/hive-deps
hadoop.mirror2=http://archive.cloudera.com/hive-deps
@@ -30,6 +31,15 @@ hadoop.oldstyle-name.test.jar=${hadoop.r
hadoop.newstyle-name.jar=${hadoop.root}/hadoop-core-${hadoop.version.ant-internal}.jar
hadoop.newstyle-name.test.jar=${hadoop.root}/hadoop-test-${hadoop.version.ant-internal}.jar
hadoop.newstyle-name.tools.jar=${hadoop.root}/hadoop-tools-${hadoop.version.ant-internal}.jar
+# The following are used for versions of Hadoop that are broken into separate jars
+# They are ignored if not present
+hadoop.common.jar=${hadoop.root}/share/hadoop/common/hadoop-common-${hadoop.version.ant-internal}.jar
+hadoop.common.test.jar=${hadoop.root}/share/hadoop/common/hadoop-common-${hadoop.version.ant-internal}-tests.jar
+hadoop.hdfs.jar=${hadoop.root}/share/hadoop/hdfs/hadoop-hdfs-${hadoop.version.ant-internal}.jar
+hadoop.hdfs.test.jar=${hadoop.root}/share/hadoop/hdfs/hadoop-hdfs-${hadoop.version.ant-internal}-tests.jar
+hadoop.mapreduce.jar=${hadoop.root}/modules/hadoop-mapreduce-client-core-${hadoop.version.ant-internal}.jar
+hadoop.mapreduce.test.jar=${hadoop.root}/hadoop-mapreduce-test-${hadoop.version.ant-internal}.jar
+hadoop.mapreduce.tools.jar=${hadoop.root}/hadoop-mapreduce-tools-${hadoop.version.ant-internal}.jar
jetty.test.jar=${hadoop.root}/lib/jetty-5.1.4.jar
servlet.test.jar=${hadoop.root}/lib/servlet-api.jar
Modified: hive/trunk/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/build.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/build.xml (original)
+++ hive/trunk/build.xml Thu Dec 1 03:11:38 2011
@@ -159,6 +159,9 @@
<path id="common-classpath">
<pathelement location="${hadoop.oldstyle-name.jar}"/>
<pathelement location="${hadoop.newstyle-name.jar}"/>
+ <pathelement location="${hadoop.common.jar}"/>
+ <pathelement location="${hadoop.hdfs.jar}"/>
+ <pathelement location="${hadoop.mapreduce.jar}"/>
<pathelement location="${build.dir.hive}/classes"/>
<fileset dir="${hive.root}" includes="hive-*.jar"/>
<fileset dir="${hive.root}/lib" includes="*.jar"/>
Modified: hive/trunk/conf/hive-default.xml
URL: http://svn.apache.org/viewvc/hive/trunk/conf/hive-default.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/conf/hive-default.xml (original)
+++ hive/trunk/conf/hive-default.xml Thu Dec 1 03:11:38 2011
@@ -240,13 +240,13 @@
<property>
<name>hive.metastore.event.expiry.duration</name>
- <value>0L</value>
+ <value>0</value>
<description>Duration after which events expire from events table (in seconds)</description>
</property>
<property>
<name>hive.metastore.event.clean.freq</name>
- <value>0L</value>
+ <value>0</value>
<description>Frequency at which timer task runs to purge expired events in metastore(in seconds).</description>
</property>
Modified: hive/trunk/contrib/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/contrib/build.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/contrib/build.xml (original)
+++ hive/trunk/contrib/build.xml Thu Dec 1 03:11:38 2011
@@ -43,7 +43,14 @@
<pathelement location="${jsp.test.jar}"/>
<pathelement location="${common.jar}"/>
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>
- <fileset dir="${hadoop.root}/lib" includes="*.jar"/>
+ <fileset dir="${hadoop.root}">
+ <include name="lib/**/*.jar" />
+ <exclude name="lib/**/excluded/" />
+ <!-- below is for 0.23 onwards -->
+ <include name="share/hadoop/common/lib/*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
+ </fileset>
<path refid="classpath"/>
</path>
Modified: hive/trunk/hbase-handler/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/build.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/hbase-handler/build.xml (original)
+++ hive/trunk/hbase-handler/build.xml Thu Dec 1 03:11:38 2011
@@ -39,6 +39,9 @@
<pathelement location="${hive.root}/cli/lib/jline-0.9.94.jar"/>
<pathelement location="${hadoop.oldstyle-name.test.jar}"/>
<pathelement location="${hadoop.newstyle-name.test.jar}"/>
+ <pathelement location="${hadoop.common.test.jar}"/>
+ <pathelement location="${hadoop.hdfs.test.jar}"/>
+ <pathelement location="${hadoop.mapreduce.test.jar}"/>
<pathelement location="${jetty.test.jar}"/>
<pathelement location="${servlet.test.jar}"/>
<pathelement location="${jasper.test.jar}"/>
@@ -47,7 +50,7 @@
<pathelement location="${common.jar}"/>
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>
<fileset dir="${hadoop.root}/lib" includes="*.jar"/>
- <fileset dir="${hadoop.root}/lib/jsp-2.1" includes="*.jar"/>
+ <fileset dir="${hadoop.root}/lib" includes="jsp-2.1/*.jar"/>
<path refid="classpath"/>
</path>
Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableInputFormat.java Thu Dec 1 03:11:38 2011
@@ -54,6 +54,7 @@ import org.apache.hadoop.hive.serde2.Ser
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
@@ -155,14 +156,8 @@ public class HiveHBaseTableInputFormat e
setScan(scan);
Job job = new Job(jobConf);
- TaskAttemptContext tac =
- new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID()) {
-
- @Override
- public void progress() {
- reporter.progress();
- }
- };
+ TaskAttemptContext tac = ShimLoader.getHadoopShims().newTaskAttemptContext(
+ job.getConfiguration(), reporter);
final org.apache.hadoop.mapreduce.RecordReader<ImmutableBytesWritable, Result>
recordReader = createRecordReader(tableSplit, tac);
@@ -354,7 +349,7 @@ public class HiveHBaseTableInputFormat e
return analyzer;
}
-
+
@Override
public InputSplit[] getSplits(JobConf jobConf, int numSplits) throws IOException {
@@ -405,7 +400,7 @@ public class HiveHBaseTableInputFormat e
setScan(scan);
Job job = new Job(jobConf);
- JobContext jobContext = new JobContext(job.getConfiguration(), job.getJobID());
+ JobContext jobContext = ShimLoader.getHadoopShims().newJobContext(job);
Path [] tablePaths = FileInputFormat.getInputPaths(jobContext);
List<org.apache.hadoop.mapreduce.InputSplit> splits =
Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHBaseTableOutputFormat.java Thu Dec 1 03:11:38 2011
@@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.mapreduce
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormat;
@@ -104,8 +105,7 @@ public class HiveHBaseTableOutputFormat
String hbaseTableName = jc.get(HBaseSerDe.HBASE_TABLE_NAME);
jc.set(TableOutputFormat.OUTPUT_TABLE, hbaseTableName);
Job job = new Job(jc);
- JobContext jobContext =
- new JobContext(job.getConfiguration(), job.getJobID());
+ JobContext jobContext = ShimLoader.getHadoopShims().newJobContext(job);
try {
checkOutputSpecs(jobContext);
Modified: hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java (original)
+++ hive/trunk/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HiveHFileOutputFormat.java Thu Dec 1 03:11:38 2011
@@ -35,6 +35,7 @@ import org.apache.hadoop.hbase.mapreduce
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter;
import org.apache.hadoop.hive.ql.io.HiveOutputFormat;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.JobConf;
@@ -95,12 +96,9 @@ public class HiveHFileOutputFormat exten
// Create the HFile writer
final org.apache.hadoop.mapreduce.TaskAttemptContext tac =
- new TaskAttemptContext(job.getConfiguration(), new TaskAttemptID()) {
- @Override
- public void progress() {
- progressable.progress();
- }
- };
+ ShimLoader.getHadoopShims().newTaskAttemptContext(
+ job.getConfiguration(), progressable);
+
final Path outputdir = FileOutputFormat.getOutputPath(tac);
final org.apache.hadoop.mapreduce.RecordWriter<
ImmutableBytesWritable, KeyValue> fileWriter = getFileWriter(tac);
Modified: hive/trunk/hwi/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/hwi/build.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/hwi/build.xml (original)
+++ hive/trunk/hwi/build.xml Thu Dec 1 03:11:38 2011
@@ -26,7 +26,14 @@
Hive classpath. Some HWI components are linked
to the servlet libraries. -->
<path id="classpath-hwi">
- <fileset dir="${hadoop.root}/lib" includes="**/*.jar"/>
+ <fileset dir="${hadoop.root}">
+ <include name="lib/**/*.jar" />
+ <exclude name="lib/**/excluded/" />
+ <!-- below is for 0.23 onwards -->
+ <include name="share/hadoop/common/lib/*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
+ </fileset>
<path refid="classpath"/>
</path>
Modified: hive/trunk/jdbc/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/jdbc/build.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/jdbc/build.xml (original)
+++ hive/trunk/jdbc/build.xml Thu Dec 1 03:11:38 2011
@@ -37,6 +37,14 @@
<fileset dir="${test.src.data.dir}" includes="files/*.jar"/>
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>
<pathelement location="${build.dir.hive}/ql/test/classes"/>
+ <fileset dir="${hadoop.root}">
+ <include name="lib/**/*.jar" />
+ <exclude name="lib/**/excluded/" />
+ <!-- below is for 0.23 onwards -->
+ <include name="share/hadoop/common/lib/*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
+ </fileset>
<path refid="classpath"/>
</path>
Modified: hive/trunk/ql/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/ql/build.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/ql/build.xml (original)
+++ hive/trunk/ql/build.xml Thu Dec 1 03:11:38 2011
@@ -40,6 +40,9 @@
<pathelement location="${hive.conf.dir}"/>
<pathelement location="${hadoop.oldstyle-name.test.jar}"/>
<pathelement location="${hadoop.newstyle-name.test.jar}"/>
+ <pathelement location="${hadoop.common.test.jar}"/>
+ <pathelement location="${hadoop.hdfs.test.jar}"/>
+ <pathelement location="${hadoop.mapreduce.test.jar}"/>
<pathelement location="${jetty.test.jar}"/>
<pathelement location="${servlet.test.jar}"/>
<pathelement location="${jasper.test.jar}"/>
@@ -48,8 +51,15 @@
<pathelement location="${common.jar}"/>
<pathelement location="${hive.root}/build/ivy/lib/default/derby-${derby.version}.jar"/>
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>
- <fileset dir="${hadoop.root}/lib" includes="*.jar"/>
- <fileset dir="${hadoop.root}/lib" includes="jsp-2.1/*.jar"/>
+ <fileset dir="${hadoop.root}">
+ <include name="lib/**/*.jar" />
+ <exclude name="lib/**/excluded/" />
+ <!-- below is for 0.23 onwards -->
+ <include name="share/hadoop/common/lib/*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
+ </fileset>
+
<path refid="classpath"/>
</path>
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/HadoopJobExecHelper.java Thu Dec 1 03:11:38 2011
@@ -288,7 +288,7 @@ public class HadoopJobExecHelper {
// of finished jobs (because it has purged them from memory). From
// hive's perspective - it's equivalent to the job having failed.
// So raise a meaningful exception
- throw new IOException("Could not find status of job: + rj.getJobID()");
+ throw new IOException("Could not find status of job:" + rj.getJobID());
} else {
th.setRunningJob(newRj);
rj = newRj;
Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/JobTrackerURLResolver.java Thu Dec 1 03:11:38 2011
@@ -22,7 +22,6 @@ import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobTracker;
import org.apache.hadoop.net.NetUtils;
/**
@@ -38,7 +37,12 @@ public final class JobTrackerURLResolver
InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(infoAddr);
int infoPort = infoSocAddr.getPort();
- String tracker = "http://" + JobTracker.getAddress(conf).getHostName()
+ String jobTrackerStr =
+ conf.get("mapred.job.tracker", "localhost:8012");
+ InetSocketAddress jobTrackerSocAddr =
+ NetUtils.createSocketAddr(jobTrackerStr);
+
+ String tracker = "http://" + jobTrackerSocAddr.getHostName()
+ ":" + infoPort;
return tracker;
Modified: hive/trunk/service/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/service/build.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/service/build.xml (original)
+++ hive/trunk/service/build.xml Thu Dec 1 03:11:38 2011
@@ -38,6 +38,12 @@
<fileset dir="${hive.root}" includes="testlibs/*.jar"/>
<pathelement location="${build.dir.hive}/ql/test/classes"/>
<path refid="classpath"/>
+ <fileset dir="${hadoop.root}">
+ <!-- below is for 0.23 onwards -->
+ <include name="share/hadoop/common/lib/*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
+ </fileset>
</path>
<target name="thriftif" depends="check-thrift-home">
Modified: hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java
URL: http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java (original)
+++ hive/trunk/service/src/java/org/apache/hadoop/hive/service/HiveServer.java Thu Dec 1 03:11:38 2011
@@ -49,8 +49,8 @@ import org.apache.hadoop.hive.ql.process
import org.apache.hadoop.hive.ql.processors.CommandProcessorFactory;
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.mapred.ClusterStatus;
-import org.apache.hadoop.mapred.JobTracker;
import org.apache.thrift.TException;
import org.apache.thrift.TProcessor;
import org.apache.thrift.TProcessorFactory;
@@ -241,21 +241,7 @@ public class HiveServer extends ThriftHi
drv.init();
ClusterStatus cs = drv.getClusterStatus();
- JobTracker.State jbs = cs.getJobTrackerState();
-
- // Convert the ClusterStatus to its Thrift equivalent: HiveClusterStatus
- JobTrackerState state;
- switch (jbs) {
- case INITIALIZING:
- state = JobTrackerState.INITIALIZING;
- break;
- case RUNNING:
- state = JobTrackerState.RUNNING;
- break;
- default:
- String errorMsg = "Unrecognized JobTracker state: " + jbs.toString();
- throw new Exception(errorMsg);
- }
+ JobTrackerState state = JobTrackerState.valueOf(ShimLoader.getHadoopShims().getJobTrackerState(cs).name());
hcs = new HiveClusterStatus(cs.getTaskTrackers(), cs.getMapTasks(), cs
.getReduceTasks(), cs.getMaxMapTasks(), cs.getMaxReduceTasks(),
@@ -649,7 +635,7 @@ public class HiveServer extends ThriftHi
}
}
}
-
+
public static void main(String[] args) {
try {
HiveServerCli cli = new HiveServerCli();
@@ -686,7 +672,7 @@ public class HiveServer extends ThriftHi
.protocolFactory(new TBinaryProtocol.Factory())
.minWorkerThreads(cli.minWorkerThreads)
.maxWorkerThreads(cli.maxWorkerThreads);
-
+
TServer server = new TThreadPoolServer(sargs);
String msg = "Starting hive server on port " + cli.port
Modified: hive/trunk/shims/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/shims/build.xml?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/shims/build.xml (original)
+++ hive/trunk/shims/build.xml Thu Dec 1 03:11:38 2011
@@ -26,9 +26,13 @@ to call at top-level: ant deploy-contrib
<import file="../build-common.xml"/>
<path id="classpath">
- <fileset dir="${hadoop.root}/lib">
- <include name="**/*.jar" />
- <exclude name="**/excluded/" />
+ <fileset dir="${hadoop.root}">
+ <include name="lib/**/*.jar" />
+ <exclude name="lib/**/excluded/" />
+ <!-- below is for 0.23 onwards -->
+ <include name="share/hadoop/common/lib/*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-mapreduce-*.jar" />
+ <exclude name="share/hadoop/common/lib/hadoop-yarn-*.jar" />
</fileset>
<pathelement location="${hadoop.oldstyle-name.jar}"/>
<pathelement location="${hadoop.oldstyle-name.tools.jar}"/>
@@ -36,6 +40,13 @@ to call at top-level: ant deploy-contrib
<pathelement location="${hadoop.newstyle-name.jar}"/>
<pathelement location="${hadoop.newstyle-name.test.jar}"/>
<pathelement location="${hadoop.newstyle-name.tools.jar}"/>
+ <pathelement location="${hadoop.common.jar}"/>
+ <pathelement location="${hadoop.common.test.jar}"/>
+ <pathelement location="${hadoop.hdfs.jar}"/>
+ <pathelement location="${hadoop.hdfs.test.jar}"/>
+ <pathelement location="${hadoop.mapreduce.jar}"/>
+ <pathelement location="${hadoop.mapreduce.test.jar}"/>
+ <pathelement location="${hadoop.mapreduce.tools.jar}"/>
<fileset dir="../lib" includes="*.jar" />
<path refid="common-classpath"/>
</path>
@@ -67,7 +78,7 @@ to call at top-level: ant deploy-contrib
</antcall>
<antcall target="build_shims" inheritRefs="false" inheritAll="false">
<param name="hadoop.version.ant-internal" value="${hadoop.security.version}" />
- <param name="hadoop.version.ant-internal.prefix" value="0.20S" />
+ <param name="hadoop.version.ant-internal.prefix" value="${hadoop.security.version.prefix}" />
</antcall>
<getversionpref property="hadoop.version.ant-internal.prefix" input="${hadoop.version}" />
<javac
@@ -79,7 +90,7 @@ to call at top-level: ant deploy-contrib
includeantruntime="false">
<compilerarg line="${javac.args} ${javac.args.warnings}" />
<classpath refid="classpath"/>
- <src path="${basedir}/src/${hadoop.version.ant-internal.prefix}/java" />
+ <src path="${basedir}/src/common/java" />
</javac>
</target>
@@ -121,9 +132,10 @@ to call at top-level: ant deploy-contrib
<target name="compile-test" depends="compile">
<echo message="Project: ${ant.project.name}"/>
- <antcall target="compile_secure_test" inheritRefs="false" inheritAll="false">
+ <!-- TODO: move tests to version directory -->
+ <!--antcall target="compile_secure_test" inheritRefs="false" inheritAll="false">
<param name="hadoop.version.ant-internal" value="${hadoop.security.version}" />
- <param name="hadoop.version.ant-internal.prefix" value="0.20S" />
- </antcall>
+ <param name="hadoop.version.ant-internal.prefix" value="${hadoop.security.version.prefix}" />
+ </antcall-->
</target>
</project>
Modified: hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyFileSystem.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyFileSystem.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyFileSystem.java (original)
+++ hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyFileSystem.java Thu Dec 1 03:11:38 2011
@@ -1,273 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.util.Progressable;
-
-/****************************************************************
- * A FileSystem that can serve a given scheme/authority using some
- * other file system. In that sense, it serves as a proxy for the
- * real/underlying file system
- *****************************************************************/
-
-public class ProxyFileSystem extends FilterFileSystem {
-
- protected String myScheme;
- protected String myAuthority;
- protected URI myUri;
-
- protected String realScheme;
- protected String realAuthority;
- protected URI realUri;
-
-
-
- private Path swizzleParamPath(Path p) {
- return new Path (realScheme, realAuthority, p.toUri().getPath());
- }
-
- private Path swizzleReturnPath(Path p) {
- return new Path (myScheme, myAuthority, p.toUri().getPath());
- }
-
- private FileStatus swizzleFileStatus(FileStatus orig, boolean isParam) {
- FileStatus ret =
- new FileStatus(orig.getLen(), orig.isDir(), orig.getReplication(),
- orig.getBlockSize(), orig.getModificationTime(),
- orig.getAccessTime(), orig.getPermission(),
- orig.getOwner(), orig.getGroup(),
- isParam ? swizzleParamPath(orig.getPath()) :
- swizzleReturnPath(orig.getPath()));
- return ret;
- }
-
- public ProxyFileSystem() {
- throw new RuntimeException ("Unsupported constructor");
- }
-
- public ProxyFileSystem(FileSystem fs) {
- throw new RuntimeException ("Unsupported constructor");
- }
-
- /**
- * Create a proxy file system for fs.
- *
- * @param fs FileSystem to create proxy for
- * @param myUri URI to use as proxy. Only the scheme and authority from
- * this are used right now
- */
- public ProxyFileSystem(FileSystem fs, URI myUri) {
- super(fs);
-
- URI realUri = fs.getUri();
- this.realScheme = realUri.getScheme();
- this.realAuthority=realUri.getAuthority();
- this.realUri = realUri;
-
- this.myScheme = myUri.getScheme();
- this.myAuthority=myUri.getAuthority();
- this.myUri = myUri;
- }
-
- @Override
- public void initialize(URI name, Configuration conf) throws IOException {
- try {
- URI realUri = new URI (realScheme, realAuthority,
- name.getPath(), name.getQuery(), name.getFragment());
- super.initialize(realUri, conf);
- } catch (URISyntaxException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public URI getUri() {
- return myUri;
- }
-
- @Override
- public String getName() {
- return getUri().toString();
- }
-
- @Override
- public Path makeQualified(Path path) {
- return swizzleReturnPath(super.makeQualified(swizzleParamPath(path)));
- }
-
-
- @Override
- protected void checkPath(Path path) {
- super.checkPath(swizzleParamPath(path));
- }
-
- @Override
- public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
- long len) throws IOException {
- return super.getFileBlockLocations(swizzleFileStatus(file, true),
- start, len);
- }
-
- @Override
- public FSDataInputStream open(Path f, int bufferSize) throws IOException {
- return super.open(swizzleParamPath(f), bufferSize);
- }
-
- @Override
- public FSDataOutputStream append(Path f, int bufferSize,
- Progressable progress) throws IOException {
- return super.append(swizzleParamPath(f), bufferSize, progress);
- }
-
- @Override
- public FSDataOutputStream create(Path f, FsPermission permission,
- boolean overwrite, int bufferSize, short replication, long blockSize,
- Progressable progress) throws IOException {
- return super.create(swizzleParamPath(f), permission,
- overwrite, bufferSize, replication, blockSize, progress);
- }
-
- @Override
- public boolean setReplication(Path src, short replication) throws IOException {
- return super.setReplication(swizzleParamPath(src), replication);
- }
-
- @Override
- public boolean rename(Path src, Path dst) throws IOException {
- return super.rename(swizzleParamPath(src), swizzleParamPath(dst));
- }
-
- @Override
- public boolean delete(Path f, boolean recursive) throws IOException {
- return super.delete(swizzleParamPath(f), recursive);
- }
-
- @Override
- public boolean deleteOnExit(Path f) throws IOException {
- return super.deleteOnExit(swizzleParamPath(f));
- }
-
- @Override
- public FileStatus[] listStatus(Path f) throws IOException {
- FileStatus[] orig = super.listStatus(swizzleParamPath(f));
- FileStatus[] ret = new FileStatus [orig.length];
- for (int i=0; i<orig.length; i++) {
- ret[i] = swizzleFileStatus(orig[i], false);
- }
- return ret;
- }
-
- @Override
- public Path getHomeDirectory() {
- return swizzleReturnPath(super.getHomeDirectory());
- }
-
- @Override
- public void setWorkingDirectory(Path newDir) {
- super.setWorkingDirectory(swizzleParamPath(newDir));
- }
-
- @Override
- public Path getWorkingDirectory() {
- return swizzleReturnPath(super.getWorkingDirectory());
- }
-
- @Override
- public boolean mkdirs(Path f, FsPermission permission) throws IOException {
- return super.mkdirs(swizzleParamPath(f), permission);
- }
-
- @Override
- public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
- throws IOException {
- super.copyFromLocalFile(delSrc, swizzleParamPath(src), swizzleParamPath(dst));
- }
-
- @Override
- public void copyFromLocalFile(boolean delSrc, boolean overwrite,
- Path[] srcs, Path dst)
- throws IOException {
- super.copyFromLocalFile(delSrc, overwrite, srcs, swizzleParamPath(dst));
- }
-
- @Override
- public void copyFromLocalFile(boolean delSrc, boolean overwrite,
- Path src, Path dst)
- throws IOException {
- super.copyFromLocalFile(delSrc, overwrite, src, swizzleParamPath(dst));
- }
-
- @Override
- public void copyToLocalFile(boolean delSrc, Path src, Path dst)
- throws IOException {
- super.copyToLocalFile(delSrc, swizzleParamPath(src), dst);
- }
-
- @Override
- public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
- throws IOException {
- return super.startLocalOutput(swizzleParamPath(fsOutputFile), tmpLocalFile);
- }
-
- @Override
- public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
- throws IOException {
- super.completeLocalOutput(swizzleParamPath(fsOutputFile), tmpLocalFile);
- }
-
- @Override
- public ContentSummary getContentSummary(Path f) throws IOException {
- return super.getContentSummary(swizzleParamPath(f));
- }
-
- @Override
- public FileStatus getFileStatus(Path f) throws IOException {
- return swizzleFileStatus(super.getFileStatus(swizzleParamPath(f)), false);
- }
-
- @Override
- public FileChecksum getFileChecksum(Path f) throws IOException {
- return super.getFileChecksum(swizzleParamPath(f));
- }
-
- @Override
- public void setOwner(Path p, String username, String groupname
- ) throws IOException {
- super.setOwner(swizzleParamPath(p), username, groupname);
- }
-
- @Override
- public void setTimes(Path p, long mtime, long atime
- ) throws IOException {
- super.setTimes(swizzleParamPath(p), mtime, atime);
- }
-
- @Override
- public void setPermission(Path p, FsPermission permission
- ) throws IOException {
- super.setPermission(swizzleParamPath(p), permission);
- }
-}
-
Modified: hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java (original)
+++ hive/trunk/shims/src/0.20/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java Thu Dec 1 03:11:38 2011
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.fs;
-
-import java.io.*;
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.util.Progressable;
-
-/****************************************************************
- * A Proxy for LocalFileSystem
- *
- * Serves uri's corresponding to 'pfile:///' namespace with using
- * a LocalFileSystem
- *****************************************************************/
-
-public class ProxyLocalFileSystem extends FilterFileSystem {
-
- protected LocalFileSystem localFs;
-
- public ProxyLocalFileSystem() {
- localFs = new LocalFileSystem();
- }
-
- public ProxyLocalFileSystem(FileSystem fs) {
- throw new RuntimeException ("Unsupported Constructor");
- }
-
- @Override
- public void initialize(URI name, Configuration conf) throws IOException {
- // create a proxy for the local filesystem
- // the scheme/authority serving as the proxy is derived
- // from the supplied URI
-
- String scheme = name.getScheme();
- String authority = name.getAuthority() != null ? name.getAuthority() : "";
- String proxyUriString = name + "://" + authority + "/";
- fs = new ProxyFileSystem(localFs, URI.create(proxyUriString));
-
- fs.initialize(name, conf);
- }
-}
Modified: hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java (original)
+++ hive/trunk/shims/src/0.20/java/org/apache/hadoop/hive/shims/Hadoop20Shims.java Thu Dec 1 03:11:38 2011
@@ -35,6 +35,7 @@ import org.apache.hadoop.hdfs.MiniDFSClu
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain;
import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
@@ -51,9 +52,12 @@ import org.apache.hadoop.mapred.TaskID;
import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.security.UnixUserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.tools.HadoopArchives;
+import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ToolRunner;
/**
@@ -211,7 +215,7 @@ public class Hadoop20Shims implements Ha
protected RecordReader<K, V> curReader;
protected boolean isShrinked;
protected long shrinkedLength;
-
+
public boolean next(K key, V value) throws IOException {
while ((curReader == null)
@@ -287,9 +291,9 @@ public class Hadoop20Shims implements Ha
}
initNextRecordReader(null);
}
-
+
/**
- * do next and handle exception inside it.
+ * do next and handle exception inside it.
* @param key
* @param value
* @return
@@ -504,4 +508,33 @@ public class Hadoop20Shims implements Ha
public String getTokenStrForm(String tokenSignature) throws IOException {
throw new UnsupportedOperationException("Tokens are not supported in current hadoop version");
}
+
+ @Override
+ public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
+ JobTrackerState state;
+ switch (clusterStatus.getJobTrackerState()) {
+ case INITIALIZING:
+ return JobTrackerState.INITIALIZING;
+ case RUNNING:
+ return JobTrackerState.RUNNING;
+ default:
+ String errorMsg = "Unrecognized JobTracker state: " + clusterStatus.getJobTrackerState();
+ throw new Exception(errorMsg);
+ }
+ }
+
+ @Override
+ public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
+ return new org.apache.hadoop.mapreduce.TaskAttemptContext(conf, new TaskAttemptID()) {
+ @Override
+ public void progress() {
+ progressable.progress();
+ }
+ };
+ }
+
+ @Override
+ public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
+ return new org.apache.hadoop.mapreduce.JobContext(job.getConfiguration(), job.getJobID());
+ }
}
Modified: hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Thu Dec 1 03:11:38 2011
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.shims.Hado
import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
@@ -54,11 +55,14 @@ import org.apache.hadoop.mapred.TaskID;
import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
import org.apache.hadoop.mapred.lib.CombineFileSplit;
import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.TokenSelector;
import org.apache.hadoop.tools.HadoopArchives;
+import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ToolRunner;
/**
@@ -511,4 +515,33 @@ public class Hadoop20SShims implements H
tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
return token != null ? token.encodeToUrlString() : null;
}
+
+ @Override
+ public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
+ JobTrackerState state;
+ switch (clusterStatus.getJobTrackerState()) {
+ case INITIALIZING:
+ return JobTrackerState.INITIALIZING;
+ case RUNNING:
+ return JobTrackerState.RUNNING;
+ default:
+ String errorMsg = "Unrecognized JobTracker state: " + clusterStatus.getJobTrackerState();
+ throw new Exception(errorMsg);
+ }
+ }
+
+ @Override
+ public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
+ return new org.apache.hadoop.mapreduce.TaskAttemptContext(conf, new TaskAttemptID()) {
+ @Override
+ public void progress() {
+ progressable.progress();
+ }
+ };
+ }
+
+ @Override
+ public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
+ return new org.apache.hadoop.mapreduce.JobContext(job.getConfiguration(), job.getJobID());
+ }
}
Added: hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1208940&view=auto
==============================================================================
--- hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (added)
+++ hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Thu Dec 1 03:11:38 2011
@@ -0,0 +1,546 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.shims;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.security.auth.login.LoginException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain;
+import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
+import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
+import org.apache.hadoop.hive.thrift.DelegationTokenSelector23;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.ClusterStatus;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobContext;
+import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapred.OutputCommitter;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapred.TaskAttemptContext;
+import org.apache.hadoop.mapred.TaskCompletionEvent;
+import org.apache.hadoop.mapred.TaskID;
+import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
+import org.apache.hadoop.mapred.lib.CombineFileSplit;
+import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.mapreduce.task.JobContextImpl;
+import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.security.token.TokenSelector;
+import org.apache.hadoop.tools.HadoopArchives;
+import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.ToolRunner;
+
+/**
+ * Implemention of shims against Hadoop 0.23.0.
+ */
+public class Hadoop23Shims implements HadoopShims {
+ public boolean usesJobShell() {
+ return false;
+ }
+
+ public boolean fileSystemDeleteOnExit(FileSystem fs, Path path)
+ throws IOException {
+
+ return fs.deleteOnExit(path);
+ }
+
+ public void inputFormatValidateInput(InputFormat fmt, JobConf conf)
+ throws IOException {
+ // gone in 0.18+
+ }
+
+ public boolean isJobPreparing(RunningJob job) throws IOException {
+ return job.getJobState() == JobStatus.PREP;
+ }
+ /**
+ * Workaround for hadoop-17 - jobclient only looks at commandlineconfig.
+ */
+ public void setTmpFiles(String prop, String files) {
+ // gone in 20+
+ }
+
+ public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+ int numDataNodes,
+ boolean format,
+ String[] racks) throws IOException {
+ return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+ }
+
+ /**
+ * MiniDFSShim.
+ *
+ */
+ public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+ private final MiniDFSCluster cluster;
+
+ public MiniDFSShim(MiniDFSCluster cluster) {
+ this.cluster = cluster;
+ }
+
+ public FileSystem getFileSystem() throws IOException {
+ return cluster.getFileSystem();
+ }
+
+ public void shutdown() {
+ cluster.shutdown();
+ }
+ }
+
+ /**
+ * We define this function here to make the code compatible between
+ * hadoop 0.17 and hadoop 0.20.
+ *
+ * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't
+ * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is
+ * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler
+ * references that class, which is not available in hadoop 0.17.
+ */
+ public int compareText(Text a, Text b) {
+ return a.compareTo(b);
+ }
+
+ @Override
+ public long getAccessTime(FileStatus file) {
+ return file.getAccessTime();
+ }
+
+ public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
+ return new CombineFileInputFormatShim() {
+ @Override
+ public RecordReader getRecordReader(InputSplit split,
+ JobConf job, Reporter reporter) throws IOException {
+ throw new IOException("CombineFileInputFormat.getRecordReader not needed.");
+ }
+ };
+ }
+
+ public static class InputSplitShim extends CombineFileSplit implements HadoopShims.InputSplitShim {
+ long shrinkedLength;
+ boolean _isShrinked;
+ public InputSplitShim() {
+ super();
+ _isShrinked = false;
+ }
+
+ public InputSplitShim(CombineFileSplit old) throws IOException {
+ super(old);
+ _isShrinked = false;
+ }
+
+ @Override
+ public void shrinkSplit(long length) {
+ _isShrinked = true;
+ shrinkedLength = length;
+ }
+
+ public boolean isShrinked() {
+ return _isShrinked;
+ }
+
+ public long getShrinkedLength() {
+ return shrinkedLength;
+ }
+
+ @Override
+ public void readFields(DataInput in) throws IOException {
+ super.readFields(in);
+ _isShrinked = in.readBoolean();
+ if (_isShrinked) {
+ shrinkedLength = in.readLong();
+ }
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ super.write(out);
+ out.writeBoolean(_isShrinked);
+ if (_isShrinked) {
+ out.writeLong(shrinkedLength);
+ }
+ }
+ }
+
+ /* This class should be replaced with org.apache.hadoop.mapred.lib.CombineFileRecordReader class, once
+ * https://issues.apache.org/jira/browse/MAPREDUCE-955 is fixed. This code should be removed - it is a copy
+ * of org.apache.hadoop.mapred.lib.CombineFileRecordReader
+ */
+ public static class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
+
+ static final Class[] constructorSignature = new Class[] {
+ InputSplit.class,
+ Configuration.class,
+ Reporter.class,
+ Integer.class
+ };
+
+ protected CombineFileSplit split;
+ protected JobConf jc;
+ protected Reporter reporter;
+ protected Class<RecordReader<K, V>> rrClass;
+ protected Constructor<RecordReader<K, V>> rrConstructor;
+ protected FileSystem fs;
+
+ protected int idx;
+ protected long progress;
+ protected RecordReader<K, V> curReader;
+ protected boolean isShrinked;
+ protected long shrinkedLength;
+
+ public boolean next(K key, V value) throws IOException {
+
+ while ((curReader == null)
+ || !doNextWithExceptionHandler((K) ((CombineHiveKey) key).getKey(),
+ value)) {
+ if (!initNextRecordReader(key)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public K createKey() {
+ K newKey = curReader.createKey();
+ return (K)(new CombineHiveKey(newKey));
+ }
+
+ public V createValue() {
+ return curReader.createValue();
+ }
+
+ /**
+ * Return the amount of data processed.
+ */
+ public long getPos() throws IOException {
+ return progress;
+ }
+
+ public void close() throws IOException {
+ if (curReader != null) {
+ curReader.close();
+ curReader = null;
+ }
+ }
+
+ /**
+ * Return progress based on the amount of data processed so far.
+ */
+ public float getProgress() throws IOException {
+ return Math.min(1.0f, progress / (float) (split.getLength()));
+ }
+
+ /**
+ * A generic RecordReader that can hand out different recordReaders
+ * for each chunk in the CombineFileSplit.
+ */
+ public CombineFileRecordReader(JobConf job, CombineFileSplit split,
+ Reporter reporter,
+ Class<RecordReader<K, V>> rrClass)
+ throws IOException {
+ this.split = split;
+ this.jc = job;
+ this.rrClass = rrClass;
+ this.reporter = reporter;
+ this.idx = 0;
+ this.curReader = null;
+ this.progress = 0;
+
+ isShrinked = false;
+
+ assert (split instanceof InputSplitShim);
+ if (((InputSplitShim) split).isShrinked()) {
+ isShrinked = true;
+ shrinkedLength = ((InputSplitShim) split).getShrinkedLength();
+ }
+
+ try {
+ rrConstructor = rrClass.getDeclaredConstructor(constructorSignature);
+ rrConstructor.setAccessible(true);
+ } catch (Exception e) {
+ throw new RuntimeException(rrClass.getName() +
+ " does not have valid constructor", e);
+ }
+ initNextRecordReader(null);
+ }
+
+ /**
+ * do next and handle exception inside it.
+ * @param key
+ * @param value
+ * @return
+ * @throws IOException
+ */
+ private boolean doNextWithExceptionHandler(K key, V value) throws IOException {
+ try {
+ return curReader.next(key, value);
+ } catch (Exception e) {
+ return HiveIOExceptionHandlerUtil.handleRecordReaderNextException(e, jc);
+ }
+ }
+
+ /**
+ * Get the record reader for the next chunk in this CombineFileSplit.
+ */
+ protected boolean initNextRecordReader(K key) throws IOException {
+
+ if (curReader != null) {
+ curReader.close();
+ curReader = null;
+ if (idx > 0) {
+ progress += split.getLength(idx - 1); // done processing so far
+ }
+ }
+
+ // if all chunks have been processed or reached the length, nothing more to do.
+ if (idx == split.getNumPaths() || (isShrinked && progress > shrinkedLength)) {
+ return false;
+ }
+
+ // get a record reader for the idx-th chunk
+ try {
+ curReader = rrConstructor.newInstance(new Object[]
+ {split, jc, reporter, Integer.valueOf(idx)});
+
+ // change the key if need be
+ if (key != null) {
+ K newKey = curReader.createKey();
+ ((CombineHiveKey)key).setKey(newKey);
+ }
+
+ // setup some helper config variables.
+ jc.set("map.input.file", split.getPath(idx).toString());
+ jc.setLong("map.input.start", split.getOffset(idx));
+ jc.setLong("map.input.length", split.getLength(idx));
+ } catch (Exception e) {
+ curReader=HiveIOExceptionHandlerUtil.handleRecordReaderCreationException(e, jc);
+ }
+ idx++;
+ return true;
+ }
+ }
+
+ public abstract static class CombineFileInputFormatShim<K, V> extends
+ CombineFileInputFormat<K, V>
+ implements HadoopShims.CombineFileInputFormatShim<K, V> {
+
+ public Path[] getInputPathsShim(JobConf conf) {
+ try {
+ return FileInputFormat.getInputPaths(conf);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void createPool(JobConf conf, PathFilter... filters) {
+ super.createPool(conf, filters);
+ }
+
+ @Override
+ public InputSplitShim[] getSplits(JobConf job, int numSplits) throws IOException {
+ long minSize = job.getLong("mapred.min.split.size", 0);
+
+ // For backward compatibility, let the above parameter be used
+ if (job.getLong("mapred.min.split.size.per.node", 0) == 0) {
+ super.setMinSplitSizeNode(minSize);
+ }
+
+ if (job.getLong("mapred.min.split.size.per.rack", 0) == 0) {
+ super.setMinSplitSizeRack(minSize);
+ }
+
+ if (job.getLong("mapred.max.split.size", 0) == 0) {
+ super.setMaxSplitSize(minSize);
+ }
+
+ InputSplit[] splits = super.getSplits(job, numSplits);
+
+ InputSplitShim[] isplits = new InputSplitShim[splits.length];
+ for (int pos = 0; pos < splits.length; pos++) {
+ isplits[pos] = new InputSplitShim((CombineFileSplit) splits[pos]);
+ }
+
+ return isplits;
+ }
+
+ public InputSplitShim getInputSplitShim() throws IOException {
+ return new InputSplitShim();
+ }
+
+ public RecordReader getRecordReader(JobConf job, HadoopShims.InputSplitShim split,
+ Reporter reporter,
+ Class<RecordReader<K, V>> rrClass)
+ throws IOException {
+ CombineFileSplit cfSplit = (CombineFileSplit) split;
+ return new CombineFileRecordReader(job, cfSplit, reporter, rrClass);
+ }
+
+ }
+
+ public String getInputFormatClassName() {
+ return "org.apache.hadoop.hive.ql.io.CombineHiveInputFormat";
+ }
+
+ String[] ret = new String[2];
+
+ @Override
+ public String[] getTaskJobIDs(TaskCompletionEvent t) {
+ TaskID tid = t.getTaskAttemptId().getTaskID();
+ ret[0] = tid.toString();
+ ret[1] = tid.getJobID().toString();
+ return ret;
+ }
+
+ public void setFloatConf(Configuration conf, String varName, float val) {
+ conf.setFloat(varName, val);
+ }
+
+ @Override
+ public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
+ String archiveName) throws Exception {
+
+ HadoopArchives har = new HadoopArchives(conf);
+ List<String> args = new ArrayList<String>();
+
+ if (conf.get("hive.archive.har.parentdir.settable") == null) {
+ throw new RuntimeException("hive.archive.har.parentdir.settable is not set");
+ }
+ boolean parentSettable =
+ conf.getBoolean("hive.archive.har.parentdir.settable", false);
+
+ if (parentSettable) {
+ args.add("-archiveName");
+ args.add(archiveName);
+ args.add("-p");
+ args.add(sourceDir.toString());
+ args.add(destDir.toString());
+ } else {
+ args.add("-archiveName");
+ args.add(archiveName);
+ args.add(sourceDir.toString());
+ args.add(destDir.toString());
+ }
+
+ return ToolRunner.run(har, args.toArray(new String[0]));
+ }
+
+ public static class NullOutputCommitter extends OutputCommitter {
+ @Override
+ public void setupJob(JobContext jobContext) { }
+ @Override
+ public void cleanupJob(JobContext jobContext) { }
+
+ @Override
+ public void setupTask(TaskAttemptContext taskContext) { }
+ @Override
+ public boolean needsTaskCommit(TaskAttemptContext taskContext) {
+ return false;
+ }
+ @Override
+ public void commitTask(TaskAttemptContext taskContext) { }
+ @Override
+ public void abortTask(TaskAttemptContext taskContext) { }
+ }
+
+ public void setNullOutputFormat(JobConf conf) {
+ conf.setOutputFormat(NullOutputFormat.class);
+ conf.setOutputCommitter(Hadoop23Shims.NullOutputCommitter.class);
+
+ // option to bypass job setup and cleanup was introduced in hadoop-21 (MAPREDUCE-463)
+ // but can be backported. So we disable setup/cleanup in all versions >= 0.19
+ conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false);
+
+ // option to bypass task cleanup task was introduced in hadoop-23 (MAPREDUCE-2206)
+ // but can be backported. So we disable setup/cleanup in all versions >= 0.19
+ conf.setBoolean("mapreduce.job.committer.task.cleanup.needed", false);
+ }
+
+ @Override
+ public UserGroupInformation getUGIForConf(Configuration conf) throws IOException {
+ return UserGroupInformation.getCurrentUser();
+ }
+
+ @Override
+ public boolean isSecureShimImpl() {
+ return true;
+ }
+
+ @Override
+ public String getShortUserName(UserGroupInformation ugi) {
+ return ugi.getShortUserName();
+ }
+
+ @Override
+ public String getTokenStrForm(String tokenSignature) throws IOException {
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector23();
+
+ Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
+ tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
+ return token != null ? token.encodeToUrlString() : null;
+ }
+
+ @Override
+ public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
+ JobTrackerState state;
+ switch (clusterStatus.getJobTrackerStatus()) {
+ case INITIALIZING:
+ return JobTrackerState.INITIALIZING;
+ case RUNNING:
+ return JobTrackerState.RUNNING;
+ default:
+ String errorMsg = "Unrecognized JobTracker state: " + clusterStatus.getJobTrackerStatus();
+ throw new Exception(errorMsg);
+ }
+ }
+
+ @Override
+ public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
+ return new TaskAttemptContextImpl(conf, new TaskAttemptID()) {
+ @Override
+ public void progress() {
+ progressable.progress();
+ }
+ };
+ }
+
+ @Override
+ public org.apache.hadoop.mapreduce.JobContext newJobContext(Job job) {
+ return new JobContextImpl(job.getConfiguration(), job.getJobID());
+ }
+}
Added: hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java?rev=1208940&view=auto
==============================================================================
--- hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java (added)
+++ hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Jetty23Shims.java Thu Dec 1 03:11:38 2011
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.shims;
+
+import java.io.IOException;
+
+import org.mortbay.jetty.bio.SocketConnector;
+import org.mortbay.jetty.handler.RequestLogHandler;
+import org.mortbay.jetty.webapp.WebAppContext;
+
+/**
+ * Jetty23Shims.
+ *
+ */
+public class Jetty23Shims implements JettyShims {
+ public Server startServer(String listen, int port) throws IOException {
+ Server s = new Server();
+ s.setupListenerHostPort(listen, port);
+ return s;
+ }
+
+ private static class Server extends org.mortbay.jetty.Server implements JettyShims.Server {
+ public void addWar(String war, String contextPath) {
+ WebAppContext wac = new WebAppContext();
+ wac.setContextPath(contextPath);
+ wac.setWar(war);
+ RequestLogHandler rlh = new RequestLogHandler();
+ rlh.setHandler(wac);
+ this.addHandler(rlh);
+ }
+
+ public void setupListenerHostPort(String listen, int port)
+ throws IOException {
+
+ SocketConnector connector = new SocketConnector();
+ connector.setPort(port);
+ connector.setHost(listen);
+ this.addConnector(connector);
+ }
+ }
+}
Added: hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier23.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier23.java?rev=1208940&view=auto
==============================================================================
--- hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier23.java (added)
+++ hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier23.java Thu Dec 1 03:11:38 2011
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.thrift;
+
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenIdentifier;
+
+/**
+ * A delegation token identifier that is specific to Hive.
+ */
+public class DelegationTokenIdentifier23
+ extends AbstractDelegationTokenIdentifier {
+ public static final Text HIVE_DELEGATION_KIND = new Text("HIVE_DELEGATION_TOKEN");
+
+ /**
+ * Create an empty delegation token identifier for reading into.
+ */
+ public DelegationTokenIdentifier23() {
+ }
+
+ /**
+ * Create a new delegation token identifier
+ * @param owner the effective username of the token owner
+ * @param renewer the username of the renewer
+ * @param realUser the real username of the token owner
+ */
+ public DelegationTokenIdentifier23(Text owner, Text renewer, Text realUser) {
+ super(owner, renewer, realUser);
+ }
+
+ @Override
+ public Text getKind() {
+ return HIVE_DELEGATION_KIND;
+ }
+
+}
Added: hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector23.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector23.java?rev=1208940&view=auto
==============================================================================
--- hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector23.java (added)
+++ hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector23.java Thu Dec 1 03:11:38 2011
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.thrift;
+
+import org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSelector;
+
+/**
+ * A delegation token that is specialized for Hive
+ */
+
+public class DelegationTokenSelector23
+ extends AbstractDelegationTokenSelector<DelegationTokenIdentifier23>{
+
+ public DelegationTokenSelector23() {
+ super(DelegationTokenIdentifier23.HIVE_DELEGATION_KIND);
+ }
+}
Added: hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java?rev=1208940&view=auto
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java (added)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyFileSystem.java Thu Dec 1 03:11:38 2011
@@ -0,0 +1,273 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.util.Progressable;
+
+/****************************************************************
+ * A FileSystem that can serve a given scheme/authority using some
+ * other file system. In that sense, it serves as a proxy for the
+ * real/underlying file system
+ *****************************************************************/
+
+public class ProxyFileSystem extends FilterFileSystem {
+
+ protected String myScheme;
+ protected String myAuthority;
+ protected URI myUri;
+
+ protected String realScheme;
+ protected String realAuthority;
+ protected URI realUri;
+
+
+
+ private Path swizzleParamPath(Path p) {
+ return new Path (realScheme, realAuthority, p.toUri().getPath());
+ }
+
+ private Path swizzleReturnPath(Path p) {
+ return new Path (myScheme, myAuthority, p.toUri().getPath());
+ }
+
+ private FileStatus swizzleFileStatus(FileStatus orig, boolean isParam) {
+ FileStatus ret =
+ new FileStatus(orig.getLen(), orig.isDir(), orig.getReplication(),
+ orig.getBlockSize(), orig.getModificationTime(),
+ orig.getAccessTime(), orig.getPermission(),
+ orig.getOwner(), orig.getGroup(),
+ isParam ? swizzleParamPath(orig.getPath()) :
+ swizzleReturnPath(orig.getPath()));
+ return ret;
+ }
+
+ public ProxyFileSystem() {
+ throw new RuntimeException ("Unsupported constructor");
+ }
+
+ public ProxyFileSystem(FileSystem fs) {
+ throw new RuntimeException ("Unsupported constructor");
+ }
+
+ /**
+ * Create a proxy file system for fs.
+ *
+ * @param fs FileSystem to create proxy for
+ * @param myUri URI to use as proxy. Only the scheme and authority from
+ * this are used right now
+ */
+ public ProxyFileSystem(FileSystem fs, URI myUri) {
+ super(fs);
+
+ URI realUri = fs.getUri();
+ this.realScheme = realUri.getScheme();
+ this.realAuthority=realUri.getAuthority();
+ this.realUri = realUri;
+
+ this.myScheme = myUri.getScheme();
+ this.myAuthority=myUri.getAuthority();
+ this.myUri = myUri;
+ }
+
+ @Override
+ public void initialize(URI name, Configuration conf) throws IOException {
+ try {
+ URI realUri = new URI (realScheme, realAuthority,
+ name.getPath(), name.getQuery(), name.getFragment());
+ super.initialize(realUri, conf);
+ } catch (URISyntaxException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public URI getUri() {
+ return myUri;
+ }
+
+ @Override
+ public String getName() {
+ return getUri().toString();
+ }
+
+ @Override
+ public Path makeQualified(Path path) {
+ return swizzleReturnPath(super.makeQualified(swizzleParamPath(path)));
+ }
+
+
+ @Override
+ protected void checkPath(Path path) {
+ super.checkPath(swizzleParamPath(path));
+ }
+
+ @Override
+ public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
+ long len) throws IOException {
+ return super.getFileBlockLocations(swizzleFileStatus(file, true),
+ start, len);
+ }
+
+ @Override
+ public FSDataInputStream open(Path f, int bufferSize) throws IOException {
+ return super.open(swizzleParamPath(f), bufferSize);
+ }
+
+ @Override
+ public FSDataOutputStream append(Path f, int bufferSize,
+ Progressable progress) throws IOException {
+ return super.append(swizzleParamPath(f), bufferSize, progress);
+ }
+
+ @Override
+ public FSDataOutputStream create(Path f, FsPermission permission,
+ boolean overwrite, int bufferSize, short replication, long blockSize,
+ Progressable progress) throws IOException {
+ return super.create(swizzleParamPath(f), permission,
+ overwrite, bufferSize, replication, blockSize, progress);
+ }
+
+ @Override
+ public boolean setReplication(Path src, short replication) throws IOException {
+ return super.setReplication(swizzleParamPath(src), replication);
+ }
+
+ @Override
+ public boolean rename(Path src, Path dst) throws IOException {
+ return super.rename(swizzleParamPath(src), swizzleParamPath(dst));
+ }
+
+ @Override
+ public boolean delete(Path f, boolean recursive) throws IOException {
+ return super.delete(swizzleParamPath(f), recursive);
+ }
+
+ @Override
+ public boolean deleteOnExit(Path f) throws IOException {
+ return super.deleteOnExit(swizzleParamPath(f));
+ }
+
+ @Override
+ public FileStatus[] listStatus(Path f) throws IOException {
+ FileStatus[] orig = super.listStatus(swizzleParamPath(f));
+ FileStatus[] ret = new FileStatus [orig.length];
+ for (int i=0; i<orig.length; i++) {
+ ret[i] = swizzleFileStatus(orig[i], false);
+ }
+ return ret;
+ }
+
+ @Override
+ public Path getHomeDirectory() {
+ return swizzleReturnPath(super.getHomeDirectory());
+ }
+
+ @Override
+ public void setWorkingDirectory(Path newDir) {
+ super.setWorkingDirectory(swizzleParamPath(newDir));
+ }
+
+ @Override
+ public Path getWorkingDirectory() {
+ return swizzleReturnPath(super.getWorkingDirectory());
+ }
+
+ @Override
+ public boolean mkdirs(Path f, FsPermission permission) throws IOException {
+ return super.mkdirs(swizzleParamPath(f), permission);
+ }
+
+ @Override
+ public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
+ throws IOException {
+ super.copyFromLocalFile(delSrc, swizzleParamPath(src), swizzleParamPath(dst));
+ }
+
+ @Override
+ public void copyFromLocalFile(boolean delSrc, boolean overwrite,
+ Path[] srcs, Path dst)
+ throws IOException {
+ super.copyFromLocalFile(delSrc, overwrite, srcs, swizzleParamPath(dst));
+ }
+
+ @Override
+ public void copyFromLocalFile(boolean delSrc, boolean overwrite,
+ Path src, Path dst)
+ throws IOException {
+ super.copyFromLocalFile(delSrc, overwrite, src, swizzleParamPath(dst));
+ }
+
+ @Override
+ public void copyToLocalFile(boolean delSrc, Path src, Path dst)
+ throws IOException {
+ super.copyToLocalFile(delSrc, swizzleParamPath(src), dst);
+ }
+
+ @Override
+ public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
+ throws IOException {
+ return super.startLocalOutput(swizzleParamPath(fsOutputFile), tmpLocalFile);
+ }
+
+ @Override
+ public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
+ throws IOException {
+ super.completeLocalOutput(swizzleParamPath(fsOutputFile), tmpLocalFile);
+ }
+
+ @Override
+ public ContentSummary getContentSummary(Path f) throws IOException {
+ return super.getContentSummary(swizzleParamPath(f));
+ }
+
+ @Override
+ public FileStatus getFileStatus(Path f) throws IOException {
+ return swizzleFileStatus(super.getFileStatus(swizzleParamPath(f)), false);
+ }
+
+ @Override
+ public FileChecksum getFileChecksum(Path f) throws IOException {
+ return super.getFileChecksum(swizzleParamPath(f));
+ }
+
+ @Override
+ public void setOwner(Path p, String username, String groupname
+ ) throws IOException {
+ super.setOwner(swizzleParamPath(p), username, groupname);
+ }
+
+ @Override
+ public void setTimes(Path p, long mtime, long atime
+ ) throws IOException {
+ super.setTimes(swizzleParamPath(p), mtime, atime);
+ }
+
+ @Override
+ public void setPermission(Path p, FsPermission permission
+ ) throws IOException {
+ super.setPermission(swizzleParamPath(p), permission);
+ }
+}
+
Added: hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java?rev=1208940&view=auto
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java (added)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/fs/ProxyLocalFileSystem.java Thu Dec 1 03:11:38 2011
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.fs;
+
+import java.io.*;
+import java.net.URI;
+import java.net.URISyntaxException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.util.Progressable;
+
+/****************************************************************
+ * A Proxy for LocalFileSystem
+ *
+ * Serves uri's corresponding to 'pfile:///' namespace with using
+ * a LocalFileSystem
+ *****************************************************************/
+
+public class ProxyLocalFileSystem extends FilterFileSystem {
+
+ protected LocalFileSystem localFs;
+
+ public ProxyLocalFileSystem() {
+ localFs = new LocalFileSystem();
+ }
+
+ public ProxyLocalFileSystem(FileSystem fs) {
+ throw new RuntimeException ("Unsupported Constructor");
+ }
+
+ @Override
+ public void initialize(URI name, Configuration conf) throws IOException {
+ // create a proxy for the local filesystem
+ // the scheme/authority serving as the proxy is derived
+ // from the supplied URI
+
+ String scheme = name.getScheme();
+ String authority = name.getAuthority() != null ? name.getAuthority() : "";
+ String proxyUriString = name + "://" + authority + "/";
+ fs = new ProxyFileSystem(localFs, URI.create(proxyUriString));
+
+ fs.initialize(name, conf);
+ }
+}
Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/HadoopShims.java Thu Dec 1 03:11:38 2011
@@ -31,6 +31,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
@@ -38,7 +39,11 @@ import org.apache.hadoop.mapred.RecordRe
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TaskCompletionEvent;
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.JobContext;
+import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Progressable;
/**
* In order to be compatible with multiple versions of Hadoop, all parts
@@ -189,6 +194,22 @@ public interface HadoopShims {
*/
String getTokenStrForm(String tokenSignature) throws IOException;
+
+ /**
+ * Convert the ClusterStatus to its Thrift equivalent: JobTrackerState.
+ * See MAPREDUCE-2455 for why this is a part of the shim.
+ * @param clusterStatus
+ * @return the matching JobTrackerState
+ * @throws Exception if no equivalent JobTrackerState exists
+ */
+ enum JobTrackerState { INITIALIZING, RUNNING };
+
+ public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception;
+
+ public TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable);
+
+ public JobContext newJobContext(Job job);
+
/**
* InputSplitShim.
*
Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java?rev=1208940&r1=1208939&r2=1208940&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java Thu Dec 1 03:11:38 2011
@@ -40,6 +40,7 @@ public abstract class ShimLoader {
static {
HADOOP_SHIM_CLASSES.put("0.20", "org.apache.hadoop.hive.shims.Hadoop20Shims");
HADOOP_SHIM_CLASSES.put("0.20S", "org.apache.hadoop.hive.shims.Hadoop20SShims");
+ HADOOP_SHIM_CLASSES.put("0.23", "org.apache.hadoop.hive.shims.Hadoop23Shims");
}
/**
@@ -52,6 +53,7 @@ public abstract class ShimLoader {
static {
JETTY_SHIM_CLASSES.put("0.20", "org.apache.hadoop.hive.shims.Jetty20Shims");
JETTY_SHIM_CLASSES.put("0.20S", "org.apache.hadoop.hive.shims.Jetty20SShims");
+ JETTY_SHIM_CLASSES.put("0.23", "org.apache.hadoop.hive.shims.Jetty23Shims");
}
/**
@@ -122,7 +124,9 @@ public abstract class ShimLoader {
try {
Class.forName("org.apache.hadoop.security.UnixUserGroupInformation");
} catch (ClassNotFoundException cnf) {
- majorVersion += "S";
+ if ("0.20".equals(majorVersion)) {
+ majorVersion += "S";
+ }
}
return majorVersion;
}