You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2012/01/10 16:08:48 UTC

svn commit: r1229584 - in /hive/trunk: ./ contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/ ql/src/java/org/apache/hadoop/hive/ql/io/ ql/src/java/org/apache/hadoop/hive/ql/lib/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/ap...

Author: amareshwari
Date: Tue Jan 10 15:08:45 2012
New Revision: 1229584

URL: http://svn.apache.org/viewvc?rev=1229584&view=rev
Log:
HIVE-2629. Reverting previous commit 

Added:
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenSecretManager.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/HadoopThriftAuthBridge20S.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/MemoryTokenStore.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/TokenStoreDelegationTokenSecretManager.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/ZooKeeperTokenStore.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/thrift/client/TUGIAssumingTransport.java
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/security/token/delegation/HiveDelegationTokenSupport.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.20S/java/org/apache/hadoop/security/token/delegation/HiveDelegationTokenSupport.java
    hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier23.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenIdentifier23.java
    hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector23.java
      - copied unchanged from r1229509, hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/thrift/DelegationTokenSelector23.java
Removed:
    hive/trunk/shims/src/common-secure/
Modified:
    hive/trunk/README.txt   (props changed)
    hive/trunk/build-common.xml
    hive/trunk/build.properties
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java   (props changed)
    hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataOutputBuffer.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessorCtx.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeFieldDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java   (props changed)
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java   (props changed)
    hive/trunk/serde/README.txt   (props changed)
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java   (props changed)
    hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/   (props changed)
    hive/trunk/shims/build.xml
    hive/trunk/shims/ivy.xml
    hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
    hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
    hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java

Propchange: hive/trunk/README.txt
            ('svn:mergeinfo' removed)

Modified: hive/trunk/build-common.xml
URL: http://svn.apache.org/viewvc/hive/trunk/build-common.xml?rev=1229584&r1=1229583&r2=1229584&view=diff
==============================================================================
--- hive/trunk/build-common.xml (original)
+++ hive/trunk/build-common.xml Tue Jan 10 15:08:45 2012
@@ -115,14 +115,14 @@
       log="${ivyresolvelog}"/>
   </target>
 
-  <target name="ivy-retrieve-hadoop-source"
+
+  <target name="ivy-retrieve-hadoop-source" depends="ivy-init-settings"
     description="Retrieve Ivy-managed Hadoop source artifacts" unless="ivy.skip">
     <echo message="Project: ${ant.project.name}"/>
-  	<echo message="hadoop.version.ant-internal: ${hadoop.version.ant-internal}"/>
-  	<ivy:settings id="${ant.project.name}-${hadoop.version.ant-internal}.ivy.settings" file="${ivysettings.xml}"/>
-  	<ivy:retrieve settingsRef="${ant.project.name}-${hadoop.version.ant-internal}.ivy.settings"
+    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
       pattern="${build.dir.hadoop}/[artifact]-[revision].[ext]"/>
   </target>
+
   
   <available property="hadoopcore.${hadoop.version.ant-internal}.install.done"
     file="${build.dir.hadoop}/hadoop-${hadoop.version.ant-internal}.installed"/>

Modified: hive/trunk/build.properties
URL: http://svn.apache.org/viewvc/hive/trunk/build.properties?rev=1229584&r1=1229583&r2=1229584&view=diff
==============================================================================
--- hive/trunk/build.properties (original)
+++ hive/trunk/build.properties Tue Jan 10 15:08:45 2012
@@ -10,11 +10,9 @@ javac.deprecation=off
 javac.args=
 javac.args.warnings=
 
-hadoop-0.20.version=0.20.1
-hadoop-0.20S.version=0.20.3-CDH3-SNAPSHOT
-hadoop-0.23.version=0.23.0
-hadoop.version=${hadoop-0.20.version}
-hadoop.security.version=${hadoop-0.20S.version}
+hadoop.version=0.20.1
+hadoop.security.version=0.20.3-CDH3-SNAPSHOT
+hadoop.security.version.prefix=0.20S
 hadoop.mirror=http://mirror.facebook.net/facebook/hive-deps
 hadoop.mirror2=http://archive.cloudera.com/hive-deps
 

Propchange: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMax.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/contrib/src/java/org/apache/hadoop/hive/contrib/udaf/example/UDAFExampleMin.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataInputBuffer.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/NonSyncDataOutputBuffer.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultGraphWalker.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/DefaultRuleDispatcher.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/Dispatcher.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/GraphWalker.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessor.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/NodeProcessorCtx.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/Rule.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/lib/RuleRegExp.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinCond.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/JoinType.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AggregationDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/AlterTableDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CollectDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CopyWork.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateTableLikeDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DescTableDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DropTableDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/Explain.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplainWork.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExplosionDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeColumnDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeConstantDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeFieldDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeGenericFuncDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExprNodeNullDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ExtractDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FetchWork.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FileSinkDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/FilterDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ForwardDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/GroupByDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinCondDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/JoinDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LateralViewJoinDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LimitDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadFileDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/LoadTableDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapJoinDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredLocalWork.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MapredWork.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/MoveWork.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/PartitionDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ReduceSinkDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SchemaDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ScriptDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/SelectDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowPartitionsDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTableStatusDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowTablesDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/TableScanDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UDTFDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/UnionDesc.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/serde/README.txt
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazyFactory.java
            ('svn:mergeinfo' removed)

Propchange: hive/trunk/serde/src/java/org/apache/hadoop/hive/serde2/typeinfo/
            ('svn:mergeinfo' removed)

Modified: hive/trunk/shims/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/shims/build.xml?rev=1229584&r1=1229583&r2=1229584&view=diff
==============================================================================
--- hive/trunk/shims/build.xml (original)
+++ hive/trunk/shims/build.xml Tue Jan 10 15:08:45 2012
@@ -51,49 +51,50 @@ to call at top-level: ant deploy-contrib
     <path refid="common-classpath"/>
   </path>
 
-  <!-- default list of shims to build -->
-  <property name="shims.include" value="0.20,0.20S,0.23"/>
-  <property name="shims.common.sources" value="${basedir}/src/common/java"/>	
-  <property name="shims.common.secure.sources" value="${basedir}/src/common/java;${basedir}/src/common-secure/java"/>
-  <!-- sources and hadoop version for each shim -->
-  <property name="shims.0.20.sources" value="${shims.common.sources};${basedir}/src/0.20/java" />	
-  <property name="shims.0.20.version" value="${hadoop-0.20.version}" />	
-  <property name="shims.0.20S.sources" value="${shims.common.secure.sources};${basedir}/src/0.20S/java" />	
-  <property name="shims.0.20S.version" value="${hadoop-0.20S.version}" />	
-  <property name="shims.0.23.sources" value="${shims.common.secure.sources};${basedir}/src/0.23/java" />	
-  <property name="shims.0.23.version" value="${hadoop-0.23.version}" />	
-	
-  <target name="build_shims" depends="install-hadoopcore-internal"
+  <target name="build_shims" depends="install-hadoopcore-internal, ivy-retrieve-hadoop-source"
           description="Build shims against a particular hadoop version">
     <echo message="Project: ${ant.project.name}"/>
-    <echo message="Compiling ${sources} against hadoop ${hadoop.version.ant-internal} (${hadoop.root})"/>
+    <getversionpref property="hadoop.version.ant-internal.prefix" input="${hadoop.version.ant-internal}" />
+    <echo message="Compiling shims against hadoop ${hadoop.version.ant-internal} (${hadoop.root})"/>
     <javac
      encoding="${build.encoding}"
      includes="**/*.java"
+     excludes="**/Proxy*.java"
      destdir="${build.classes}"
      debug="${javac.debug}"
      deprecation="${javac.deprecation}"
-     srcdir="${sources}"
      includeantruntime="false">
       <compilerarg line="${javac.args} ${javac.args.warnings}" />
       <classpath refid="classpath"/>
+      <src path="${basedir}/src/${hadoop.version.ant-internal.prefix}/java" />
+      <src path="${basedir}/src/common/java" />
     </javac>
   </target>
-	
+
   <target name="compile" depends="init,ivy-retrieve">
-  		<echo message="Project: ${ant.project.name}"/>
-		<for param="shimName" list="${shims.include}">
-		  <sequential>
-		    <echo>Building shims @{shimName}</echo>
-		  	<antcall target="build_shims" inheritRefs="false" inheritAll="false">
-		      <param name="hadoop.version.ant-internal" value="${shims.@{shimName}.version}" />
-		      <param name="sources" value="${shims.@{shimName}.sources}" />
-		    </antcall>
-		  </sequential>
-	  	</for>  	
+    <echo message="Project: ${ant.project.name}"/>
+    <antcall target="build_shims" inheritRefs="false" inheritAll="false">
+      <param name="hadoop.version.ant-internal" value="${hadoop.version}" />
+    </antcall>
+    <antcall target="build_shims" inheritRefs="false" inheritAll="false">
+      <param name="hadoop.version.ant-internal" value="${hadoop.security.version}" />
+      <param name="hadoop.version.ant-internal.prefix" value="${hadoop.security.version.prefix}" />
+    </antcall>
+    <getversionpref property="hadoop.version.ant-internal.prefix" input="${hadoop.version}" />
+    <javac
+     encoding="${build.encoding}"
+     includes="**/Proxy*.java"
+     destdir="${build.classes}"
+     debug="${javac.debug}"
+     deprecation="${javac.deprecation}"
+     includeantruntime="false">
+      <compilerarg line="${javac.args} ${javac.args.warnings}" />
+      <classpath refid="classpath"/>
+      <src path="${basedir}/src/common/java" />
+    </javac>
   </target>
   
-  <target name="compile_secure_test" depends="install-hadoopcore-internal" 
+  <target name="compile_secure_test" depends="install-hadoopcore-internal, ivy-retrieve-hadoop-source" 
           description="Test shims against a particular hadoop version">
     <echo message="Project: ${ant.project.name}"/>
     <getversionpref property="hadoop.version.ant-internal.prefix" input="${hadoop.version.ant-internal}" />
@@ -132,8 +133,9 @@ to call at top-level: ant deploy-contrib
   <target name="compile-test" depends="compile">
     <echo message="Project: ${ant.project.name}"/>
     <!-- TODO: move tests to version directory -->
-    <antcall target="compile_secure_test" inheritRefs="false" inheritAll="false">
+    <!--antcall target="compile_secure_test" inheritRefs="false" inheritAll="false">
       <param name="hadoop.version.ant-internal" value="${hadoop.security.version}" />
-    </antcall>
+      <param name="hadoop.version.ant-internal.prefix" value="${hadoop.security.version.prefix}" />
+    </antcall-->
   </target>
 </project>

Modified: hive/trunk/shims/ivy.xml
URL: http://svn.apache.org/viewvc/hive/trunk/shims/ivy.xml?rev=1229584&r1=1229583&r2=1229584&view=diff
==============================================================================
--- hive/trunk/shims/ivy.xml (original)
+++ hive/trunk/shims/ivy.xml Tue Jan 10 15:08:45 2012
@@ -27,7 +27,10 @@
     <include file="${ivy.conf.dir}/common-configurations.xml"/>
   </configurations>
   <dependencies>
-    <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop.version.ant-internal}">
+    <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop.version}">
+      <artifact name="hadoop" type="source" ext="tar.gz"/>
+    </dependency> 
+    <dependency org="org.apache.hadoop" name="hadoop-core" rev="${hadoop.security.version}">
       <artifact name="hadoop" type="source" ext="tar.gz"/>
     </dependency>
     <dependency org="org.apache.zookeeper" name="zookeeper"

Modified: hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java?rev=1229584&r1=1229583&r2=1229584&view=diff
==============================================================================
--- hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java (original)
+++ hive/trunk/shims/src/0.20S/java/org/apache/hadoop/hive/shims/Hadoop20SShims.java Tue Jan 10 15:08:45 2012
@@ -17,17 +17,510 @@
  */
 package org.apache.hadoop.hive.shims;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
+import java.util.List;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.shims.HadoopShimsSecure;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
+import org.apache.hadoop.hive.thrift.DelegationTokenSelector;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.ClusterStatus;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobContext;
+import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapred.OutputCommitter;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapred.TaskAttemptContext;
+import org.apache.hadoop.mapred.TaskCompletionEvent;
+import org.apache.hadoop.mapred.TaskID;
+import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
+import org.apache.hadoop.mapred.lib.CombineFileSplit;
+import org.apache.hadoop.mapred.lib.NullOutputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.security.token.TokenSelector;
+import org.apache.hadoop.tools.HadoopArchives;
 import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
- * Implemention of shims against Hadoop 0.20 with Security.
+ * Implemention of shims against Hadoop 0.20.0.
  */
-public class Hadoop20SShims extends HadoopShimsSecure {
+public class Hadoop20SShims implements HadoopShims {
+  public boolean usesJobShell() {
+    return false;
+  }
+
+  public boolean fileSystemDeleteOnExit(FileSystem fs, Path path)
+      throws IOException {
+
+    return fs.deleteOnExit(path);
+  }
+
+  public void inputFormatValidateInput(InputFormat fmt, JobConf conf)
+      throws IOException {
+    // gone in 0.18+
+  }
+
+  public boolean isJobPreparing(RunningJob job) throws IOException {
+    return job.getJobState() == JobStatus.PREP;
+  }
+  /**
+   * Workaround for hadoop-17 - jobclient only looks at commandlineconfig.
+   */
+  public void setTmpFiles(String prop, String files) {
+    // gone in 20+
+  }
+
+  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+      int numDataNodes,
+      boolean format,
+      String[] racks) throws IOException {
+    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+  }
+
+  /**
+   * MiniDFSShim.
+   *
+   */
+  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+    private final MiniDFSCluster cluster;
+
+    public MiniDFSShim(MiniDFSCluster cluster) {
+      this.cluster = cluster;
+    }
+
+    public FileSystem getFileSystem() throws IOException {
+      return cluster.getFileSystem();
+    }
+
+    public void shutdown() {
+      cluster.shutdown();
+    }
+  }
+
+  /**
+   * We define this function here to make the code compatible between
+   * hadoop 0.17 and hadoop 0.20.
+   *
+   * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't
+   * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is
+   * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler
+   * references that class, which is not available in hadoop 0.17.
+   */
+  public int compareText(Text a, Text b) {
+    return a.compareTo(b);
+  }
+
+  @Override
+  public long getAccessTime(FileStatus file) {
+    return file.getAccessTime();
+  }
+
+  public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
+    return new CombineFileInputFormatShim() {
+      @Override
+      public RecordReader getRecordReader(InputSplit split,
+          JobConf job, Reporter reporter) throws IOException {
+        throw new IOException("CombineFileInputFormat.getRecordReader not needed.");
+      }
+    };
+  }
+
+  public static class InputSplitShim extends CombineFileSplit implements HadoopShims.InputSplitShim {
+    long shrinkedLength;
+    boolean _isShrinked;
+    public InputSplitShim() {
+      super();
+      _isShrinked = false;
+    }
+
+    public InputSplitShim(CombineFileSplit old) throws IOException {
+      super(old);
+      _isShrinked = false;
+    }
+
+    @Override
+    public void shrinkSplit(long length) {
+      _isShrinked = true;
+      shrinkedLength = length;
+    }
+
+    public boolean isShrinked() {
+      return _isShrinked;
+    }
+
+    public long getShrinkedLength() {
+      return shrinkedLength;
+    }
+
+    @Override
+    public void readFields(DataInput in) throws IOException {
+      super.readFields(in);
+      _isShrinked = in.readBoolean();
+      if (_isShrinked) {
+        shrinkedLength = in.readLong();
+      }
+    }
+
+    @Override
+    public void write(DataOutput out) throws IOException {
+      super.write(out);
+      out.writeBoolean(_isShrinked);
+      if (_isShrinked) {
+        out.writeLong(shrinkedLength);
+      }
+    }
+  }
+
+  /* This class should be replaced with org.apache.hadoop.mapred.lib.CombineFileRecordReader class, once
+   * https://issues.apache.org/jira/browse/MAPREDUCE-955 is fixed. This code should be removed - it is a copy
+   * of org.apache.hadoop.mapred.lib.CombineFileRecordReader
+   */
+  public static class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
+
+    static final Class[] constructorSignature = new Class[] {
+        InputSplit.class,
+        Configuration.class,
+        Reporter.class,
+        Integer.class
+        };
+
+    protected CombineFileSplit split;
+    protected JobConf jc;
+    protected Reporter reporter;
+    protected Class<RecordReader<K, V>> rrClass;
+    protected Constructor<RecordReader<K, V>> rrConstructor;
+    protected FileSystem fs;
+
+    protected int idx;
+    protected long progress;
+    protected RecordReader<K, V> curReader;
+    protected boolean isShrinked;
+    protected long shrinkedLength;
+
+    public boolean next(K key, V value) throws IOException {
+
+      while ((curReader == null)
+          || !doNextWithExceptionHandler((K) ((CombineHiveKey) key).getKey(),
+              value)) {
+        if (!initNextRecordReader(key)) {
+          return false;
+        }
+      }
+      return true;
+    }
+
+    public K createKey() {
+      K newKey = curReader.createKey();
+      return (K)(new CombineHiveKey(newKey));
+    }
+
+    public V createValue() {
+      return curReader.createValue();
+    }
+
+    /**
+     * Return the amount of data processed.
+     */
+    public long getPos() throws IOException {
+      return progress;
+    }
+
+    public void close() throws IOException {
+      if (curReader != null) {
+        curReader.close();
+        curReader = null;
+      }
+    }
+
+    /**
+     * Return progress based on the amount of data processed so far.
+     */
+    public float getProgress() throws IOException {
+      return Math.min(1.0f, progress / (float) (split.getLength()));
+    }
+
+    /**
+     * A generic RecordReader that can hand out different recordReaders
+     * for each chunk in the CombineFileSplit.
+     */
+    public CombineFileRecordReader(JobConf job, CombineFileSplit split,
+        Reporter reporter,
+        Class<RecordReader<K, V>> rrClass)
+        throws IOException {
+      this.split = split;
+      this.jc = job;
+      this.rrClass = rrClass;
+      this.reporter = reporter;
+      this.idx = 0;
+      this.curReader = null;
+      this.progress = 0;
+
+      isShrinked = false;
+
+      assert (split instanceof Hadoop20Shims.InputSplitShim);
+      if (((InputSplitShim) split).isShrinked()) {
+        isShrinked = true;
+        shrinkedLength = ((InputSplitShim) split).getShrinkedLength();
+      }      
+      
+      try {
+        rrConstructor = rrClass.getDeclaredConstructor(constructorSignature);
+        rrConstructor.setAccessible(true);
+      } catch (Exception e) {
+        throw new RuntimeException(rrClass.getName() +
+            " does not have valid constructor", e);
+      }
+      initNextRecordReader(null);
+    }
+    
+    /**
+     * do next and handle exception inside it. 
+     * @param key
+     * @param value
+     * @return
+     * @throws IOException
+     */
+    private boolean doNextWithExceptionHandler(K key, V value) throws IOException {
+      try {
+        return curReader.next(key, value);
+      } catch (Exception e) {
+        return HiveIOExceptionHandlerUtil
+            .handleRecordReaderNextException(e, jc);
+      }
+    }
+
+    /**
+     * Get the record reader for the next chunk in this CombineFileSplit.
+     */
+    protected boolean initNextRecordReader(K key) throws IOException {
+
+      if (curReader != null) {
+        curReader.close();
+        curReader = null;
+        if (idx > 0) {
+          progress += split.getLength(idx - 1); // done processing so far
+        }
+      }
+
+      // if all chunks have been processed, nothing more to do.
+      if (idx == split.getNumPaths() || (isShrinked && progress > shrinkedLength)) {
+        return false;
+      }
+
+      // get a record reader for the idx-th chunk
+      try {
+        curReader = rrConstructor.newInstance(new Object[]
+            {split, jc, reporter, Integer.valueOf(idx)});
+
+        // change the key if need be
+        if (key != null) {
+          K newKey = curReader.createKey();
+          ((CombineHiveKey)key).setKey(newKey);
+        }
+
+        // setup some helper config variables.
+        jc.set("map.input.file", split.getPath(idx).toString());
+        jc.setLong("map.input.start", split.getOffset(idx));
+        jc.setLong("map.input.length", split.getLength(idx));
+      } catch (Exception e) {
+        curReader = HiveIOExceptionHandlerUtil.handleRecordReaderCreationException(
+            e, jc);
+      }
+      idx++;
+      return true;
+    }
+  }
+
+  public abstract static class CombineFileInputFormatShim<K, V> extends
+      CombineFileInputFormat<K, V>
+      implements HadoopShims.CombineFileInputFormatShim<K, V> {
+
+    public Path[] getInputPathsShim(JobConf conf) {
+      try {
+        return FileInputFormat.getInputPaths(conf);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+
+    @Override
+    public void createPool(JobConf conf, PathFilter... filters) {
+      super.createPool(conf, filters);
+    }
+
+    @Override
+    public InputSplitShim[] getSplits(JobConf job, int numSplits) throws IOException {
+      long minSize = job.getLong("mapred.min.split.size", 0);
+
+      // For backward compatibility, let the above parameter be used
+      if (job.getLong("mapred.min.split.size.per.node", 0) == 0) {
+        super.setMinSplitSizeNode(minSize);
+      }
+
+      if (job.getLong("mapred.min.split.size.per.rack", 0) == 0) {
+        super.setMinSplitSizeRack(minSize);
+      }
+
+      if (job.getLong("mapred.max.split.size", 0) == 0) {
+        super.setMaxSplitSize(minSize);
+      }
+
+      CombineFileSplit[] splits = (CombineFileSplit[]) super.getSplits(job, numSplits);
+
+      InputSplitShim[] isplits = new InputSplitShim[splits.length];
+      for (int pos = 0; pos < splits.length; pos++) {
+        isplits[pos] = new InputSplitShim(splits[pos]);
+      }
+
+      return isplits;
+    }
+
+    public InputSplitShim getInputSplitShim() throws IOException {
+      return new InputSplitShim();
+    }
+
+    public RecordReader getRecordReader(JobConf job, HadoopShims.InputSplitShim split,
+        Reporter reporter,
+        Class<RecordReader<K, V>> rrClass)
+        throws IOException {
+      CombineFileSplit cfSplit = (CombineFileSplit) split;
+      return new CombineFileRecordReader(job, cfSplit, reporter, rrClass);
+    }
+
+  }
+
+  public String getInputFormatClassName() {
+    return "org.apache.hadoop.hive.ql.io.CombineHiveInputFormat";
+  }
+
+  String[] ret = new String[2];
+
+  @Override
+  public String[] getTaskJobIDs(TaskCompletionEvent t) {
+    TaskID tid = t.getTaskAttemptId().getTaskID();
+    ret[0] = tid.toString();
+    ret[1] = tid.getJobID().toString();
+    return ret;
+  }
+
+  public void setFloatConf(Configuration conf, String varName, float val) {
+    conf.setFloat(varName, val);
+  }
+
+  @Override
+  public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
+      String archiveName) throws Exception {
+
+    HadoopArchives har = new HadoopArchives(conf);
+    List<String> args = new ArrayList<String>();
+
+    if (conf.get("hive.archive.har.parentdir.settable") == null) {
+      throw new RuntimeException("hive.archive.har.parentdir.settable is not set");
+    }
+    boolean parentSettable =
+      conf.getBoolean("hive.archive.har.parentdir.settable", false);
+
+    if (parentSettable) {
+      args.add("-archiveName");
+      args.add(archiveName);
+      args.add("-p");
+      args.add(sourceDir.toString());
+      args.add(destDir.toString());
+    } else {
+      args.add("-archiveName");
+      args.add(archiveName);
+      args.add(sourceDir.toString());
+      args.add(destDir.toString());
+    }
+
+    return ToolRunner.run(har, args.toArray(new String[0]));
+  }
+
+  public static class NullOutputCommitter extends OutputCommitter {
+    @Override
+    public void setupJob(JobContext jobContext) { }
+    @Override
+    public void cleanupJob(JobContext jobContext) { }
+
+    @Override
+    public void setupTask(TaskAttemptContext taskContext) { }
+    @Override
+    public boolean needsTaskCommit(TaskAttemptContext taskContext) {
+      return false;
+    }
+    @Override
+    public void commitTask(TaskAttemptContext taskContext) { }
+    @Override
+    public void abortTask(TaskAttemptContext taskContext) { }
+  }
+
+  public void setNullOutputFormat(JobConf conf) {
+    conf.setOutputFormat(NullOutputFormat.class);
+    conf.setOutputCommitter(Hadoop20Shims.NullOutputCommitter.class);
+
+    // option to bypass job setup and cleanup was introduced in hadoop-21 (MAPREDUCE-463)
+    // but can be backported. So we disable setup/cleanup in all versions >= 0.19
+    conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false);
+
+    // option to bypass task cleanup task was introduced in hadoop-23 (MAPREDUCE-2206)
+    // but can be backported. So we disable setup/cleanup in all versions >= 0.19
+    conf.setBoolean("mapreduce.job.committer.task.cleanup.needed", false);
+  }
+
+  @Override
+  public UserGroupInformation getUGIForConf(Configuration conf) throws IOException {
+    return UserGroupInformation.getCurrentUser();
+  }
+  
+  @Override
+  public boolean isSecureShimImpl() {
+    return true;
+  }
+  
+  @Override
+  public String getShortUserName(UserGroupInformation ugi) {
+    return ugi.getShortUserName();
+  }
+
+  @Override
+  public String getTokenStrForm(String tokenSignature) throws IOException {
+    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+    TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector();
+
+    Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
+        tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
+    return token != null ? token.encodeToUrlString() : null;
+  }
+  
+  @Override
+  public void doAs(UserGroupInformation ugi, PrivilegedExceptionAction<Void> pvea) throws IOException, InterruptedException {
+    ugi.doAs(pvea);
+  }
+
+  @Override
+  public UserGroupInformation createRemoteUser(String userName, List<String> groupNames) {
+    return UserGroupInformation.createRemoteUser(userName);
+  }
 
   @Override
   public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
@@ -42,7 +535,7 @@ public class Hadoop20SShims extends Hado
       throw new Exception(errorMsg);
     }
   }
-
+  
   @Override
   public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
     return new org.apache.hadoop.mapreduce.TaskAttemptContext(conf, new TaskAttemptID()) {

Modified: hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java?rev=1229584&r1=1229583&r2=1229584&view=diff
==============================================================================
--- hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java (original)
+++ hive/trunk/shims/src/0.23/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java Tue Jan 10 15:08:45 2012
@@ -17,22 +17,505 @@
  */
 package org.apache.hadoop.hive.shims;
 
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.security.auth.login.LoginException;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain;
+import org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil;
 import org.apache.hadoop.hive.shims.HadoopShims.JobTrackerState;
-import org.apache.hadoop.hive.shims.HadoopShimsSecure;
+import org.apache.hadoop.hive.thrift.DelegationTokenSelector23;
+import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.ClusterStatus;
+import org.apache.hadoop.mapred.FileInputFormat;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.JobContext;
+import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapred.OutputCommitter;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.mapred.TaskAttemptContext;
+import org.apache.hadoop.mapred.TaskCompletionEvent;
+import org.apache.hadoop.mapred.TaskID;
+import org.apache.hadoop.mapred.lib.CombineFileInputFormat;
+import org.apache.hadoop.mapred.lib.CombineFileSplit;
+import org.apache.hadoop.mapred.lib.NullOutputFormat;
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.TaskAttemptID;
 import org.apache.hadoop.mapreduce.task.JobContextImpl;
 import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.Token;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.security.token.TokenSelector;
+import org.apache.hadoop.tools.HadoopArchives;
 import org.apache.hadoop.util.Progressable;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * Implemention of shims against Hadoop 0.23.0.
  */
-public class Hadoop23Shims extends HadoopShimsSecure {
+public class Hadoop23Shims implements HadoopShims {
+  public boolean usesJobShell() {
+    return false;
+  }
+
+  public boolean fileSystemDeleteOnExit(FileSystem fs, Path path)
+      throws IOException {
+
+    return fs.deleteOnExit(path);
+  }
+
+  public void inputFormatValidateInput(InputFormat fmt, JobConf conf)
+      throws IOException {
+    // gone in 0.18+
+  }
+
+  public boolean isJobPreparing(RunningJob job) throws IOException {
+    return job.getJobState() == JobStatus.PREP;
+  }
+  /**
+   * Workaround for hadoop-17 - jobclient only looks at commandlineconfig.
+   */
+  public void setTmpFiles(String prop, String files) {
+    // gone in 20+
+  }
+
+  public HadoopShims.MiniDFSShim getMiniDfs(Configuration conf,
+      int numDataNodes,
+      boolean format,
+      String[] racks) throws IOException {
+    return new MiniDFSShim(new MiniDFSCluster(conf, numDataNodes, format, racks));
+  }
+
+  /**
+   * MiniDFSShim.
+   *
+   */
+  public class MiniDFSShim implements HadoopShims.MiniDFSShim {
+    private final MiniDFSCluster cluster;
+
+    public MiniDFSShim(MiniDFSCluster cluster) {
+      this.cluster = cluster;
+    }
+
+    public FileSystem getFileSystem() throws IOException {
+      return cluster.getFileSystem();
+    }
+
+    public void shutdown() {
+      cluster.shutdown();
+    }
+  }
+
+  /**
+   * We define this function here to make the code compatible between
+   * hadoop 0.17 and hadoop 0.20.
+   *
+   * Hive binary that compiled Text.compareTo(Text) with hadoop 0.20 won't
+   * work with hadoop 0.17 because in hadoop 0.20, Text.compareTo(Text) is
+   * implemented in org.apache.hadoop.io.BinaryComparable, and Java compiler
+   * references that class, which is not available in hadoop 0.17.
+   */
+  public int compareText(Text a, Text b) {
+    return a.compareTo(b);
+  }
+
+  @Override
+  public long getAccessTime(FileStatus file) {
+    return file.getAccessTime();
+  }
+
+  public HadoopShims.CombineFileInputFormatShim getCombineFileInputFormat() {
+    return new CombineFileInputFormatShim() {
+      @Override
+      public RecordReader getRecordReader(InputSplit split,
+          JobConf job, Reporter reporter) throws IOException {
+        throw new IOException("CombineFileInputFormat.getRecordReader not needed.");
+      }
+    };
+  }
+
+  public static class InputSplitShim extends CombineFileSplit implements HadoopShims.InputSplitShim {
+    long shrinkedLength;
+    boolean _isShrinked;
+    public InputSplitShim() {
+      super();
+      _isShrinked = false;
+    }
+
+    public InputSplitShim(CombineFileSplit old) throws IOException {
+      super(old);
+      _isShrinked = false;
+    }
+
+    @Override
+    public void shrinkSplit(long length) {
+      _isShrinked = true;
+      shrinkedLength = length;
+    }
+
+    public boolean isShrinked() {
+      return _isShrinked;
+    }
+
+    public long getShrinkedLength() {
+      return shrinkedLength;
+    }
+
+    @Override
+    public void readFields(DataInput in) throws IOException {
+      super.readFields(in);
+      _isShrinked = in.readBoolean();
+      if (_isShrinked) {
+        shrinkedLength = in.readLong();
+      }
+    }
+
+    @Override
+    public void write(DataOutput out) throws IOException {
+      super.write(out);
+      out.writeBoolean(_isShrinked);
+      if (_isShrinked) {
+        out.writeLong(shrinkedLength);
+      }
+    }
+  }
+
+  /* This class should be replaced with org.apache.hadoop.mapred.lib.CombineFileRecordReader class, once
+   * https://issues.apache.org/jira/browse/MAPREDUCE-955 is fixed. This code should be removed - it is a copy
+   * of org.apache.hadoop.mapred.lib.CombineFileRecordReader
+   */
+  public static class CombineFileRecordReader<K, V> implements RecordReader<K, V> {
+
+    static final Class[] constructorSignature = new Class[] {
+        InputSplit.class,
+        Configuration.class,
+        Reporter.class,
+        Integer.class
+        };
+
+    protected CombineFileSplit split;
+    protected JobConf jc;
+    protected Reporter reporter;
+    protected Class<RecordReader<K, V>> rrClass;
+    protected Constructor<RecordReader<K, V>> rrConstructor;
+    protected FileSystem fs;
+
+    protected int idx;
+    protected long progress;
+    protected RecordReader<K, V> curReader;
+    protected boolean isShrinked;
+    protected long shrinkedLength;
+    
+    public boolean next(K key, V value) throws IOException {
+
+      while ((curReader == null)
+          || !doNextWithExceptionHandler((K) ((CombineHiveKey) key).getKey(),
+              value)) {
+        if (!initNextRecordReader(key)) {
+          return false;
+        }
+      }
+      return true;
+    }
+
+    public K createKey() {
+      K newKey = curReader.createKey();
+      return (K)(new CombineHiveKey(newKey));
+    }
+
+    public V createValue() {
+      return curReader.createValue();
+    }
+
+    /**
+     * Return the amount of data processed.
+     */
+    public long getPos() throws IOException {
+      return progress;
+    }
+
+    public void close() throws IOException {
+      if (curReader != null) {
+        curReader.close();
+        curReader = null;
+      }
+    }
+
+    /**
+     * Return progress based on the amount of data processed so far.
+     */
+    public float getProgress() throws IOException {
+      return Math.min(1.0f, progress / (float) (split.getLength()));
+    }
+
+    /**
+     * A generic RecordReader that can hand out different recordReaders
+     * for each chunk in the CombineFileSplit.
+     */
+    public CombineFileRecordReader(JobConf job, CombineFileSplit split,
+        Reporter reporter,
+        Class<RecordReader<K, V>> rrClass)
+        throws IOException {
+      this.split = split;
+      this.jc = job;
+      this.rrClass = rrClass;
+      this.reporter = reporter;
+      this.idx = 0;
+      this.curReader = null;
+      this.progress = 0;
+
+      isShrinked = false;
+
+      assert (split instanceof InputSplitShim);
+      if (((InputSplitShim) split).isShrinked()) {
+        isShrinked = true;
+        shrinkedLength = ((InputSplitShim) split).getShrinkedLength();
+      }
+
+      try {
+        rrConstructor = rrClass.getDeclaredConstructor(constructorSignature);
+        rrConstructor.setAccessible(true);
+      } catch (Exception e) {
+        throw new RuntimeException(rrClass.getName() +
+            " does not have valid constructor", e);
+      }
+      initNextRecordReader(null);
+    }
+    
+    /**
+     * do next and handle exception inside it. 
+     * @param key
+     * @param value
+     * @return
+     * @throws IOException
+     */
+    private boolean doNextWithExceptionHandler(K key, V value) throws IOException {
+      try {
+        return curReader.next(key, value);
+      } catch (Exception e) {
+        return HiveIOExceptionHandlerUtil.handleRecordReaderNextException(e, jc);
+      }
+    }
+
+    /**
+     * Get the record reader for the next chunk in this CombineFileSplit.
+     */
+    protected boolean initNextRecordReader(K key) throws IOException {
+
+      if (curReader != null) {
+        curReader.close();
+        curReader = null;
+        if (idx > 0) {
+          progress += split.getLength(idx - 1); // done processing so far
+        }
+      }
+
+      // if all chunks have been processed or reached the length, nothing more to do.
+      if (idx == split.getNumPaths() || (isShrinked && progress > shrinkedLength)) {
+        return false;
+      }
+
+      // get a record reader for the idx-th chunk
+      try {
+        curReader = rrConstructor.newInstance(new Object[]
+            {split, jc, reporter, Integer.valueOf(idx)});
+
+        // change the key if need be
+        if (key != null) {
+          K newKey = curReader.createKey();
+          ((CombineHiveKey)key).setKey(newKey);
+        }
+
+        // setup some helper config variables.
+        jc.set("map.input.file", split.getPath(idx).toString());
+        jc.setLong("map.input.start", split.getOffset(idx));
+        jc.setLong("map.input.length", split.getLength(idx));
+      } catch (Exception e) {
+        curReader=HiveIOExceptionHandlerUtil.handleRecordReaderCreationException(e, jc);
+      }
+      idx++;
+      return true;
+    }
+  }
+
+  public abstract static class CombineFileInputFormatShim<K, V> extends
+      CombineFileInputFormat<K, V>
+      implements HadoopShims.CombineFileInputFormatShim<K, V> {
+
+    public Path[] getInputPathsShim(JobConf conf) {
+      try {
+        return FileInputFormat.getInputPaths(conf);
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+
+    @Override
+    public void createPool(JobConf conf, PathFilter... filters) {
+      super.createPool(conf, filters);
+    }
+
+    @Override
+    public InputSplitShim[] getSplits(JobConf job, int numSplits) throws IOException {
+      long minSize = job.getLong("mapred.min.split.size", 0);
+
+      // For backward compatibility, let the above parameter be used
+      if (job.getLong("mapred.min.split.size.per.node", 0) == 0) {
+        super.setMinSplitSizeNode(minSize);
+      }
+
+      if (job.getLong("mapred.min.split.size.per.rack", 0) == 0) {
+        super.setMinSplitSizeRack(minSize);
+      }
+
+      if (job.getLong("mapred.max.split.size", 0) == 0) {
+        super.setMaxSplitSize(minSize);
+      }
+
+      InputSplit[] splits = super.getSplits(job, numSplits);
+
+      InputSplitShim[] isplits = new InputSplitShim[splits.length];
+      for (int pos = 0; pos < splits.length; pos++) {
+        isplits[pos] = new InputSplitShim((CombineFileSplit) splits[pos]);
+      }
+
+      return isplits;
+    }
+
+    public InputSplitShim getInputSplitShim() throws IOException {
+      return new InputSplitShim();
+    }
+
+    public RecordReader getRecordReader(JobConf job, HadoopShims.InputSplitShim split,
+        Reporter reporter,
+        Class<RecordReader<K, V>> rrClass)
+        throws IOException {
+      CombineFileSplit cfSplit = (CombineFileSplit) split;
+      return new CombineFileRecordReader(job, cfSplit, reporter, rrClass);
+    }
+
+  }
+
+  public String getInputFormatClassName() {
+    return "org.apache.hadoop.hive.ql.io.CombineHiveInputFormat";
+  }
+
+  String[] ret = new String[2];
+
+  @Override
+  public String[] getTaskJobIDs(TaskCompletionEvent t) {
+    TaskID tid = t.getTaskAttemptId().getTaskID();
+    ret[0] = tid.toString();
+    ret[1] = tid.getJobID().toString();
+    return ret;
+  }
+
+  public void setFloatConf(Configuration conf, String varName, float val) {
+    conf.setFloat(varName, val);
+  }
 
   @Override
+  public int createHadoopArchive(Configuration conf, Path sourceDir, Path destDir,
+      String archiveName) throws Exception {
+
+    HadoopArchives har = new HadoopArchives(conf);
+    List<String> args = new ArrayList<String>();
+
+    if (conf.get("hive.archive.har.parentdir.settable") == null) {
+      throw new RuntimeException("hive.archive.har.parentdir.settable is not set");
+    }
+    boolean parentSettable =
+      conf.getBoolean("hive.archive.har.parentdir.settable", false);
+
+    if (parentSettable) {
+      args.add("-archiveName");
+      args.add(archiveName);
+      args.add("-p");
+      args.add(sourceDir.toString());
+      args.add(destDir.toString());
+    } else {
+      args.add("-archiveName");
+      args.add(archiveName);
+      args.add(sourceDir.toString());
+      args.add(destDir.toString());
+    }
+
+    return ToolRunner.run(har, args.toArray(new String[0]));
+  }
+
+  public static class NullOutputCommitter extends OutputCommitter {
+    @Override
+    public void setupJob(JobContext jobContext) { }
+    @Override
+    public void cleanupJob(JobContext jobContext) { }
+
+    @Override
+    public void setupTask(TaskAttemptContext taskContext) { }
+    @Override
+    public boolean needsTaskCommit(TaskAttemptContext taskContext) {
+      return false;
+    }
+    @Override
+    public void commitTask(TaskAttemptContext taskContext) { }
+    @Override
+    public void abortTask(TaskAttemptContext taskContext) { }
+  }
+
+  public void setNullOutputFormat(JobConf conf) {
+    conf.setOutputFormat(NullOutputFormat.class);
+    conf.setOutputCommitter(Hadoop23Shims.NullOutputCommitter.class);
+
+    // option to bypass job setup and cleanup was introduced in hadoop-21 (MAPREDUCE-463)
+    // but can be backported. So we disable setup/cleanup in all versions >= 0.19
+    conf.setBoolean("mapred.committer.job.setup.cleanup.needed", false);
+
+    // option to bypass task cleanup task was introduced in hadoop-23 (MAPREDUCE-2206)
+    // but can be backported. So we disable setup/cleanup in all versions >= 0.19
+    conf.setBoolean("mapreduce.job.committer.task.cleanup.needed", false);
+  }
+
+  @Override
+  public UserGroupInformation getUGIForConf(Configuration conf) throws IOException {
+    return UserGroupInformation.getCurrentUser();
+  }
+
+  @Override
+  public boolean isSecureShimImpl() {
+    return true;
+  }
+
+  @Override
+  public String getShortUserName(UserGroupInformation ugi) {
+    return ugi.getShortUserName();
+  }
+
+  @Override
+  public String getTokenStrForm(String tokenSignature) throws IOException {
+    UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+    TokenSelector<? extends TokenIdentifier> tokenSelector = new DelegationTokenSelector23();
+
+    Token<? extends TokenIdentifier> token = tokenSelector.selectToken(
+        tokenSignature == null ? new Text() : new Text(tokenSignature), ugi.getTokens());
+    return token != null ? token.encodeToUrlString() : null;
+  }
+  
+  @Override
   public JobTrackerState getJobTrackerState(ClusterStatus clusterStatus) throws Exception {
     JobTrackerState state;
     switch (clusterStatus.getJobTrackerStatus()) {
@@ -45,7 +528,7 @@ public class Hadoop23Shims extends Hadoo
       throw new Exception(errorMsg);
     }
   }
-
+  
   @Override
   public org.apache.hadoop.mapreduce.TaskAttemptContext newTaskAttemptContext(Configuration conf, final Progressable progressable) {
     return new TaskAttemptContextImpl(conf, new TaskAttemptID()) {

Modified: hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java
URL: http://svn.apache.org/viewvc/hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java?rev=1229584&r1=1229583&r2=1229584&view=diff
==============================================================================
--- hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java (original)
+++ hive/trunk/shims/src/common/java/org/apache/hadoop/hive/shims/ShimLoader.java Tue Jan 10 15:08:45 2012
@@ -79,7 +79,7 @@ public abstract class ShimLoader {
   }
 
   public static synchronized HadoopThriftAuthBridge getHadoopThriftAuthBridge() {
-      if (getHadoopShims().isSecureShimImpl()) {
+        if ("0.20S".equals(getMajorVersion())) {
           return createShim("org.apache.hadoop.hive.thrift.HadoopThriftAuthBridge20S",
                             HadoopThriftAuthBridge.class);
         } else {
@@ -87,6 +87,7 @@ public abstract class ShimLoader {
         }
       }
 
+  @SuppressWarnings("unchecked")
   private static <T> T loadShims(Map<String, String> classMap, Class<T> xface) {
     String vers = getMajorVersion();
     String className = classMap.get(vers);
@@ -95,7 +96,7 @@ public abstract class ShimLoader {
 
     private static <T> T createShim(String className, Class<T> xface) {
     try {
-      Class<?> clazz = Class.forName(className);
+      Class clazz = Class.forName(className);
       return xface.cast(clazz.newInstance());
     } catch (Exception e) {
       throw new RuntimeException("Could not load shims in class " +