You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by gu...@apache.org on 2013/07/29 23:08:19 UTC

svn commit: r1508202 [10/48] - in /hive/branches/tez: ./ beeline/src/java/org/apache/hive/beeline/ cli/src/java/org/apache/hadoop/hive/cli/ common/src/java/org/apache/hadoop/hive/common/metrics/ common/src/java/org/apache/hadoop/hive/conf/ common/src/t...

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/SortMergeJoinTaskDispatcher.java Mon Jul 29 21:08:03 2013
@@ -35,13 +35,13 @@ import org.apache.hadoop.hive.ql.exec.Co
 import org.apache.hadoop.hive.ql.exec.DummyStoreOperator;
 import org.apache.hadoop.hive.ql.exec.FileSinkOperator;
 import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.optimizer.MapJoinProcessor;
 import org.apache.hadoop.hive.ql.parse.OpParseContext;
@@ -52,10 +52,12 @@ import org.apache.hadoop.hive.ql.plan.Co
 import org.apache.hadoop.hive.ql.plan.ConditionalResolverCommonJoin.ConditionalResolverCommonJoinCtx;
 import org.apache.hadoop.hive.ql.plan.ConditionalWork;
 import org.apache.hadoop.hive.ql.plan.FetchWork;
+import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.OperatorDesc;
 import org.apache.hadoop.hive.ql.plan.PartitionDesc;
+import org.apache.hadoop.hive.ql.plan.ReduceWork;
 import org.apache.hadoop.hive.ql.plan.SMBJoinDesc;
 
 /**
@@ -72,7 +74,7 @@ public class SortMergeJoinTaskDispatcher
   // Convert the work in the SMB plan to a regular join
   // Note that the operator tree is not fixed, only the path/alias mappings in the
   // plan are fixed. The operator tree will still contain the SMBJoinOperator
-  private void genSMBJoinWork(MapredWork currWork, SMBMapJoinOperator smbJoinOp) {
+  private void genSMBJoinWork(MapWork currWork, SMBMapJoinOperator smbJoinOp) {
     // Remove the paths which are not part of aliasToPartitionInfo
     Map<String, PartitionDesc> aliasToPartitionInfo = currWork.getAliasToPartnInfo();
     List<String> removePaths = new ArrayList<String>();
@@ -150,7 +152,7 @@ public class SortMergeJoinTaskDispatcher
 
       // deep copy a new mapred work
       InputStream in = new ByteArrayInputStream(xml.getBytes("UTF-8"));
-      MapredWork currJoinWork = Utilities.deserializeMapRedWork(in, physicalContext.getConf());
+      MapredWork currJoinWork = Utilities.deserializeObject(in);
       SMBMapJoinOperator newSMBJoinOp = getSMBMapJoinOp(currJoinWork);
 
       // Add the row resolver for the new operator
@@ -158,7 +160,7 @@ public class SortMergeJoinTaskDispatcher
           physicalContext.getParseContext().getOpParseCtx();
       opParseContextMap.put(newSMBJoinOp, opParseContextMap.get(oldSMBJoinOp));
       // change the newly created map-red plan as if it was a join operator
-      genSMBJoinWork(currJoinWork, newSMBJoinOp);
+      genSMBJoinWork(currJoinWork.getMapWork(), newSMBJoinOp);
       return currJoinWork;
     } catch (Exception e) {
       e.printStackTrace();
@@ -174,24 +176,25 @@ public class SortMergeJoinTaskDispatcher
       throws UnsupportedEncodingException, SemanticException {
     // deep copy a new mapred work from xml
     InputStream in = new ByteArrayInputStream(xml.getBytes("UTF-8"));
-    MapredWork newWork = Utilities.deserializeMapRedWork(in, physicalContext.getConf());
+    MapredWork newWork = Utilities.deserializeObject(in);
     // create a mapred task for this work
     MapRedTask newTask = (MapRedTask) TaskFactory.get(newWork, physicalContext
         .getParseContext().getConf());
     // generate the map join operator; already checked the map join
     MapJoinOperator newMapJoinOp =
         getMapJoinOperator(newTask, newWork, smbJoinOp, joinTree, bigTablePosition);
+
     // The reducer needs to be restored - Consider a query like:
     // select count(*) FROM bucket_big a JOIN bucket_small b ON a.key = b.key;
     // The reducer contains a groupby, which needs to be restored.
-    Operator<? extends OperatorDesc> reducer = newWork.getReducer();
+    ReduceWork rWork = newWork.getReduceWork();
 
     // create the local work for this plan
     String bigTableAlias =
         MapJoinProcessor.genLocalWorkForMapJoin(newWork, newMapJoinOp, bigTablePosition);
 
     // restore the reducer
-    newWork.setReducer(reducer);
+    newWork.setReduceWork(rWork);
     return new ObjectPair<MapRedTask, String>(newTask, bigTableAlias);
   }
 
@@ -259,10 +262,10 @@ public class SortMergeJoinTaskDispatcher
     MapredWork currJoinWork = convertSMBWorkToJoinWork(currWork, originalSMBJoinOp);
     SMBMapJoinOperator newSMBJoinOp = getSMBMapJoinOp(currJoinWork);
 
-    currWork.setOpParseCtxMap(parseCtx.getOpParseCtx());
-    currWork.setJoinTree(joinTree);
-    currJoinWork.setOpParseCtxMap(parseCtx.getOpParseCtx());
-    currJoinWork.setJoinTree(joinTree);
+    currWork.getMapWork().setOpParseCtxMap(parseCtx.getOpParseCtx());
+    currWork.getMapWork().setJoinTree(joinTree);
+    currJoinWork.getMapWork().setOpParseCtxMap(parseCtx.getOpParseCtx());
+    currJoinWork.getMapWork().setJoinTree(joinTree);
 
     // create conditional work list and task list
     List<Serializable> listWorks = new ArrayList<Serializable>();
@@ -272,7 +275,7 @@ public class SortMergeJoinTaskDispatcher
     HashMap<String, Task<? extends Serializable>> aliasToTask =
         new HashMap<String, Task<? extends Serializable>>();
     // Note that pathToAlias will behave as if the original plan was a join plan
-    HashMap<String, ArrayList<String>> pathToAliases = currJoinWork.getPathToAliases();
+    HashMap<String, ArrayList<String>> pathToAliases = currJoinWork.getMapWork().getPathToAliases();
 
     // generate a map join task for the big table
     SMBJoinDesc originalSMBJoinDesc = originalSMBJoinOp.getConf();
@@ -289,7 +292,7 @@ public class SortMergeJoinTaskDispatcher
     HashMap<String, Long> aliasToSize = new HashMap<String, Long>();
     Configuration conf = context.getConf();
     try {
-      long aliasTotalKnownInputSize = getTotalKnownInputSize(context, currJoinWork,
+      long aliasTotalKnownInputSize = getTotalKnownInputSize(context, currJoinWork.getMapWork(),
           pathToAliases, aliasToSize);
 
       String xml = currJoinWork.toXML();
@@ -339,8 +342,8 @@ public class SortMergeJoinTaskDispatcher
     listWorks.add(currTask.getWork());
     listTasks.add(currTask);
     // clear JoinTree and OP Parse Context
-    currWork.setOpParseCtxMap(null);
-    currWork.setJoinTree(null);
+    currWork.getMapWork().setOpParseCtxMap(null);
+    currWork.getMapWork().setJoinTree(null);
 
     // create conditional task and insert conditional task into task tree
     ConditionalWork cndWork = new ConditionalWork(listWorks);
@@ -417,9 +420,9 @@ public class SortMergeJoinTaskDispatcher
   }
 
   private SMBMapJoinOperator getSMBMapJoinOp(MapredWork work) throws SemanticException {
-    if (work != null) {
-      Operator<? extends OperatorDesc> reducer = work.getReducer();
-      for (Operator<? extends OperatorDesc> op : work.getAliasToWork().values()) {
+    if (work != null && work.getReduceWork() != null) {
+      Operator<? extends OperatorDesc> reducer = work.getReduceWork().getReducer();
+      for (Operator<? extends OperatorDesc> op : work.getMapWork().getAliasToWork().values()) {
         SMBMapJoinOperator smbMapJoinOp = getSMBMapJoinOp(op, reducer);
         if (smbMapJoinOp != null) {
           return smbMapJoinOp;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereProcessor.java Mon Jul 29 21:08:03 2013
@@ -31,10 +31,10 @@ import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hive.metastore.api.Index;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
 import org.apache.hadoop.hive.ql.hooks.ReadEntity;
 import org.apache.hadoop.hive.ql.index.HiveIndexHandler;
 import org.apache.hadoop.hive.ql.index.HiveIndexQueryContext;
@@ -49,6 +49,7 @@ import org.apache.hadoop.hive.ql.optimiz
 import org.apache.hadoop.hive.ql.parse.ParseContext;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.MapWork;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.TableDesc;
 import org.apache.hadoop.hive.ql.plan.TableScanDesc;
@@ -162,7 +163,7 @@ public class IndexWhereProcessor impleme
       HiveIndexQueryContext queryContext = queryContexts.get(chosenIndex);
 
       // prepare the map reduce job to use indexing
-      MapredWork work = currentTask.getWork();
+      MapWork work = currentTask.getWork().getMapWork();
       work.setInputformat(queryContext.getIndexInputFormat());
       work.addIndexIntermediateFile(queryContext.getIndexIntermediateFile());
       // modify inputs based on index query
@@ -204,7 +205,7 @@ public class IndexWhereProcessor impleme
 
     // check the size
     try {
-      ContentSummary inputSummary = Utilities.getInputSummary(pctx.getContext(), task.getWork(), null);
+      ContentSummary inputSummary = Utilities.getInputSummary(pctx.getContext(), task.getWork().getMapWork(), null);
       long inputSize = inputSummary.getLength();
       if (!indexHandler.checkQuerySize(inputSize, pctx.getConf())) {
         queryContext.setQueryTasks(null);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/physical/index/IndexWhereTaskDispatcher.java Mon Jul 29 21:08:03 2013
@@ -93,7 +93,7 @@ public class IndexWhereTaskDispatcher im
     GraphWalker ogw = new DefaultGraphWalker(dispatcher);
     ArrayList<Node> topNodes = new ArrayList<Node>();
     if (task.getWork() instanceof MapredWork) {
-      topNodes.addAll(((MapredWork)task.getWork()).getAliasToWork().values());
+      topNodes.addAll(((MapredWork)task.getWork()).getMapWork().getAliasToWork().values());
     } else {
       return null;
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ppr/PartitionPruner.java Mon Jul 29 21:08:03 2013
@@ -217,7 +217,7 @@ public class PartitionPruner implements 
               LOG.info(ErrorMsg.INVALID_JDO_FILTER_EXPRESSION.getMsg("by condition '"
                   + message + "'"));
               pruneBySequentialScan(tab, true_parts, unkn_parts, denied_parts,
-                  prunerExpr, rowObjectInspector);
+                  prunerExpr, rowObjectInspector, conf);
             }
           }
         }
@@ -300,10 +300,11 @@ public class PartitionPruner implements 
    * @param denied_parts pruned out partitions.
    * @param prunerExpr the SQL predicate that involves partition columns.
    * @param rowObjectInspector object inspector used by the evaluator
+   * @param conf Hive Configuration object, can not be NULL.
    * @throws Exception
    */
   static private void pruneBySequentialScan(Table tab, Set<Partition> true_parts, Set<Partition> unkn_parts,
-      Set<Partition> denied_parts, ExprNodeDesc prunerExpr, StructObjectInspector rowObjectInspector)
+      Set<Partition> denied_parts, ExprNodeDesc prunerExpr, StructObjectInspector rowObjectInspector, HiveConf conf)
       throws Exception {
 
     List<String> trueNames = null;
@@ -320,6 +321,7 @@ public class PartitionPruner implements 
     List<String> partCols = new ArrayList<String>(pCols.size());
     List<String> values = new ArrayList<String>(pCols.size());
     Object[] objectWithPart = new Object[2];
+    String defaultPartitionName = conf.getVar(HiveConf.ConfVars.DEFAULTPARTITIONNAME);
 
     for (FieldSchema pCol : pCols) {
       partCols.add(pCol.getName());
@@ -344,11 +346,17 @@ public class PartitionPruner implements 
       Boolean r = (Boolean) PartExprEvalUtils.evaluateExprOnPart(handle, objectWithPart);
 
       if (r == null) {
-        if (unknNames == null) {
-          unknNames = new LinkedList<String>();
+        // Reject default partitions if we couldn't determine whether we should include it or not.
+        // Note that predicate would only contains partition column parts of original predicate.
+        if (values.contains(defaultPartitionName)) {
+          LOG.debug("skipping default/bad partition: " + partName);
+        }else {
+          if (unknNames == null) {
+            unknNames = new LinkedList<String>();
+          }
+          unknNames.add(partName);
+          LOG.debug("retained unknown partition: " + partName);
         }
-        unknNames.add(partName);
-        LOG.debug("retained unknown partition: " + partName);
       } else if (Boolean.TRUE.equals(r)) {
         if (trueNames == null) {
           trueNames = new LinkedList<String>();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/BaseSemanticAnalyzer.java Mon Jul 29 21:08:03 2013
@@ -427,6 +427,8 @@ public abstract class BaseSemanticAnalyz
     }
   }
 
+  private static final int[] multiplier = new int[] {1000, 100, 10, 1};
+
   @SuppressWarnings("nls")
   public static String unescapeSQLString(String b) {
 
@@ -452,6 +454,18 @@ public abstract class BaseSemanticAnalyz
         continue;
       }
 
+      if (currentChar == '\\' && (i + 6 < b.length()) && b.charAt(i + 1) == 'u') {
+        int code = 0;
+        int base = i + 2;
+        for (int j = 0; j < 4; j++) {
+          int digit = Character.digit(b.charAt(j + base), 16);
+          code += digit * multiplier[j];
+        }
+        sb.append((char)code);
+        i += 5;
+        continue;
+      }
+
       if (currentChar == '\\' && (i + 4 < b.length())) {
         char i1 = b.charAt(i + 1);
         char i2 = b.charAt(i + 2);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Jul 29 21:08:03 2013
@@ -156,8 +156,8 @@ public class DDLSemanticAnalyzer extends
   }
 
   public static String getTypeName(int token) throws SemanticException {
-    // date and datetime types aren't currently supported
-    if (token == HiveParser.TOK_DATE || token == HiveParser.TOK_DATETIME) {
+    // datetime type isn't currently supported
+    if (token == HiveParser.TOK_DATETIME) {
       throw new SemanticException(ErrorMsg.UNSUPPORTED_TYPE.getMsg());
     }
     return TokenToTypeName.get(token);

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ExplainSemanticAnalyzer.java Mon Jul 29 21:08:03 2013
@@ -40,26 +40,31 @@ public class ExplainSemanticAnalyzer ext
     super(conf);
   }
 
+  @SuppressWarnings("unchecked")
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
-    ctx.setExplain(true);
-
-    // Create a semantic analyzer for the query
-    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (ASTNode) ast
-        .getChild(0));
-    sem.analyze((ASTNode) ast.getChild(0), ctx);
-    sem.validate();
 
     boolean extended = false;
     boolean formatted = false;
     boolean dependency = false;
+    boolean logical = false;
     if (ast.getChildCount() == 2) {
       int explainOptions = ast.getChild(1).getType();
       formatted = (explainOptions == HiveParser.KW_FORMATTED);
       extended = (explainOptions == HiveParser.KW_EXTENDED);
       dependency = (explainOptions == HiveParser.KW_DEPENDENCY);
+      logical = (explainOptions == HiveParser.KW_LOGICAL);
     }
 
+    ctx.setExplain(true);
+    ctx.setExplainLogical(logical);
+
+    // Create a semantic analyzer for the query
+    BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, (ASTNode) ast
+        .getChild(0));
+    sem.analyze((ASTNode) ast.getChild(0), ctx);
+    sem.validate();
+
     ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
     List<Task<? extends Serializable>> tasks = sem.getRootTasks();
     Task<? extends Serializable> fetchTask = sem.getFetchTask();
@@ -72,14 +77,21 @@ public class ExplainSemanticAnalyzer ext
       tasks.add(fetchTask);
     }
 
+    ParseContext pCtx = null;
+    if (sem instanceof SemanticAnalyzer) {
+      pCtx = ((SemanticAnalyzer)sem).getParseContext();
+    }
+
     Task<? extends Serializable> explTask =
         TaskFactory.get(new ExplainWork(ctx.getResFile().toString(),
+        pCtx,
         tasks,
         ((ASTNode) ast.getChild(0)).toStringTree(),
         sem.getInputs(),
         extended,
         formatted,
-        dependency),
+        dependency,
+        logical),
       conf);
 
     fieldList = explTask.getResultSchema();

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/FromClauseParser.g Mon Jul 29 21:08:03 2013
@@ -112,13 +112,13 @@ joinToken
 @init { gParent.msgs.push("join type specifier"); }
 @after { gParent.msgs.pop(); }
     :
-      KW_JOIN                     -> TOK_JOIN
-    | KW_INNER  KW_JOIN            -> TOK_JOIN
-    | KW_CROSS KW_JOIN            -> TOK_CROSSJOIN
-    | KW_LEFT  KW_OUTER KW_JOIN   -> TOK_LEFTOUTERJOIN
-    | KW_RIGHT KW_OUTER KW_JOIN   -> TOK_RIGHTOUTERJOIN
-    | KW_FULL  KW_OUTER KW_JOIN   -> TOK_FULLOUTERJOIN
-    | KW_LEFT  KW_SEMI  KW_JOIN   -> TOK_LEFTSEMIJOIN
+      KW_JOIN                      -> TOK_JOIN
+    | KW_INNER KW_JOIN             -> TOK_JOIN
+    | KW_CROSS KW_JOIN             -> TOK_CROSSJOIN
+    | KW_LEFT  (KW_OUTER)? KW_JOIN -> TOK_LEFTOUTERJOIN
+    | KW_RIGHT (KW_OUTER)? KW_JOIN -> TOK_RIGHTOUTERJOIN
+    | KW_FULL  (KW_OUTER)? KW_JOIN -> TOK_FULLOUTERJOIN
+    | KW_LEFT KW_SEMI KW_JOIN      -> TOK_LEFTSEMIJOIN
     ;
 
 lateralView
@@ -176,8 +176,8 @@ tableSample
 tableSource
 @init { gParent.msgs.push("table source"); }
 @after { gParent.msgs.pop(); }
-    : tabname=tableName (ts=tableSample)? (KW_AS? alias=identifier)?
-    -> ^(TOK_TABREF $tabname $ts? $alias?)
+    : tabname=tableName (props=tableProperties)? (ts=tableSample)? (KW_AS? alias=Identifier)?
+    -> ^(TOK_TABREF $tabname $props? $ts? $alias?)
     ;
 
 tableName

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/GenMapRedWalker.java Mon Jul 29 21:08:03 2013
@@ -20,7 +20,6 @@ package org.apache.hadoop.hive.ql.parse;
 
 import java.util.List;
 
-import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
 import org.apache.hadoop.hive.ql.lib.DefaultGraphWalker;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.Node;
@@ -52,12 +51,11 @@ public class GenMapRedWalker extends Def
 
     // maintain the stack of operators encountered
     opStack.push(nd);
-    dispatch(nd, opStack);
+    Boolean result = dispatchAndReturn(nd, opStack);
 
-    // kids of reduce sink operator need not be traversed again
-    if ((children == null)
-        || ((nd instanceof ReduceSinkOperator) && (getDispatchedList()
-        .containsAll(children)))) {
+    // kids of reduce sink operator or mapjoin operators merged into root task
+    // need not be traversed again
+    if (children == null || result == Boolean.FALSE) {
       opStack.pop();
       return;
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Mon Jul 29 21:08:03 2013
@@ -161,6 +161,7 @@ KW_EXTENDED: 'EXTENDED';
 KW_FORMATTED: 'FORMATTED';
 KW_PRETTY: 'PRETTY';
 KW_DEPENDENCY: 'DEPENDENCY';
+KW_LOGICAL: 'LOGICAL';
 KW_SERDE: 'SERDE';
 KW_WITH: 'WITH';
 KW_DEFERRED: 'DEFERRED';

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Mon Jul 29 21:08:03 2013
@@ -106,6 +106,7 @@ TOK_BOOLEAN;
 TOK_FLOAT;
 TOK_DOUBLE;
 TOK_DATE;
+TOK_DATELITERAL;
 TOK_DATETIME;
 TOK_TIMESTAMP;
 TOK_STRING;
@@ -558,7 +559,7 @@ statement
 explainStatement
 @init { msgs.push("explain statement"); }
 @after { msgs.pop(); }
-	: KW_EXPLAIN (explainOptions=KW_EXTENDED|explainOptions=KW_FORMATTED|explainOptions=KW_DEPENDENCY)? execStatement
+	: KW_EXPLAIN (explainOptions=KW_EXTENDED|explainOptions=KW_FORMATTED|explainOptions=KW_DEPENDENCY|explainOptions=KW_LOGICAL)? execStatement
       -> ^(TOK_EXPLAIN execStatement $explainOptions?)
 	;
 

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IdentifiersParser.g Mon Jul 29 21:08:03 2013
@@ -222,6 +222,7 @@ constant
 @after { gParent.msgs.pop(); }
     :
     Number
+    | dateLiteral
     | StringLiteral
     | stringLiteralSequence
     | BigintLiteral
@@ -244,6 +245,11 @@ charSetStringLiteral
     csName=CharSetName csLiteral=CharSetLiteral -> ^(TOK_CHARSETLITERAL $csName $csLiteral)
     ;
 
+dateLiteral
+    :
+    KW_DATE StringLiteral -> ^(TOK_DATELITERAL StringLiteral)
+    ;
+
 expression
 @init { gParent.msgs.push("expression specification"); }
 @after { gParent.msgs.pop(); }
@@ -254,6 +260,7 @@ expression
 atomExpression
     :
     KW_NULL -> TOK_NULL
+    | dateLiteral
     | constant
     | function
     | castExpression
@@ -512,5 +519,5 @@ identifier
     
 nonReserved
     :
-    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | 
 KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW_WHILE | KW_R
 EAD | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_INNER
+    KW_TRUE | KW_FALSE | KW_LIKE | KW_EXISTS | KW_ASC | KW_DESC | KW_ORDER | KW_GROUP | KW_BY | KW_AS | KW_INSERT | KW_OVERWRITE | KW_OUTER | KW_LEFT | KW_RIGHT | KW_FULL | KW_PARTITION | KW_PARTITIONS | KW_TABLE | KW_TABLES | KW_COLUMNS | KW_INDEX | KW_INDEXES | KW_REBUILD | KW_FUNCTIONS | KW_SHOW | KW_MSCK | KW_REPAIR | KW_DIRECTORY | KW_LOCAL | KW_USING | KW_CLUSTER | KW_DISTRIBUTE | KW_SORT | KW_UNION | KW_LOAD | KW_EXPORT | KW_IMPORT | KW_DATA | KW_INPATH | KW_IS | KW_NULL | KW_CREATE | KW_EXTERNAL | KW_ALTER | KW_CHANGE | KW_FIRST | KW_AFTER | KW_DESCRIBE | KW_DROP | KW_RENAME | KW_IGNORE | KW_PROTECTION | KW_TO | KW_COMMENT | KW_BOOLEAN | KW_TINYINT | KW_SMALLINT | KW_INT | KW_BIGINT | KW_FLOAT | KW_DOUBLE | KW_DATE | KW_DATETIME | KW_TIMESTAMP | KW_DECIMAL | KW_STRING | KW_ARRAY | KW_STRUCT | KW_UNIONTYPE | KW_PARTITIONED | KW_CLUSTERED | KW_SORTED | KW_INTO | KW_BUCKETS | KW_ROW | KW_ROWS | KW_FORMAT | KW_DELIMITED | KW_FIELDS | KW_TERMINATED | KW_ESCAPED | KW_COLLECTION | 
 KW_ITEMS | KW_KEYS | KW_KEY_TYPE | KW_LINES | KW_STORED | KW_FILEFORMAT | KW_SEQUENCEFILE | KW_TEXTFILE | KW_RCFILE | KW_ORCFILE | KW_INPUTFORMAT | KW_OUTPUTFORMAT | KW_INPUTDRIVER | KW_OUTPUTDRIVER | KW_OFFLINE | KW_ENABLE | KW_DISABLE | KW_READONLY | KW_NO_DROP | KW_LOCATION | KW_BUCKET | KW_OUT | KW_OF | KW_PERCENT | KW_ADD | KW_REPLACE | KW_RLIKE | KW_REGEXP | KW_TEMPORARY | KW_EXPLAIN | KW_FORMATTED | KW_PRETTY | KW_DEPENDENCY | KW_LOGICAL | KW_SERDE | KW_WITH | KW_DEFERRED | KW_SERDEPROPERTIES | KW_DBPROPERTIES | KW_LIMIT | KW_SET | KW_UNSET | KW_TBLPROPERTIES | KW_IDXPROPERTIES | KW_VALUE_TYPE | KW_ELEM_TYPE | KW_MAPJOIN | KW_STREAMTABLE | KW_HOLD_DDLTIME | KW_CLUSTERSTATUS | KW_UTC | KW_UTCTIMESTAMP | KW_LONG | KW_DELETE | KW_PLUS | KW_MINUS | KW_FETCH | KW_INTERSECT | KW_VIEW | KW_IN | KW_DATABASES | KW_MATERIALIZED | KW_SCHEMA | KW_SCHEMAS | KW_GRANT | KW_REVOKE | KW_SSL | KW_UNDO | KW_LOCK | KW_LOCKS | KW_UNLOCK | KW_SHARED | KW_EXCLUSIVE | KW_PROCEDURE | KW_UNSIGNED | KW
 _WHILE | KW_READ | KW_READS | KW_PURGE | KW_RANGE | KW_ANALYZE | KW_BEFORE | KW_BETWEEN | KW_BOTH | KW_BINARY | KW_CONTINUE | KW_CURSOR | KW_TRIGGER | KW_RECORDREADER | KW_RECORDWRITER | KW_SEMI | KW_LATERAL | KW_TOUCH | KW_ARCHIVE | KW_UNARCHIVE | KW_COMPUTE | KW_STATISTICS | KW_USE | KW_OPTION | KW_CONCATENATE | KW_SHOW_DATABASE | KW_UPDATE | KW_RESTRICT | KW_CASCADE | KW_SKEWED | KW_ROLLUP | KW_CUBE | KW_DIRECTORIES | KW_FOR | KW_GROUPING | KW_SETS | KW_TRUNCATE | KW_NOSCAN | KW_USER | KW_ROLE | KW_INNER
     ;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/IndexUpdater.java Mon Jul 29 21:08:03 2013
@@ -40,7 +40,6 @@ import org.apache.commons.logging.LogFac
 import org.apache.hadoop.fs.ContentSummary;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.ql.exec.FilterOperator;
-import org.apache.hadoop.hive.ql.exec.MapRedTask;
 import org.apache.hadoop.hive.ql.exec.TableScanOperator;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.Utilities;

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java Mon Jul 29 21:08:03 2013
@@ -75,6 +75,7 @@ import org.apache.hadoop.hive.ql.plan.PT
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFrameDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowFunctionDef;
 import org.apache.hadoop.hive.ql.plan.PTFDesc.WindowTableFunctionDef;
+import org.apache.hadoop.hive.ql.plan.PTFDeserializer;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator;
@@ -128,18 +129,19 @@ public class PTFTranslator {
       UnparseTranslator unparseT)
       throws SemanticException {
     init(semAly, hCfg, inputRR, unparseT);
-    this.ptfInvocation = qSpec;
+    ptfInvocation = qSpec;
     ptfDesc = new PTFDesc();
     ptfDesc.setLlInfo(llInfo);
     translatePTFChain();
     return ptfDesc;
   }
 
-  public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg, RowResolver inputRR,
+  public PTFDesc translate(WindowingSpec wdwSpec, SemanticAnalyzer semAly, HiveConf hCfg,
+      RowResolver inputRR,
       UnparseTranslator unparseT)
       throws SemanticException {
     init(semAly, hCfg, inputRR, unparseT);
-    this.windowingSpec = wdwSpec;
+    windowingSpec = wdwSpec;
     ptfDesc = new PTFDesc();
     ptfDesc.setLlInfo(llInfo);
     WindowTableFunctionDef wdwTFnDef = new WindowTableFunctionDef();
@@ -153,12 +155,12 @@ public class PTFTranslator {
     WindowingTableFunctionResolver tFn = (WindowingTableFunctionResolver)
         FunctionRegistry.getTableFunctionResolver(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
     if (tFn == null) {
-      throw new SemanticException(String.format("INternal Error: Unknown Table Function %s",
+      throw new SemanticException(String.format("Internal Error: Unknown Table Function %s",
           FunctionRegistry.WINDOWING_TABLE_FUNCTION));
     }
     wdwTFnDef.setName(FunctionRegistry.WINDOWING_TABLE_FUNCTION);
     wdwTFnDef.setResolverClassName(tFn.getClass().getName());
-    wdwTFnDef.setAlias("ptf_" + 1 );
+    wdwTFnDef.setAlias("ptf_" + 1);
     wdwTFnDef.setExpressionTreeString(null);
     wdwTFnDef.setTransformsRawInput(false);
     tFn.initialize(hCfg, ptfDesc, wdwTFnDef);
@@ -168,7 +170,7 @@ public class PTFTranslator {
     wdwTFnDef.setRawInputShape(inpShape);
 
     PartitioningSpec partiSpec = wdwSpec.getQueryPartitioningSpec();
-    if ( partiSpec == null ) {
+    if (partiSpec == null) {
       throw new SemanticException(
           "Invalid use of Windowing: there is no Partitioning associated with Windowing");
     }
@@ -182,10 +184,10 @@ public class PTFTranslator {
      * process Wdw functions
      */
     ArrayList<WindowFunctionDef> windowFunctions = new ArrayList<WindowFunctionDef>();
-    if ( wdwSpec.getWindowExpressions() != null ) {
-      for(WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
-        if ( expr instanceof WindowFunctionSpec) {
-          WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec)expr);
+    if (wdwSpec.getWindowExpressions() != null) {
+      for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
+        if (expr instanceof WindowFunctionSpec) {
+          WindowFunctionDef wFnDef = translate(wdwTFnDef, (WindowFunctionSpec) expr);
           windowFunctions.add(wFnDef);
         }
       }
@@ -195,13 +197,13 @@ public class PTFTranslator {
     /*
      * set outputFromWdwFnProcessing
      */
-    if ( windowFunctions.size() > 0 ) {
+    if (windowFunctions.size() > 0) {
       ArrayList<String> aliases = new ArrayList<String>();
       ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
-      for(WindowFunctionDef wFnDef : windowFunctions) {
+      for (WindowFunctionDef wFnDef : windowFunctions) {
         aliases.add(wFnDef.getAlias());
-        if ( wFnDef.isPivotResult() ) {
-          fieldOIs.add(((ListObjectInspector)wFnDef.getOI()).getListElementObjectInspector());
+        if (wFnDef.isPivotResult()) {
+          fieldOIs.add(((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector());
         } else {
           fieldOIs.add(wFnDef.getOI());
         }
@@ -223,16 +225,15 @@ public class PTFTranslator {
      */
     ShapeDetails wdwOutShape = wdwTFnDef.getOutputFromWdwFnProcessing();
     ArrayList<WindowExpressionDef> windowExpressions = new ArrayList<WindowExpressionDef>();
-    if ( wdwSpec.getWindowExpressions() != null ) {
-      for(WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
-        if ( !(expr instanceof WindowFunctionSpec) ) {
+    if (wdwSpec.getWindowExpressions() != null) {
+      for (WindowExpressionSpec expr : wdwSpec.getWindowExpressions()) {
+        if (!(expr instanceof WindowFunctionSpec)) {
           try {
             PTFExpressionDef eDef = buildExpressionDef(wdwOutShape, expr.getExpression());
             WindowExpressionDef wdwEDef = new WindowExpressionDef(eDef);
             wdwEDef.setAlias(expr.getAlias());
             windowExpressions.add(wdwEDef);
-          }
-          catch(HiveException he) {
+          } catch (HiveException he) {
             throw new SemanticException(he);
           }
         }
@@ -243,10 +244,10 @@ public class PTFTranslator {
     /*
      * set outputOI
      */
-    if ( windowExpressions.size() > 0 ) {
+    if (windowExpressions.size() > 0) {
       ArrayList<String> aliases = new ArrayList<String>();
       ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
-      for(WindowExpressionDef wEDef : windowExpressions) {
+      for (WindowExpressionDef wEDef : windowExpressions) {
         aliases.add(wEDef.getAlias());
         fieldOIs.add(wEDef.getOI());
       }
@@ -267,7 +268,7 @@ public class PTFTranslator {
      * If we have windowExpressions then we convert to Std. Object to process;
      * we just stream these rows; no need to put in an output Partition.
      */
-    if ( windowExpressions.size() > 0 ) {
+    if (windowExpressions.size() > 0) {
       StructObjectInspector oi = (StructObjectInspector)
           ObjectInspectorUtils.getStandardObjectInspector(wdwTFnDef.getOutputShape().getOI());
       wdwTFnDef.getOutputShape().setOI(oi);
@@ -280,17 +281,17 @@ public class PTFTranslator {
 
     Stack<PTFInputSpec> ptfChain = new Stack<PTFInvocationSpec.PTFInputSpec>();
     PTFInputSpec currentSpec = ptfInvocation.getFunction();
-    while (currentSpec != null ) {
+    while (currentSpec != null) {
       ptfChain.push(currentSpec);
       currentSpec = currentSpec.getInput();
     }
 
     int inputNum = 0;
     PTFInputDef currentDef = null;
-    while ( !ptfChain.isEmpty() ) {
+    while (!ptfChain.isEmpty()) {
       currentSpec = ptfChain.pop();
 
-      if ( currentSpec instanceof PTFQueryInputSpec) {
+      if (currentSpec instanceof PTFQueryInputSpec) {
         currentDef = translate((PTFQueryInputSpec) currentSpec, inputNum);
       }
       else {
@@ -300,7 +301,7 @@ public class PTFTranslator {
       }
       inputNum++;
     }
-    ptfDesc.setFuncDef((PartitionedTableFunctionDef)currentDef);
+    ptfDesc.setFuncDef((PartitionedTableFunctionDef) currentDef);
   }
 
   private PTFQueryInputDef translate(PTFQueryInputSpec spec,
@@ -342,8 +343,7 @@ public class PTFTranslator {
         PTFExpressionDef argDef = null;
         try {
           argDef = buildExpressionDef(inpDef.getOutputShape(), expr);
-        }
-        catch(HiveException he) {
+        } catch (HiveException he) {
           throw new SemanticException(he);
         }
         def.addArg(argDef);
@@ -356,7 +356,7 @@ public class PTFTranslator {
     def.setCarryForwardNames(tFn.carryForwardNames());
     tFn.setupRawInputOI();
 
-    if ( tFn.transformsRawInput() ) {
+    if (tFn.transformsRawInput()) {
       StructObjectInspector rawInOutOI = tEval.getRawInputOI();
       ArrayList<String> rawInOutColNames = tFn.getRawInputColumnNames();
       RowResolver rawInRR = buildRowResolverForPTF(def.getName(),
@@ -418,8 +418,7 @@ public class PTFTranslator {
         PTFExpressionDef argDef = null;
         try {
           argDef = buildExpressionDef(inpShape, expr);
-        }
-        catch(HiveException he) {
+        } catch (HiveException he) {
           throw new SemanticException(he);
         }
         def.addArg(argDef);
@@ -432,24 +431,23 @@ public class PTFTranslator {
     }
 
     WindowSpec wdwSpec = spec.getWindowSpec();
-    if ( wdwSpec != null ) {
+    if (wdwSpec != null) {
       String desc = spec.toString();
 
       WindowFrameDef wdwFrame = translate(spec.getName(), inpShape, wdwSpec);
-      if (!wFnInfo.isSupportsWindow() )
+      if (!wFnInfo.isSupportsWindow())
       {
         BoundarySpec start = wdwSpec.getWindowFrame().getStart();
-        if ( start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT ) {
+        if (start.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
           throw new SemanticException(
-          String.format("Expecting left window frame boundary for " +
-              "function %s to be unbounded. Found : %d", desc, start.getAmt())
-              );
+              String.format("Expecting left window frame boundary for " +
+                  "function %s to be unbounded. Found : %d", desc, start.getAmt()));
         }
         BoundarySpec end = wdwSpec.getWindowFrame().getEnd();
-        if ( end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT ) {
+        if (end.getAmt() != BoundarySpec.UNBOUNDED_AMOUNT) {
           throw new SemanticException(
               String.format("Expecting right window frame boundary for " +
-              "function %s to be unbounded. Found : %d", desc, start.getAmt()));
+                  "function %s to be unbounded. Found : %d", desc, start.getAmt()));
         }
       }
       def.setWindowFrame(wdwFrame);
@@ -457,8 +455,7 @@ public class PTFTranslator {
 
     try {
       setupWdwFnEvaluator(def);
-    }
-    catch(HiveException he) {
+    } catch (HiveException he) {
       throw new SemanticException(he);
     }
 
@@ -470,7 +467,7 @@ public class PTFTranslator {
       throws SemanticException {
 
     applyConstantPartition(spec);
-    if ( spec.getPartition() == null ) {
+    if (spec.getPartition() == null) {
       return;
     }
     PartitionDef partDef = translate(def.getRawInputShape(), spec.getPartition());
@@ -483,30 +480,28 @@ public class PTFTranslator {
    * If this the first PPTF in the chain and there is no partition specified
    * then assume the user wants to include the entire input in 1 partition.
    */
-  private static void applyConstantPartition( PartitionedTableFunctionSpec spec) {
-    if ( spec.getPartition() != null ) {
+  private static void applyConstantPartition(PartitionedTableFunctionSpec spec) {
+    if (spec.getPartition() != null) {
       return;
     }
     PTFInputSpec iSpec = spec.getInput();
-    if ( iSpec instanceof PTFInputSpec ) {
-        PartitionSpec partSpec = new PartitionSpec();
-        PartitionExpression partExpr = new PartitionExpression();
-        partExpr.setExpression(new ASTNode(new CommonToken(HiveParser.Number, "0")));
-        partSpec.addExpression(partExpr);
-        spec.setPartition(partSpec);
+    if (iSpec instanceof PTFInputSpec) {
+      PartitionSpec partSpec = new PartitionSpec();
+      PartitionExpression partExpr = new PartitionExpression();
+      partExpr.setExpression(new ASTNode(new CommonToken(HiveParser.Number, "0")));
+      partSpec.addExpression(partExpr);
+      spec.setPartition(partSpec);
     }
   }
 
   private PartitionDef translate(ShapeDetails inpShape, PartitionSpec spec)
-      throws SemanticException
-  {
+      throws SemanticException {
     if (spec == null || spec.getExpressions() == null || spec.getExpressions().size() == 0) {
       return null;
     }
 
     PartitionDef pDef = new PartitionDef();
-    for (PartitionExpression pExpr : spec.getExpressions())
-    {
+    for (PartitionExpression pExpr : spec.getExpressions()) {
       PTFExpressionDef expDef = translate(inpShape, pExpr);
       pDef.addExpression(expDef);
     }
@@ -514,17 +509,16 @@ public class PTFTranslator {
   }
 
   private PTFExpressionDef translate(ShapeDetails inpShape,
-      PartitionExpression pExpr) throws SemanticException
-  {
+      PartitionExpression pExpr) throws SemanticException {
     PTFExpressionDef expDef = null;
     try {
       expDef = buildExpressionDef(inpShape, pExpr.getExpression());
-    }
-    catch(HiveException he) {
+    } catch (HiveException he) {
       throw new SemanticException(he);
     }
     PTFTranslator.validateComparable(expDef.getOI(),
-        String.format("Partition Expression %s is not a comparable expression", pExpr.getExpression().toStringTree()));
+        String.format("Partition Expression %s is not a comparable expression", pExpr
+            .getExpression().toStringTree()));
     return expDef;
   }
 
@@ -548,8 +542,7 @@ public class PTFTranslator {
 
   private OrderExpressionDef translate(ShapeDetails inpShape,
       OrderExpression oExpr)
-          throws SemanticException
-  {
+      throws SemanticException {
     OrderExpressionDef oexpDef = new OrderExpressionDef();
     oexpDef.setOrder(oExpr.getOrder());
     try {
@@ -558,8 +551,7 @@ public class PTFTranslator {
       oexpDef.setExprEvaluator(expDef.getExprEvaluator());
       oexpDef.setExprNode(expDef.getExprNode());
       oexpDef.setOI(expDef.getOI());
-    }
-    catch(HiveException he) {
+    } catch (HiveException he) {
       throw new SemanticException(he);
     }
     PTFTranslator.validateComparable(oexpDef.getOI(),
@@ -573,23 +565,21 @@ public class PTFTranslator {
     /*
      * Since we componentize Windowing, no need to translate
      * the Partition & Order specs of individual WFns.
-    */
+     */
     return translate(inpShape, spec.getWindowFrame());
   }
 
   private WindowFrameDef translate(ShapeDetails inpShape,
       WindowFrameSpec spec)
       throws SemanticException {
-    if (spec == null)
-    {
+    if (spec == null) {
       return null;
     }
 
     BoundarySpec s = spec.getStart();
     BoundarySpec e = spec.getEnd();
     int cmp = s.compareTo(e);
-    if (cmp > 0)
-    {
+    if (cmp > 0) {
       throw new SemanticException(String.format(
           "Window range invalid, start boundary is greater than end boundary: %s", spec));
     }
@@ -602,8 +592,7 @@ public class PTFTranslator {
 
   private BoundaryDef translate(ShapeDetails inpShape, BoundarySpec bndSpec)
       throws SemanticException {
-    if (bndSpec instanceof ValueBoundarySpec)
-    {
+    if (bndSpec instanceof ValueBoundarySpec) {
       ValueBoundarySpec vBndSpec = (ValueBoundarySpec) bndSpec;
       ValueBoundaryDef vbDef = new ValueBoundaryDef();
       vbDef.setAmt(vBndSpec.getAmt());
@@ -612,32 +601,27 @@ public class PTFTranslator {
       PTFExpressionDef exprDef = null;
       try {
         exprDef = buildExpressionDef(inpShape, vBndSpec.getExpression());
-      }
-      catch(HiveException he) {
+      } catch (HiveException he) {
         throw new SemanticException(he);
       }
       PTFTranslator.validateValueBoundaryExprType(exprDef.getOI());
       vbDef.setExpressionDef(exprDef);
       return vbDef;
     }
-    else if (bndSpec instanceof RangeBoundarySpec)
-    {
+    else if (bndSpec instanceof RangeBoundarySpec) {
       RangeBoundarySpec rBndSpec = (RangeBoundarySpec) bndSpec;
       RangeBoundaryDef rbDef = new RangeBoundaryDef();
       rbDef.setAmt(rBndSpec.getAmt());
       rbDef.setDirection(rBndSpec.getDirection());
       return rbDef;
-    }
-    else if (bndSpec instanceof CurrentRowSpec)
-    {
+    } else if (bndSpec instanceof CurrentRowSpec) {
       CurrentRowDef cbDef = new CurrentRowDef();
       return cbDef;
     }
     throw new SemanticException("Unknown Boundary: " + bndSpec);
   }
 
-  static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException
-  {
+  static void setupWdwFnEvaluator(WindowFunctionDef def) throws HiveException {
     ArrayList<PTFExpressionDef> args = def.getArgs();
     ArrayList<ObjectInspector> argOIs = new ArrayList<ObjectInspector>();
     ObjectInspector[] funcArgOIs = null;
@@ -650,7 +634,8 @@ public class PTFTranslator {
       funcArgOIs = argOIs.toArray(funcArgOIs);
     }
 
-    GenericUDAFEvaluator wFnEval = FunctionRegistry.getGenericWindowingEvaluator(def.getName(), argOIs,
+    GenericUDAFEvaluator wFnEval = FunctionRegistry.getGenericWindowingEvaluator(def.getName(),
+        argOIs,
         def.isDistinct(), def.isStar());
     ObjectInspector OI = wFnEval.init(GenericUDAFEvaluator.Mode.COMPLETE, funcArgOIs);
     def.setWFnEval(wFnEval);
@@ -658,10 +643,8 @@ public class PTFTranslator {
   }
 
   private static void validateValueBoundaryExprType(ObjectInspector OI)
-      throws SemanticException
-  {
-    if (!OI.getCategory().equals(Category.PRIMITIVE))
-    {
+      throws SemanticException {
+    if (!OI.getCategory().equals(Category.PRIMITIVE)) {
       throw new SemanticException(
           String.format(
               "Value Boundary expression must be of primitve type. Found: %s",
@@ -691,12 +674,12 @@ public class PTFTranslator {
 
   }
 
-  private ShapeDetails setupTableFnShape(String fnName, ShapeDetails inpShape, StructObjectInspector OI, ArrayList<String> columnNames, RowResolver rr)
+  private ShapeDetails setupTableFnShape(String fnName, ShapeDetails inpShape,
+      StructObjectInspector OI, ArrayList<String> columnNames, RowResolver rr)
       throws SemanticException {
     if (fnName.equals(FunctionRegistry.NOOP_TABLE_FUNCTION)
         || fnName.equals(
-            FunctionRegistry.NOOP_MAP_TABLE_FUNCTION))
-    {
+            FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) {
       return setupShapeForNoop(inpShape, OI, columnNames, rr);
     }
     return setupShape(OI, columnNames, rr);
@@ -711,9 +694,8 @@ public class PTFTranslator {
 
     try {
       serde = PTFTranslator.createLazyBinarySerDe(hCfg, OI, serdePropsMap);
-      shp.setOI((StructObjectInspector)serde.getObjectInspector());
-    }
-    catch(SerDeException se) {
+      shp.setOI((StructObjectInspector) serde.getObjectInspector());
+    } catch (SerDeException se) {
       throw new SemanticException(se);
     }
 
@@ -781,22 +763,20 @@ public class PTFTranslator {
 
     if (numOfPartColumns != 0 && numOfPartColumns != partCols.size()) {
       List<String> partitionColumnNames = new ArrayList<String>();
-      for(PartitionExpression partitionExpression : partCols) {
+      for (PartitionExpression partitionExpression : partCols) {
         ASTNode column = partitionExpression.getExpression();
-        if(column != null && column.getChildCount() > 0) {
+        if (column != null && column.getChildCount() > 0) {
           partitionColumnNames.add(column.getChild(0).getText());
         }
       }
       throw new SemanticException(
           String.format(
-                  "all partition columns %s must be in order clause or none should be specified",
-                  partitionColumnNames.toString()));
+              "all partition columns %s must be in order clause or none should be specified",
+              partitionColumnNames.toString()));
     }
     ArrayList<OrderExpression> combinedOrdExprs = new ArrayList<OrderExpression>();
-    if (numOfPartColumns == 0)
-    {
-      for (PartitionExpression partCol : partCols)
-      {
+    if (numOfPartColumns == 0) {
+      for (PartitionExpression partCol : partCols) {
         OrderExpression orderCol = new OrderExpression(partCol);
         combinedOrdExprs.add(orderCol);
       }
@@ -811,8 +791,7 @@ public class PTFTranslator {
    */
 
   protected static final ArrayList<String> RANKING_FUNCS = new ArrayList<String>();
-  static
-  {
+  static {
     RANKING_FUNCS.add("rank");
     RANKING_FUNCS.add("dense_rank");
     RANKING_FUNCS.add("percent_rank");
@@ -822,17 +801,13 @@ public class PTFTranslator {
   private void setupRankingArgs(WindowTableFunctionDef wdwTFnDef,
       WindowFunctionDef wFnDef,
       WindowFunctionSpec wSpec)
-      throws SemanticException
-  {
-    if (wSpec.getArgs().size() > 0)
-    {
+      throws SemanticException {
+    if (wSpec.getArgs().size() > 0) {
       throw new SemanticException("Ranking Functions can take no arguments");
     }
-
     OrderDef oDef = wdwTFnDef.getOrder();
     ArrayList<OrderExpressionDef> oExprs = oDef.getExpressions();
-    for (OrderExpressionDef oExpr : oExprs)
-    {
+    for (OrderExpressionDef oExpr : oExprs) {
       wFnDef.addArg(oExpr);
     }
   }
@@ -841,8 +816,7 @@ public class PTFTranslator {
    * Expr translation helper methods
    */
   public PTFExpressionDef buildExpressionDef(ShapeDetails inpShape, ASTNode arg)
-      throws HiveException
-  {
+      throws HiveException {
     PTFExpressionDef argDef = new PTFExpressionDef();
 
     ExprNodeDesc exprNode = semAly.genExprNodeDesc(arg, inpShape.getRr(),
@@ -860,8 +834,7 @@ public class PTFTranslator {
   private ObjectInspector initExprNodeEvaluator(ExprNodeEvaluator exprEval,
       ExprNodeDesc exprNode,
       ShapeDetails inpShape)
-      throws HiveException
-  {
+      throws HiveException {
     ObjectInspector outOI;
     outOI = exprEval.initialize(inpShape.getOI());
 
@@ -892,10 +865,10 @@ public class PTFTranslator {
    */
 
   protected static SerDe createLazyBinarySerDe(Configuration cfg,
-      StructObjectInspector oi, Map<String,String> serdePropsMap) throws SerDeException {
+      StructObjectInspector oi, Map<String, String> serdePropsMap) throws SerDeException {
     serdePropsMap = serdePropsMap == null ? new LinkedHashMap<String, String>() : serdePropsMap;
 
-    addOIPropertiestoSerDePropsMap(oi, serdePropsMap);
+    PTFDeserializer.addOIPropertiestoSerDePropsMap(oi, serdePropsMap);
 
     SerDe serDe = new LazyBinarySerDe();
     Properties p = new Properties();
@@ -909,33 +882,6 @@ public class PTFTranslator {
   }
 
   @SuppressWarnings({"unchecked"})
-  public static void addOIPropertiestoSerDePropsMap(StructObjectInspector OI,
-      Map<String,String> serdePropsMap) {
-
-    if ( serdePropsMap == null ) {
-      return;
-    }
-
-    ArrayList<? extends Object>[] tInfo = getTypeMap(OI);
-
-    ArrayList<String> columnNames = (ArrayList<String>) tInfo[0];
-    ArrayList<TypeInfo> fields = (ArrayList<TypeInfo>) tInfo[1];
-    StringBuilder cNames = new StringBuilder();
-    StringBuilder cTypes = new StringBuilder();
-
-    for (int i = 0; i < fields.size(); i++)
-    {
-      cNames.append(i > 0 ? "," : "");
-      cTypes.append(i > 0 ? "," : "");
-      cNames.append(columnNames.get(i));
-      cTypes.append(fields.get(i).getTypeName());
-    }
-
-    serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS,
-        cNames.toString());
-    serdePropsMap.put(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMN_TYPES,
-        cTypes.toString());
-  }
 
   private static ArrayList<? extends Object>[] getTypeMap(
       StructObjectInspector oi) {
@@ -943,14 +889,14 @@ public class PTFTranslator {
         .getTypeInfoFromObjectInspector(oi);
     ArrayList<String> fnames = t.getAllStructFieldNames();
     ArrayList<TypeInfo> fields = t.getAllStructFieldTypeInfos();
-    return new ArrayList<?>[]
-    { fnames, fields };
+    return new ArrayList<?>[] {fnames, fields};
   }
 
   /**
    * For each column on the input RR, construct a StructField for it
    * OI is constructed using the list of input column names and
    * their corresponding OIs.
+   *
    * @param rr
    * @return
    */
@@ -972,20 +918,16 @@ public class PTFTranslator {
   }
 
   protected static void validateComparable(ObjectInspector OI, String errMsg)
-      throws SemanticException
-  {
-    if (!ObjectInspectorUtils.compareSupported(OI))
-    {
+      throws SemanticException {
+    if (!ObjectInspectorUtils.compareSupported(OI)) {
       throw new SemanticException(errMsg);
     }
   }
 
   private static void addInputColumnsToList(ShapeDetails shape,
-      ArrayList<String> fieldNames, ArrayList<ObjectInspector> fieldOIs)
-  {
+      ArrayList<String> fieldNames, ArrayList<ObjectInspector> fieldOIs) {
     StructObjectInspector OI = shape.getOI();
-    for (StructField f : OI.getAllStructFieldRefs())
-    {
+    for (StructField f : OI.getAllStructFieldRefs()) {
       fieldNames.add(f.getFieldName());
       fieldOIs.add(f.getFieldObjectInspector());
     }
@@ -999,15 +941,14 @@ public class PTFTranslator {
       StructObjectInspector rowObjectInspector,
       ArrayList<String> outputColNames, RowResolver inputRR) throws SemanticException {
 
-    if ( tbFnName.equals(FunctionRegistry.NOOP_TABLE_FUNCTION) ||
-        tbFnName.equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION) ) {
+    if (tbFnName.equals(FunctionRegistry.NOOP_TABLE_FUNCTION) ||
+        tbFnName.equals(FunctionRegistry.NOOP_MAP_TABLE_FUNCTION)) {
       return buildRowResolverForNoop(tabAlias, rowObjectInspector, inputRR);
     }
 
     RowResolver rwsch = new RowResolver();
     List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs();
-    for (int i = 0; i < fields.size(); i++)
-    {
+    for (int i = 0; i < fields.size(); i++) {
       ColumnInfo colInfo = new ColumnInfo(fields.get(i).getFieldName(),
           TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i)
               .getFieldObjectInspector()),
@@ -1019,14 +960,14 @@ public class PTFTranslator {
   }
 
   protected RowResolver buildRowResolverForWindowing(WindowTableFunctionDef def,
-      boolean addWdwExprs)  throws SemanticException {
+      boolean addWdwExprs) throws SemanticException {
     RowResolver rr = new RowResolver();
     HashMap<String, WindowExpressionSpec> aliasToExprMap = windowingSpec.getAliasToWdwExpr();
     /*
      * add Window Expressions
      */
-    if ( addWdwExprs ) {
-      for(WindowExpressionDef wEDef : def.getWindowExpressions() ) {
+    if (addWdwExprs) {
+      for (WindowExpressionDef wEDef : def.getWindowExpressions()) {
         ASTNode ast = aliasToExprMap.get(wEDef.getAlias()).getExpression();
         ColumnInfo cInfo = new ColumnInfo(wEDef.getAlias(),
             TypeInfoUtils.getTypeInfoFromObjectInspector(wEDef.getOI()),
@@ -1039,11 +980,11 @@ public class PTFTranslator {
     /*
      * add Window Functions
      */
-    for(WindowFunctionDef wFnDef : def.getWindowFunctions() ) {
+    for (WindowFunctionDef wFnDef : def.getWindowFunctions()) {
       ASTNode ast = aliasToExprMap.get(wFnDef.getAlias()).getExpression();
       ObjectInspector wFnOI = null;
-      if ( wFnDef.isPivotResult() ) {
-        wFnOI = ((ListObjectInspector)wFnDef.getOI()).getListElementObjectInspector();
+      if (wFnDef.isPivotResult()) {
+        wFnOI = ((ListObjectInspector) wFnDef.getOI()).getListElementObjectInspector();
       }
       else {
         wFnOI = wFnDef.getOI();
@@ -1068,10 +1009,9 @@ public class PTFTranslator {
       }
       ASTNode inExpr = null;
       inExpr = PTFTranslator.getASTNode(inpCInfo, inpRR);
-      if ( inExpr != null ) {
+      if (inExpr != null) {
         rr.putExpression(inExpr, cInfo);
-      }
-      else {
+      } else {
         rr.put(cInfo.getTabAlias(), colAlias, cInfo);
       }
     }
@@ -1081,13 +1021,11 @@ public class PTFTranslator {
 
   protected static RowResolver buildRowResolverForNoop(String tabAlias,
       StructObjectInspector rowObjectInspector,
-      RowResolver inputRowResolver) throws SemanticException
-  {
+      RowResolver inputRowResolver) throws SemanticException {
     LOG.info("QueryTranslationInfo::getRowResolver invoked on ObjectInspector");
     RowResolver rwsch = new RowResolver();
     List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs();
-    for (int i = 0; i < fields.size(); i++)
-    {
+    for (int i = 0; i < fields.size(); i++) {
       StructField field = fields.get(i);
       String internalName = field.getFieldName();
       String[] tabColAlias = inputRowResolver == null ? null : inputRowResolver
@@ -1099,8 +1037,7 @@ public class PTFTranslator {
 
       if (tabColAlias != null) {
         inpColInfo = inputRowResolver.get(colTabAlias, colAlias);
-      }
-      else {
+      } else {
         /*
          * for the Virtual columns:
          * - the internalName is UPPER Case and the alias is lower case
@@ -1115,8 +1052,7 @@ public class PTFTranslator {
       if (inpColInfo != null) {
         colInfo = new ColumnInfo(inpColInfo);
         colInfo.setTabAlias(tabAlias);
-      }
-      else {
+      } else {
         colInfo = new ColumnInfo(fields.get(i).getFieldName(),
             TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i)
                 .getFieldObjectInspector()),
@@ -1130,8 +1066,7 @@ public class PTFTranslator {
 
       if (expr != null) {
         rwsch.putExpression(expr, colInfo);
-      }
-      else {
+      } else {
         rwsch.put(tabAlias, colAlias, colInfo);
       }
     }
@@ -1142,9 +1077,9 @@ public class PTFTranslator {
    * If the cInfo is for an ASTNode, this function returns the ASTNode that it is for.
    */
   public static ASTNode getASTNode(ColumnInfo cInfo, RowResolver rr) throws SemanticException {
-    for(Map.Entry<String, ASTNode> entry : rr.getExpressionMap().entrySet()) {
+    for (Map.Entry<String, ASTNode> entry : rr.getExpressionMap().entrySet()) {
       ASTNode expr = entry.getValue();
-      if ( rr.getExpression(expr).equals(cInfo)) {
+      if (rr.getExpression(expr).equals(cInfo)) {
         return expr;
       }
     }
@@ -1160,26 +1095,27 @@ public class PTFTranslator {
 
   /** Do the recursive work for visit */
   private static void _visit(Object t, Object parent, int childIndex, ContextVisitor visitor) {
-    if ( t==null ) {
+    if (t == null) {
       return;
     }
     visitor.visit(t, parent, childIndex, null);
     int n = ParseDriver.adaptor.getChildCount(t);
-    for (int i=0; i<n; i++) {
+    for (int i = 0; i < n; i++) {
       Object child = ParseDriver.adaptor.getChild(t, i);
       _visit(child, t, i, visitor);
     }
   }
 
-  public static ArrayList<PTFInvocationSpec> componentize(PTFInvocationSpec ptfInvocation) throws SemanticException {
+  public static ArrayList<PTFInvocationSpec> componentize(PTFInvocationSpec ptfInvocation)
+      throws SemanticException {
 
     ArrayList<PTFInvocationSpec> componentInvocations = new ArrayList<PTFInvocationSpec>();
 
     Stack<PTFInputSpec> ptfChain = new Stack<PTFInvocationSpec.PTFInputSpec>();
     PTFInputSpec spec = ptfInvocation.getFunction();
     while (spec instanceof PartitionedTableFunctionSpec) {
-        ptfChain.push(spec);
-        spec = spec.getInput();
+      ptfChain.push(spec);
+      spec = spec.getInput();
     }
 
     PartitionedTableFunctionSpec prevFn = (PartitionedTableFunctionSpec) ptfChain.pop();
@@ -1187,12 +1123,12 @@ public class PTFTranslator {
     PartitionSpec partSpec = prevFn.getPartition();
     OrderSpec orderSpec = prevFn.getOrder();
 
-    if ( partSpec == null ) {
-      //oops this should have been caught before trying to componentize
+    if (partSpec == null) {
+      // oops this should have been caught before trying to componentize
       throw new SemanticException(
           "No Partitioning specification specified at start of a PTFChain");
     }
-    if ( orderSpec == null ) {
+    if (orderSpec == null) {
       orderSpec = new OrderSpec(partSpec);
       prevFn.setOrder(orderSpec);
     }
@@ -1256,33 +1192,28 @@ public class PTFTranslator {
      */
     Map<ExprNodeDesc, List<ExprNodeGenericFuncDesc>> mapTopExprToLLFunExprs;
 
-    private void addLeadLagExpr(ExprNodeGenericFuncDesc llFunc)
-    {
+    private void addLeadLagExpr(ExprNodeGenericFuncDesc llFunc) {
       leadLagExprs = leadLagExprs == null ? new ArrayList<ExprNodeGenericFuncDesc>() : leadLagExprs;
       leadLagExprs.add(llFunc);
     }
 
-    public List<ExprNodeGenericFuncDesc> getLeadLagExprs()
-    {
+    public List<ExprNodeGenericFuncDesc> getLeadLagExprs() {
       return leadLagExprs;
     }
 
-    public void addLLFuncExprForTopExpr(ExprNodeDesc topExpr, ExprNodeGenericFuncDesc llFuncExpr)
-    {
+    public void addLLFuncExprForTopExpr(ExprNodeDesc topExpr, ExprNodeGenericFuncDesc llFuncExpr) {
       addLeadLagExpr(llFuncExpr);
       mapTopExprToLLFunExprs = mapTopExprToLLFunExprs == null ?
           new HashMap<ExprNodeDesc, List<ExprNodeGenericFuncDesc>>() : mapTopExprToLLFunExprs;
       List<ExprNodeGenericFuncDesc> funcList = mapTopExprToLLFunExprs.get(topExpr);
-      if (funcList == null)
-      {
+      if (funcList == null) {
         funcList = new ArrayList<ExprNodeGenericFuncDesc>();
         mapTopExprToLLFunExprs.put(topExpr, funcList);
       }
       funcList.add(llFuncExpr);
     }
 
-    public List<ExprNodeGenericFuncDesc> getLLFuncExprsInTopExpr(ExprNodeDesc topExpr)
-    {
+    public List<ExprNodeGenericFuncDesc> getLLFuncExprsInTopExpr(ExprNodeDesc topExpr) {
       if (mapTopExprToLLFunExprs == null) {
         return null;
       }
@@ -1291,8 +1222,7 @@ public class PTFTranslator {
   }
 
   public static void validateNoLeadLagInValueBoundarySpec(ASTNode node)
-      throws SemanticException
-  {
+      throws SemanticException {
     String errMsg = "Lead/Lag not allowed in ValueBoundary Spec";
     TreeWizard tw = new TreeWizard(ParseDriver.adaptor, HiveParser.tokenNames);
     ValidateNoLeadLag visitor = new ValidateNoLeadLag(errMsg);
@@ -1300,37 +1230,30 @@ public class PTFTranslator {
     visitor.checkValid();
   }
 
-  public static class ValidateNoLeadLag implements
-      ContextVisitor
-  {
+  public static class ValidateNoLeadLag implements ContextVisitor {
     String errMsg;
     boolean throwError = false;
     ASTNode errorNode;
 
-    public ValidateNoLeadLag(String errMsg)
-    {
+    public ValidateNoLeadLag(String errMsg) {
       this.errMsg = errMsg;
     }
 
     @SuppressWarnings("rawtypes")
     @Override
-    public void visit(Object t, Object parent, int childIndex, Map labels)
-    {
+    public void visit(Object t, Object parent, int childIndex, Map labels) {
       ASTNode expr = (ASTNode) t;
       ASTNode nameNode = (ASTNode) expr.getChild(0);
       if (nameNode.getText().equals(FunctionRegistry.LEAD_FUNC_NAME)
           || nameNode.getText()
-              .equals(FunctionRegistry.LAG_FUNC_NAME))
-      {
+              .equals(FunctionRegistry.LAG_FUNC_NAME)) {
         throwError = true;
         errorNode = expr;
       }
     }
 
-    void checkValid() throws SemanticException
-    {
-      if (throwError)
-      {
+    void checkValid() throws SemanticException {
+      if (throwError) {
         throw new SemanticException(errMsg + errorNode.toStringTree());
       }
     }

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseContext.java Mon Jul 29 21:08:03 2013
@@ -79,6 +79,7 @@ public class ParseContext {
   private HashMap<TableScanOperator, Table> topToTable;
   private Map<FileSinkOperator, Table> fsopToTable;
   private List<ReduceSinkOperator> reduceSinkOperatorsAddedByEnforceBucketingSorting;
+  private HashMap<TableScanOperator, Map<String, String>> topToProps;
   private HashMap<String, SplitSample> nameToSplitSample;
   private List<LoadTableDesc> loadTableWork;
   private List<LoadFileDesc> loadFileWork;
@@ -170,6 +171,7 @@ public class ParseContext {
       Map<JoinOperator, QBJoinTree> joinContext,
       Map<SMBMapJoinOperator, QBJoinTree> smbMapJoinContext,
       HashMap<TableScanOperator, Table> topToTable,
+      HashMap<TableScanOperator, Map<String, String>> topToProps,
       Map<FileSinkOperator, Table> fsopToTable,
       List<LoadTableDesc> loadTableWork, List<LoadFileDesc> loadFileWork,
       Context ctx, HashMap<String, String> idToTableNameMap, int destTableId,
@@ -193,6 +195,7 @@ public class ParseContext {
     this.smbMapJoinContext = smbMapJoinContext;
     this.topToTable = topToTable;
     this.fsopToTable = fsopToTable;
+    this.topToProps = topToProps;
     this.loadFileWork = loadFileWork;
     this.loadTableWork = loadTableWork;
     this.opParseCtx = opParseCtx;
@@ -336,6 +339,21 @@ public class ParseContext {
   }
 
   /**
+   * @return the topToProps
+   */
+  public HashMap<TableScanOperator, Map<String, String>> getTopToProps() {
+    return topToProps;
+  }
+
+  /**
+   * @param topToProps
+   *          the topToProps to set
+   */
+  public void setTopToProps(HashMap<TableScanOperator, Map<String, String>> topToProps) {
+    this.topToProps = topToProps;
+  }
+
+  /**
    * @return the topOps
    */
   public HashMap<String, Operator<? extends OperatorDesc>> getTopOps() {

Modified: hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=1508202&r1=1508201&r2=1508202&view=diff
==============================================================================
--- hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hive/branches/tez/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Mon Jul 29 21:08:03 2013
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.parse;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.logging.Log;
@@ -43,6 +44,7 @@ public class QB {
   private int numSelDi = 0;
   private HashMap<String, String> aliasToTabs;
   private HashMap<String, QBExpr> aliasToSubq;
+  private HashMap<String, Map<String, String>> aliasToProps;
   private List<String> aliases;
   private QBParseInfo qbp;
   private QBMetaData qbm;
@@ -81,6 +83,7 @@ public class QB {
   public QB(String outer_id, String alias, boolean isSubQ) {
     aliasToTabs = new HashMap<String, String>();
     aliasToSubq = new HashMap<String, QBExpr>();
+    aliasToProps = new HashMap<String, Map<String, String>>();
     aliases = new ArrayList<String>();
     if (alias != null) {
       alias = alias.toLowerCase();
@@ -142,6 +145,10 @@ public class QB {
     aliasToSubq.put(alias.toLowerCase(), qbexpr);
   }
 
+  public void setTabProps(String alias, Map<String, String> props) {
+    aliasToProps.put(alias.toLowerCase(), props);
+  }
+
   public void addAlias(String alias) {
     if (!aliases.contains(alias.toLowerCase())) {
       aliases.add(alias.toLowerCase());
@@ -188,6 +195,10 @@ public class QB {
     return aliasToTabs.get(alias.toLowerCase());
   }
 
+  public Map<String, String> getTabPropsForAlias(String alias) {
+    return aliasToProps.get(alias.toLowerCase());
+  }
+
   public void rewriteViewToSubq(String alias, String viewName, QBExpr qbexpr) {
     alias = alias.toLowerCase();
     String tableName = aliasToTabs.remove(alias);
@@ -298,4 +309,4 @@ public class QB {
   }
 
 
-}
\ No newline at end of file
+}