You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by px...@apache.org on 2015/08/18 23:44:45 UTC

hive git commit: backport HIVE-9228:: Problem with subquery using windowing functions (Navis via Ashutosh Chauhan)

Repository: hive
Updated Branches:
  refs/heads/branch-1.0 d10dee334 -> 09c9324c7


backport HIVE-9228:: Problem with subquery using windowing functions (Navis via Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/09c9324c
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/09c9324c
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/09c9324c

Branch: refs/heads/branch-1.0
Commit: 09c9324c7f1f5ded2f45e8c6d94e55fa850a4016
Parents: d10dee3
Author: Pengcheng Xiong <px...@apache.org>
Authored: Tue Aug 18 14:44:30 2015 -0700
Committer: Pengcheng Xiong <px...@apache.org>
Committed: Tue Aug 18 14:44:30 2015 -0700

----------------------------------------------------------------------
 .../ql/optimizer/ColumnPrunerProcFactory.java   | 58 +++++++++++++-------
 .../hadoop/hive/ql/parse/RowResolver.java       | 11 ++++
 .../hadoop/hive/ql/parse/SemanticAnalyzer.java  | 17 +++---
 .../clientpositive/windowing_windowspec.q       |  4 ++
 .../results/clientpositive/subquery_notin.q.out | 12 ++--
 .../subquery_unqualcolumnrefs.q.out             |  2 +-
 .../clientpositive/tez/vectorized_ptf.q.out     | 18 +++---
 .../results/clientpositive/vectorized_ptf.q.out | 44 ++++++++-------
 .../clientpositive/windowing_windowspec.q.out   | 17 ++++++
 9 files changed, 120 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
index 3287021..bacfdc9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/ColumnPrunerProcFactory.java
@@ -259,39 +259,54 @@ public final class ColumnPrunerProcFactory {
         return super.process(nd, stack, cppCtx, nodeOutputs);
       }
 
-      WindowTableFunctionDef def = (WindowTableFunctionDef) conf.getFuncDef();
-      ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();
-
       List<String> prunedCols = cppCtx.getPrunedColList(op.getChildOperators().get(0));
-      //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
-      prunedCols = new ArrayList<String>(prunedCols);
-      prunedColumnsList(prunedCols, def);
-      RowResolver oldRR = cppCtx.getOpToParseCtxMap().get(op).getRowResolver();
-      RowResolver newRR = buildPrunedRR(prunedCols, oldRR, sig);
-      cppCtx.getPrunedColLists().put(op, prunedInputList(prunedCols, def));
-      cppCtx.getOpToParseCtxMap().get(op).setRowResolver(newRR);
+
+      WindowTableFunctionDef def = null;
+      if (conf.forWindowing()) {
+        def = (WindowTableFunctionDef) conf.getFuncDef();
+        prunedCols = Utilities.mergeUniqElems(getWindowFunctionColumns(def), prunedCols);
+        prunedCols = prunedColumnsList(prunedCols, def);
+      }
+
+      RowSchema oldRS = op.getSchema();
+      ArrayList<ColumnInfo> sig = buildPrunedRR(prunedCols, oldRS);
       op.getSchema().setSignature(sig);
+
+      prunedCols = def == null ? prunedCols : prunedInputList(prunedCols, def);
+      cppCtx.getPrunedColLists().put(op, prunedCols);
       return null;
     }
 
-    private static RowResolver buildPrunedRR(List<String> prunedCols,
-        RowResolver oldRR, ArrayList<ColumnInfo> sig) throws SemanticException{
-      RowResolver newRR = new RowResolver();
+    private static ArrayList<ColumnInfo> buildPrunedRR(List<String> prunedCols,
+        RowSchema oldRS) throws SemanticException{
+      ArrayList<ColumnInfo> sig = new ArrayList<ColumnInfo>();
       HashSet<String> prunedColsSet = new HashSet<String>(prunedCols);
-      for(ColumnInfo cInfo : oldRR.getRowSchema().getSignature()) {
+      for(ColumnInfo cInfo : oldRS.getSignature()) {
         if ( prunedColsSet.contains(cInfo.getInternalName())) {
-          String[] nm = oldRR.reverseLookup(cInfo.getInternalName());
-          newRR.put(nm[0], nm[1], cInfo);
           sig.add(cInfo);
         }
       }
-      return newRR;
+      return sig;
+    }
+
+    // always should be in this order (see PTFDeserializer#initializeWindowing)
+    private List<String> getWindowFunctionColumns(WindowTableFunctionDef tDef) {
+      List<String> columns = new ArrayList<String>();
+      if (tDef.getWindowFunctions() != null) {
+        for (WindowFunctionDef wDef : tDef.getWindowFunctions()) {
+          columns.add(wDef.getAlias());
+        }
+      }
+      return columns;
     }
 
     /*
      * add any input columns referenced in WindowFn args or expressions.
      */
-    private void prunedColumnsList(List<String> prunedCols, WindowTableFunctionDef tDef) {
+    private ArrayList<String> prunedColumnsList(List<String> prunedCols, 
+        WindowTableFunctionDef tDef) {
+      //we create a copy of prunedCols to create a list of pruned columns for PTFOperator
+      ArrayList<String> mergedColList = new ArrayList<String>(prunedCols);
       if ( tDef.getWindowFunctions() != null ) {
         for(WindowFunctionDef wDef : tDef.getWindowFunctions() ) {
           if ( wDef.getArgs() == null) {
@@ -299,22 +314,23 @@ public final class ColumnPrunerProcFactory {
           }
           for(PTFExpressionDef arg : wDef.getArgs()) {
             ExprNodeDesc exprNode = arg.getExprNode();
-            Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
+            Utilities.mergeUniqElems(mergedColList, exprNode.getCols());
           }
         }
       }
      if(tDef.getPartition() != null){
          for(PTFExpressionDef col : tDef.getPartition().getExpressions()){
            ExprNodeDesc exprNode = col.getExprNode();
-           Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
+           Utilities.mergeUniqElems(mergedColList, exprNode.getCols());
          }
        }
        if(tDef.getOrder() != null){
          for(PTFExpressionDef col : tDef.getOrder().getExpressions()){
            ExprNodeDesc exprNode = col.getExprNode();
-           Utilities.mergeUniqElems(prunedCols, exprNode.getCols());
+           Utilities.mergeUniqElems(mergedColList, exprNode.getCols());
          }
        }
+      return mergedColList;
     }
 
     /*

http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
index 469dc9f..2c52a43 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/RowResolver.java
@@ -458,4 +458,15 @@ public class RowResolver implements Serializable{
     }
     return combinedRR;
   }
+
+  public RowResolver duplicate() {
+    RowResolver resolver = new RowResolver();
+    resolver.rowSchema = new RowSchema(rowSchema);
+    resolver.rslvMap.putAll(rslvMap);
+    resolver.invRslvMap.putAll(invRslvMap);
+    resolver.altInvRslvMap.putAll(altInvRslvMap);
+    resolver.expressionMap.putAll(expressionMap);
+    resolver.isExprResolver = isExprResolver;
+    return resolver;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 5deda9d..6dc1e0e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -5122,7 +5122,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
 
     // insert a select operator here used by the ColumnPruner to reduce
     // the data to shuffle
-    Operator select = insertSelectAllPlanForGroupBy(selectInput);
+    Operator select = genSelectAllDesc(selectInput);
 
     // Generate ReduceSinkOperator
     ReduceSinkOperator reduceSinkOperatorInfo =
@@ -8395,8 +8395,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
     return type;
   }
 
-  private Operator insertSelectAllPlanForGroupBy(Operator input)
-      throws SemanticException {
+  private Operator genSelectAllDesc(Operator input) throws SemanticException {
     OpParseContext inputCtx = opParseCtx.get(input);
     RowResolver inputRR = inputCtx.getRowResolver();
     ArrayList<ColumnInfo> columns = inputRR.getColumnInfos();
@@ -8410,9 +8409,10 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
       columnNames.add(col.getInternalName());
       columnExprMap.put(col.getInternalName(), new ExprNodeColumnDesc(col));
     }
+    RowResolver outputRR = inputRR.duplicate();
     Operator output = putOpInsertMap(OperatorFactory.getAndMakeChild(
-        new SelectDesc(colList, columnNames, true), new RowSchema(inputRR
-            .getColumnInfos()), input), inputRR);
+        new SelectDesc(colList, columnNames, true), 
+        outputRR.getRowSchema(), input), outputRR);
     output.setColumnExprMap(columnExprMap);
     return output;
   }
@@ -8857,9 +8857,9 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
                   throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.
                       getMsg());
                 }
-                // insert a select operator here used by the ColumnPruner to reduce
-                // the data to shuffle
-                curr = insertSelectAllPlanForGroupBy(curr);
+              // insert a select operator here used by the ColumnPruner to reduce
+              // the data to shuffle
+              curr = genSelectAllDesc(curr);
                 if (conf.getBoolVar(HiveConf.ConfVars.HIVEMAPSIDEAGGREGATE)) {
                   if (!conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) {
                     curr = genGroupByPlanMapAggrNoSkew(dest, qb, curr);
@@ -12198,6 +12198,7 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
       input = putOpInsertMap(OperatorFactory.getAndMakeChild(ptfDesc,
           new RowSchema(ptfOpRR.getColumnInfos()),
           input), ptfOpRR);
+      input = genSelectAllDesc(input);
       rr = ptfOpRR;
     }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/test/queries/clientpositive/windowing_windowspec.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/windowing_windowspec.q b/ql/src/test/queries/clientpositive/windowing_windowspec.q
index 2055e9d..202eb74 100644
--- a/ql/src/test/queries/clientpositive/windowing_windowspec.q
+++ b/ql/src/test/queries/clientpositive/windowing_windowspec.q
@@ -36,3 +36,7 @@ select f, sum(f) over (partition by ts order by f rows between 2 preceding and 1
 select s, i, round(avg(d) over (partition by s order by i) / 10.0 , 2) from over10k limit 7;
 
 select s, i, round((avg(d) over  w1 + 10.0) - (avg(d) over w1 - 10.0),2) from over10k window w1 as (partition by s order by i) limit 7;
+
+set hive.cbo.enable=false;
+-- HIVE-9228 
+select s, i from ( select s, i, round((avg(d) over  w1 + 10.0) - (avg(d) over w1 - 10.0),2) from over10k window w1 as (partition by s order by i)) X limit 7;

http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/test/results/clientpositive/subquery_notin.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_notin.q.out b/ql/src/test/results/clientpositive/subquery_notin.q.out
index c5cfe15..d42e947 100644
--- a/ql/src/test/results/clientpositive/subquery_notin.q.out
+++ b/ql/src/test/results/clientpositive/subquery_notin.q.out
@@ -384,7 +384,7 @@ POSTHOOK: Input: default@src
 199	val_199
 199	val_199
 2	val_2
-Warning: Shuffle Join JOIN[26][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[28][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
 PREHOOK: query: -- non agg, corr
 explain
 select p_mfgr, b.p_name, p_size 
@@ -588,7 +588,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join JOIN[26][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[28][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
 PREHOOK: query: select p_mfgr, b.p_name, p_size 
 from part b 
 where b.p_name not in 
@@ -627,7 +627,7 @@ Manufacturer#4	almond azure aquamarine papaya violet	12
 Manufacturer#5	almond antique blue firebrick mint	31
 Manufacturer#5	almond aquamarine dodger light gainsboro	46
 Manufacturer#5	almond azure blanched chiffon midnight	23
-Warning: Shuffle Join JOIN[34][tables = [part, sq_1_notin_nullcheck]] in Stage 'Stage-6:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[36][tables = [part, sq_1_notin_nullcheck]] in Stage 'Stage-6:MAPRED' is a cross product
 PREHOOK: query: -- agg, non corr
 explain
 select p_name, p_size 
@@ -874,7 +874,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join JOIN[34][tables = [part, sq_1_notin_nullcheck]] in Stage 'Stage-7:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[36][tables = [part, sq_1_notin_nullcheck]] in Stage 'Stage-7:MAPRED' is a cross product
 PREHOOK: query: select p_name, p_size 
 from 
 part where part.p_size not in 
@@ -921,7 +921,7 @@ almond aquamarine sandy cyan gainsboro	18
 almond aquamarine yellow dodger mint	7
 almond azure aquamarine papaya violet	12
 almond azure blanched chiffon midnight	23
-Warning: Shuffle Join JOIN[34][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[36][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
 PREHOOK: query: -- agg, corr
 explain
 select p_mfgr, p_name, p_size 
@@ -1196,7 +1196,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join JOIN[34][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[36][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
 PREHOOK: query: select p_mfgr, p_name, p_size 
 from part b where b.p_size not in 
   (select min(p_size) 

http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out b/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
index 027c195..f926d20 100644
--- a/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
+++ b/ql/src/test/results/clientpositive/subquery_unqualcolumnrefs.q.out
@@ -775,7 +775,7 @@ STAGE PLANS:
       Processor Tree:
         ListSink
 
-Warning: Shuffle Join JOIN[26][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
+Warning: Shuffle Join JOIN[28][tables = [b, sq_1_notin_nullcheck]] in Stage 'Stage-1:MAPRED' is a cross product
 PREHOOK: query: -- non agg, corr
 explain
 select p_mfgr, b.p_name, p_size 

http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
index 5fcfeb9..c00e03e 100644
--- a/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
+++ b/ql/src/test/results/clientpositive/tez/vectorized_ptf.q.out
@@ -5309,14 +5309,18 @@ STAGE PLANS:
                 Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
                 PTF Operator
                   Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
-                  Reduce Output Operator
-                    key expressions: _col2 (type: string), _col2 (type: string), _col1 (type: string)
-                    sort order: +++
-                    Map-reduce partition columns: _col2 (type: string)
+                  Select Operator
+                    expressions: _col1 (type: string), _col2 (type: string), _col5 (type: int), _wcol0 (type: bigint)
+                    outputColumnNames: _col1, _col2, _col5, _wcol0
                     Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
-                    tag: -1
-                    value expressions: _wcol0 (type: bigint), _col1 (type: string), _col2 (type: string), _col5 (type: int)
-                    auto parallelism: true
+                    Reduce Output Operator
+                      key expressions: _col2 (type: string), _col2 (type: string), _col1 (type: string)
+                      sort order: +++
+                      Map-reduce partition columns: _col2 (type: string)
+                      Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+                      tag: -1
+                      value expressions: _wcol0 (type: bigint), _col1 (type: string), _col2 (type: string), _col5 (type: int)
+                      auto parallelism: true
         Reducer 4 
             Needs Tagging: false
             Reduce Operator Tree:

http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/test/results/clientpositive/vectorized_ptf.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/vectorized_ptf.q.out b/ql/src/test/results/clientpositive/vectorized_ptf.q.out
index da8a575..1cfd7d7 100644
--- a/ql/src/test/results/clientpositive/vectorized_ptf.q.out
+++ b/ql/src/test/results/clientpositive/vectorized_ptf.q.out
@@ -6122,23 +6122,27 @@ STAGE PLANS:
           Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
           PTF Operator
             Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
-            File Output Operator
-              compressed: false
-              GlobalTableId: 0
+            Select Operator
+              expressions: _col1 (type: string), _col2 (type: string), _col5 (type: int), _wcol0 (type: bigint)
+              outputColumnNames: _col1, _col2, _col5, _wcol0
+              Statistics: Num rows: 26 Data size: 16042 Basic stats: COMPLETE Column stats: NONE
+              File Output Operator
+                compressed: false
+                GlobalTableId: 0
 #### A masked pattern was here ####
-              NumFilesPerFileSink: 1
-              table:
-                  input format: org.apache.hadoop.mapred.SequenceFileInputFormat
-                  output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
-                  properties:
-                    columns _wcol0,_col1,_col2,_col5
-                    columns.types bigint,string,string,int
-                    escape.delim \
-                    serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-                  serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
-              TotalFiles: 1
-              GatherStats: false
-              MultiFileSpray: false
+                NumFilesPerFileSink: 1
+                table:
+                    input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                    properties:
+                      columns _col1,_col2,_col5,_wcol0
+                      columns.types string,string,int,bigint
+                      escape.delim \
+                      serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+                    serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
+                TotalFiles: 1
+                GatherStats: false
+                MultiFileSpray: false
 
   Stage: Stage-4
     Map Reduce
@@ -6162,8 +6166,8 @@ STAGE PLANS:
             input format: org.apache.hadoop.mapred.SequenceFileInputFormat
             output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
             properties:
-              columns _wcol0,_col1,_col2,_col5
-              columns.types bigint,string,string,int
+              columns _col1,_col2,_col5,_wcol0
+              columns.types string,string,int,bigint
               escape.delim \
               serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
             serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
@@ -6171,8 +6175,8 @@ STAGE PLANS:
               input format: org.apache.hadoop.mapred.SequenceFileInputFormat
               output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
               properties:
-                columns _wcol0,_col1,_col2,_col5
-                columns.types bigint,string,string,int
+                columns _col1,_col2,_col5,_wcol0
+                columns.types string,string,int,bigint
                 escape.delim \
                 serialization.lib org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe
               serde: org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe

http://git-wip-us.apache.org/repos/asf/hive/blob/09c9324c/ql/src/test/results/clientpositive/windowing_windowspec.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/windowing_windowspec.q.out b/ql/src/test/results/clientpositive/windowing_windowspec.q.out
index de4ae97..66b0b52 100644
--- a/ql/src/test/results/clientpositive/windowing_windowspec.q.out
+++ b/ql/src/test/results/clientpositive/windowing_windowspec.q.out
@@ -938,3 +938,20 @@ alice allen	65609	20.0
 alice allen	65662	20.0
 alice allen	65670	20.0
 alice allen	65720	20.0
+PREHOOK: query: -- HIVE-9228 
+select s, i from ( select s, i, round((avg(d) over  w1 + 10.0) - (avg(d) over w1 - 10.0),2) from over10k window w1 as (partition by s order by i)) X limit 7
+PREHOOK: type: QUERY
+PREHOOK: Input: default@over10k
+#### A masked pattern was here ####
+POSTHOOK: query: -- HIVE-9228 
+select s, i from ( select s, i, round((avg(d) over  w1 + 10.0) - (avg(d) over w1 - 10.0),2) from over10k window w1 as (partition by s order by i)) X limit 7
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@over10k
+#### A masked pattern was here ####
+alice allen	65545
+alice allen	65557
+alice allen	65600
+alice allen	65609
+alice allen	65662
+alice allen	65670
+alice allen	65720