You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/05/09 10:32:48 UTC

svn commit: r1480557 [2/2] - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/cube/metadata/ java/org/apache/hadoop/hive/ql/cube/parse/ java/org/apache/hadoop/hive/ql/cube/processors/ test/org/apache/hadoop/hive/ql/cube/parse/ test/or...

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java Thu May  9 08:32:48 2013
@@ -1,5 +1,26 @@
 package org.apache.hadoop.hive.ql.cube.parse;
 
+import static org.apache.hadoop.hive.ql.parse.HiveParser.DIVIDE;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.DOT;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.EQUAL;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.GREATERTHAN;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.GREATERTHANOREQUALTO;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.Identifier;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_AND;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_LIKE;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.KW_OR;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.LESSTHAN;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.LESSTHANOREQUALTO;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.MINUS;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.MOD;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.NOTEQUAL;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.Number;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.PLUS;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.STAR;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.StringLiteral;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_FUNCTION;
+import static org.apache.hadoop.hive.ql.parse.HiveParser.TOK_SELECT;
+
 import java.lang.reflect.Field;
 import java.util.Collections;
 import java.util.HashMap;
@@ -12,8 +33,6 @@ import java.util.Set;
 import org.antlr.runtime.tree.Tree;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-
 import org.apache.hadoop.hive.ql.parse.ParseDriver;
 import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.parse.ParseUtils;
@@ -27,6 +46,7 @@ public class HQLParser {
   public static class TreeNode {
     final TreeNode parent;
     final ASTNode node;
+
     public TreeNode(TreeNode parent, ASTNode node) {
       this.parent = parent;
       this.node = node;
@@ -96,6 +116,7 @@ public class HQLParser {
 
   /**
    * Debug function for printing query AST to stdout
+   *
    * @param node
    * @param level
    */
@@ -111,7 +132,7 @@ public class HQLParser {
 
     System.out.print(node.getText() + " [" + tokenMapping.get(
         node.getToken().getType()) + "]");
-    System.out.print(" (l"+level + "c" + child + ")");
+    System.out.print(" (l" + level + "c" + child + ")");
 
     if (node.getChildCount() > 0) {
       System.out.println(" {");
@@ -119,7 +140,7 @@ public class HQLParser {
       for (int i = 0; i < node.getChildCount(); i++) {
         Tree tree = node.getChild(i);
         if (tree instanceof ASTNode) {
-          printAST(tokenMapping, (ASTNode) tree, level + 1, i+1);
+          printAST(tokenMapping, (ASTNode) tree, level + 1, i + 1);
         } else {
           System.out.println("NON ASTNode");
         }
@@ -154,13 +175,14 @@ public class HQLParser {
 
   /**
    * Find a node in the tree rooted at root, given the path of type of tokens
-   *  from the root's children to the desired node
+   * from the root's children to the desired node
    *
    * @param root
-   * @param path starts at the level of root's children
+   * @param path
+   *          starts at the level of root's children
    * @return
    */
-  public static ASTNode findNodeByPath (ASTNode root, int... path) {
+  public static ASTNode findNodeByPath(ASTNode root, int... path) {
     for (int i = 0; i < path.length; i++) {
       int type = path[i];
       boolean hasChildWithType = false;
@@ -194,6 +216,7 @@ public class HQLParser {
 
   /**
    * Breadth first traversal of AST
+   *
    * @param root
    * @param visitor
    */
@@ -213,16 +236,18 @@ public class HQLParser {
       visitor.visit(node);
       ASTNode astNode = node.getNode();
       for (int i = 0; i < astNode.getChildCount(); i++) {
-        queue.offer(new TreeNode (node, (ASTNode)astNode.getChild(i)) );
+        queue.offer(new TreeNode(node, (ASTNode) astNode.getChild(i)));
       }
     }
   }
 
   /**
    * Recursively reconstruct query string given a query AST
+   *
    * @param root
-   * @param buf preallocated builder where the reconstructed string will
-   *  be written
+   * @param buf
+   *          preallocated builder where the reconstructed string will
+   *          be written
    */
   public static void toInfixString(ASTNode root, StringBuilder buf) {
     if (root == null) {
@@ -230,30 +255,30 @@ public class HQLParser {
     }
     int rootType = root.getToken().getType();
     // Operand, print contents
-    if (Identifier == rootType|| Number == rootType ||
+    if (Identifier == rootType || Number == rootType ||
         StringLiteral == rootType) {
       buf.append(' ').append(root.getText()).append(' ');
     } else if (BINARY_OPERATORS.contains(
-          Integer.valueOf(root.getToken().getType()))) {
-        buf.append("(");
-        toInfixString((ASTNode)root.getChild(0), buf);
-        buf.append(' ').append(root.getText()).append(' ');
-        toInfixString((ASTNode) root.getChild(1), buf);
-        buf.append(")");
+        Integer.valueOf(root.getToken().getType()))) {
+      buf.append("(");
+      toInfixString((ASTNode) root.getChild(0), buf);
+      buf.append(' ').append(root.getText()).append(' ');
+      toInfixString((ASTNode) root.getChild(1), buf);
+      buf.append(")");
     } else if (TOK_FUNCTION == root.getToken().getType()) {
-        String fname = ((ASTNode) root.getChild(0)).getText();
-        buf.append(fname).append("(");
-        for (int i = 1; i < root.getChildCount(); i++) {
-          toInfixString((ASTNode) root.getChild(i), buf);
-          if (i != root.getChildCount() -1) {
-            buf.append(", ");
-          }
+      String fname = ((ASTNode) root.getChild(0)).getText();
+      buf.append(fname).append("(");
+      for (int i = 1; i < root.getChildCount(); i++) {
+        toInfixString((ASTNode) root.getChild(i), buf);
+        if (i != root.getChildCount() - 1) {
+          buf.append(", ");
         }
-        buf.append(")");
+      }
+      buf.append(")");
     } else if (TOK_SELECT == rootType) {
       for (int i = 0; i < root.getChildCount(); i++) {
         toInfixString((ASTNode) root.getChild(i), buf);
-        if (i != root.getChildCount() -1) {
+        if (i != root.getChildCount() - 1) {
           buf.append(", ");
         }
       }
@@ -266,7 +291,7 @@ public class HQLParser {
 
   public static void main(String[] args) throws Exception {
     ASTNode ast = parseHQL("select * from default_table "
-    		);
+        );
 
     printAST(getHiveTokenMapping(), ast, 0, 0);
   }
@@ -276,4 +301,4 @@ public class HQLParser {
     toInfixString(tree, buf);
     return buf.toString();
   }
-}
\ No newline at end of file
+}

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java Thu May  9 08:32:48 2013
@@ -21,7 +21,7 @@ public class JoinResolver implements Con
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws SemanticException {
-    resolveJoins((CubeQueryContext)cubeql);
+    resolveJoins((CubeQueryContext) cubeql);
   }
 
   public void resolveJoins(CubeQueryContext cubeql) throws SemanticException {
@@ -34,7 +34,8 @@ public class JoinResolver implements Con
     }
   }
 
-  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree, CubeQueryContext cubeql)
+  private QBJoinTree genJoinTree(QB qb, ASTNode joinParseTree,
+      CubeQueryContext cubeql)
       throws SemanticException {
     QBJoinTree joinTree = new QBJoinTree();
     JoinCond[] condn = new JoinCond[1];
@@ -75,7 +76,7 @@ public class JoinResolver implements Con
       String alias = left.getChildCount() == 1 ? tableName
           : SemanticAnalyzer.unescapeIdentifier(
               left.getChild(left.getChildCount() - 1)
-              .getText().toLowerCase());
+                  .getText().toLowerCase());
       joinTree.setLeftAlias(alias);
       String[] leftAliases = new String[1];
       leftAliases[0] = alias;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java Thu May  9 08:32:48 2013
@@ -17,32 +17,34 @@ public class LeastDimensionResolver impl
   @Override
   public void rewriteContext(CubeQueryContext cubeql)
       throws SemanticException {
-    if (cubeql.getCube() != null && !cubeql.getCandidateFactTables().isEmpty()) {
+    if (cubeql.getCube() != null && !cubeql.getCandidateFactTables()
+        .isEmpty()) {
       Map<CubeFactTable, Integer> dimWeightMap =
           new HashMap<CubeFactTable, Integer>();
 
       for (CubeFactTable fact : cubeql.getCandidateFactTables()) {
         dimWeightMap.put(fact, getDimensionWeight(cubeql, fact));
-     }
+      }
 
-     int minWeight = Collections.min(dimWeightMap.values());
+      int minWeight = Collections.min(dimWeightMap.values());
 
-     for (Iterator<CubeFactTable> i =
-         cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
-       CubeFactTable fact = i.next();
-       if (dimWeightMap.get(fact) > minWeight) {
-         System.out.println("Removing fact:" + fact +
-             " from candidate fact tables as it has more dimension weight:"
-             +  dimWeightMap.get(fact) + " minimum:"
-             + minWeight);
-         i.remove();
-       }
-     }
+      for (Iterator<CubeFactTable> i =
+          cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
+        CubeFactTable fact = i.next();
+        if (dimWeightMap.get(fact) > minWeight) {
+          System.out.println("Removing fact:" + fact +
+              " from candidate fact tables as it has more dimension weight:"
+              + dimWeightMap.get(fact) + " minimum:"
+              + minWeight);
+          i.remove();
+        }
+      }
     }
   }
 
-  private Integer getDimensionWeight(CubeQueryContext cubeql, CubeFactTable fact) {
-    //TODO get the dimension weight associated with the fact wrt query
+  private Integer getDimensionWeight(CubeQueryContext cubeql,
+      CubeFactTable fact) {
+    // TODO get the dimension weight associated with the fact wrt query
     return 0;
   }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java Thu May  9 08:32:48 2013
@@ -19,13 +19,14 @@ public class LeastPartitionResolver impl
   @Override
   public void rewriteContext(CubeQueryContext cubeql)
       throws SemanticException {
-    if (cubeql.getCube() != null && !cubeql.getCandidateFactTables().isEmpty()) {
+    if (cubeql.getCube() != null && !cubeql.getCandidateFactTables().isEmpty())
+    {
       Map<CubeFactTable, Integer> numPartitionsMap =
           new HashMap<CubeFactTable, Integer>();
 
       for (CubeFactTable fact : cubeql.getCandidateFactTables()) {
-         numPartitionsMap.put(fact, getTotalPartitions(
-             cubeql.getFactPartitionMap().get(fact)));
+        numPartitionsMap.put(fact, getTotalPartitions(
+            cubeql.getFactPartitionMap().get(fact)));
       }
 
       int minPartitions = Collections.min(numPartitionsMap.values());
@@ -36,7 +37,7 @@ public class LeastPartitionResolver impl
         if (numPartitionsMap.get(fact) > minPartitions) {
           System.out.println("Removing fact:" + fact +
               " from candidate fact tables as it requires more partitions to" +
-              " be queried:" +  numPartitionsMap.get(fact) + " minimum:"
+              " be queried:" + numPartitionsMap.get(fact) + " minimum:"
               + minPartitions);
           i.remove();
         }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java Thu May  9 08:32:48 2013
@@ -28,7 +28,8 @@ public class PartitionResolver implement
 
       Calendar cal = Calendar.getInstance();
       cal.setTime(fromDate);
-      for (Iterator<CubeFactTable> i = cubeql.getCandidateFactTables().iterator(); i.hasNext();) {
+      for (Iterator<CubeFactTable> i = cubeql.getCandidateFactTables()
+          .iterator(); i.hasNext();) {
         CubeFactTable fact = i.next();
         Map<UpdatePeriod, List<String>> partitionColMap =
             new HashMap<UpdatePeriod, List<String>>();
@@ -45,7 +46,7 @@ public class PartitionResolver implement
 
   private boolean getPartitions(CubeFactTable fact, Date fromDate, Date toDate,
       Map<UpdatePeriod, List<String>> partitionColMap, CubeQueryContext cubeql)
-          throws SemanticException {
+      throws SemanticException {
     if (fromDate.equals(toDate) || fromDate.after(toDate)) {
       return true;
     }
@@ -70,6 +71,6 @@ public class PartitionResolver implement
       parts.addAll(partitions);
     }
     return (getPartitions(fact, fromDate, ceilFromDate, partitionColMap, cubeql)
-        && getPartitions(fact, floorToDate, toDate, partitionColMap, cubeql));
+    && getPartitions(fact, floorToDate, toDate, partitionColMap, cubeql));
   }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java Thu May  9 08:32:48 2013
@@ -26,10 +26,10 @@ public class StorageTableResolver implem
 
     // resolve fact tables
     Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factStorageMap =
-        new HashMap<CubeFactTable, Map<UpdatePeriod,List<String>>>();
+        new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
     Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartMap =
         cubeql.getFactPartitionMap();
-    //Find candidate tables wrt supported storages
+    // Find candidate tables wrt supported storages
     for (CubeFactTable fact : factPartMap.keySet()) {
       Map<UpdatePeriod, List<String>> storageTableMap =
           new HashMap<UpdatePeriod, List<String>>();
@@ -54,26 +54,27 @@ public class StorageTableResolver implem
     }
     cubeql.setFactStorageMap(factStorageMap);
 
-    //resolve dimension tables
+    // resolve dimension tables
     Map<CubeDimensionTable, List<String>> dimStorageMap =
         new HashMap<CubeDimensionTable, List<String>>();
     for (CubeDimensionTable dim : cubeql.getDimensionTables()) {
-        List<String> storageTables = new ArrayList<String>();
-        dimStorageMap.put(dim, storageTables);
-        for (String storage : dim.getStorages()) {
-          if (cubeql.isStorageSupported(storage)) {
-            String tableName = MetastoreUtil.getDimStorageTableName(
-                dim.getName(), Storage.getPrefix(storage));
-            storageTables.add(tableName);
-            if (dim.hasStorageSnapshots(storage)) {
-              storageTableToWhereClause.put(tableName,
-                getWherePartClause(dim.getName(), Storage.getPartitionsForLatest()));
-            }
-          } else {
-            System.out.println("Storage:" + storage + " is not supported");
+      List<String> storageTables = new ArrayList<String>();
+      dimStorageMap.put(dim, storageTables);
+      for (String storage : dim.getStorages()) {
+        if (cubeql.isStorageSupported(storage)) {
+          String tableName = MetastoreUtil.getDimStorageTableName(
+              dim.getName(), Storage.getPrefix(storage));
+          storageTables.add(tableName);
+          if (dim.hasStorageSnapshots(storage)) {
+            storageTableToWhereClause.put(tableName,
+                getWherePartClause(dim.getName(), Storage
+                    .getPartitionsForLatest()));
           }
+        } else {
+          System.out.println("Storage:" + storage + " is not supported");
         }
       }
+    }
     cubeql.setDimStorageMap(dimStorageMap);
     cubeql.setStorageTableToWhereClause(storageTableToWhereClause);
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java Thu May  9 08:32:48 2013
@@ -11,7 +11,8 @@ public abstract class ValidationRule {
     this.conf = conf;
   }
 
-  public abstract boolean validate(CubeQueryContext ctx) throws SemanticException;
+  public abstract boolean validate(CubeQueryContext ctx)
+      throws SemanticException;
 
   public String getErrorMessage() {
     return error;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java Thu May  9 08:32:48 2013
@@ -50,7 +50,7 @@ public class CubeDriver extends Driver {
     return super.compile(query);
   }
 
-  String compileCubeQuery(String query)
+  protected String compileCubeQuery(String query)
       throws SemanticException, ParseException, IOException {
     System.out.println("Query :" + query);
     ctx = new Context(getConf());

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java Thu May  9 08:32:48 2013
@@ -77,12 +77,13 @@ public class CubeTestSetup {
     cubeDimensions.add(new BaseDimension(new FieldSchema("dim1", "string",
         "basedim")));
     // Added for ambiguity test
-    cubeDimensions.add(new BaseDimension(new FieldSchema("ambigdim1", "string", "used in testColumnAmbiguity")));
+    cubeDimensions.add(new BaseDimension(new FieldSchema("ambigdim1", "string",
+        "used in testColumnAmbiguity")));
     cubeDimensions.add(new ReferencedDimension(
-            new FieldSchema("dim2", "string", "ref dim"),
-            new TableReference("testdim2", "id")));
+        new FieldSchema("dim2", "string", "ref dim"),
+        new TableReference("testdim2", "id")));
     cubeDimensions.add(new InlineDimension(
-            new FieldSchema("region", "string", "region dim"), regions));
+        new FieldSchema("region", "string", "region dim"), regions));
     cube = new Cube(cubeName, cubeMeasures, cubeDimensions);
     client.createCube(cubeName, cubeMeasures, cubeDimensions);
   }
@@ -98,7 +99,8 @@ public class CubeTestSetup {
     // add dimensions of the cube
     factColumns.add(new FieldSchema("zipcode","int", "zip"));
     factColumns.add(new FieldSchema("cityid","int", "city id"));
-    factColumns.add(new FieldSchema("ambigdim1", "string", "used in testColumnAmbiguity"));
+    factColumns.add(new FieldSchema("ambigdim1", "string", "used in" +
+        " testColumnAmbiguity"));
 
     Map<Storage, List<UpdatePeriod>> storageAggregatePeriods =
         new HashMap<Storage, List<UpdatePeriod>>();
@@ -119,7 +121,8 @@ public class CubeTestSetup {
         storageAggregatePeriods);
   }
 
-  private void createCubeFactOnlyHourly(CubeMetastoreClient client) throws HiveException {
+  private void createCubeFactOnlyHourly(CubeMetastoreClient client)
+      throws HiveException {
     String factName = "testFact2";
     List<FieldSchema> factColumns = new ArrayList<FieldSchema>(
         cubeMeasures.size());
@@ -145,7 +148,8 @@ public class CubeTestSetup {
         storageAggregatePeriods);
   }
 
-  private void createCubeFactMonthly(CubeMetastoreClient client) throws HiveException {
+  private void createCubeFactMonthly(CubeMetastoreClient client)
+      throws HiveException {
     String factName = "testFactMonthly";
     List<FieldSchema> factColumns = new ArrayList<FieldSchema>(
         cubeMeasures.size());
@@ -180,8 +184,10 @@ public class CubeTestSetup {
     dimColumns.add(new FieldSchema("name", "string", "field1"));
     dimColumns.add(new FieldSchema("stateid", "int", "state id"));
     dimColumns.add(new FieldSchema("zipcode", "int", "zip code"));
-    dimColumns.add(new FieldSchema("ambigdim1", "string", "used in testColumnAmbiguity"));
-    dimColumns.add(new FieldSchema("ambigdim2", "string", "used in testColumnAmbiguity"));
+    dimColumns.add(new FieldSchema("ambigdim1", "string", "used in" +
+        " testColumnAmbiguity"));
+    dimColumns.add(new FieldSchema("ambigdim2", "string", "used in " +
+        "testColumnAmbiguity"));
     Map<String, TableReference> dimensionReferences =
         new HashMap<String, TableReference>();
     dimensionReferences.put("stateid", new TableReference("statetable", "id"));
@@ -231,7 +237,8 @@ public class CubeTestSetup {
     dimColumns.add(new FieldSchema("name", "string", "field1"));
     dimColumns.add(new FieldSchema("capital", "string", "field2"));
     dimColumns.add(new FieldSchema("region", "string", "region name"));
-    dimColumns.add(new FieldSchema("ambigdim2", "string", "used in testColumnAmbiguity"));
+    dimColumns.add(new FieldSchema("ambigdim2", "string", "used in" +
+        " testColumnAmbiguity"));
     Storage hdfsStorage = new HDFSStorage("C1",
         TextInputFormat.class.getCanonicalName(),
         HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
@@ -255,7 +262,8 @@ public class CubeTestSetup {
 
     Map<String, TableReference> dimensionReferences =
         new HashMap<String, TableReference>();
-    dimensionReferences.put("countryid", new TableReference("countrytable", "id"));
+    dimensionReferences.put("countryid", new TableReference("countrytable",
+        "id"));
 
     Storage hdfsStorage = new HDFSStorage("C1",
         TextInputFormat.class.getCanonicalName(),

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java Thu May  9 08:32:48 2013
@@ -19,7 +19,7 @@ public class TestCubeSemanticAnalyzer {
   }
 
   String queries[] = { "SELECT t1.c1 rsalias0, f(t1.c2) rsalias1," +
-  		" (t2.c3 + t2.c4) rsalias2, avg(fc5/fc6) * fc7 " +
+      " (t2.c3 + t2.c4) rsalias2, avg(fc5/fc6) * fc7 " +
       " FROM facttab t1" +
       " WHERE ( fc1='foo' and fc2 = 250 or sin(fc3)=1.0 ) " +
       " and time_range_in('NOW-7DAYS', 'NOW')" +
@@ -44,7 +44,8 @@ public class TestCubeSemanticAnalyzer {
 
   @Test
   public void testSimpleQuery() throws Exception {
-    astRoot = HQLParser.parseHQL("select SUM(msr2) from testCube where time_range_in('NOW - 2DAYS', 'NOW')");
+    astRoot = HQLParser.parseHQL("select SUM(msr2) from testCube where" +
+        " time_range_in('NOW - 2DAYS', 'NOW')");
     analyzer.analyzeInternal(astRoot);
     CubeQueryContext cubeql = analyzer.getQueryContext();
     //System.out.println("cube hql:" + cubeql.toHQL());

Modified: hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java (original)
+++ hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java Thu May  9 08:32:48 2013
@@ -46,10 +46,12 @@ public class TestCubeDriver {
   }
 
   public static String HOUR_FMT = "yyyy-MM-dd HH";
-  public static final SimpleDateFormat HOUR_PARSER = new SimpleDateFormat(HOUR_FMT);
+  public static final SimpleDateFormat HOUR_PARSER = new SimpleDateFormat(
+      HOUR_FMT);
 
   public static String MONTH_FMT = "yyyy-MM";
-  public static final SimpleDateFormat MONTH_PARSER = new SimpleDateFormat(MONTH_FMT);
+  public static final SimpleDateFormat MONTH_PARSER = new SimpleDateFormat(
+      MONTH_FMT);
 
   public static String getDateUptoHours(Date dt) {
     return HOUR_PARSER.format(dt);
@@ -63,8 +65,8 @@ public class TestCubeDriver {
   public void testQueryWithNow() throws Exception {
     Throwable th = null;
     try {
-      String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
-          " where time_range_in('NOW - 2DAYS', 'NOW')");
+      String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube"
+          + " where time_range_in('NOW - 2DAYS', 'NOW')");
     } catch (SemanticException e) {
       th = e;
       e.printStackTrace();
@@ -89,9 +91,12 @@ public class TestCubeDriver {
 
   @Test
   public void testCubeWhereQuery() throws Exception {
-    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" + getDateUptoHours(now));
-    //String expected = " sum( testcube.msr2 ) FROM  C1_testfact_HOURLY testcube  WHERE " + whereClause(HOURLY) + " UNION " +
-    // SELECT sum( testcube.msr2 ) FROM  C1_testfact_DAILY testcube  WHERE + whereClause(DAILY)
+    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" +
+        getDateUptoHours(now));
+    //String expected = " sum( testcube.msr2 ) FROM  C1_testfact_HOURLY
+    //testcube  WHERE " + whereClause(HOURLY) + " UNION " +
+    // SELECT sum( testcube.msr2 ) FROM  C1_testfact_DAILY testcube
+    //WHERE + whereClause(DAILY)
 
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
         " where time_range_in('" + getDateUptoHours(twodaysBack)
@@ -109,7 +114,8 @@ public class TestCubeDriver {
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
-    hqlQuery = driver.compileCubeQuery("select statetable.name, SUM(msr2) from testCube"
+    hqlQuery = driver.compileCubeQuery("select statetable.name, SUM(msr2) from"
+        + " testCube"
         + " join citytable on testCube.cityid = citytable.id"
         + " left outer join statetable on statetable.id = citytable.stateid"
         + " right outer join ziptable on citytable.zipcode = ziptable.code"
@@ -133,9 +139,11 @@ public class TestCubeDriver {
     cal.add(Calendar.DAY_OF_MONTH, -2);
     Date twodaysBack = cal.getTime();
     System.out.println("Test twodaysBack:" + twodaysBack);
-    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" + getDateUptoHours(now));
+    System.out.println("Test from:" + getDateUptoHours(twodaysBack) + " to:" +
+        getDateUptoHours(now));
     //String expected = "select SUM(testCube.msr2) from "
-    String hqlQuery = driver.compileCubeQuery("select name, SUM(msr2) from testCube"
+    String hqlQuery = driver.compileCubeQuery("select name, SUM(msr2) from" +
+        " testCube"
         + " join citytable on testCube.cityid = citytable.id"
         + " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')");
@@ -153,7 +161,8 @@ public class TestCubeDriver {
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
-    hqlQuery = driver.compileCubeQuery("select round(cityid), SUM(msr2) from testCube"
+    hqlQuery = driver.compileCubeQuery("select round(cityid), SUM(msr2) from" +
+        " testCube"
         + " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
@@ -163,7 +172,8 @@ public class TestCubeDriver {
         + "','" + getDateUptoHours(now) + "')"
         + " group by round(zipcode)");
 
-    hqlQuery = driver.compileCubeQuery("select round(cityid), SUM(msr2) from testCube"
+    hqlQuery = driver.compileCubeQuery("select round(cityid), SUM(msr2) from" +
+        " testCube"
         + " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')"
         + " group by zipcode");
@@ -195,27 +205,30 @@ public class TestCubeDriver {
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
     try {
-    hqlQuery = driver.compileCubeQuery("select name, SUM(msr2) from testCube" +
-        " join citytable" +
-        " where time_range_in('" + getDateUptoHours(twodaysBack)
-        + "','" + getDateUptoHours(now) + "')" +
-        " group by name");
-    System.out.println("cube hql:" + hqlQuery);
+      hqlQuery = driver.compileCubeQuery("select name, SUM(msr2) from testCube"
+          + " join citytable" +
+          " where time_range_in('" + getDateUptoHours(twodaysBack)
+          + "','" + getDateUptoHours(now) + "')" +
+          " group by name");
+      System.out.println("cube hql:" + hqlQuery);
     } catch (SemanticException e) {
       e.printStackTrace();
     }
-    hqlQuery = driver.compileCubeQuery("select SUM(mycube.msr2) from testCube mycube" +
+    hqlQuery = driver.compileCubeQuery("select SUM(mycube.msr2) from" +
+        " testCube mycube" +
         " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
     //Assert.assertEquals(queries[1], cubeql.toHQL());
 
-    hqlQuery = driver.compileCubeQuery("select SUM(testCube.msr2) from testCube" +
+    hqlQuery = driver.compileCubeQuery("select SUM(testCube.msr2) from" +
+        " testCube" +
         " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
 
-    hqlQuery = driver.compileCubeQuery("select mycube.msr2 m2 from testCube mycube" +
+    hqlQuery = driver.compileCubeQuery("select mycube.msr2 m2 from testCube" +
+        " mycube" +
         " where time_range_in('" + getDateUptoHours(twodaysBack)
         + "','" + getDateUptoHours(now) + "')");
     System.out.println("cube hql:" + hqlQuery);
@@ -228,7 +241,8 @@ public class TestCubeDriver {
 
   @Test
   public void testCubeWhereQueryForMonth() throws Exception {
-    System.out.println("Test from:" + getDateUptoHours(twoMonthsBack) + " to:" + getDateUptoHours(now));
+    System.out.println("Test from:" + getDateUptoHours(twoMonthsBack) + " to:" +
+        getDateUptoHours(now));
     String hqlQuery = driver.compileCubeQuery("select SUM(msr2) from testCube" +
         " where time_range_in('" + getDateUptoHours(twoMonthsBack)
         + "','" + getDateUptoHours(now) + "')");
@@ -245,7 +259,8 @@ public class TestCubeDriver {
 
   @Test
   public void testDimensionQueryWithMultipleStorages() throws Exception {
-    String hqlQuery = driver.compileCubeQuery("select name, stateid from citytable");
+    String hqlQuery = driver.compileCubeQuery("select name, stateid from" +
+        " citytable");
     System.out.println("cube hql:" + hqlQuery);
 
     hqlQuery = driver.compileCubeQuery("select name, c.stateid from citytable c");
@@ -270,16 +285,19 @@ public class TestCubeDriver {
 
   @Test
   public void testLimitQueryOnDimension() throws Exception {
-    String hqlQuery = driver.compileCubeQuery("select name, stateid from citytable limit 100");
+    String hqlQuery = driver.compileCubeQuery("select name, stateid from" +
+        " citytable limit 100");
     System.out.println("cube hql:" + hqlQuery);
     //Assert.assertEquals(queries[1], cubeql.toHQL());
     conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C2");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
-    hqlQuery = driver.compileCubeQuery("select name, stateid from citytable limit 100");
+    hqlQuery = driver.compileCubeQuery("select name, stateid from citytable " +
+        "limit 100");
     System.out.println("cube hql:" + hqlQuery);
     conf.set(HiveConf.ConfVars.HIVE_DRIVER_SUPPORTED_STORAGES.toString(), "C1");
     driver = new CubeDriver(new HiveConf(conf, HiveConf.class));
-    hqlQuery = driver.compileCubeQuery("select name, stateid from citytable limit 100");
+    hqlQuery = driver.compileCubeQuery("select name, stateid from citytable" +
+        " limit 100");
     System.out.println("cube hql:" + hqlQuery);
   }
 
@@ -288,7 +306,8 @@ public class TestCubeDriver {
     String timeRange = " where  time_range_in('2013-05-01', '2013-05-03')";
     System.out.println("#$AGGREGATE_RESOLVER_ TIME_RANGE:" + timeRange);
     String q1 = "SELECT cityid, testCube.msr2 from testCube " + timeRange;
-    String q2 = "SELECT cityid, testCube.msr2 * testCube.msr2 from testCube " + timeRange;
+    String q2 = "SELECT cityid, testCube.msr2 * testCube.msr2 from testCube "
+        + timeRange;
     String q3 = "SELECT cityid, sum(testCube.msr2) from testCube " + timeRange;
     String q4 = "SELECT cityid, sum(testCube.msr2) from testCube "  + timeRange
         + " having testCube.msr2 > 100";
@@ -297,7 +316,8 @@ public class TestCubeDriver {
     String q6 = "SELECT cityid, testCube.msr2 from testCube " + timeRange
         + " having testCube.msr2 > 100 AND testCube.msr2 < 100";
     String q7 = "SELECT cityid, sum(testCube.msr2) from testCube " + timeRange
-        + " having (testCube.msr2 > 100) OR (testcube.msr2 < 100 AND SUM(testcube.msr3) > 1000)";
+        + " having (testCube.msr2 > 100) OR (testcube.msr2 < 100 AND " +
+        "SUM(testcube.msr3) > 1000)";
 
     String tests[] = {q1, q2, q3, q4, q5, q6, q7};
 
@@ -312,7 +332,8 @@ public class TestCubeDriver {
 
     String q8 = "SELECT cityid, testCube.noAggrMsr FROM testCube " + timeRange;
     try {
-      // Should throw exception in aggregate resolver because noAggrMsr does not have a default aggregate defined.s
+      // Should throw exception in aggregate resolver because noAggrMsr does
+      //not have a default aggregate defined.s
       String hql = driver.compileCubeQuery(q8);
       Assert.assertTrue("Should not reach here", false);
     } catch (SemanticException exc) {
@@ -353,7 +374,8 @@ public class TestCubeDriver {
     try {
       // this query should go through
       String q1Hql =
-          driver.compileCubeQuery("SELECT cityid, msr2 from testCube " + timeRange);
+          driver.compileCubeQuery("SELECT cityid, msr2 from testCube " +
+              timeRange);
     } catch (SemanticException exc) {
       exc.printStackTrace();
       Assert.assertTrue("Exception not expected here", false);
@@ -362,7 +384,8 @@ public class TestCubeDriver {
     try {
       // this query should through exception because invalidMsr is invalid
       String q2Hql =
-          driver.compileCubeQuery("SELECT cityid, invalidMsr from testCube " + timeRange);
+          driver.compileCubeQuery("SELECT cityid, invalidMsr from testCube " +
+              timeRange);
       Assert.assertTrue("Should not reach here", false);
     } catch (SemanticException exc) {
       exc.printStackTrace();
@@ -375,7 +398,8 @@ public class TestCubeDriver {
     String timeRange1 = " where  time_range_in('2013-05-01', '2013-05-03')";
 
     try {
-      String hql = driver.compileCubeQuery("SELECT cityid, testCube.msr2 from testCube " + timeRange1);
+      String hql = driver.compileCubeQuery("SELECT cityid, testCube.msr2 from" +
+          " testCube " + timeRange1);
     } catch (SemanticException exc) {
       exc.printStackTrace(System.out);
       Assert.assertTrue("Exception not expected here", false);
@@ -385,7 +409,8 @@ public class TestCubeDriver {
     String timeRange2 = " where  time_range_in('2013-05-03', '2013-05-01')";
     try {
       // this should throw exception because from date is after to date
-      String hql = driver.compileCubeQuery("SELECT cityid, testCube.msr2 from testCube " + timeRange2);
+      String hql = driver.compileCubeQuery("SELECT cityid, testCube.msr2 from" +
+          " testCube " + timeRange2);
       Assert.assertTrue("Should not reach here", false);
     } catch (SemanticException exc) {
       exc.printStackTrace(System.out);