You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2013/05/09 10:32:48 UTC

svn commit: r1480557 [1/2] - in /hive/branches/HIVE-4115/ql/src: java/org/apache/hadoop/hive/ql/cube/metadata/ java/org/apache/hadoop/hive/ql/cube/parse/ java/org/apache/hadoop/hive/ql/cube/processors/ test/org/apache/hadoop/hive/ql/cube/parse/ test/or...

Author: amareshwari
Date: Thu May  9 08:32:48 2013
New Revision: 1480557

URL: http://svn.apache.org/r1480557
Log:
Fix checkstyle errors

Modified:
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HierarchicalDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/HQLParser.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/JoinResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastDimensionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/LeastPartitionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/PartitionResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/StorageTableResolver.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/ValidationRule.java
    hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/processors/CubeDriver.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/CubeTestSetup.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/parse/TestCubeSemanticAnalyzer.java
    hive/branches/HIVE-4115/ql/src/test/org/apache/hadoop/hive/ql/cube/processors/TestCubeDriver.java

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/BaseDimension.java Thu May  9 08:32:48 2013
@@ -37,7 +37,7 @@ public class BaseDimension extends CubeD
     final int prime = 31;
     int result = super.hashCode();
     result = prime * result + ((getType() == null) ? 0 :
-      getType().toLowerCase().hashCode());
+        getType().toLowerCase().hashCode());
     return result;
   }
 
@@ -46,7 +46,7 @@ public class BaseDimension extends CubeD
     if (!super.equals(obj)) {
       return false;
     }
-    BaseDimension other = (BaseDimension)obj;
+    BaseDimension other = (BaseDimension) obj;
     if (this.getType() == null) {
       if (other.getType() != null) {
         return false;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ColumnMeasure.java Thu May  9 08:32:48 2013
@@ -35,5 +35,4 @@ public final class ColumnMeasure extends
     }
     return true;
   }
-
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Cube.java Thu May  9 08:32:48 2013
@@ -113,9 +113,9 @@ public final class Cube extends Abstract
       try {
         Class<?> clazz = Class.forName(className);
         Constructor<?> constructor;
-          constructor = clazz.getConstructor(String.class, Map.class);
+        constructor = clazz.getConstructor(String.class, Map.class);
         measure = (CubeMeasure) constructor.newInstance(new Object[]
-           {measureName, props});
+        {measureName, props});
       } catch (ClassNotFoundException e) {
         throw new IllegalArgumentException("Invalid measure", e);
       } catch (SecurityException e) {
@@ -148,9 +148,9 @@ public final class Cube extends Abstract
       try {
         Class<?> clazz = Class.forName(className);
         Constructor<?> constructor;
-          constructor = clazz.getConstructor(String.class, Map.class);
+        constructor = clazz.getConstructor(String.class, Map.class);
         dim = (CubeDimension) constructor.newInstance(new Object[]
-           {dimName, props});
+        {dimName, props});
       } catch (ClassNotFoundException e) {
         throw new IllegalArgumentException("Invalid dimension", e);
       } catch (SecurityException e) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimension.java Thu May  9 08:32:48 2013
@@ -19,13 +19,13 @@ public abstract class CubeDimension impl
     final int prime = 31;
     int result = 1;
     result = prime * result + ((getName() == null) ? 0 :
-      getName().toLowerCase().hashCode());
+        getName().toLowerCase().hashCode());
     return result;
   }
 
   @Override
   public boolean equals(Object obj) {
-    CubeDimension other = (CubeDimension)obj;
+    CubeDimension other = (CubeDimension) obj;
     if (this.getName() == null) {
       if (other.getName() != null) {
         return false;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeDimensionTable.java Thu May  9 08:32:48 2013
@@ -73,7 +73,7 @@ public final class CubeDimensionTable ex
       {
         if (entry.getValue() != null) {
           props.put(MetastoreUtil.getDimensionDumpPeriodKey(name, entry.getKey()),
-            entry.getValue().name());
+              entry.getValue().name());
         }
       }
     }
@@ -82,8 +82,7 @@ public final class CubeDimensionTable ex
   public static void addDimensionReferenceProperties(Map<String, String> props,
       Map<String, TableReference> dimensionReferences) {
     if (dimensionReferences != null) {
-      for (Map.Entry<String, TableReference> entry :
-        dimensionReferences.entrySet()) {
+      for (Map.Entry<String, TableReference> entry : dimensionReferences.entrySet()) {
         props.put(MetastoreUtil.getDimensionSrcReferenceKey(entry.getKey()),
             MetastoreUtil.getDimensionDestReference(entry.getValue()));
       }
@@ -134,7 +133,7 @@ public final class CubeDimensionTable ex
     if (!super.equals(obj)) {
       return false;
     }
-    CubeDimensionTable other = (CubeDimensionTable)obj;
+    CubeDimensionTable other = (CubeDimensionTable) obj;
 
     if (this.getDimensionReferences() == null) {
       if (other.getDimensionReferences() != null) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeFactTable.java Thu May  9 08:32:48 2013
@@ -61,8 +61,7 @@ public final class CubeFactTable extends
     if (updatePeriods != null) {
       props.put(MetastoreUtil.getFactStorageListKey(name),
           MetastoreUtil.getStr(updatePeriods.keySet()));
-      for (Map.Entry<String, List<UpdatePeriod>> entry :
-        updatePeriods.entrySet()) {
+      for (Map.Entry<String, List<UpdatePeriod>> entry : updatePeriods.entrySet()) {
         props.put(MetastoreUtil.getFactUpdatePeriodKey(name, entry.getKey()),
             MetastoreUtil.getNamedStr(entry.getValue()));
       }
@@ -98,7 +97,7 @@ public final class CubeFactTable extends
       return false;
     }
 
-    CubeFactTable other = (CubeFactTable)obj;
+    CubeFactTable other = (CubeFactTable) obj;
     if (this.getUpdatePeriods() == null) {
       if (other.getUpdatePeriods() != null) {
         return false;
@@ -158,9 +157,15 @@ public final class CubeFactTable extends
       if (UpdatePeriod.YEARLY == i || UpdatePeriod.QUARTERLY == i || UpdatePeriod.MONTHLY == i) {
         int intervals = 0;
         switch (i) {
-        case YEARLY:  intervals = DateUtil.getYearsBetween(from, to); break;
-        case QUARTERLY: intervals = DateUtil.getQuartersBetween(from, to); break;
-        case MONTHLY: intervals = DateUtil.getMonthsBetween(from, to); break;
+        case YEARLY:
+          intervals = DateUtil.getYearsBetween(from, to);
+          break;
+        case QUARTERLY:
+          intervals = DateUtil.getQuartersBetween(from, to);
+          break;
+        case MONTHLY:
+          intervals = DateUtil.getMonthsBetween(from, to);
+          break;
         }
 
         if (intervals > 0) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMeasure.java Thu May  9 08:32:48 2013
@@ -117,15 +117,15 @@ public abstract class CubeMeasure implem
     final int prime = 31;
     int result = 1;
     result = prime * result + ((getName() == null) ? 0 :
-      getName().toLowerCase().hashCode());
+        getName().toLowerCase().hashCode());
     result = prime * result + ((getType() == null) ? 0 :
-      getType().toLowerCase().hashCode());
+        getType().toLowerCase().hashCode());
     result = prime * result + ((unit == null) ? 0 :
-      unit.toLowerCase().hashCode());
+        unit.toLowerCase().hashCode());
     result = prime * result + ((aggregate == null) ? 0 :
-      aggregate.toLowerCase().hashCode());
+        aggregate.toLowerCase().hashCode());
     result = prime * result + ((formatString == null) ? 0 :
-      formatString.toLowerCase().hashCode());
+        formatString.toLowerCase().hashCode());
     return result;
   }
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/CubeMetastoreClient.java Thu May  9 08:32:48 2013
@@ -85,7 +85,7 @@ public class CubeMetastoreClient {
 
   private void createFactStorage(String factName, Storage storage,
       StorageDescriptor parentSD, List<UpdatePeriod> updatePeriods)
-          throws HiveException {
+      throws HiveException {
     for (UpdatePeriod updatePeriod : updatePeriods) {
       createFactStorageUpdatePeriod(factName, storage, parentSD, updatePeriod);
     }
@@ -93,7 +93,7 @@ public class CubeMetastoreClient {
 
   private void createFactStorageUpdatePeriod(String factName, Storage storage,
       StorageDescriptor parentSD, UpdatePeriod updatePeriod)
-          throws HiveException {
+      throws HiveException {
     String storageTblName = MetastoreUtil.getFactStorageTableName(factName,
         updatePeriod, storage.getPrefix());
     createStorage(storageTblName, storage, parentSD);
@@ -101,7 +101,7 @@ public class CubeMetastoreClient {
 
   private void createDimStorage(String dimName, Storage storage,
       StorageDescriptor parentSD)
-          throws HiveException {
+      throws HiveException {
     String storageTblName = MetastoreUtil.getDimStorageTableName(dimName,
         storage.getPrefix());
     createStorage(storageTblName, storage, parentSD);
@@ -133,8 +133,7 @@ public class CubeMetastoreClient {
     if (storageAggregatePeriods != null) {
       Map<String, List<UpdatePeriod>> updatePeriods =
           new HashMap<String, List<UpdatePeriod>>();
-      for (Map.Entry<Storage, List<UpdatePeriod>> entry :
-        storageAggregatePeriods.entrySet()) {
+      for (Map.Entry<Storage, List<UpdatePeriod>> entry : storageAggregatePeriods.entrySet()) {
         updatePeriods.put(entry.getKey().getName(), entry.getValue());
       }
       return updatePeriods;
@@ -156,7 +155,7 @@ public class CubeMetastoreClient {
   public void createCubeFactTable(String cubeName, String factName,
       List<FieldSchema> columns,
       Map<Storage, List<UpdatePeriod>> storageAggregatePeriods)
-          throws HiveException {
+      throws HiveException {
     CubeFactTable factTable = new CubeFactTable(cubeName, factName, columns,
         getUpdatePeriods(storageAggregatePeriods));
     createCubeTable(factTable, storageAggregatePeriods);
@@ -165,7 +164,7 @@ public class CubeMetastoreClient {
   public void createCubeDimensionTable(String dimName,
       List<FieldSchema> columns,
       Map<String, TableReference> dimensionReferences, Set<Storage> storages)
-          throws HiveException {
+      throws HiveException {
     CubeDimensionTable dimTable = new CubeDimensionTable(dimName, columns,
         dimensionReferences);
     createCubeTable(dimTable, storages);
@@ -174,8 +173,10 @@ public class CubeMetastoreClient {
   private Map<String, UpdatePeriod> getDumpPeriods(
       Map<Storage, UpdatePeriod> storageDumpPeriods) {
     if (storageDumpPeriods != null) {
-      Map<String, UpdatePeriod> updatePeriods = new HashMap<String, UpdatePeriod>();
-      for (Map.Entry<Storage, UpdatePeriod> entry : storageDumpPeriods.entrySet()) {
+      Map<String, UpdatePeriod> updatePeriods =
+          new HashMap<String, UpdatePeriod>();
+      for (Map.Entry<Storage, UpdatePeriod> entry : storageDumpPeriods
+          .entrySet()) {
         updatePeriods.put(entry.getKey().getName(), entry.getValue());
       }
       return updatePeriods;
@@ -188,7 +189,7 @@ public class CubeMetastoreClient {
       List<FieldSchema> columns,
       Map<String, TableReference> dimensionReferences,
       Map<Storage, UpdatePeriod> dumpPeriods)
-          throws HiveException {
+      throws HiveException {
     // add date partitions for storages with dumpPeriods
     addDatePartitions(dumpPeriods);
     CubeDimensionTable dimTable = new CubeDimensionTable(dimName, columns,
@@ -206,7 +207,7 @@ public class CubeMetastoreClient {
 
   public void createCubeTable(CubeFactTable factTable,
       Map<Storage, List<UpdatePeriod>> storageAggregatePeriods)
-          throws HiveException {
+      throws HiveException {
     // create virtual cube table in metastore
     StorageDescriptor sd = createCubeHiveTable(factTable);
 
@@ -236,33 +237,33 @@ public class CubeMetastoreClient {
 
   public void addStorage(CubeFactTable table, Storage storage,
       List<UpdatePeriod> updatePeriods) throws HiveException {
-    //TODO add the update periods to cube table properties
+    // TODO add the update periods to cube table properties
     createFactStorage(table.getName(), storage, getCubeTableSd(table),
         updatePeriods);
   }
 
   public void addStorageUpdatePeriod(CubeFactTable table, Storage storage,
       UpdatePeriod updatePeriod) throws HiveException {
-    //TODO add the update periods to cube table properties
-    createFactStorageUpdatePeriod(table.getName(),storage,
+    // TODO add the update periods to cube table properties
+    createFactStorageUpdatePeriod(table.getName(), storage,
         getStorageSD(storage, getCubeTableSd(table)), updatePeriod);
   }
 
   public void addColumn(AbstractCubeTable table, FieldSchema column) {
-    //TODO
+    // TODO
   }
 
   public void addDimensionReference(AbstractCubeTable srcTable, String srcCol,
       TableReference reference) {
-    //TODO
+    // TODO
   }
 
-  //public void addMeasure(CubeFactTable table, Measure measure) {
-  //TODO
-  //}
+  // public void addMeasure(CubeFactTable table, Measure measure) {
+  // TODO
+  // }
 
   public void addUpdatePeriod(CubeFactTable table, UpdatePeriod updatePeriod) {
-    //TODO
+    // TODO
   }
 
   public static List<String> getPartitionValues(Table tbl,
@@ -281,7 +282,7 @@ public class CubeMetastoreClient {
 
   public void addPartition(CubeFactTable table, Storage storage,
       UpdatePeriod updatePeriod, Date partitionTimestamp)
-          throws HiveException {
+      throws HiveException {
     String storageTableName = MetastoreUtil.getFactStorageTableName(
         table.getName(), updatePeriod, storage.getPrefix());
     addPartition(storageTableName, storage, getPartitionSpec(updatePeriod,
@@ -291,7 +292,7 @@ public class CubeMetastoreClient {
   public void addPartition(CubeFactTable table, Storage storage,
       UpdatePeriod updatePeriod, Date partitionTimestamp,
       Map<String, String> partSpec)
-          throws HiveException {
+      throws HiveException {
     String storageTableName = MetastoreUtil.getFactStorageTableName(
         table.getName(), updatePeriod, storage.getPrefix());
     partSpec.putAll(getPartitionSpec(updatePeriod,
@@ -341,22 +342,24 @@ public class CubeMetastoreClient {
 
   boolean factPartitionExists(CubeFactTable fact,
       Storage storage, UpdatePeriod updatePeriod,
-      Date partitionTimestamp, Map<String, String> partSpec) throws HiveException {
+      Date partitionTimestamp, Map<String, String> partSpec)
+      throws HiveException {
     String storageTableName = MetastoreUtil.getFactStorageTableName(
         fact.getName(), updatePeriod, storage.getPrefix());
-    return partitionExists(storageTableName, updatePeriod, partitionTimestamp, partSpec);
+    return partitionExists(storageTableName, updatePeriod, partitionTimestamp,
+        partSpec);
   }
 
   boolean partitionExists(String storageTableName, UpdatePeriod updatePeriod,
       Date partitionTimestamp)
-          throws HiveException {
+      throws HiveException {
     return partitionExists(storageTableName,
         getPartitionSpec(updatePeriod, partitionTimestamp));
   }
 
   boolean partitionExists(String storageTableName, UpdatePeriod updatePeriod,
       Date partitionTimestamp, Map<String, String> partSpec)
-          throws HiveException {
+      throws HiveException {
     partSpec.putAll(getPartitionSpec(updatePeriod, partitionTimestamp));
     return partitionExists(storageTableName, partSpec);
   }
@@ -377,7 +380,8 @@ public class CubeMetastoreClient {
     String storageTableName = MetastoreUtil.getDimStorageTableName(
         dim.getName(), storage.getPrefix());
     return partitionExists(storageTableName,
-        dim.getSnapshotDumpPeriods().get(storage.getName()), partitionTimestamp);
+        dim.getSnapshotDumpPeriods().get(storage.getName()),
+        partitionTimestamp);
   }
 
   boolean latestPartitionExists(CubeDimensionTable dim,
@@ -395,7 +399,7 @@ public class CubeMetastoreClient {
     return getHiveTable(tableName);
   }
 
-  private Table getTable(String tableName)  throws HiveException {
+  private Table getTable(String tableName) throws HiveException {
     Table tbl;
     try {
       tbl = getClient().getTable(tableName.toLowerCase());
@@ -411,7 +415,7 @@ public class CubeMetastoreClient {
     return isFactTable(tbl);
   }
 
-   boolean isFactTable(Table tbl) {
+  boolean isFactTable(Table tbl) {
     String tableType = tbl.getParameters().get(
         MetastoreConstants.TABLE_TYPE_KEY);
     return CubeTableType.FACT.name().equals(tableType);

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ExprMeasure.java Thu May  9 08:32:48 2013
@@ -38,7 +38,7 @@ public final class ExprMeasure extends C
     final int prime = 31;
     int result = super.hashCode();
     result = prime * result + ((getExpr() == null) ? 0 :
-      getExpr().toLowerCase().hashCode());
+        getExpr().toLowerCase().hashCode());
     return result;
   }
 
@@ -47,7 +47,7 @@ public final class ExprMeasure extends C
     if (!super.equals(obj)) {
       return false;
     }
-    ExprMeasure other = (ExprMeasure)obj;
+    ExprMeasure other = (ExprMeasure) obj;
     if (this.getExpr() == null) {
       if (other.getExpr() != null) {
         return false;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HDFSStorage.java Thu May  9 08:32:48 2013
@@ -93,7 +93,7 @@ public class HDFSStorage extends Storage
 
   public HDFSStorage(Table table) {
     super("HDFS", TableType.EXTERNAL_TABLE);
-    //TODO
+    // TODO
   }
 
   private HDFSStorage(String name, String inputFormat, String outputFormat,
@@ -176,12 +176,12 @@ public class HDFSStorage extends Storage
       }
     }
     client.createPartition(storageTbl, partSpec,
-        location, getTableParameters(),  inputFormat, outputFormat, -1,
+        location, getTableParameters(), inputFormat, outputFormat, -1,
         storageTbl.getCols(), serdeClassName, serdeParameters, null, null);
     if (makeLatest) {
       // symlink this partition to latest
       client.createPartition(storageTbl, getLatestPartSpec(),
-          location, getTableParameters(),  inputFormat, outputFormat, -1,
+          location, getTableParameters(), inputFormat, outputFormat, -1,
           storageTbl.getCols(), serdeClassName, serdeParameters, null, null);
     }
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HierarchicalDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HierarchicalDimension.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HierarchicalDimension.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/HierarchicalDimension.java Thu May  9 08:32:48 2013
@@ -24,7 +24,7 @@ public class HierarchicalDimension exten
   @Override
   public void addProperties(Map<String, String> props) {
     super.addProperties(props);
-    for (int i =0; i < hierarchy.size(); i++) {
+    for (int i = 0; i < hierarchy.size(); i++) {
       CubeDimension dim = hierarchy.get(i);
       props.put(MetastoreUtil.getHierachyElementKeyName(getName(), i),
           getHierarchyElement(dim));
@@ -63,7 +63,7 @@ public class HierarchicalDimension exten
         Constructor<?> constructor;
         constructor = clazz.getConstructor(String.class, Map.class);
         dim = (CubeDimension) constructor.newInstance(new Object[]
-            {dimName, props});
+        {dimName, props});
       } catch (ClassNotFoundException e) {
         throw new IllegalArgumentException("Invalid Dimension", e);
       } catch (SecurityException e) {
@@ -89,7 +89,7 @@ public class HierarchicalDimension exten
     final int prime = 31;
     int result = super.hashCode();
     result = prime * result + ((getHierarchy() == null) ? 0 :
-      getHierarchy().hashCode());
+        getHierarchy().hashCode());
     return result;
   }
 
@@ -98,7 +98,7 @@ public class HierarchicalDimension exten
     if (!super.equals(obj)) {
       return false;
     }
-    HierarchicalDimension other = (HierarchicalDimension)obj;
+    HierarchicalDimension other = (HierarchicalDimension) obj;
     if (this.getHierarchy() == null) {
       if (other.getHierarchy() != null) {
         return false;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/InlineDimension.java Thu May  9 08:32:48 2013
@@ -30,7 +30,8 @@ public class InlineDimension extends Bas
 
   public InlineDimension(String name, Map<String, String> props) {
     super(name, props);
-    String valueStr = props.get(MetastoreUtil.getInlineDimensionValuesKey(name));
+    String valueStr = props.get(MetastoreUtil.getInlineDimensionValuesKey(
+        name));
     this.values = Arrays.asList(valueStr.split(","));
   }
 
@@ -39,7 +40,7 @@ public class InlineDimension extends Bas
     final int prime = 31;
     int result = super.hashCode();
     result = prime * result + ((getValues() == null) ? 0 :
-      getValues().hashCode());
+        getValues().hashCode());
     return result;
   }
 
@@ -48,7 +49,7 @@ public class InlineDimension extends Bas
     if (!super.equals(obj)) {
       return false;
     }
-    InlineDimension other = (InlineDimension)obj;
+    InlineDimension other = (InlineDimension) obj;
     if (this.getValues() == null) {
       if (other.getValues() != null) {
         return false;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/MetastoreUtil.java Thu May  9 08:32:48 2013
@@ -35,9 +35,9 @@ public class MetastoreUtil implements Me
     return virtualName.substring(type.name().length() + 1);
   }
 
-  /////////////////////////
+  // ///////////////////////
   // Dimension properties//
-  /////////////////////////
+  // ///////////////////////
   public static final String getDimTypePropertyKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + TYPE_SFX;
   }
@@ -45,6 +45,7 @@ public class MetastoreUtil implements Me
   public static String getHierachyElementKeyPFX(String dimName) {
     return getDimensionKeyPrefix(dimName) + HIERARCHY_SFX;
   }
+
   public static String getHierachyElementKeyName(String dimName, int index) {
     return getHierachyElementKeyPFX(dimName) + index;
   }
@@ -84,20 +85,20 @@ public class MetastoreUtil implements Me
 
   public static String getDimensionDumpPeriodKey(String name, String storage) {
     return getDimensionKeyPrefix(name) + "." + storage.toLowerCase() +
-        DUMP_PERIOD_SFX ;
+        DUMP_PERIOD_SFX;
   }
 
   public static String getDimensionStorageListKey(String name) {
-    return getDimensionKeyPrefix(name) +  STORAGE_LIST_SFX;
+    return getDimensionKeyPrefix(name) + STORAGE_LIST_SFX;
   }
 
   public static final String getDimensionClassPropertyKey(String dimName) {
     return getDimensionKeyPrefix(dimName) + CLASS_SFX;
   }
 
-  ////////////////////////////
-  // Measure properties  ///
-  ///////////////////////////
+  // //////////////////////////
+  // Measure properties ///
+  // /////////////////////////
   public static final String getMeasurePrefix(String measureName) {
     return MEASURE_KEY_PFX + measureName.toLowerCase();
   }
@@ -126,9 +127,9 @@ public class MetastoreUtil implements Me
     return getMeasurePrefix(measureName) + EXPR_SFX;
   }
 
-  ////////////////////////////
-  // Cube properties  ///
-  ///////////////////////////
+  // //////////////////////////
+  // Cube properties ///
+  // /////////////////////////
   public static final String getCubePrefix(String cubeName) {
     return CUBE_KEY_PFX + cubeName.toLowerCase();
   }
@@ -141,11 +142,11 @@ public class MetastoreUtil implements Me
     return getCubePrefix(cubeName) + DIMENSIONS_LIST_SFX;
   }
 
-  ////////////////////////////
-  // Fact propertes  ///
-  ///////////////////////////
+  // //////////////////////////
+  // Fact propertes ///
+  // /////////////////////////
   public static String getFactStorageListKey(String name) {
-    return getFactKeyPrefix(name) +  STORAGE_LIST_SFX;
+    return getFactKeyPrefix(name) + STORAGE_LIST_SFX;
   }
 
   public static String getFactKeyPrefix(String factName) {
@@ -153,7 +154,7 @@ public class MetastoreUtil implements Me
   }
 
   public static String getFactUpdatePeriodKey(String name, String storage) {
-    return getFactKeyPrefix(name) +  "." + storage.toLowerCase()
+    return getFactKeyPrefix(name) + "." + storage.toLowerCase()
         + UPDATE_PERIOD_SFX;
   }
 
@@ -161,16 +162,16 @@ public class MetastoreUtil implements Me
     return getFactKeyPrefix(name) + CUBE_NAME_SFX;
   }
 
-  ////////////////////////////
-  // Utils  ///
-  ///////////////////////////
+  // //////////////////////////
+  // Utils ///
+  // /////////////////////////
   public static <E extends Named> String getNamedStr(Collection<E> set) {
     if (set == null || set.isEmpty()) {
       return "";
     }
     StringBuilder valueStr = new StringBuilder();
     Iterator<E> it = set.iterator();
-    for (int i = 0; i < (set.size()-1) ; i++) {
+    for (int i = 0; i < (set.size() - 1); i++) {
       valueStr.append(it.next().getName());
       valueStr.append(",");
     }
@@ -184,7 +185,7 @@ public class MetastoreUtil implements Me
     }
     StringBuilder valueStr = new StringBuilder();
     Iterator<?> it = set.iterator();
-    for (int i = 0; i < (set.size()-1) ; i++) {
+    for (int i = 0; i < (set.size() - 1); i++) {
       valueStr.append(it.next().toString());
       valueStr.append(",");
     }
@@ -198,7 +199,7 @@ public class MetastoreUtil implements Me
     }
     StringBuilder valueStr = new StringBuilder();
     Iterator<String> it = set.iterator();
-    for (int i = 0; i < (set.size()-1) ; i++) {
+    for (int i = 0; i < (set.size() - 1); i++) {
       valueStr.append(it.next());
       valueStr.append(",");
     }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/ReferencedDimension.java Thu May  9 08:32:48 2013
@@ -34,7 +34,7 @@ public class ReferencedDimension extends
     final int prime = 31;
     int result = super.hashCode();
     result = prime * result + ((getReference() == null) ? 0 :
-      getReference().hashCode());
+        getReference().hashCode());
     return result;
   }
 
@@ -43,7 +43,7 @@ public class ReferencedDimension extends
     if (!super.equals(obj)) {
       return false;
     }
-    ReferencedDimension other = (ReferencedDimension)obj;
+    ReferencedDimension other = (ReferencedDimension) obj;
     if (this.getReference() == null) {
       if (other.getReference() != null) {
         return false;

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/Storage.java Thu May  9 08:32:48 2013
@@ -15,12 +15,13 @@ import org.apache.hadoop.hive.serde.serd
 public abstract class Storage implements Named {
 
   private final TableType tableType;
-  private final Map<String, String> tableParameters = new HashMap<String, String>();
+  private final Map<String, String> tableParameters =
+      new HashMap<String, String>();
   private final List<FieldSchema> partCols = new ArrayList<FieldSchema>();
   protected Map<String, String> serdeParameters = new HashMap<String, String>();
   private final String name;
 
-  protected Storage(String name,TableType type) {
+  protected Storage(String name, TableType type) {
     this.tableType = type;
     this.name = name;
   }
@@ -65,7 +66,7 @@ public abstract class Storage implements
 
   public abstract void addPartition(String storageTableName,
       Map<String, String> partSpec, HiveConf conf, boolean makeLatest)
-          throws HiveException;
+      throws HiveException;
 
 
   public static String getDatePartitionKey() {
@@ -74,7 +75,8 @@ public abstract class Storage implements
 
   private static Map<String, String> latestSpec = new HashMap<String, String>();
   static {
-    latestSpec.put(getDatePartitionKey(), StorageConstants.LATEST_PARTITION_VALUE);
+    latestSpec.put(getDatePartitionKey(),
+        StorageConstants.LATEST_PARTITION_VALUE);
   }
 
   public static Map<String, String> getLatestPartSpec() {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/TableReference.java Thu May  9 08:32:48 2013
@@ -34,6 +34,7 @@ public class TableReference {
     this.destColumn = destColumn;
   }
 
+  @Override
   public boolean equals(Object obj) {
     if (this == obj) {
       return true;
@@ -62,6 +63,7 @@ public class TableReference {
     return true;
   }
 
+  @Override
   public String toString() {
     return destTable + "." + destColumn;
   }
@@ -70,7 +72,8 @@ public class TableReference {
   public int hashCode() {
     final int prime = 31;
     int result = 1;
-    result = prime * result + ((destColumn == null) ? 0 : destColumn.hashCode());
+    result = prime * result + ((destColumn == null) ? 0 :
+        destColumn.hashCode());
     result = prime * result + ((destTable == null) ? 0 : destTable.hashCode());
     return result;
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/metadata/UpdatePeriod.java Thu May  9 08:32:48 2013
@@ -7,15 +7,15 @@ import java.util.Date;
 import org.apache.hadoop.hive.ql.cube.parse.DateUtil;
 
 
-public enum UpdatePeriod implements Named{
-  SECONDLY (Calendar.SECOND, 1000, "yyyy-MM-dd-HH-mm-ss"),
-  MINUTELY (Calendar.MINUTE, 60 * SECONDLY.weight(), "yyyy-MM-dd-HH-mm"),
-  HOURLY (Calendar.HOUR_OF_DAY, 60 * MINUTELY.weight(), "yyyy-MM-dd-HH"),
-  DAILY (Calendar.DAY_OF_MONTH, 24 * HOURLY.weight(), "yyyy-MM-dd"),
-  WEEKLY (Calendar.WEEK_OF_YEAR, 7 * DAILY.weight(), "yyyy-'W'ww-u"),
-  MONTHLY (Calendar.MONTH, 30 * DAILY.weight(), "yyyy-MM"),
-  QUARTERLY (Calendar.MONTH, 3 * MONTHLY.weight(), "yyyy-M"),
-  YEARLY (Calendar.YEAR, 12 * MONTHLY.weight(), "yyyy");
+public enum UpdatePeriod implements Named {
+  SECONDLY(Calendar.SECOND, 1000, "yyyy-MM-dd-HH-mm-ss"),
+  MINUTELY(Calendar.MINUTE, 60 * SECONDLY.weight(), "yyyy-MM-dd-HH-mm"),
+  HOURLY(Calendar.HOUR_OF_DAY, 60 * MINUTELY.weight(), "yyyy-MM-dd-HH"),
+  DAILY(Calendar.DAY_OF_MONTH, 24 * HOURLY.weight(), "yyyy-MM-dd"),
+  WEEKLY(Calendar.WEEK_OF_YEAR, 7 * DAILY.weight(), "yyyy-'W'ww-u"),
+  MONTHLY(Calendar.MONTH, 30 * DAILY.weight(), "yyyy-MM"),
+  QUARTERLY(Calendar.MONTH, 3 * MONTHLY.weight(), "yyyy-M"),
+  YEARLY(Calendar.YEAR, 12 * MONTHLY.weight(), "yyyy");
 
   public static final long MIN_INTERVAL = SECONDLY.weight();
   private final int calendarField;
@@ -49,7 +49,8 @@ public enum UpdatePeriod implements Name
     return name();
   }
 
-  public static class UpdatePeriodComparator implements Comparator<UpdatePeriod> {
+  public static class UpdatePeriodComparator
+      implements Comparator<UpdatePeriod> {
     @Override
     public int compare(UpdatePeriod o1, UpdatePeriod o2) {
       if (o1 == null && o2 != null) {

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AggregateResolver.java Thu May  9 08:32:48 2013
@@ -13,23 +13,20 @@ import org.apache.hadoop.hive.ql.parse.B
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.log4j.Logger;
+
 /**
- * <p>Replace select and having columns with default aggregate functions on
- * them, if default aggregate is defined and if there isn't already an
- * aggregate function specified on the columns.</p>
- *
- * <p>For example, if query is like - <pre>select dim1.name, fact.msr1, fact.msr1 * sin(fact.msr2)/cos(fact.msr2),
- * sum(fact.msr4) ...</pre>
- * Then we will replace fact.msr1 with sum(fact.msr1) given that 'sum' has been
- * set as the default aggregate function for msr1.</p>
- *
- * <p>We will also wrap expressions of measures into a default aggregate function.
- * For example the expression 'fact.msr1 * sin(fact.msr2)/cos(fact.msr2)' will become
- *  sum(fact.msr1 * sin(fact.msr2)/cos(fact.msr2)), if sum is the default aggregate function.</p>
+ * <p>
+ * Replace select and having columns with default aggregate functions on them, if default aggregate
+ * is defined and if there isn't already an aggregate function specified on the columns.
+ * </p>
  *
- * <p>Expressions which already contain aggregate sub-expressions will not be changed.</p>
+ * <p>
+ * Expressions which already contain aggregate sub-expressions will not be changed.
+ * </p>
  *
- * <p>At this point it's assumed that aliases have been added to all columns.</p>
+ * <p>
+ * At this point it's assumed that aliases have been added to all columns.
+ * </p>
  */
 public class AggregateResolver implements ContextRewriter {
   public static final Logger LOG = Logger.getLogger(AggregateResolver.class);
@@ -60,15 +57,18 @@ public class AggregateResolver implement
     System.out.println("New having after aggregate resolver: " + rewritHaving);
   }
 
-  private void validateAggregates(CubeQueryContext cubeql, ASTNode node, boolean insideAggregate,
-      boolean insideArithExpr, boolean insideNonAggrFn) throws SemanticException {
+  private void validateAggregates(CubeQueryContext cubeql, ASTNode node,
+      boolean insideAggregate,
+      boolean insideArithExpr, boolean insideNonAggrFn)
+          throws SemanticException {
     if (node == null) {
       return;
     }
 
     int nodeType = node.getToken().getType();
     if (nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT) {
-      // Found a column ref. If this is a measure, it should be inside an aggregate if its part of
+      // Found a column ref. If this is a measure, it should be inside an
+      // aggregate if its part of
       // an arithmetic expression or an argument of a non-aggregate function
       String msrname = getColName(node);
       if (cubeql.isCubeMeasure(msrname) &&
@@ -81,27 +81,32 @@ public class AggregateResolver implement
       // Not allowed - msr1 + msr2 * msr3 <- Not inside aggregate
       // Not allowed - sum(msr1) + msr2 <- Aggregate only on one measure
       // count of measures within aggregates must be equal to count of measures
-      // if both counts are equal and zero, then this node should be inside aggregate
+      // if both counts are equal and zero, then this node should be inside
+      // aggregate
       int measuresInAggregates = countMeasuresInAggregates(cubeql, node, false);
       int measuresInTree = countMeasures(cubeql, node);
 
       if (measuresInAggregates == measuresInTree) {
         if (measuresInAggregates == 0 && !insideAggregate) {
           // (msr1 + msr2)
-          throw new SemanticException("Invalid projection expression: " + HQLParser.getString(node));
+          throw new SemanticException("Invalid projection expression: "
+              + HQLParser.getString(node));
         } else if (insideAggregate) {
           // sum(sum(msr1) + sum(msr2))
-          throw new SemanticException("Invalid projection expression: " + HQLParser.getString(node));
+          throw new SemanticException("Invalid projection expression: "
+              + HQLParser.getString(node));
         }
       } else {
-        throw new SemanticException("Invalid projection expression: " + HQLParser.getString(node));
+        throw new SemanticException("Invalid projection expression: "
+            + HQLParser.getString(node));
       }
     } else {
       boolean isArithmetic = HQLParser.isArithmeticOp(nodeType);
       boolean isAggregate = isAggregateAST(node);
       boolean isNonAggrFn = nodeType == HiveParser.TOK_FUNCTION && !isAggregate;
       for (int i = 0; i < node.getChildCount(); i++) {
-        validateAggregates(cubeql, (ASTNode) node.getChild(i), isAggregate, isArithmetic, isNonAggrFn);
+        validateAggregates(cubeql, (ASTNode) node.getChild(i), isAggregate,
+            isArithmetic, isNonAggrFn);
       }
     }
   }
@@ -124,7 +129,8 @@ public class AggregateResolver implement
     }
   }
 
-  private int countMeasuresInAggregates(CubeQueryContext cubeql, ASTNode node, boolean inAggregate) {
+  private int countMeasuresInAggregates(CubeQueryContext cubeql, ASTNode node,
+      boolean inAggregate) {
     int nodeType = node.getToken().getType();
     if (nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT) {
       String msrname = getColName(node);
@@ -136,14 +142,16 @@ public class AggregateResolver implement
     } else {
       int count = 0;
       for (int i = 0; i < node.getChildCount(); i++) {
-        boolean isAggr = isAggregateAST((ASTNode)node.getChild(i));
-        count += countMeasuresInAggregates(cubeql, (ASTNode) node.getChild(i), isAggr);
+        boolean isAggr = isAggregateAST((ASTNode) node.getChild(i));
+        count += countMeasuresInAggregates(cubeql, (ASTNode) node.getChild(i),
+            isAggr);
       }
       return count;
     }
   }
 
-  private String resolveForSelect(CubeQueryContext cubeql, String exprTree) throws SemanticException {
+  private String resolveForSelect(CubeQueryContext cubeql, String exprTree)
+      throws SemanticException {
     // Aggregate resolver needs cube to be resolved first
     assert cubeql.getCube() != null;
 
@@ -163,9 +171,11 @@ public class AggregateResolver implement
 
       if (!cubeql.isAggregateExpr(token)) {
         if (cubeql.isCubeMeasure(token)) {
-          // Take care of brackets added around col names in HQLParsrer.getString
-          if (token.startsWith("(") && token.endsWith(")") && token.length() > 2) {
-            token = token.substring(1, token.length() -1);
+          // Take care of brackets added around col names
+          // in HQLParsrer.getString
+          if (token.startsWith("(") && token.endsWith(")")
+              && token.length() > 2) {
+            token = token.substring(1, token.length() - 1);
           }
 
           String splits[] = StringUtils.split(token, ".");
@@ -179,16 +189,20 @@ public class AggregateResolver implement
             String msrAggregate = measure.getAggregate();
 
             if (StringUtils.isNotBlank(msrAggregate)) {
-              exprTokens[i] = msrAggregate + "( " + token + ")" + (hasAlias ? " " + tokenAlias : "");
+              exprTokens[i] = msrAggregate + "( " + token + ")" + (hasAlias ?
+                  " " + tokenAlias : "");
               exprTokens[i] = exprTokens[i].toLowerCase();
-              // Add this expression to aggregate expr set so that group by resolver can skip
+              // Add this expression to aggregate expr set so that group by
+              // resolver can skip
               // over expressions changed during aggregate resolver.
               cubeql.addAggregateExpr(exprTokens[i]);
             } else {
-              throw new SemanticException("Default aggregate is not set for measure: " + msrName);
+              throw new SemanticException("Default aggregate is not set for" +
+                  " measure: " + msrName);
             }
           } else {
-            // should not be here, since if it is a measure, we should get a cube measure object
+            // should not be here, since if it is a measure, we should get a
+            // cube measure object
             throw new SemanticException("Measure not found for " + msrName);
           }
         }
@@ -199,8 +213,10 @@ public class AggregateResolver implement
   }
 
   // We need to traverse the AST for Having clause.
-  // We need to skip any columns that are inside an aggregate UDAF or inside an arithmetic expression
-  private String resolveForHaving(CubeQueryContext cubeql) throws SemanticException {
+  // We need to skip any columns that are inside an aggregate UDAF or
+  // inside an arithmetic expression
+  private String resolveForHaving(CubeQueryContext cubeql)
+      throws SemanticException {
     ASTNode havingTree = cubeql.getHavingAST();
     String havingTreeStr = cubeql.getHavingTree();
 
@@ -215,14 +231,16 @@ public class AggregateResolver implement
     return HQLParser.getString(havingTree);
   }
 
-  private void transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node, int nodePos) throws SemanticException {
+  private void transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node,
+      int nodePos) throws SemanticException {
     if (parent == null || node == null) {
       return;
     }
     int nodeType = node.getToken().getType();
 
-    if (! (isAggregateAST(node) || HQLParser.isArithmeticOp(nodeType))) {
-      if (nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT) {
+    if (!(isAggregateAST(node) || HQLParser.isArithmeticOp(nodeType))) {
+      if (nodeType == HiveParser.TOK_TABLE_OR_COL ||
+          nodeType == HiveParser.DOT) {
         // Leaf node
         ASTNode wrapped = wrapAggregate(cubeql, node);
         if (wrapped != node) {
@@ -244,8 +262,8 @@ public class AggregateResolver implement
         || exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
       assert (node.getChildCount() != 0);
       if (node.getChild(0).getType() == HiveParser.Identifier) {
-        String functionName = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0)
-            .getText());
+        String functionName = BaseSemanticAnalyzer.unescapeIdentifier(
+            node.getChild(0).getText());
         if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
           return true;
         }
@@ -255,8 +273,10 @@ public class AggregateResolver implement
     return false;
   }
 
-  // Wrap an aggregate function around the node if its a measure, leave it unchanged otherwise
-  private ASTNode wrapAggregate(CubeQueryContext cubeql, ASTNode node) throws SemanticException {
+  // Wrap an aggregate function around the node if its a measure, leave it
+  // unchanged otherwise
+  private ASTNode wrapAggregate(CubeQueryContext cubeql, ASTNode node)
+      throws SemanticException {
 
     String tabname = null;
     String colname = null;
@@ -273,19 +293,22 @@ public class AggregateResolver implement
       tabname = tabident.getText();
     }
 
-    String msrname = StringUtils.isBlank(tabname) ? colname : tabname + "." + colname;
+    String msrname = StringUtils.isBlank(tabname) ? colname : tabname + "."
+        + colname;
 
     if (cubeql.isCubeMeasure(msrname)) {
       CubeMeasure measure = cubeql.getCube().getMeasureByName(colname);
       String aggregateFn = measure.getAggregate();
 
       if (StringUtils.isBlank(aggregateFn)) {
-        throw new SemanticException("Default aggregate is not set for measure: " + colname);
+        throw new SemanticException("Default aggregate is not set for measure: "
+            + colname);
       }
       ASTNode fnroot = new ASTNode(new CommonToken(HiveParser.TOK_FUNCTION));
       fnroot.setParent(node.getParent());
 
-      ASTNode fnIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier, aggregateFn));
+      ASTNode fnIdentNode = new ASTNode(new CommonToken(HiveParser.Identifier,
+          aggregateFn));
       fnIdentNode.setParent(fnroot);
       fnroot.addChild(fnIdentNode);
 

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/AliasReplacer.java Thu May  9 08:32:48 2013
@@ -17,7 +17,7 @@ public class AliasReplacer implements Co
     // If tab1 is already aliased say with t1, col1 is changed as t1.col1
     // replace the columns in select, groupby, having, orderby by
     // prepending the table alias to the col
-    //sample select trees
+    // sample select trees
     // 1: (TOK_SELECT (TOK_SELEXPR (TOK_TABLE_OR_COL key))
     // (TOK_SELEXPR (TOK_FUNCTION count (TOK_TABLE_OR_COL value))))
     // 2: (TOK_SELECT (TOK_SELEXPR (. (TOK_TABLE_OR_COL src) key))

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryContext.java Thu May  9 08:32:48 2013
@@ -59,30 +59,40 @@ public class CubeQueryContext {
   private Cube cube;
   private List<String> cubeMeasureNames;
   private List<String> cubeDimNames;
-  protected Set<CubeDimensionTable> dimensions = new HashSet<CubeDimensionTable>();
-  private final Map<String, AbstractCubeTable> cubeTbls = new HashMap<String, AbstractCubeTable>();
+  protected Set<CubeDimensionTable> dimensions =
+      new HashSet<CubeDimensionTable>();
+  private final Map<String, AbstractCubeTable> cubeTbls =
+      new HashMap<String, AbstractCubeTable>();
   private final Map<AbstractCubeTable, List<String>> cubeTabToCols =
       new HashMap<AbstractCubeTable, List<String>>();
-  protected Set<CubeFactTable> candidateFactTables = new HashSet<CubeFactTable>();
+  protected Set<CubeFactTable> candidateFactTables =
+      new HashSet<CubeFactTable>();
 
   // fields queried
-  private final Map<String, List<String>> tblAliasToColumns = new HashMap<String, List<String>>();
+  private final Map<String, List<String>> tblAliasToColumns =
+      new HashMap<String, List<String>>();
   private final Set<String> cubeColumnsQueried = new HashSet<String>();
-  private final Map<String, String> columnToTabAlias = new HashMap<String, String>();
-  private final Map<CubeQueryExpr, Set<String>> exprToCols = new HashMap<CubeQueryExpr, Set<String>>();
-  private final Map<CubeQueryExpr, Set<String>> queryExprToExprs = new HashMap<CubeQueryExpr, Set<String>>();
+  private final Map<String, String> columnToTabAlias =
+      new HashMap<String, String>();
+  private final Map<CubeQueryExpr, Set<String>> exprToCols =
+      new HashMap<CubeQueryExpr, Set<String>>();
+  private final Map<CubeQueryExpr, Set<String>> queryExprToExprs =
+      new HashMap<CubeQueryExpr, Set<String>>();
   private final Map<String, String> exprToAlias = new HashMap<String, String>();
   private final Set<String> aggregateCols = new HashSet<String>();
   private final Set<String> aggregateExprs = new HashSet<String>();
-  private final Map<QBJoinTree, String> joinConds = new HashMap<QBJoinTree, String>();
+  private final Map<QBJoinTree, String> joinConds =
+      new HashMap<QBJoinTree, String>();
 
   // storage specific
-  protected Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factPartitionMap =
+  protected Map<CubeFactTable, Map<UpdatePeriod, List<String>>>
+  factPartitionMap =
       new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
   private List<String> supportedStorages;
   private boolean allStoragesSupported;
-  private final Map<CubeFactTable, Map<UpdatePeriod, List<String>>> factStorageMap =
-      new HashMap<CubeFactTable, Map<UpdatePeriod,List<String>>>();
+  private final Map<CubeFactTable, Map<UpdatePeriod, List<String>>>
+  factStorageMap =
+      new HashMap<CubeFactTable, Map<UpdatePeriod, List<String>>>();
   private final Map<CubeDimensionTable, List<String>> dimStorageMap =
       new HashMap<CubeDimensionTable, List<String>>();
   private final Map<String, String> storageTableToWhereClause =
@@ -113,8 +123,8 @@ public class CubeQueryContext {
           qb.getParseInfo().getWhrForClause(clauseName)).toLowerCase();
     }
     if (qb.getParseInfo().getHavingForClause(clauseName) != null) {
-      this.havingTree = HQLParser.getString(qb.getParseInfo().getHavingForClause(
-          clauseName)).toLowerCase();
+      this.havingTree = HQLParser.getString(qb.getParseInfo()
+          .getHavingForClause(clauseName)).toLowerCase();
       this.havingAST = qb.getParseInfo().getHavingForClause(
           clauseName);
     }
@@ -152,12 +162,13 @@ public class CubeQueryContext {
     try {
       CubeMetastoreClient client = CubeMetastoreClient.getInstance(conf);
       List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
-      for (String alias :  tabAliases) {
+      for (String alias : tabAliases) {
         String tblName = qb.getTabNameForAlias(alias);
         if (client.isCube(tblName)) {
           if (cube != null) {
             if (cube.getName() != tblName) {
-              throw new SemanticException("More than one cube accessed in query");
+              throw new SemanticException("More than one cube accessed in" +
+                  " query");
             }
           }
           cube = client.getCube(tblName);
@@ -191,7 +202,8 @@ public class CubeQueryContext {
 
   private String getClause() {
     if (clauseName == null) {
-      TreeSet<String> ks = new TreeSet<String>(qb.getParseInfo().getClauseNames());
+      TreeSet<String> ks = new TreeSet<String>(
+          qb.getParseInfo().getClauseNames());
       clauseName = ks.first();
     }
     return clauseName;
@@ -213,7 +225,7 @@ public class CubeQueryContext {
     if (TOK_FUNCTION == whereTree.getChild(0).getType()) {
       // expect only time range
       timenode = HQLParser.findNodeByPath(whereTree, TOK_FUNCTION);
-    } else if (KW_AND == whereTree.getChild(0).getType()){
+    } else if (KW_AND == whereTree.getChild(0).getType()) {
       // expect time condition as the right sibling of KW_AND
       timenode = HQLParser.findNodeByPath(whereTree, KW_AND, TOK_FUNCTION);
     }
@@ -245,14 +257,15 @@ public class CubeQueryContext {
     assert timeFrom != null && timeTo != null;
 
     if (timeFrom.after(timeTo)) {
-      throw new SemanticException("From date: " + fromDateRaw + " is after to date:" + toDateRaw);
+      throw new SemanticException("From date: " + fromDateRaw
+          + " is after to date:" + toDateRaw);
     }
   }
 
   private void extractColumns() throws SemanticException {
-    //columnAliases = new ArrayList<String>();
+    // columnAliases = new ArrayList<String>();
 
-    // Check if its 'select *  from...'
+    // Check if its 'select * from...'
     ASTNode selTree = qb.getParseInfo().getSelForClause(clauseName);
     if (selTree.getChildCount() == 1) {
       ASTNode star = HQLParser.findNodeByPath(selTree, TOK_SELEXPR,
@@ -265,7 +278,8 @@ public class CubeQueryContext {
       if (star != null) {
         int starType = star.getToken().getType();
         if (TOK_FUNCTIONSTAR == starType || TOK_ALLCOLREF == starType) {
-          throw new SemanticException("Selecting allColumns is not yet supported");
+          throw new SemanticException("Selecting allColumns is not yet " +
+              "supported");
         }
       }
     }
@@ -276,7 +290,8 @@ public class CubeQueryContext {
       getColsForTree(getExprTree(expr), columns, tblAliasToColumns, exprToAlias);
     }
 
-    for (ASTNode aggrTree : qb.getParseInfo().getAggregationExprsForClause(clauseName).values()) {
+    for (ASTNode aggrTree : qb.getParseInfo().getAggregationExprsForClause(
+        clauseName).values()) {
       getColsForTree(aggrTree, aggregateCols, null, null);
       String aggr = HQLParser.getString(aggrTree);
       aggregateExprs.add(aggr.toLowerCase());
@@ -291,18 +306,26 @@ public class CubeQueryContext {
 
   private ASTNode getExprTree(CubeQueryExpr expr) {
     switch (expr) {
-    case SELECT : return qb.getParseInfo().getSelForClause(clauseName);
-    case WHERE : return qb.getParseInfo().getWhrForClause(clauseName);
-    case HAVING : return qb.getParseInfo().getHavingForClause(clauseName);
-    case GROUPBY : return qb.getParseInfo().getGroupByForClause(clauseName);
-    case ORDERBY : qb.getParseInfo().getOrderByForClause(clauseName);
-    case JOIN : return qb.getParseInfo().getJoinExpr();
-    default : return null;
+    case SELECT:
+      return qb.getParseInfo().getSelForClause(clauseName);
+    case WHERE:
+      return qb.getParseInfo().getWhrForClause(clauseName);
+    case HAVING:
+      return qb.getParseInfo().getHavingForClause(clauseName);
+    case GROUPBY:
+      return qb.getParseInfo().getGroupByForClause(clauseName);
+    case ORDERBY:
+      qb.getParseInfo().getOrderByForClause(clauseName);
+    case JOIN:
+      return qb.getParseInfo().getJoinExpr();
+    default:
+      return null;
     }
   }
 
   private static void getColsForTree(ASTNode tree, final Set<String> columns,
-      final Map<String, List<String>> tblToCols, final Map<String,String> exprToAlias) {
+      final Map<String, List<String>> tblToCols,
+      final Map<String, String> exprToAlias) {
     if (tree == null) {
       return;
     }
@@ -369,6 +392,7 @@ public class CubeQueryContext {
       }
     });
   }
+
   private void extractTabAliasForCol() throws SemanticException {
     List<String> columns = tblAliasToColumns.get(DEFAULT_TABLE);
     if (columns == null) {
@@ -379,25 +403,28 @@ public class CubeQueryContext {
       if (cube != null) {
         List<String> cols = cubeTabToCols.get(cube);
         if (cols.contains(col.toLowerCase())) {
-          columnToTabAlias.put(col.toLowerCase(), getAliasForTabName(cube.getName()));
+          columnToTabAlias.put(col.toLowerCase(), getAliasForTabName(
+              cube.getName()));
           cubeColumnsQueried.add(col);
           inCube = true;
         }
       }
-      for (CubeDimensionTable dim: dimensions) {
+      for (CubeDimensionTable dim : dimensions) {
         if (cubeTabToCols.get(dim).contains(col.toLowerCase())) {
           if (!inCube) {
             String prevDim = columnToTabAlias.get(col.toLowerCase());
             if (prevDim != null && !prevDim.equals(dim.getName())) {
               throw new SemanticException("Ambiguous column:" + col
-                  + " in dimensions '" + prevDim + "' and '" + dim.getName()+"'");
+                  + " in dimensions '" + prevDim + "' and '"
+                  + dim.getName() + "'");
             }
             columnToTabAlias.put(col.toLowerCase(), dim.getName());
             break;
           } else {
             // throw error because column is in both cube and dimension table
             throw new SemanticException("Ambiguous column:" + col
-                + " in cube: " + cube.getName() + " and dimension: " + dim.getName());
+                + " in cube: " + cube.getName() + " and dimension: "
+                + dim.getName());
           }
         }
       }
@@ -412,7 +439,8 @@ public class CubeQueryContext {
     if (cube != null) {
       // go over the columns accessed in the query and find out which tables
       // can answer the query
-      for (Iterator<CubeFactTable> i = candidateFactTables.iterator(); i.hasNext();) {
+      for (Iterator<CubeFactTable> i = candidateFactTables.iterator();
+          i.hasNext();) {
         CubeFactTable fact = i.next();
         List<String> factCols = cubeTabToCols.get(fact);
         for (String col : cubeColumnsQueried) {
@@ -471,6 +499,7 @@ public class CubeQueryContext {
     }
     return tabName;
   }
+
   public void print() {
     StringBuilder builder = new StringBuilder();
     builder.append("ASTNode:" + ast.dump() + "\n");
@@ -501,38 +530,49 @@ public class CubeQueryContext {
     builder.append("\n hints: " + parseInfo.getHints());
     builder.append("\n aliasToSrc: ");
     for (String alias : tabAliases) {
-      builder.append("\n\t" + alias +": " + parseInfo.getSrcForAlias(alias).dump());
+      builder.append("\n\t" + alias + ": " + parseInfo.getSrcForAlias(alias)
+          .dump());
     }
     TreeSet<String> clauses = new TreeSet<String>(parseInfo.getClauseNames());
     for (String clause : clauses) {
-      builder.append("\n\t" + clause + ": " + parseInfo.getClauseNamesForDest());
+      builder.append("\n\t" + clause + ": " + parseInfo
+          .getClauseNamesForDest());
     }
     String clause = clauses.first();
     if (parseInfo.getWhrForClause(clause) != null) {
-      builder.append("\n whereexpr: " + parseInfo.getWhrForClause(clause).dump());
+      builder.append("\n whereexpr: " + parseInfo.getWhrForClause(clause)
+          .dump());
     }
     if (parseInfo.getGroupByForClause(clause) != null) {
-      builder.append("\n groupby expr: " + parseInfo.getGroupByForClause(clause).dump());
+      builder.append("\n groupby expr: " + parseInfo.getGroupByForClause(clause)
+          .dump());
     }
     if (parseInfo.getSelForClause(clause) != null) {
-      builder.append("\n sel expr: " + parseInfo.getSelForClause(clause).dump());
+      builder.append("\n sel expr: " + parseInfo.getSelForClause(clause)
+          .dump());
     }
     if (parseInfo.getHavingForClause(clause) != null) {
-      builder.append("\n having expr: " + parseInfo.getHavingForClause(clause).dump());
+      builder.append("\n having expr: " + parseInfo.getHavingForClause(clause)
+          .dump());
     }
     if (parseInfo.getDestLimit(clause) != null) {
       builder.append("\n limit: " + parseInfo.getDestLimit(clause));
     }
-    if (parseInfo.getAllExprToColumnAlias() != null && !parseInfo.getAllExprToColumnAlias().isEmpty()) {
+    if (parseInfo.getAllExprToColumnAlias() != null
+        && !parseInfo.getAllExprToColumnAlias().isEmpty()) {
       builder.append("\n exprToColumnAlias:");
-      for (Map.Entry<ASTNode, String> entry : parseInfo.getAllExprToColumnAlias().entrySet()) {
-        builder.append("\n\t expr: " + entry.getKey().dump() + " ColumnAlias: " + entry.getValue());
+      for (Map.Entry<ASTNode, String> entry : parseInfo
+          .getAllExprToColumnAlias().entrySet()) {
+        builder.append("\n\t expr: " + entry.getKey().dump()
+            + " ColumnAlias: " + entry.getValue());
       }
     }
     if (parseInfo.getAggregationExprsForClause(clause) != null) {
       builder.append("\n aggregateexprs:");
-      for (Map.Entry<String, ASTNode> entry : parseInfo.getAggregationExprsForClause(clause).entrySet()) {
-        builder.append("\n\t key: " + entry.getKey() + " expr: " + entry.getValue().dump());
+      for (Map.Entry<String, ASTNode> entry : parseInfo
+          .getAggregationExprsForClause(clause).entrySet()) {
+        builder.append("\n\t key: " + entry.getKey() + " expr: " +
+            entry.getValue().dump());
       }
     }
     if (parseInfo.getDistinctFuncExprsForClause(clause) != null) {
@@ -542,7 +582,7 @@ public class CubeQueryContext {
       }
     }
 
-    if(qb.getQbJoinTree() != null) {
+    if (qb.getQbJoinTree() != null) {
       builder.append("\n\n JoinTree");
       QBJoinTree joinTree = qb.getQbJoinTree();
       printJoinTree(joinTree, builder);
@@ -554,13 +594,13 @@ public class CubeQueryContext {
     builder.append("leftAlias:" + joinTree.getLeftAlias());
     if (joinTree.getLeftAliases() != null) {
       builder.append("\n leftAliases:");
-      for (String alias: joinTree.getLeftAliases()) {
+      for (String alias : joinTree.getLeftAliases()) {
         builder.append("\n\t " + alias);
       }
     }
     if (joinTree.getRightAliases() != null) {
       builder.append("\n rightAliases:");
-      for (String alias: joinTree.getRightAliases()) {
+      for (String alias : joinTree.getRightAliases()) {
         builder.append("\n\t " + alias);
       }
     }
@@ -571,7 +611,7 @@ public class CubeQueryContext {
     }
     if (joinTree.getBaseSrc() != null) {
       builder.append("\n baseSrcs:");
-      for (String src: joinTree.getBaseSrc()) {
+      for (String src : joinTree.getBaseSrc()) {
         builder.append("\n\t " + src);
       }
     }
@@ -580,7 +620,7 @@ public class CubeQueryContext {
     builder.append("\n mapSideJoin: " + joinTree.isMapSideJoin());
     if (joinTree.getJoinCond() != null) {
       builder.append("\n joinConds:");
-      for (JoinCond cond: joinTree.getJoinCond()) {
+      for (JoinCond cond : joinTree.getJoinCond()) {
         builder.append("\n\t left: " + cond.getLeft() + " right: " +
             cond.getRight() + " type:" + cond.getJoinType() +
             " preserved:" + cond.getPreserved());
@@ -614,16 +654,19 @@ public class CubeQueryContext {
 
   public ASTNode getFromTree() {
     if (cube != null) {
-      return qb.getParseInfo().getSrcForAlias(getAliasForTabName(cube.getName()));
+      return qb.getParseInfo().getSrcForAlias(getAliasForTabName(
+          cube.getName()));
     }
-    return qb.getParseInfo().getSrcForAlias(qb.getTabAliases().iterator().next());
+    return qb.getParseInfo().getSrcForAlias(qb.getTabAliases().iterator()
+        .next());
   }
 
   public Integer getLimitValue() {
     return qb.getParseInfo().getDestLimit(getClause());
   }
 
-  public Map<CubeFactTable, Map<UpdatePeriod, List<String>>> getFactPartitionMap() {
+  public
+  Map<CubeFactTable, Map<UpdatePeriod, List<String>>> getFactPartitionMap() {
     return factPartitionMap;
   }
 
@@ -664,7 +707,8 @@ public class CubeQueryContext {
     return queryFormat.toString();
   }
 
-  private Object[] getQueryTreeStrings(String factStorageTable) throws SemanticException {
+  private Object[] getQueryTreeStrings(String factStorageTable)
+      throws SemanticException {
     List<String> qstrs = new ArrayList<String>();
     qstrs.add(getSelectTree());
     String whereString = getWhereTree(factStorageTable);
@@ -691,42 +735,49 @@ public class CubeQueryContext {
     String fromString = null;
     if (joinTree == null) {
       if (cube != null) {
-        fromString = storageTableToQuery.get(cube) + " " + getAliasForTabName(cube.getName());
+        fromString = storageTableToQuery.get(cube) + " " + getAliasForTabName(
+            cube.getName());
       } else {
         CubeDimensionTable dim = dimensions.iterator().next();
-        fromString = storageTableToQuery.get(dim) + " " + getAliasForTabName(dim.getName());
+        fromString = storageTableToQuery.get(dim) + " " + getAliasForTabName(
+            dim.getName());
       }
     } else {
       StringBuilder builder = new StringBuilder();
-      /*printJoinTree(qb.getQbJoinTree(), builder);
-      System.out.println(builder.toString());
-      builder = new StringBuilder();*/
+      /*
+       * printJoinTree(qb.getQbJoinTree(), builder);
+       * System.out.println(builder.toString());
+       * builder = new StringBuilder();
+       */
       getQLString(qb.getQbJoinTree(), builder);
       fromString = builder.toString();
     }
     return fromString;
   }
 
-  private void getQLString(QBJoinTree joinTree, StringBuilder builder) throws SemanticException {
-    if (joinTree.getBaseSrc()[0] == null){
+  private void getQLString(QBJoinTree joinTree, StringBuilder builder)
+      throws SemanticException {
+    if (joinTree.getBaseSrc()[0] == null) {
       if (joinTree.getJoinSrc() != null) {
         getQLString(joinTree.getJoinSrc(), builder);
       }
     } else { // (joinTree.getBaseSrc()[0] != null){
       String tblName = joinTree.getBaseSrc()[0].toLowerCase();
-      builder.append(storageTableToQuery.get(cubeTbls.get(tblName)) + " " + getAliasForTabName(tblName));
+      builder.append(storageTableToQuery.get(cubeTbls.get(tblName))
+          + " " + getAliasForTabName(tblName));
     }
     if (joinTree.getJoinCond() != null) {
       builder.append(getString(joinTree.getJoinCond()[0].getJoinType()));
       builder.append("JOIN ");
     }
-    if (joinTree.getBaseSrc()[1] == null){
+    if (joinTree.getBaseSrc()[1] == null) {
       if (joinTree.getJoinSrc() != null) {
         getQLString(joinTree.getJoinSrc(), builder);
       }
     } else { // (joinTree.getBaseSrc()[1] != null){
       String tblName = joinTree.getBaseSrc()[1].toLowerCase();
-      builder.append(storageTableToQuery.get(cubeTbls.get(tblName)) + " " + getAliasForTabName(tblName));
+      builder.append(storageTableToQuery.get(cubeTbls.get(tblName))
+          + " " + getAliasForTabName(tblName));
     }
 
     String joinCond = joinConds.get(joinTree);
@@ -740,12 +791,18 @@ public class CubeQueryContext {
 
   private String getString(JoinType joinType) {
     switch (joinType) {
-    case INNER: return " INNER ";
-    case LEFTOUTER: return " LEFT OUTER ";
-    case RIGHTOUTER: return " RIGHT OUTER ";
-    case FULLOUTER: return " FULL OUTER ";
-    case UNIQUE: return " UNIQUE ";
-    case LEFTSEMI: return " LEFT SEMI ";
+    case INNER:
+      return " INNER ";
+    case LEFTOUTER:
+      return " LEFT OUTER ";
+    case RIGHTOUTER:
+      return " RIGHT OUTER ";
+    case FULLOUTER:
+      return " FULL OUTER ";
+    case UNIQUE:
+      return " UNIQUE ";
+    case LEFTSEMI:
+      return " LEFT SEMI ";
     }
     return null;
   }
@@ -764,12 +821,13 @@ public class CubeQueryContext {
     appendWhereClause(whereWithoutTimerange, whereClause);
   }
 
-  private void appendWhereClause(CubeDimensionTable dim, StringBuilder whereString,
+  private void appendWhereClause(CubeDimensionTable dim,
+      StringBuilder whereString,
       boolean hasMore) {
     String storageTable = dimStorageMap.get(dim).get(0);
     storageTableToQuery.put(dim, storageTable);
     String whereClause = storageTableToWhereClause.get(storageTable);
-    if ( whereClause != null) {
+    if (whereClause != null) {
       appendWhereClause(whereString, whereClause, hasMore);
     }
   }
@@ -827,7 +885,8 @@ public class CubeQueryContext {
     }
 
     if (fact != null) {
-      Map<UpdatePeriod, List<String>> storageTableMap = factStorageMap.get(fact);
+      Map<UpdatePeriod, List<String>> storageTableMap = factStorageMap.get(
+          fact);
       Map<UpdatePeriod, List<String>> partColMap = factPartitionMap.get(fact);
 
       StringBuilder query = new StringBuilder();
@@ -920,7 +979,7 @@ public class CubeQueryContext {
   public boolean isCubeMeasure(String col) {
     // Take care of brackets added around col names in HQLParsrer.getString
     if (col.startsWith("(") && col.endsWith(")") && col.length() > 2) {
-      col = col.substring(1, col.length() -1);
+      col = col.substring(1, col.length() - 1);
     }
 
     String[] split = StringUtils.split(col, ".");
@@ -948,8 +1007,9 @@ public class CubeQueryContext {
   }
 
   public boolean hasAggregates() {
-    return !aggregateExprs.isEmpty() || (cube !=null);
+    return !aggregateExprs.isEmpty() || (cube != null);
   }
+
   public String getAlias(String expr) {
     return exprToAlias.get(expr);
   }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeQueryRewriter.java Thu May  9 08:32:48 2013
@@ -23,14 +23,14 @@ public class CubeQueryRewriter {
   }
 
   private void setupPhase1Rewriters() {
-    //Resolve joins and generate base join tree
+    // Resolve joins and generate base join tree
     phase1Rewriters.add(new JoinResolver(conf));
-    //Resolve aggregations and generate base select tree
+    // Resolve aggregations and generate base select tree
     phase1Rewriters.add(new AggregateResolver(conf));
     phase1Rewriters.add(new GroupbyResolver(conf));
-    //Rewrite base trees (groupby, having, orderby, limit) using aliases
+    // Rewrite base trees (groupby, having, orderby, limit) using aliases
     phase1Rewriters.add(new AliasReplacer(conf));
-    //Resolve partition columns and table names
+    // Resolve partition columns and table names
     phase1Rewriters.add(new PartitionResolver(conf));
   }
 
@@ -38,16 +38,16 @@ public class CubeQueryRewriter {
     phase2Rewriters.add(new StorageTableResolver(conf));
     phase2Rewriters.add(new LeastPartitionResolver(conf));
     phase2Rewriters.add(new LeastDimensionResolver(conf));
-   // phase2Rewriters.add(new WhereConditionRewriter(conf));
+    // phase2Rewriters.add(new WhereConditionRewriter(conf));
   }
 
   public CubeQueryContext rewritePhase1(ASTNode astnode)
       throws SemanticException, ParseException {
-      CubeSemanticAnalyzer analyzer =  new CubeSemanticAnalyzer(
-          new HiveConf(conf, HiveConf.class));
-      analyzer.analyzeInternal(astnode);
-      CubeQueryContext ctx = analyzer.getQueryContext();
-      rewrite(phase1Rewriters, ctx);
+    CubeSemanticAnalyzer analyzer = new CubeSemanticAnalyzer(
+        new HiveConf(conf, HiveConf.class));
+    analyzer.analyzeInternal(astnode);
+    CubeQueryContext ctx = analyzer.getQueryContext();
+    rewrite(phase1Rewriters, ctx);
     return ctx;
   }
 
@@ -65,9 +65,4 @@ public class CubeQueryRewriter {
       rewriter.rewriteContext(ctx);
     }
   }
-
-  public static void main(String[] args) throws SemanticException, ParseException {
-   // CubeQueryRewriter writer = new CubeQueryRewriter(new Configuration());
-   // writer.rewritePhase1("select * from cube");
-  }
 }

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/CubeSemanticAnalyzer.java Thu May  9 08:32:48 2013
@@ -12,7 +12,8 @@ import org.apache.hadoop.hive.ql.parse.S
 
 public class CubeSemanticAnalyzer extends SemanticAnalyzer {
   private final HiveConf conf;
-  private final List<ValidationRule> validationRules = new ArrayList<ValidationRule>();
+  private final List<ValidationRule> validationRules =
+      new ArrayList<ValidationRule>();
   private CubeQueryContext cubeQl;
 
   public CubeSemanticAnalyzer(HiveConf conf) throws SemanticException {
@@ -38,23 +39,23 @@ public class CubeSemanticAnalyzer extend
       throw new SemanticException("Create table/view is not allowed");
     }
 
-    //analyzing from the ASTNode.
+    // analyzing from the ASTNode.
     if (!doPhase1(ast, qb, initPhase1Ctx())) {
       // if phase1Result false return
       return;
     }
     cubeQl = new CubeQueryContext(ast, qb, conf);
-    //cubeQl.init();
-    //validate();
+    // cubeQl.init();
+    // validate();
 
     // TODO Move this to a validation Rule
-    //QBParseInfo qbp = qb.getParseInfo();
-    //TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
-    //if (ks.size() > 1) {
-    //  throw new SemanticException("nested/sub queries not allowed yet");
-    //}
-    //Operator sinkOp = genPlan(qb);
-    //System.out.println(sinkOp.toString());
+    // QBParseInfo qbp = qb.getParseInfo();
+    // TreeSet<String> ks = new TreeSet<String>(qbp.getClauseNames());
+    // if (ks.size() > 1) {
+    // throw new SemanticException("nested/sub queries not allowed yet");
+    // }
+    // Operator sinkOp = genPlan(qb);
+    // System.out.println(sinkOp.toString());
   }
 
   @Override

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/DateUtil.java Thu May  9 08:32:48 2013
@@ -12,8 +12,6 @@ import org.apache.hadoop.hive.ql.cube.me
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.log4j.Logger;
 
-
-
 public class DateUtil {
   public static final Logger LOG = Logger.getLogger(DateUtil.class);
 
@@ -21,10 +19,11 @@ public class DateUtil {
    * NOW -> new java.util.Date()
    * NOW-7DAY -> a date one week earlier
    * NOW (+-) <NUM>UNIT
-   * or Hardcoded dates in  DD-MM-YYYY hh:mm:ss,sss
+   * or Hardcoded dates in DD-MM-YYYY hh:mm:ss,sss
    */
   public static final String RELATIVE = "(now){1}";
-  public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE, Pattern.CASE_INSENSITIVE);
+  public static final Pattern P_RELATIVE = Pattern.compile(RELATIVE,
+      Pattern.CASE_INSENSITIVE);
 
   public static final String WSPACE = "\\s+";
   public static final Pattern P_WSPACE = Pattern.compile(WSPACE);
@@ -36,17 +35,19 @@ public class DateUtil {
   public static final Pattern P_QUANTITY = Pattern.compile(QUANTITY);
 
   public static final String UNIT = "year|month|week|day|hour|minute|second";
-  public static final Pattern P_UNIT = Pattern.compile(UNIT, Pattern.CASE_INSENSITIVE);
+  public static final Pattern P_UNIT = Pattern.compile(UNIT,
+      Pattern.CASE_INSENSITIVE);
 
 
   public static final String RELDATE_VALIDATOR_STR = RELATIVE
       + "(" + WSPACE + ")?"
-      + "((" + SIGNAGE +")"
+      + "((" + SIGNAGE + ")"
       + "(" + WSPACE + ")?"
       + "(" + QUANTITY + ")(" + UNIT + ")){0,1}"
-      +"(s?)";
+      + "(s?)";
 
-  public static final Pattern RELDATE_VALIDATOR = Pattern.compile(RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
+  public static final Pattern RELDATE_VALIDATOR = Pattern.compile(
+      RELDATE_VALIDATOR_STR, Pattern.CASE_INSENSITIVE);
 
   public static String YEAR_FMT = "[0-9]{4}";
   public static String MONTH_FMT = YEAR_FMT + "-[0-9]{2}";
@@ -55,7 +56,8 @@ public class DateUtil {
   public static String MINUTE_FMT = HOUR_FMT + ":[0-9]{2}";
   public static String SECOND_FMT = MINUTE_FMT + ":[0-9]{2}";
   public static final String ABSDATE_FMT = "yyyy-MM-dd HH:mm:ss,SSS";
-  public static final SimpleDateFormat ABSDATE_PARSER = new SimpleDateFormat(ABSDATE_FMT);
+  public static final SimpleDateFormat ABSDATE_PARSER =
+      new SimpleDateFormat(ABSDATE_FMT);
 
   public static String formatDate(Date dt) {
     return ABSDATE_PARSER.format(dt);
@@ -97,7 +99,8 @@ public class DateUtil {
     }
   }
 
-  private static Date resolveRelativeDate(String str, Date now) throws HiveException {
+  private static Date resolveRelativeDate(String str, Date now)
+      throws HiveException {
     if (!(str == null || str.isEmpty())) {
       throw new HiveException("date value cannot be null or empty:" + str);
     }
@@ -111,7 +114,7 @@ public class DateUtil {
     Matcher qtyMatcher = P_QUANTITY.matcher(raw);
     int qty = 1;
     if (qtyMatcher.find() && true) {
-      qty =  Integer.parseInt(qtyMatcher.group());
+      qty = Integer.parseInt(qtyMatcher.group());
     }
 
     Matcher signageMatcher = P_SIGNAGE.matcher(raw);
@@ -143,7 +146,7 @@ public class DateUtil {
       } else if ("second".equals(unit)) {
         calendar.add(Calendar.SECOND, qty);
       } else {
-        throw new HiveException("invalid time unit: "+ unit);
+        throw new HiveException("invalid time unit: " + unit);
       }
     }
 
@@ -155,37 +158,37 @@ public class DateUtil {
     cal.setTime(fromDate);
     boolean hasFraction = false;
     switch (interval) {
-    case YEARLY :
+    case YEARLY:
       if (cal.get(Calendar.MONTH) != 1) {
         hasFraction = true;
         break;
       }
-    case MONTHLY :
+    case MONTHLY:
       if (cal.get(Calendar.DAY_OF_MONTH) != 1) {
         hasFraction = true;
         break;
       }
-    case WEEKLY :
+    case WEEKLY:
       if (cal.get(Calendar.DAY_OF_WEEK) != 1) {
         hasFraction = true;
         break;
       }
-    case DAILY :
+    case DAILY:
       if (cal.get(Calendar.HOUR_OF_DAY) != 0) {
         hasFraction = true;
         break;
       }
-    case HOURLY :
+    case HOURLY:
       if (cal.get(Calendar.MINUTE) != 0) {
         hasFraction = true;
         break;
       }
-    case MINUTELY :
+    case MINUTELY:
       if (cal.get(Calendar.SECOND) != 0) {
         hasFraction = true;
         break;
       }
-    case SECONDLY :
+    case SECONDLY:
       if (cal.get(Calendar.MILLISECOND) != 0) {
         hasFraction = true;
         break;
@@ -204,19 +207,19 @@ public class DateUtil {
     Calendar cal = Calendar.getInstance();
     cal.setTime(toDate);
     switch (interval) {
-    case YEARLY :
+    case YEARLY:
       cal.set(Calendar.MONTH, 1);
-    case MONTHLY :
+    case MONTHLY:
       cal.set(Calendar.DAY_OF_MONTH, 1);
-    case DAILY :
+    case DAILY:
       cal.set(Calendar.HOUR_OF_DAY, 0);
-    case HOURLY :
+    case HOURLY:
       cal.set(Calendar.MINUTE, 0);
-    case MINUTELY :
+    case MINUTELY:
       cal.set(Calendar.SECOND, 0);
-    case SECONDLY :
+    case SECONDLY:
       break;
-    case WEEKLY :
+    case WEEKLY:
       cal.set(Calendar.DAY_OF_WEEK, 1);
       cal.set(Calendar.HOUR_OF_DAY, 0);
       cal.set(Calendar.MINUTE, 0);

Modified: hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java
URL: http://svn.apache.org/viewvc/hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java?rev=1480557&r1=1480556&r2=1480557&view=diff
==============================================================================
--- hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java (original)
+++ hive/branches/HIVE-4115/ql/src/java/org/apache/hadoop/hive/ql/cube/parse/GroupbyResolver.java Thu May  9 08:32:48 2013
@@ -35,7 +35,7 @@ public class GroupbyResolver implements 
       if (cubeql.hasAggregates()) {
         String alias = cubeql.getAlias(expr);
         if (alias != null) {
-          expr = expr.substring(0, (expr.length()-alias.length())).trim();
+          expr = expr.substring(0, (expr.length() - alias.length())).trim();
         }
         if (!groupByExprs.contains(expr)) {
           if (!cubeql.isAggregateExpr(expr)) {