You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by jc...@apache.org on 2019/07/17 16:34:31 UTC

[hive] branch master updated: HIVE-21977: Clean up DescTableOperation (Miklos Gergely, reviewed by Jesus Camacho Rodriguez)

This is an automated email from the ASF dual-hosted git repository.

jcamacho pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new fafa46a  HIVE-21977: Clean up DescTableOperation (Miklos Gergely, reviewed by Jesus Camacho Rodriguez)
fafa46a is described below

commit fafa46a3cfeb4c95efcf7ea86dd227965163587b
Author: Miklos Gergely <mg...@hortonworks.com>
AuthorDate: Wed Jul 17 09:34:00 2019 -0700

    HIVE-21977: Clean up DescTableOperation (Miklos Gergely, reviewed by Jesus Camacho Rodriguez)
    
    Close apache/hive#720
---
 .../hive/ql/ddl/table/info/DescTableDesc.java      |  30 +-
 .../hive/ql/ddl/table/info/DescTableOperation.java | 361 +++++++++++----------
 .../hadoop/hive/ql/metadata/CheckConstraint.java   |   4 +
 .../hadoop/hive/ql/metadata/DefaultConstraint.java |   4 +
 .../hadoop/hive/ql/metadata/ForeignKeyInfo.java    |   4 +
 .../hadoop/hive/ql/metadata/NotNullConstraint.java |   3 +
 .../hadoop/hive/ql/metadata/PartitionIterable.java |  16 +-
 .../hadoop/hive/ql/metadata/PrimaryKeyInfo.java    |   3 +
 .../org/apache/hadoop/hive/ql/metadata/Table.java  |  29 +-
 .../hadoop/hive/ql/metadata/UniqueConstraint.java  |   4 +
 .../metadata/formatting/JsonMetaDataFormatter.java |  40 +--
 .../metadata/formatting/MetaDataFormatUtils.java   |  39 +--
 .../ql/metadata/formatting/MetaDataFormatter.java  |  56 +---
 .../metadata/formatting/TextMetaDataFormatter.java |  78 +++--
 .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java  |   6 +-
 .../results/clientpositive/alterColumnStats.q.out  |  28 +-
 .../clientpositive/alter_table_column_stats.q.out  | 140 ++------
 .../clientpositive/autoColumnStats_10.q.out        |  84 +----
 ...mn_names_with_leading_and_trailing_spaces.q.out |  14 +-
 .../results/clientpositive/compustat_avro.q.out    |  14 +-
 .../results/clientpositive/describe_syntax.q.out   |  42 +--
 .../clientpositive/display_colstats_tbllvl.q.out   |  56 +---
 ql/src/test/results/clientpositive/inputddl6.q.out |   1 +
 .../clientpositive/llap/autoColumnStats_10.q.out   |  84 +----
 ...mn_names_with_leading_and_trailing_spaces.q.out |  14 +-
 .../temp_table_display_colstats_tbllvl.q.out       |  56 +---
 26 files changed, 431 insertions(+), 779 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java
index b4b726a..3f0c699 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableDesc.java
@@ -43,18 +43,18 @@ public class DescTableDesc implements DDLDesc, Serializable {
 
   private final String resFile;
   private final String tableName;
-  private final Map<String, String> partSpec;
-  private final String colPath;
-  private final boolean isExt;
+  private final Map<String, String> partitionSpec;
+  private final String columnPath;
+  private final boolean isExtended;
   private final boolean isFormatted;
 
-  public DescTableDesc(Path resFile, String tableName, Map<String, String> partSpec, String colPath, boolean isExt,
-      boolean isFormatted) {
+  public DescTableDesc(Path resFile, String tableName, Map<String, String> partitionSpec, String columnPath,
+      boolean isExtended, boolean isFormatted) {
     this.resFile = resFile.toString();
     this.tableName = tableName;
-    this.partSpec = partSpec;
-    this.colPath = colPath;
-    this.isExt = isExt;
+    this.partitionSpec = partitionSpec;
+    this.columnPath = columnPath;
+    this.isExtended = isExtended;
     this.isFormatted = isFormatted;
   }
 
@@ -69,18 +69,22 @@ public class DescTableDesc implements DDLDesc, Serializable {
   }
 
   @Explain(displayName = "partition", explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public Map<String, String> getPartSpec() {
-    return partSpec;
+  public Map<String, String> getPartitionSpec() {
+    return partitionSpec;
   }
 
   public String getColumnPath() {
-    return colPath;
+    return columnPath;
   }
 
-  public boolean isExt() {
-    return isExt;
+  @Explain(displayName = "extended", displayOnlyOnTrue = true,
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean isExtended() {
+    return isExtended;
   }
 
+  @Explain(displayName = "formatted", displayOnlyOnTrue = true,
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public boolean isFormatted() {
     return isFormatted;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java
index 2c6e35f..d48ae04 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/table/info/DescTableOperation.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hive.ql.ddl.table.info;
 import java.io.DataOutputStream;
 import java.sql.SQLException;
 import java.util.ArrayList;
-import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -40,24 +39,21 @@ import org.apache.hadoop.hive.metastore.api.AggrStats;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsData;
 import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.MetaException;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
 import org.apache.hadoop.hive.ql.ddl.DDLUtils;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.lockmgr.LockException;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.ddl.DDLOperation;
-import org.apache.hadoop.hive.ql.metadata.CheckConstraint;
-import org.apache.hadoop.hive.ql.metadata.DefaultConstraint;
-import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.NotNullConstraint;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.PartitionIterable;
-import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
-import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.metadata.UniqueConstraint;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.plan.ColStatistics;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.stats.StatsUtils;
@@ -66,7 +62,8 @@ import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.io.IOUtils;
+
+import avro.shaded.com.google.common.collect.Lists;
 
 /**
  * Operation process of dropping a table.
@@ -78,185 +75,190 @@ public class DescTableOperation extends DDLOperation<DescTableDesc> {
 
   @Override
   public int execute() throws Exception {
-    String colPath = desc.getColumnPath();
-    String tableName = desc.getTableName();
+    Table table = getTable();
+    Partition part = getPartition(table);
+
+    try (DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context)) {
+      LOG.debug("DDLTask: got data for {}", desc.getTableName());
+
+      List<FieldSchema> cols = new ArrayList<>();
+      List<ColumnStatisticsObj> colStats = new ArrayList<>();
+
+      Deserializer deserializer = getDeserializer(table);
+
+      if (desc.getColumnPath() == null) {
+        getColumnsNoColumnPath(table, part, cols);
+      } else {
+        if (desc.isFormatted()) {
+          getColumnDataColPathSpecified(table, part, cols, colStats, deserializer);
+        } else {
+          cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
+        }
+      }
+      fixDecimalColumnTypeName(cols);
+
+      setConstraintsAndStorageHandlerInfo(table);
+      handleMaterializedView(table);
+      // In case the query is served by HiveServer2, don't pad it with spaces,
+      // as HiveServer2 output is consumed by JDBC/ODBC clients.
+      boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
+      context.getFormatter().describeTable(outStream, desc.getColumnPath(), desc.getTableName(), table, part, cols,
+          desc.isFormatted(), desc.isExtended(), isOutputPadded, colStats);
+
+      LOG.debug("DDLTask: written data for {}", desc.getTableName());
 
-    // describe the table - populate the output stream
-    Table tbl = context.getDb().getTable(tableName, false);
-    if (tbl == null) {
-      throw new HiveException(ErrorMsg.INVALID_TABLE, tableName);
+    } catch (SQLException e) {
+      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, desc.getTableName());
+    }
+
+    return 0;
+  }
+
+  private Table getTable() throws HiveException {
+    Table table = context.getDb().getTable(desc.getTableName(), false);
+    if (table == null) {
+      throw new HiveException(ErrorMsg.INVALID_TABLE, desc.getTableName());
     }
+    return table;
+  }
+
+  private Partition getPartition(Table table) throws HiveException {
     Partition part = null;
-    if (desc.getPartSpec() != null) {
-      part = context.getDb().getPartition(tbl, desc.getPartSpec(), false);
+    if (desc.getPartitionSpec() != null) {
+      part = context.getDb().getPartition(table, desc.getPartitionSpec(), false);
       if (part == null) {
         throw new HiveException(ErrorMsg.INVALID_PARTITION,
-            StringUtils.join(desc.getPartSpec().keySet(), ','), tableName);
+            StringUtils.join(desc.getPartitionSpec().keySet(), ','), desc.getTableName());
       }
-      tbl = part.getTable();
+    }
+    return part;
+  }
+
+  private Deserializer getDeserializer(Table table) throws SQLException {
+    Deserializer deserializer = table.getDeserializer(true);
+    if (deserializer instanceof AbstractSerDe) {
+      String errorMsgs = ((AbstractSerDe) deserializer).getConfigurationErrors();
+      if (StringUtils.isNotEmpty(errorMsgs)) {
+        throw new SQLException(errorMsgs);
+      }
+    }
+    return deserializer;
+  }
+
+  private void getColumnsNoColumnPath(Table table, Partition partition, List<FieldSchema> cols) throws HiveException {
+    cols.addAll(partition == null || table.getTableType() == TableType.VIRTUAL_VIEW ?
+        table.getCols() : partition.getCols());
+    if (!desc.isFormatted()) {
+      cols.addAll(table.getPartCols());
     }
 
-    DataOutputStream outStream = DDLUtils.getOutputStream(new Path(desc.getResFile()), context);
-    try {
-      LOG.debug("DDLTask: got data for {}", tableName);
+    if (table.isPartitioned() && partition == null) {
+      // No partition specified for partitioned table, lets fetch all.
+      Map<String, String> tblProps = table.getParameters() == null ?
+          new HashMap<String, String>() : table.getParameters();
 
-      List<FieldSchema> cols = null;
-      List<ColumnStatisticsObj> colStats = null;
+      Map<String, Long> valueMap = new HashMap<>();
+      Map<String, Boolean> stateMap = new HashMap<>();
+      for (String stat : StatsSetupConst.SUPPORTED_STATS) {
+        valueMap.put(stat, 0L);
+        stateMap.put(stat, true);
+      }
 
-      Deserializer deserializer = tbl.getDeserializer(true);
-      if (deserializer instanceof AbstractSerDe) {
-        String errorMsgs = ((AbstractSerDe) deserializer).getConfigurationErrors();
-        if (errorMsgs != null && !errorMsgs.isEmpty()) {
-          throw new SQLException(errorMsgs);
+      PartitionIterable partitions = new PartitionIterable(context.getDb(), table, null,
+          MetastoreConf.getIntVar(context.getConf(), MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX));
+      int numParts = 0;
+      for (Partition p : partitions) {
+        Map<String, String> partitionProps = p.getParameters();
+        Boolean state = StatsSetupConst.areBasicStatsUptoDate(partitionProps);
+        for (String stat : StatsSetupConst.SUPPORTED_STATS) {
+          stateMap.put(stat, stateMap.get(stat) && state);
+          if (partitionProps != null && partitionProps.get(stat) != null) {
+            valueMap.put(stat, valueMap.get(stat) + Long.parseLong(partitionProps.get(stat)));
+          }
         }
+        numParts++;
       }
+      tblProps.put(StatsSetupConst.NUM_PARTITIONS, Integer.toString(numParts));
 
-      if (colPath.equals(tableName)) {
-        cols = (part == null || tbl.getTableType() == TableType.VIRTUAL_VIEW) ?
-            tbl.getCols() : part.getCols();
+      for (String stat : StatsSetupConst.SUPPORTED_STATS) {
+        StatsSetupConst.setBasicStatsState(tblProps, Boolean.toString(stateMap.get(stat)));
+        tblProps.put(stat, valueMap.get(stat).toString());
+      }
+      table.setParameters(tblProps);
+    }
+  }
 
-        if (!desc.isFormatted()) {
-          cols.addAll(tbl.getPartCols());
-        }
+  private void getColumnDataColPathSpecified(Table table, Partition part, List<FieldSchema> cols,
+      List<ColumnStatisticsObj> colStats, Deserializer deserializer)
+      throws SemanticException, HiveException, MetaException {
+    // when column name is specified in describe table DDL, colPath will be table_name.column_name
+    String colName = desc.getColumnPath().split("\\.")[1];
+    List<String> colNames = Lists.newArrayList(colName.toLowerCase());
 
-        if (tbl.isPartitioned() && part == null) {
-          // No partitioned specified for partitioned table, lets fetch all.
-          Map<String, String> tblProps = tbl.getParameters() == null ?
-              new HashMap<String, String>() : tbl.getParameters();
-          Map<String, Long> valueMap = new HashMap<>();
-          Map<String, Boolean> stateMap = new HashMap<>();
-          for (String stat : StatsSetupConst.SUPPORTED_STATS) {
-            valueMap.put(stat, 0L);
-            stateMap.put(stat, true);
-          }
-          PartitionIterable parts = new PartitionIterable(context.getDb(), tbl, null,
-              context.getConf().getIntVar(HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_MAX));
-          int numParts = 0;
-          for (Partition partition : parts) {
-            Map<String, String> props = partition.getParameters();
-            Boolean state = StatsSetupConst.areBasicStatsUptoDate(props);
-            for (String stat : StatsSetupConst.SUPPORTED_STATS) {
-              stateMap.put(stat, stateMap.get(stat) && state);
-              if (props != null && props.get(stat) != null) {
-                valueMap.put(stat, valueMap.get(stat) + Long.parseLong(props.get(stat)));
-              }
-            }
-            numParts++;
-          }
-          for (String stat : StatsSetupConst.SUPPORTED_STATS) {
-            StatsSetupConst.setBasicStatsState(tblProps, Boolean.toString(stateMap.get(stat)));
-            tblProps.put(stat, valueMap.get(stat).toString());
-          }
-          tblProps.put(StatsSetupConst.NUM_PARTITIONS, Integer.toString(numParts));
-          tbl.setParameters(tblProps);
-        }
-      } else {
-        if (desc.isFormatted()) {
-          // when column name is specified in describe table DDL, colPath will
-          // will be table_name.column_name
-          String colName = colPath.split("\\.")[1];
-          String[] dbTab = Utilities.getDbTableName(tableName);
-          List<String> colNames = new ArrayList<String>();
-          colNames.add(colName.toLowerCase());
-          if (null == part) {
-            if (tbl.isPartitioned()) {
-              Map<String, String> tblProps = tbl.getParameters() == null ?
-                  new HashMap<String, String>() : tbl.getParameters();
-              if (tbl.isPartitionKey(colNames.get(0))) {
-                FieldSchema partCol = tbl.getPartColByName(colNames.get(0));
-                cols = Collections.singletonList(partCol);
-                PartitionIterable parts = new PartitionIterable(context.getDb(), tbl, null,
-                    context.getConf().getIntVar(HiveConf.ConfVars.METASTORE_BATCH_RETRIEVE_MAX));
-                ColumnInfo ci = new ColumnInfo(partCol.getName(),
-                    TypeInfoUtils.getTypeInfoFromTypeString(partCol.getType()), null, false);
-                ColStatistics cs = StatsUtils.getColStatsForPartCol(ci, parts, context.getConf());
-                ColumnStatisticsData data = new ColumnStatisticsData();
-                ColStatistics.Range r = cs.getRange();
-                StatObjectConverter.fillColumnStatisticsData(partCol.getType(), data, r == null ? null : r.minValue,
-                    r == null ? null : r.maxValue, r == null ? null : r.minValue, r == null ? null : r.maxValue,
-                    r == null ? null : r.minValue.toString(), r == null ? null : r.maxValue.toString(),
-                    cs.getNumNulls(), cs.getCountDistint(), null, cs.getAvgColLen(), cs.getAvgColLen(),
-                    cs.getNumTrues(), cs.getNumFalses());
-                ColumnStatisticsObj cso = new ColumnStatisticsObj(partCol.getName(), partCol.getType(), data);
-                colStats = Collections.singletonList(cso);
-                StatsSetupConst.setColumnStatsState(tblProps, colNames);
-              } else {
-                cols = Hive.getFieldsFromDeserializer(colPath, deserializer);
-                List<String> parts = context.getDb().getPartitionNames(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(),
-                    (short) -1);
-                AggrStats aggrStats = context.getDb().getAggrColStatsFor(
-                    dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, parts, false);
-                colStats = aggrStats.getColStats();
-                if (parts.size() == aggrStats.getPartsFound()) {
-                  StatsSetupConst.setColumnStatsState(tblProps, colNames);
-                } else {
-                  StatsSetupConst.removeColumnStatsState(tblProps, colNames);
-                }
-              }
-              tbl.setParameters(tblProps);
-            } else {
-              cols = Hive.getFieldsFromDeserializer(colPath, deserializer);
-              colStats = context.getDb().getTableColumnStatistics(
-                  dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, false);
-            }
-          } else {
-            List<String> partitions = new ArrayList<String>();
-            partitions.add(part.getName());
-            cols = Hive.getFieldsFromDeserializer(colPath, deserializer);
-            colStats = context.getDb().getPartitionColumnStatistics(dbTab[0].toLowerCase(),
-                dbTab[1].toLowerCase(), partitions, colNames, false).get(part.getName());
-          }
+    String[] dbTab = Utilities.getDbTableName(desc.getTableName());
+    if (null == part) {
+      if (table.isPartitioned()) {
+        Map<String, String> tableProps = table.getParameters() == null ?
+            new HashMap<String, String>() : table.getParameters();
+        if (table.isPartitionKey(colNames.get(0))) {
+          getColumnDataForPartitionKeyColumn(table, cols, colStats, colNames, tableProps);
         } else {
-          cols = Hive.getFieldsFromDeserializer(colPath, deserializer);
+          getColumnsForNotPartitionKeyColumn(cols, colStats, deserializer, colNames, dbTab, tableProps);
         }
+        table.setParameters(tableProps);
+      } else {
+        cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
+        colStats.addAll(
+            context.getDb().getTableColumnStatistics(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, false));
       }
-      PrimaryKeyInfo pkInfo = null;
-      ForeignKeyInfo fkInfo = null;
-      UniqueConstraint ukInfo = null;
-      NotNullConstraint nnInfo = null;
-      DefaultConstraint dInfo = null;
-      CheckConstraint cInfo = null;
-      StorageHandlerInfo storageHandlerInfo = null;
-      if (desc.isExt() || desc.isFormatted()) {
-        pkInfo = context.getDb().getPrimaryKeys(tbl.getDbName(), tbl.getTableName());
-        fkInfo = context.getDb().getForeignKeys(tbl.getDbName(), tbl.getTableName());
-        ukInfo = context.getDb().getUniqueConstraints(tbl.getDbName(), tbl.getTableName());
-        nnInfo = context.getDb().getNotNullConstraints(tbl.getDbName(), tbl.getTableName());
-        dInfo = context.getDb().getDefaultConstraints(tbl.getDbName(), tbl.getTableName());
-        cInfo = context.getDb().getCheckConstraints(tbl.getDbName(), tbl.getTableName());
-        storageHandlerInfo = context.getDb().getStorageHandlerInfo(tbl);
-      }
-      fixDecimalColumnTypeName(cols);
-      // Information for materialized views
-      if (tbl.isMaterializedView()) {
-        final String validTxnsList = context.getDb().getConf().get(ValidTxnList.VALID_TXNS_KEY);
-        if (validTxnsList != null) {
-          List<String> tablesUsed = new ArrayList<>(tbl.getCreationMetadata().getTablesUsed());
-          ValidTxnWriteIdList currentTxnWriteIds =
-              SessionState.get().getTxnMgr().getValidWriteIds(tablesUsed, validTxnsList);
-          long defaultTimeWindow = HiveConf.getTimeVar(context.getDb().getConf(),
-              HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW, TimeUnit.MILLISECONDS);
-          tbl.setOutdatedForRewriting(Hive.isOutdatedMaterializedView(tbl,
-              currentTxnWriteIds, defaultTimeWindow, tablesUsed, false));
-        }
+    } else {
+      List<String> partitions = new ArrayList<String>();
+      partitions.add(part.getName());
+      cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
+      List<ColumnStatisticsObj> partitionColStat = context.getDb().getPartitionColumnStatistics(dbTab[0].toLowerCase(),
+          dbTab[1].toLowerCase(), partitions, colNames, false).get(part.getName());
+      if (partitionColStat != null) {
+        colStats.addAll(partitionColStat);
       }
-      // In case the query is served by HiveServer2, don't pad it with spaces,
-      // as HiveServer2 output is consumed by JDBC/ODBC clients.
-      boolean isOutputPadded = !SessionState.get().isHiveServerQuery();
-      context.getFormatter().describeTable(outStream, colPath, tableName, tbl, part,
-          cols, desc.isFormatted(), desc.isExt(), isOutputPadded,
-          colStats, pkInfo, fkInfo, ukInfo, nnInfo, dInfo, cInfo,
-          storageHandlerInfo);
+    }
+  }
 
-      LOG.debug("DDLTask: written data for {}", tableName);
+  private void getColumnDataForPartitionKeyColumn(Table table, List<FieldSchema> cols,
+      List<ColumnStatisticsObj> colStats, List<String> colNames, Map<String, String> tableProps)
+      throws HiveException, MetaException {
+    FieldSchema partCol = table.getPartColByName(colNames.get(0));
+    cols.add(partCol);
+    PartitionIterable parts = new PartitionIterable(context.getDb(), table, null,
+        MetastoreConf.getIntVar(context.getConf(), MetastoreConf.ConfVars.BATCH_RETRIEVE_MAX));
+    ColumnInfo ci = new ColumnInfo(partCol.getName(),
+        TypeInfoUtils.getTypeInfoFromTypeString(partCol.getType()), null, false);
+    ColStatistics cs = StatsUtils.getColStatsForPartCol(ci, parts, context.getConf());
+    ColumnStatisticsData data = new ColumnStatisticsData();
+    ColStatistics.Range r = cs.getRange();
+    StatObjectConverter.fillColumnStatisticsData(partCol.getType(), data, r == null ? null : r.minValue,
+        r == null ? null : r.maxValue, r == null ? null : r.minValue, r == null ? null : r.maxValue,
+        r == null ? null : r.minValue.toString(), r == null ? null : r.maxValue.toString(),
+        cs.getNumNulls(), cs.getCountDistint(), null, cs.getAvgColLen(), cs.getAvgColLen(),
+        cs.getNumTrues(), cs.getNumFalses());
+    ColumnStatisticsObj cso = new ColumnStatisticsObj(partCol.getName(), partCol.getType(), data);
+    colStats.add(cso);
+    StatsSetupConst.setColumnStatsState(tableProps, colNames);
+  }
 
-    } catch (SQLException e) {
-      throw new HiveException(e, ErrorMsg.GENERIC_ERROR, tableName);
-    } finally {
-      IOUtils.closeStream(outStream);
+  private void getColumnsForNotPartitionKeyColumn(List<FieldSchema> cols, List<ColumnStatisticsObj> colStats,
+      Deserializer deserializer, List<String> colNames, String[] dbTab, Map<String, String> tableProps)
+      throws HiveException {
+    cols.addAll(Hive.getFieldsFromDeserializer(desc.getColumnPath(), deserializer));
+    List<String> parts = context.getDb().getPartitionNames(dbTab[0].toLowerCase(), dbTab[1].toLowerCase(),
+        (short) -1);
+    AggrStats aggrStats = context.getDb().getAggrColStatsFor(
+        dbTab[0].toLowerCase(), dbTab[1].toLowerCase(), colNames, parts, false);
+    colStats.addAll(aggrStats.getColStats());
+    if (parts.size() == aggrStats.getPartsFound()) {
+      StatsSetupConst.setColumnStatsState(tableProps, colNames);
+    } else {
+      StatsSetupConst.removeColumnStatsState(tableProps, colNames);
     }
-
-    return 0;
   }
 
   /**
@@ -274,4 +276,31 @@ public class DescTableOperation extends DDLOperation<DescTableDesc> {
       }
     }
   }
+
+  private void setConstraintsAndStorageHandlerInfo(Table table) throws HiveException {
+    if (desc.isExtended() || desc.isFormatted()) {
+      table.setPrimaryKeyInfo(context.getDb().getPrimaryKeys(table.getDbName(), table.getTableName()));
+      table.setForeignKeyInfo(context.getDb().getForeignKeys(table.getDbName(), table.getTableName()));
+      table.setUniqueKeyInfo(context.getDb().getUniqueConstraints(table.getDbName(), table.getTableName()));
+      table.setNotNullConstraint(context.getDb().getNotNullConstraints(table.getDbName(), table.getTableName()));
+      table.setDefaultConstraint(context.getDb().getDefaultConstraints(table.getDbName(), table.getTableName()));
+      table.setCheckConstraint(context.getDb().getCheckConstraints(table.getDbName(), table.getTableName()));
+      table.setStorageHandlerInfo(context.getDb().getStorageHandlerInfo(table));
+    }
+  }
+
+  private void handleMaterializedView(Table table) throws LockException {
+    if (table.isMaterializedView()) {
+      String validTxnsList = context.getDb().getConf().get(ValidTxnList.VALID_TXNS_KEY);
+      if (validTxnsList != null) {
+        List<String> tablesUsed = new ArrayList<>(table.getCreationMetadata().getTablesUsed());
+        ValidTxnWriteIdList currentTxnWriteIds =
+            SessionState.get().getTxnMgr().getValidWriteIds(tablesUsed, validTxnsList);
+        long defaultTimeWindow = HiveConf.getTimeVar(context.getDb().getConf(),
+            HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW, TimeUnit.MILLISECONDS);
+        table.setOutdatedForRewriting(Hive.isOutdatedMaterializedView(table,
+            currentTxnWriteIds, defaultTimeWindow, tablesUsed, false));
+      }
+    }
+  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
index af45788..5eb986e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/CheckConstraint.java
@@ -118,4 +118,8 @@ public class CheckConstraint implements Serializable {
     sb.append("]");
     return sb.toString();
   }
+
+  public static boolean isCheckConstraintNotEmpty(CheckConstraint info) {
+    return info != null && !info.getCheckConstraints().isEmpty();
+  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java
index 59df3da..c101f3d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultConstraint.java
@@ -118,4 +118,8 @@ public class DefaultConstraint implements Serializable {
     sb.append("]");
     return sb.toString();
   }
+
+  public static boolean isCheckConstraintNotEmpty(DefaultConstraint info) {
+    return info != null && !info.getDefaultConstraints().isEmpty();
+  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
index 9ae14cd..f2c978a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/ForeignKeyInfo.java
@@ -133,4 +133,8 @@ public class ForeignKeyInfo implements Serializable {
     sb.append("]");
     return sb.toString();
   }
+
+  public static boolean isForeignKeyInfoNotEmpty(ForeignKeyInfo info) {
+    return info != null && !info.getForeignKeys().isEmpty();
+  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java
index ffd42f2..8b50b7c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/NotNullConstraint.java
@@ -83,4 +83,7 @@ public class NotNullConstraint implements Serializable {
     return sb.toString();
   }
 
+  public static boolean isNotNullConstraintNotEmpty(NotNullConstraint info) {
+    return info != null && !info.getNotNullConstraints().isEmpty();
+  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/PartitionIterable.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PartitionIterable.java
index e635670..79c329d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/PartitionIterable.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PartitionIterable.java
@@ -92,11 +92,11 @@ public class PartitionIterable implements Iterable<Partition> {
       }
 
       private void getNextBatch() {
-        int batch_counter = 0;
+        int batchCounter = 0;
         List<String> nameBatch = new ArrayList<String>();
-        while (batch_counter < batch_size && partitionNamesIter.hasNext()){
+        while (batchCounter < batchSize && partitionNamesIter.hasNext()){
           nameBatch.add(partitionNamesIter.next());
-          batch_counter++;
+          batchCounter++;
         }
         try {
           batchIter = db.getPartitionsByNames(table, nameBatch, getColStats).iterator();
@@ -128,7 +128,7 @@ public class PartitionIterable implements Iterable<Partition> {
   private Table table = null;
   private Map<String, String> partialPartitionSpec = null;
   private List<String> partitionNames = null;
-  private int batch_size;
+  private int batchSize;
   private boolean getColStats = false;
 
   /**
@@ -146,8 +146,8 @@ public class PartitionIterable implements Iterable<Partition> {
    * a Hive object and a table object, and a partial partition spec.
    */
   public PartitionIterable(Hive db, Table table, Map<String, String> partialPartitionSpec,
-                           int batch_size) throws HiveException {
-    this(db, table, partialPartitionSpec, batch_size, false);
+                           int batchSize) throws HiveException {
+    this(db, table, partialPartitionSpec, batchSize, false);
   }
 
   /**
@@ -155,12 +155,12 @@ public class PartitionIterable implements Iterable<Partition> {
    * a Hive object and a table object, and a partial partition spec.
    */
   public PartitionIterable(Hive db, Table table, Map<String, String> partialPartitionSpec,
-                           int batch_size, boolean getColStats) throws HiveException {
+                           int batchSize, boolean getColStats) throws HiveException {
     this.currType = Type.LAZY_FETCH_PARTITIONS;
     this.db = db;
     this.table = table;
     this.partialPartitionSpec = partialPartitionSpec;
-    this.batch_size = batch_size;
+    this.batchSize = batchSize;
     this.getColStats = getColStats;
 
     if (this.partialPartitionSpec == null){
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
index c50bd7d..f9348c6 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/PrimaryKeyInfo.java
@@ -104,4 +104,7 @@ public class PrimaryKeyInfo implements Serializable {
     return sb.toString();
   }
 
+  public static boolean isPrimaryKeyInfoNotEmpty(PrimaryKeyInfo info) {
+    return info != null && !info.getColNames().isEmpty();
+  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
index aba3035..33a4505 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
@@ -103,6 +103,7 @@ public class Table implements Serializable {
   private Path path;
 
   private transient HiveStorageHandler storageHandler;
+  private transient StorageHandlerInfo storageHandlerInfo;
 
   private transient TableSpec tableSpec;
 
@@ -117,6 +118,8 @@ public class Table implements Serializable {
   private transient ForeignKeyInfo fki;
   private transient UniqueConstraint uki;
   private transient NotNullConstraint nnc;
+  private transient DefaultConstraint dc;
+  private transient CheckConstraint cc;
 
   /**
    * Used only for serialization.
@@ -321,6 +324,14 @@ public class Table implements Serializable {
     return storageHandler;
   }
 
+  public StorageHandlerInfo getStorageHandlerInfo() {
+    return storageHandlerInfo;
+  }
+
+  public void setStorageHandlerInfo(StorageHandlerInfo storageHandlerInfo) {
+    this.storageHandlerInfo = storageHandlerInfo;
+  }
+
   final public Class<? extends InputFormat> getInputFormatClass() {
     if (inputFormatClass == null) {
       try {
@@ -1115,7 +1126,7 @@ public class Table implements Serializable {
     return outdatedForRewritingMaterializedView;
   }
 
-  /* These are only populated during optimization */
+  /* These are only populated during optimization and describing */
   public PrimaryKeyInfo getPrimaryKeyInfo() {
     return pki;
   }
@@ -1148,6 +1159,22 @@ public class Table implements Serializable {
     this.nnc = nnc;
   }
 
+  public DefaultConstraint getDefaultConstraint() {
+    return dc;
+  }
+
+  public void setDefaultConstraint(DefaultConstraint dc) {
+    this.dc = dc;
+  }
+
+  public CheckConstraint getCheckConstraint() {
+    return cc;
+  }
+
+  public void setCheckConstraint(CheckConstraint cc) {
+    this.cc = cc;
+  }
+
 
   public ColumnStatistics getColStats() {
     return tTable.isSetColStats() ? tTable.getColStats() : null;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java
index 24817a2..1fbe76d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/UniqueConstraint.java
@@ -108,4 +108,8 @@ public class UniqueConstraint implements Serializable {
     sb.append("]");
     return sb.toString();
   }
+
+  public static boolean isUniqueConstraintNotEmpty(UniqueConstraint info) {
+    return info != null && !info.getUniqueConstraints().isEmpty();
+  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
index f28d68f..78578db 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/JsonMetaDataFormatter.java
@@ -33,7 +33,6 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang.StringUtils;
-import org.apache.hadoop.hive.ql.session.SessionState;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.fs.FileStatus;
@@ -55,7 +54,6 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.NotNullConstraint;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
-import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.metadata.UniqueConstraint;
 import org.codehaus.jackson.JsonGenerator;
@@ -186,13 +184,9 @@ public class JsonMetaDataFormatter implements MetaDataFormatter {
    * Describe table.
    */
   @Override
-  public void describeTable(DataOutputStream out, String colPath,
-      String tableName, Table tbl, Partition part, List<FieldSchema> cols,
-      boolean isFormatted, boolean isExt,
-      boolean isOutputPadded, List<ColumnStatisticsObj> colStats,
-      PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo,
-      UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo,
-      CheckConstraint cInfo, StorageHandlerInfo storageHandlerInfo) throws HiveException {
+  public void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part,
+      List<FieldSchema> cols, boolean isFormatted, boolean isExt, boolean isOutputPadded,
+      List<ColumnStatisticsObj> colStats) throws HiveException {
     MapBuilder builder = MapBuilder.create();
     builder.put("columns", makeColsUnformatted(cols));
 
@@ -203,26 +197,26 @@ public class JsonMetaDataFormatter implements MetaDataFormatter {
       else {
         builder.put("tableInfo", tbl.getTTable());
       }
-      if (pkInfo != null && !pkInfo.getColNames().isEmpty()) {
-        builder.put("primaryKeyInfo", pkInfo);
+      if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo())) {
+        builder.put("primaryKeyInfo", tbl.getPrimaryKeyInfo());
       }
-      if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) {
-        builder.put("foreignKeyInfo", fkInfo);
+      if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo())) {
+        builder.put("foreignKeyInfo", tbl.getForeignKeyInfo());
       }
-      if (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) {
-        builder.put("uniqueConstraintInfo", ukInfo);
+      if (UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo())) {
+        builder.put("uniqueConstraintInfo", tbl.getUniqueKeyInfo());
       }
-      if (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) {
-        builder.put("notNullConstraintInfo", nnInfo);
+      if (NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint())) {
+        builder.put("notNullConstraintInfo", tbl.getNotNullConstraint());
       }
-      if (dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) {
-        builder.put("defaultConstraintInfo", dInfo);
+      if (DefaultConstraint.isCheckConstraintNotEmpty(tbl.getDefaultConstraint())) {
+        builder.put("defaultConstraintInfo", tbl.getDefaultConstraint());
       }
-      if (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) {
-        builder.put("checkConstraintInfo", cInfo);
+      if (CheckConstraint.isCheckConstraintNotEmpty(tbl.getCheckConstraint())) {
+        builder.put("checkConstraintInfo", tbl.getCheckConstraint());
       }
-      if(storageHandlerInfo != null) {
-        builder.put("storageHandlerInfo", storageHandlerInfo.toString());
+      if (tbl.getStorageHandlerInfo() != null) {
+        builder.put("storageHandlerInfo", tbl.getStorageHandlerInfo().toString());
       }
     }
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
index 2bf7af7..a1ec52f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatUtils.java
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.hive.ql.metadata.formatting;
 
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.hadoop.hive.common.StatsSetupConst;
 import org.apache.hadoop.hive.common.type.HiveDecimal;
@@ -42,7 +43,6 @@ import org.apache.hadoop.hive.metastore.api.WMPool;
 import org.apache.hadoop.hive.metastore.api.WMPoolTrigger;
 import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMTrigger;
-import org.apache.hadoop.hive.ql.ddl.table.info.DescTableDesc;
 import org.apache.hadoop.hive.ql.metadata.CheckConstraint;
 import org.apache.hadoop.hive.ql.metadata.DefaultConstraint;
 import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
@@ -124,7 +124,7 @@ public final class MetaDataFormatUtils {
 
   static ColumnStatisticsObj getColumnStatisticsObject(String colName,
       String colType, List<ColumnStatisticsObj> colStats) {
-    if (colStats != null && !colStats.isEmpty()) {
+    if (CollectionUtils.isNotEmpty(colStats)) {
       for (ColumnStatisticsObj cso : colStats) {
         if (cso.getColName().equalsIgnoreCase(colName)
             && cso.getColType().equalsIgnoreCase(colType)) {
@@ -135,34 +135,33 @@ public final class MetaDataFormatUtils {
     return null;
   }
 
-  public static String getConstraintsInformation(PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo,
-          UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo, CheckConstraint cInfo) {
+  public static String getConstraintsInformation(Table table) {
     StringBuilder constraintsInfo = new StringBuilder(DEFAULT_STRINGBUILDER_SIZE);
 
     constraintsInfo.append(LINE_DELIM).append("# Constraints").append(LINE_DELIM);
-    if (pkInfo != null && !pkInfo.getColNames().isEmpty()) {
+    if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(table.getPrimaryKeyInfo())) {
       constraintsInfo.append(LINE_DELIM).append("# Primary Key").append(LINE_DELIM);
-      getPrimaryKeyInformation(constraintsInfo, pkInfo);
+      getPrimaryKeyInformation(constraintsInfo, table.getPrimaryKeyInfo());
     }
-    if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) {
+    if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(table.getForeignKeyInfo())) {
       constraintsInfo.append(LINE_DELIM).append("# Foreign Keys").append(LINE_DELIM);
-      getForeignKeysInformation(constraintsInfo, fkInfo);
+      getForeignKeysInformation(constraintsInfo, table.getForeignKeyInfo());
     }
-    if (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) {
+    if (UniqueConstraint.isUniqueConstraintNotEmpty(table.getUniqueKeyInfo())) {
       constraintsInfo.append(LINE_DELIM).append("# Unique Constraints").append(LINE_DELIM);
-      getUniqueConstraintsInformation(constraintsInfo, ukInfo);
+      getUniqueConstraintsInformation(constraintsInfo, table.getUniqueKeyInfo());
     }
-    if (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) {
+    if (NotNullConstraint.isNotNullConstraintNotEmpty(table.getNotNullConstraint())) {
       constraintsInfo.append(LINE_DELIM).append("# Not Null Constraints").append(LINE_DELIM);
-      getNotNullConstraintsInformation(constraintsInfo, nnInfo);
+      getNotNullConstraintsInformation(constraintsInfo, table.getNotNullConstraint());
     }
-    if (dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) {
+    if (DefaultConstraint.isCheckConstraintNotEmpty(table.getDefaultConstraint())) {
       constraintsInfo.append(LINE_DELIM).append("# Default Constraints").append(LINE_DELIM);
-      getDefaultConstraintsInformation(constraintsInfo, dInfo);
+      getDefaultConstraintsInformation(constraintsInfo, table.getDefaultConstraint());
     }
-    if (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) {
+    if (CheckConstraint.isCheckConstraintNotEmpty(table.getCheckConstraint())) {
       constraintsInfo.append(LINE_DELIM).append("# Check Constraints").append(LINE_DELIM);
-      getCheckConstraintsInformation(constraintsInfo, cInfo);
+      getCheckConstraintsInformation(constraintsInfo, table.getCheckConstraint());
     }
     return constraintsInfo.toString();
   }
@@ -752,14 +751,6 @@ public final class MetaDataFormatUtils {
     }
   }
 
-  public static String[] getColumnsHeader(List<ColumnStatisticsObj> colStats) {
-    boolean showColStats = false;
-    if (colStats != null) {
-      showColStats = true;
-    }
-    return DescTableDesc.getSchema(showColStats).split("#")[0].split(",");
-  }
-
   public static MetaDataFormatter getFormatter(HiveConf conf) {
     if ("json".equals(conf.get(HiveConf.ConfVars.HIVE_DDL_OUTPUT_FORMAT.varname, "text"))) {
       return new JsonMetaDataFormatter();
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
index b7e5ebe..fcce9ec 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java
@@ -31,17 +31,10 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse;
-import org.apache.hadoop.hive.ql.metadata.CheckConstraint;
-import org.apache.hadoop.hive.ql.metadata.DefaultConstraint;
-import org.apache.hadoop.hive.ql.metadata.ForeignKeyInfo;
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.metadata.NotNullConstraint;
 import org.apache.hadoop.hive.ql.metadata.Partition;
-import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo;
-import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo;
 import org.apache.hadoop.hive.ql.metadata.Table;
-import org.apache.hadoop.hive.ql.metadata.UniqueConstraint;
 
 /**
  * Interface to format table and index information.  We can format it
@@ -53,83 +46,58 @@ public interface MetaDataFormatter {
    * Write an error message.
    * @param sqlState if {@code null}, will be ignored
    */
-  public void error(OutputStream out, String msg, int errorCode, String sqlState)
+  void error(OutputStream out, String msg, int errorCode, String sqlState)
       throws HiveException;
 
   /**
    * @param sqlState if {@code null}, will be skipped in output
    * @param errorDetail usually string version of some Exception, if {@code null}, will be ignored
    */
-  public void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail)
+  void error(OutputStream out, String errorMessage, int errorCode, String sqlState, String errorDetail)
       throws HiveException;
 
   /**
    * Show a list of tables.
    */
-  public void showTables(DataOutputStream out, Set<String> tables)
+  void showTables(DataOutputStream out, Set<String> tables)
       throws HiveException;
 
   /**
    * Show a list of tables including table types.
    */
-  public void showTablesExtended(DataOutputStream out, List<Table> tables)
+  void showTablesExtended(DataOutputStream out, List<Table> tables)
       throws HiveException;
 
   /**
    * Show a list of materialized views.
    */
-  public void showMaterializedViews(DataOutputStream out, List<Table> materializedViews)
+  void showMaterializedViews(DataOutputStream out, List<Table> materializedViews)
       throws HiveException;
 
   /**
    * Describe table.
-   * @param out
-   * @param colPath
-   * @param tableName
-   * @param tbl
-   * @param part
-   * @param cols
-   * @param isFormatted - describe with formatted keyword
-   * @param isExt
-   * @param isOutputPadded - if true, add spacing and indentation
-   * @param colStats
-   * @param fkInfo  foreign keys information
-   * @param pkInfo  primary key information
-   * @param ukInfo  unique constraint information
-   * @param nnInfo  not null constraint information
-   * @throws HiveException
    */
-  public void describeTable(DataOutputStream out, String colPath,
-      String tableName, Table tbl, Partition part, List<FieldSchema> cols,
-      boolean isFormatted, boolean isExt,
-      boolean isOutputPadded, List<ColumnStatisticsObj> colStats,
-      PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo,
-      UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo, CheckConstraint cInfo,
-      StorageHandlerInfo storageHandlerInfo)
-          throws HiveException;
+  void describeTable(DataOutputStream out, String colPath, String tableName, Table tbl, Partition part,
+      List<FieldSchema> cols, boolean isFormatted, boolean isExtended, boolean isOutputPadded,
+      List<ColumnStatisticsObj> colStats) throws HiveException;
 
   /**
    * Show the table status.
    */
-  public void showTableStatus(DataOutputStream out,
-      Hive db,
-      HiveConf conf,
-      List<Table> tbls,
-      Map<String, String> part,
+  void showTableStatus(DataOutputStream out, Hive db, HiveConf conf, List<Table> tbls, Map<String, String> part,
       Partition par)
           throws HiveException;
 
   /**
    * Show the table partitions.
    */
-  public void showTablePartitions(DataOutputStream out,
-      List<String> parts)
+  void showTablePartitions(DataOutputStream out, List<String> parts)
           throws HiveException;
 
   /**
-   * Show the databases
+   * Show the databases.
    */
-  public void showDatabases(DataOutputStream out, List<String> databases)
+  void showDatabases(DataOutputStream out, List<String> databases)
       throws HiveException;
 
   /**
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
index f7704bd..b4cf0bf 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/TextMetaDataFormatter.java
@@ -30,10 +30,10 @@ import java.util.Set;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
-import org.apache.hadoop.hive.ql.metadata.StorageHandlerInfo;
 import org.apache.hive.common.util.HiveStringUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringEscapeUtils;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
@@ -47,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.PrincipalType;
 import org.apache.hadoop.hive.metastore.api.WMFullResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
 import org.apache.hadoop.hive.metastore.api.WMValidateResourcePlanResponse;
+import org.apache.hadoop.hive.ql.ddl.table.info.DescTableDesc;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.metadata.CheckConstraint;
 import org.apache.hadoop.hive.ql.metadata.DefaultConstraint;
@@ -201,26 +202,21 @@ class TextMetaDataFormatter implements MetaDataFormatter {
   }
 
   @Override
-  public void describeTable(DataOutputStream outStream,  String colPath,
-      String tableName, Table tbl, Partition part, List<FieldSchema> cols,
-      boolean isFormatted, boolean isExt,
-      boolean isOutputPadded, List<ColumnStatisticsObj> colStats,
-      PrimaryKeyInfo pkInfo, ForeignKeyInfo fkInfo,
-      UniqueConstraint ukInfo, NotNullConstraint nnInfo, DefaultConstraint dInfo, CheckConstraint cInfo,
-      StorageHandlerInfo storageHandlerInfo)
-        throws HiveException {
+  public void describeTable(DataOutputStream outStream,  String colPath, String tableName, Table tbl, Partition part,
+      List<FieldSchema> cols, boolean isFormatted, boolean isExt, boolean isOutputPadded,
+      List<ColumnStatisticsObj> colStats) throws HiveException {
     try {
       List<FieldSchema> partCols = tbl.isPartitioned() ? tbl.getPartCols() : null;
       String output = "";
 
-      boolean isColStatsAvailable = colStats != null;
+      boolean isColStatsAvailable = CollectionUtils.isNotEmpty(colStats);
 
       TextMetaDataTable mdt = new TextMetaDataTable();
       if (isFormatted && !isColStatsAvailable) {
         output = "# ";
       }
       if (isFormatted) {
-        mdt.addRow(MetaDataFormatUtils.getColumnsHeader(colStats));
+        mdt.addRow(DescTableDesc.getSchema(isColStatsAvailable).split("#")[0].split(","));
       }
       for (FieldSchema col : cols) {
         mdt.addRow(MetaDataFormatUtils.extractColumnValues(col, isColStatsAvailable,
@@ -231,11 +227,11 @@ class TextMetaDataFormatter implements MetaDataFormatter {
       }
       output += mdt.renderTable(isOutputPadded);
 
-      if (colPath.equals(tableName)) {
+      if (colPath == null) {
         if ((partCols != null) && !partCols.isEmpty() && showPartColsSeparately) {
           mdt = new TextMetaDataTable();
           output += MetaDataFormatUtils.LINE_DELIM + "# Partition Information" + MetaDataFormatUtils.LINE_DELIM + "# ";
-          mdt.addRow(MetaDataFormatUtils.getColumnsHeader(null));
+          mdt.addRow(DescTableDesc.getSchema(false).split("#")[0].split(","));
           for (FieldSchema col : partCols) {
             mdt.addRow(MetaDataFormatUtils.extractColumnValues(col));
           }
@@ -253,7 +249,7 @@ class TextMetaDataFormatter implements MetaDataFormatter {
       }
       outStream.write(output.getBytes("UTF-8"));
 
-      if (tableName.equals(colPath)) {
+      if (colPath == null) {
         if (isFormatted) {
           if (part != null) {
             output = MetaDataFormatUtils.getPartitionInformation(part);
@@ -262,13 +258,13 @@ class TextMetaDataFormatter implements MetaDataFormatter {
           }
           outStream.write(output.getBytes("UTF-8"));
 
-          if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) ||
-              (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) ||
-              (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) ||
-              (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) ||
-              cInfo != null && !cInfo.getCheckConstraints().isEmpty() ||
-              dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) {
-            output = MetaDataFormatUtils.getConstraintsInformation(pkInfo, fkInfo, ukInfo, nnInfo, dInfo, cInfo);
+          if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo()) ||
+              ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo()) ||
+              UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo()) ||
+              NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint()) ||
+              CheckConstraint.isCheckConstraintNotEmpty(tbl.getCheckConstraint()) ||
+              DefaultConstraint.isCheckConstraintNotEmpty(tbl.getDefaultConstraint())) {
+            output = MetaDataFormatUtils.getConstraintsInformation(tbl);
             outStream.write(output.getBytes("UTF-8"));
           }
         }
@@ -294,44 +290,44 @@ class TextMetaDataFormatter implements MetaDataFormatter {
             outStream.write(separator);
             outStream.write(terminator);
           }
-          if ((pkInfo != null && !pkInfo.getColNames().isEmpty()) ||
-              (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) ||
-              (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) ||
-              (dInfo!= null && !dInfo.getDefaultConstraints().isEmpty()) ||
-              (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) ||
-              (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty())) {
+          if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo()) ||
+              ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo()) ||
+              UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo()) ||
+              NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint()) ||
+              DefaultConstraint.isCheckConstraintNotEmpty(tbl.getDefaultConstraint()) ||
+              CheckConstraint.isCheckConstraintNotEmpty(tbl.getCheckConstraint())) {
             outStream.write(("Constraints").getBytes("UTF-8"));
             outStream.write(separator);
-            if (pkInfo != null && !pkInfo.getColNames().isEmpty()) {
-              outStream.write(pkInfo.toString().getBytes("UTF-8"));
+            if (PrimaryKeyInfo.isPrimaryKeyInfoNotEmpty(tbl.getPrimaryKeyInfo())) {
+              outStream.write(tbl.getPrimaryKeyInfo().toString().getBytes("UTF-8"));
               outStream.write(terminator);
             }
-            if (fkInfo != null && !fkInfo.getForeignKeys().isEmpty()) {
-              outStream.write(fkInfo.toString().getBytes("UTF-8"));
+            if (ForeignKeyInfo.isForeignKeyInfoNotEmpty(tbl.getForeignKeyInfo())) {
+              outStream.write(tbl.getForeignKeyInfo().toString().getBytes("UTF-8"));
               outStream.write(terminator);
             }
-            if (ukInfo != null && !ukInfo.getUniqueConstraints().isEmpty()) {
-              outStream.write(ukInfo.toString().getBytes("UTF-8"));
+            if (UniqueConstraint.isUniqueConstraintNotEmpty(tbl.getUniqueKeyInfo())) {
+              outStream.write(tbl.getUniqueKeyInfo().toString().getBytes("UTF-8"));
               outStream.write(terminator);
             }
-            if (nnInfo != null && !nnInfo.getNotNullConstraints().isEmpty()) {
-              outStream.write(nnInfo.toString().getBytes("UTF-8"));
+            if (NotNullConstraint.isNotNullConstraintNotEmpty(tbl.getNotNullConstraint())) {
+              outStream.write(tbl.getNotNullConstraint().toString().getBytes("UTF-8"));
               outStream.write(terminator);
             }
-            if (dInfo != null && !dInfo.getDefaultConstraints().isEmpty()) {
-              outStream.write(dInfo.toString().getBytes("UTF-8"));
+            if (DefaultConstraint.isCheckConstraintNotEmpty(tbl.getDefaultConstraint())) {
+              outStream.write(tbl.getDefaultConstraint().toString().getBytes("UTF-8"));
               outStream.write(terminator);
             }
-            if (cInfo != null && !cInfo.getCheckConstraints().isEmpty()) {
-              outStream.write(cInfo.toString().getBytes("UTF-8"));
+            if (CheckConstraint.isCheckConstraintNotEmpty(tbl.getCheckConstraint())) {
+              outStream.write(tbl.getCheckConstraint().toString().getBytes("UTF-8"));
               outStream.write(terminator);
             }
           }
 
-          if (storageHandlerInfo!= null) {
+          if (tbl.getStorageHandlerInfo() != null) {
             outStream.write(("StorageHandlerInfo").getBytes("UTF-8"));
             outStream.write(terminator);
-            outStream.write(storageHandlerInfo.formatAsText().getBytes("UTF-8"));
+            outStream.write(tbl.getStorageHandlerInfo().formatAsText().getBytes("UTF-8"));
             outStream.write(terminator);
           }
         }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index af21fcf..f8d906f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -2363,7 +2363,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
 
       // if this ast has only one child, then no column name specified.
       if (node.getChildCount() == 1) {
-        return tableName;
+        return null;
       }
 
       ASTNode columnNode = null;
@@ -2384,7 +2384,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
               QualifiedNameUtil.getFullyQualifiedName(columnNode);
         }
       } else {
-        return tableName;
+        return null;
       }
     }
 
@@ -2543,7 +2543,7 @@ public class DDLSemanticAnalyzer extends BaseSemanticAnalyzer {
       // will contain tablename.column_name. If column_name is not specified
       // colPath will be equal to tableName. This is how we can differentiate
       // if we are describing a table or column
-      if (!colPath.equalsIgnoreCase(tableName) && isFormatted) {
+      if (colPath != null && isFormatted) {
         showColStats = true;
       }
     }
diff --git a/ql/src/test/results/clientpositive/alterColumnStats.q.out b/ql/src/test/results/clientpositive/alterColumnStats.q.out
index eb8934b..ca8d7b4 100644
--- a/ql/src/test/results/clientpositive/alterColumnStats.q.out
+++ b/ql/src/test/results/clientpositive/alterColumnStats.q.out
@@ -144,18 +144,8 @@ PREHOOK: Input: default@p_n0
 POSTHOOK: query: desc formatted p_n0 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n0
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted p_n0 c2
 PREHOOK: type: DESCTABLE
@@ -163,16 +153,6 @@ PREHOOK: Input: default@p_n0
 POSTHOOK: query: desc formatted p_n0 c2
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n0
-col_name            	c2                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c2                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
diff --git a/ql/src/test/results/clientpositive/alter_table_column_stats.q.out b/ql/src/test/results/clientpositive/alter_table_column_stats.q.out
index f8c91d3..533bcfe 100644
--- a/ql/src/test/results/clientpositive/alter_table_column_stats.q.out
+++ b/ql/src/test/results/clientpositive/alter_table_column_stats.q.out
@@ -368,18 +368,8 @@ PREHOOK: Input: statsdb1@testtable1
 POSTHOOK: query: describe formatted statsdb1.testtable1 col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testtable1
-col_name            	col4                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col4                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\",\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: alter table testtable1 change col1 col1 string
 PREHOOK: type: ALTERTABLE_RENAMECOL
@@ -432,18 +422,8 @@ PREHOOK: Input: statsdb1@testtable1
 POSTHOOK: query: describe formatted statsdb1.testtable1 col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testtable1
-col_name            	col1                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col1                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: describe formatted statsdb1.testtable1 col2
 PREHOOK: type: DESCTABLE
@@ -470,18 +450,8 @@ PREHOOK: Input: statsdb1@testtable1
 POSTHOOK: query: describe formatted statsdb1.testtable1 col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testtable1
-col_name            	col4                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col4                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: alter table statsdb1.testtable1 rename to statsdb2.testtable2
 PREHOOK: type: ALTERTABLE_RENAME
@@ -535,18 +505,8 @@ PREHOOK: Input: statsdb2@testtable2
 POSTHOOK: query: describe formatted statsdb2.testtable2 col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb2@testtable2
-col_name            	col1                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col1                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: describe formatted statsdb2.testtable2 col2
 PREHOOK: type: DESCTABLE
@@ -573,18 +533,8 @@ PREHOOK: Input: statsdb2@testtable2
 POSTHOOK: query: describe formatted statsdb2.testtable2 col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb2@testtable2
-col_name            	col4                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col4                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: analyze table testpart0 compute statistics for columns
 PREHOOK: type: ANALYZE_TABLE
@@ -2249,18 +2199,8 @@ PREHOOK: Input: statsdb1@testtable1
 POSTHOOK: query: describe formatted statsdb1.testtable1 col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testtable1
-col_name            	col4                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col4                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\",\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: alter table testtable1 change col1 col1 string
 PREHOOK: type: ALTERTABLE_RENAMECOL
@@ -2313,18 +2253,8 @@ PREHOOK: Input: statsdb1@testtable1
 POSTHOOK: query: describe formatted statsdb1.testtable1 col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testtable1
-col_name            	col1                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col1                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: describe formatted statsdb1.testtable1 col2
 PREHOOK: type: DESCTABLE
@@ -2351,18 +2281,8 @@ PREHOOK: Input: statsdb1@testtable1
 POSTHOOK: query: describe formatted statsdb1.testtable1 col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb1@testtable1
-col_name            	col4                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col4                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: alter table statsdb1.testtable1 rename to statsdb2.testtable2
 PREHOOK: type: ALTERTABLE_RENAME
@@ -2416,18 +2336,8 @@ PREHOOK: Input: statsdb2@testtable2
 POSTHOOK: query: describe formatted statsdb2.testtable2 col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb2@testtable2
-col_name            	col1                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col1                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: describe formatted statsdb2.testtable2 col2
 PREHOOK: type: DESCTABLE
@@ -2454,18 +2364,8 @@ PREHOOK: Input: statsdb2@testtable2
 POSTHOOK: query: describe formatted statsdb2.testtable2 col4
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: statsdb2@testtable2
-col_name            	col4                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col4                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col2\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: analyze table testpart0 compute statistics for columns
 PREHOOK: type: ANALYZE_TABLE
diff --git a/ql/src/test/results/clientpositive/autoColumnStats_10.q.out b/ql/src/test/results/clientpositive/autoColumnStats_10.q.out
index 8ed8f30..41282bf 100644
--- a/ql/src/test/results/clientpositive/autoColumnStats_10.q.out
+++ b/ql/src/test/results/clientpositive/autoColumnStats_10.q.out
@@ -163,18 +163,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: insert into p_n1 values (2,11,111)
 PREHOOK: type: QUERY
@@ -249,18 +239,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: drop table p_n1
 PREHOOK: type: DROPTABLE
@@ -412,18 +392,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 insert_num
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	insert_num          	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+insert_num          	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted p_n1 c1
 PREHOOK: type: DESCTABLE
@@ -431,18 +401,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: insert into p_n1 values (2,11,111)
 PREHOOK: type: QUERY
@@ -498,18 +458,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 insert_num
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	insert_num          	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+insert_num          	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted p_n1 c1
 PREHOOK: type: DESCTABLE
@@ -517,16 +467,6 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}	 	 	 	 	 	 	 	 	 	 
diff --git a/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out b/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out
index a9206f7..35b40e9 100644
--- a/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out
+++ b/ql/src/test/results/clientpositive/column_names_with_leading_and_trailing_spaces.q.out
@@ -48,18 +48,8 @@ PREHOOK: Input: default@space
 POSTHOOK: query: desc formatted space ` left`
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@space
-col_name            	 left               	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+ left               	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\" left\":\"true\",\" middle \":\"true\",\"right \":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: insert into space values ("1", "2", "3")
 PREHOOK: type: QUERY
diff --git a/ql/src/test/results/clientpositive/compustat_avro.q.out b/ql/src/test/results/clientpositive/compustat_avro.q.out
index 45fd764..cd7cdab 100644
--- a/ql/src/test/results/clientpositive/compustat_avro.q.out
+++ b/ql/src/test/results/clientpositive/compustat_avro.q.out
@@ -30,18 +30,8 @@ PREHOOK: Input: default@testavro
 POSTHOOK: query: describe formatted testAvro col1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@testavro
-col_name            	col1                	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+col1                	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"col1\":\"true\",\"col2\":\"true\",\"col3\":\"true\",\"col4\":\"true\",\"col5\":\"true\",\"col6\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: analyze table testAvro compute statistics for columns col1,col3
 PREHOOK: type: ANALYZE_TABLE
diff --git a/ql/src/test/results/clientpositive/describe_syntax.q.out b/ql/src/test/results/clientpositive/describe_syntax.q.out
index 79c44de..158d8bd 100644
--- a/ql/src/test/results/clientpositive/describe_syntax.q.out
+++ b/ql/src/test/results/clientpositive/describe_syntax.q.out
@@ -205,18 +205,8 @@ PREHOOK: Input: db1@t1
 POSTHOOK: query: DESCRIBE FORMATTED t1 key1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: db1@t1
-col_name            	key1                	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+key1                	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{}                  	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: DESCRIBE db1.t1 key1
 PREHOOK: type: DESCTABLE
@@ -238,18 +228,8 @@ PREHOOK: Input: db1@t1
 POSTHOOK: query: DESCRIBE FORMATTED db1.t1 key1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: db1@t1
-col_name            	key1                	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+key1                	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{}                  	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: DESCRIBE t1 key1
 PREHOOK: type: DESCTABLE
@@ -271,18 +251,8 @@ PREHOOK: Input: db1@t1
 POSTHOOK: query: DESCRIBE FORMATTED t1 key1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: db1@t1
-col_name            	key1                	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+key1                	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{}                  	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: DESCRIBE t1 PARTITION(ds='4', part='5')
 PREHOOK: type: DESCTABLE
diff --git a/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out b/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out
index 386b7d8..d5ab761 100644
--- a/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out
+++ b/ql/src/test/results/clientpositive/display_colstats_tbllvl.q.out
@@ -51,18 +51,8 @@ PREHOOK: Input: default@uservisits_web_text_none_n0
 POSTHOOK: query: desc formatted UserVisits_web_text_none_n0 sourceIP
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@uservisits_web_text_none_n0
-col_name            	sourceIP            	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+sourceIP            	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 PREHOOK: query: explain
 analyze table UserVisits_web_text_none_n0 compute statistics for columns sourceIP, avgTimeOnSite, adRevenue
 PREHOOK: type: ANALYZE_TABLE
@@ -350,18 +340,8 @@ PREHOOK: Input: default@empty_tab_n0
 POSTHOOK: query: desc formatted empty_tab_n0 a
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@empty_tab_n0
-col_name            	a                   	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+a                   	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\",\"e\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: explain
 analyze table empty_tab_n0 compute statistics for columns a,b,c,d,e
@@ -546,36 +526,16 @@ PREHOOK: Input: test@uservisits_web_text_none_n0
 POSTHOOK: query: desc formatted UserVisits_web_text_none_n0 sourceIP
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: test@uservisits_web_text_none_n0
-col_name            	sourceIP            	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+sourceIP            	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted test.UserVisits_web_text_none_n0 sourceIP
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: test@uservisits_web_text_none_n0
 POSTHOOK: query: desc formatted test.UserVisits_web_text_none_n0 sourceIP
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: test@uservisits_web_text_none_n0
-col_name            	sourceIP            	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+sourceIP            	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted default.UserVisits_web_text_none_n0 sourceIP
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@uservisits_web_text_none_n0
diff --git a/ql/src/test/results/clientpositive/inputddl6.q.out b/ql/src/test/results/clientpositive/inputddl6.q.out
index 47cc61f..155c4b3 100644
--- a/ql/src/test/results/clientpositive/inputddl6.q.out
+++ b/ql/src/test/results/clientpositive/inputddl6.q.out
@@ -95,6 +95,7 @@ STAGE PLANS:
       partition:
         ds 2008-04-09
       table: INPUTDDL6
+      extended: true
 
   Stage: Stage-1
     Fetch Operator
diff --git a/ql/src/test/results/clientpositive/llap/autoColumnStats_10.q.out b/ql/src/test/results/clientpositive/llap/autoColumnStats_10.q.out
index 8ed8f30..41282bf 100644
--- a/ql/src/test/results/clientpositive/llap/autoColumnStats_10.q.out
+++ b/ql/src/test/results/clientpositive/llap/autoColumnStats_10.q.out
@@ -163,18 +163,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: insert into p_n1 values (2,11,111)
 PREHOOK: type: QUERY
@@ -249,18 +239,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"insert_num\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: drop table p_n1
 PREHOOK: type: DROPTABLE
@@ -412,18 +392,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 insert_num
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	insert_num          	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+insert_num          	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted p_n1 c1
 PREHOOK: type: DESCTABLE
@@ -431,18 +401,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: insert into p_n1 values (2,11,111)
 PREHOOK: type: QUERY
@@ -498,18 +458,8 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 insert_num
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	insert_num          	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+insert_num          	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted p_n1 c1
 PREHOOK: type: DESCTABLE
@@ -517,16 +467,6 @@ PREHOOK: Input: default@p_n1
 POSTHOOK: query: desc formatted p_n1 c1
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@p_n1
-col_name            	c1                  	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+c1                  	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\"}	 	 	 	 	 	 	 	 	 	 
diff --git a/ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out b/ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out
index a9206f7..35b40e9 100644
--- a/ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out
+++ b/ql/src/test/results/clientpositive/llap/column_names_with_leading_and_trailing_spaces.q.out
@@ -48,18 +48,8 @@ PREHOOK: Input: default@space
 POSTHOOK: query: desc formatted space ` left`
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@space
-col_name            	 left               	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+ left               	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\" left\":\"true\",\" middle \":\"true\",\"right \":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: insert into space values ("1", "2", "3")
 PREHOOK: type: QUERY
diff --git a/ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out b/ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out
index f255177..2ec8ee6 100644
--- a/ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out
+++ b/ql/src/test/results/clientpositive/temp_table_display_colstats_tbllvl.q.out
@@ -140,18 +140,8 @@ PREHOOK: Input: default@uservisits_web_text_none
 POSTHOOK: query: desc formatted UserVisits_web_text_none sourceIP
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@uservisits_web_text_none
-col_name            	sourceIP            	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+sourceIP            	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 PREHOOK: query: explain
 analyze table UserVisits_web_text_none compute statistics for columns sourceIP, avgTimeOnSite, adRevenue
 PREHOOK: type: ANALYZE_TABLE
@@ -482,18 +472,8 @@ PREHOOK: Input: default@empty_tab
 POSTHOOK: query: desc formatted empty_tab a
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: default@empty_tab
-col_name            	a                   	 	 	 	 	 	 	 	 	 	 
-data_type           	int                 	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+a                   	int                 	from deserializer   	 	 	 	 	 	 	 	 	 
 COLUMN_STATS_ACCURATE	{\"BASIC_STATS\":\"true\",\"COLUMN_STATS\":{\"a\":\"true\",\"b\":\"true\",\"c\":\"true\",\"d\":\"true\",\"e\":\"true\"}}	 	 	 	 	 	 	 	 	 	 
 PREHOOK: query: explain
 analyze table empty_tab compute statistics for columns a,b,c,d,e
@@ -678,36 +658,16 @@ PREHOOK: Input: test@uservisits_web_text_none
 POSTHOOK: query: desc formatted UserVisits_web_text_none sourceIP
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: test@uservisits_web_text_none
-col_name            	sourceIP            	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+sourceIP            	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted test.UserVisits_web_text_none sourceIP
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: test@uservisits_web_text_none
 POSTHOOK: query: desc formatted test.UserVisits_web_text_none sourceIP
 POSTHOOK: type: DESCTABLE
 POSTHOOK: Input: test@uservisits_web_text_none
-col_name            	sourceIP            	 	 	 	 	 	 	 	 	 	 
-data_type           	string              	 	 	 	 	 	 	 	 	 	 
-min                 	                    	 	 	 	 	 	 	 	 	 	 
-max                 	                    	 	 	 	 	 	 	 	 	 	 
-num_nulls           	                    	 	 	 	 	 	 	 	 	 	 
-distinct_count      	                    	 	 	 	 	 	 	 	 	 	 
-avg_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-max_col_len         	                    	 	 	 	 	 	 	 	 	 	 
-num_trues           	                    	 	 	 	 	 	 	 	 	 	 
-num_falses          	                    	 	 	 	 	 	 	 	 	 	 
-bitVector           	                    	 	 	 	 	 	 	 	 	 	 
-comment             	from deserializer   	 	 	 	 	 	 	 	 	 	 
+# col_name            	data_type           	comment             	 	 	 	 	 	 	 	 	 
+sourceIP            	string              	from deserializer   	 	 	 	 	 	 	 	 	 
 PREHOOK: query: desc formatted default.UserVisits_web_text_none sourceIP
 PREHOOK: type: DESCTABLE
 PREHOOK: Input: default@uservisits_web_text_none