You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ke...@apache.org on 2012/10/16 19:40:18 UTC

svn commit: r1398896 - in /hive/trunk: ./ eclipse-templates/ ql/src/java/org/apache/hadoop/hive/ql/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/java/org/apache/hadoop/hive/ql/plan/ ql/src/test/queries...

Author: kevinwilfong
Date: Tue Oct 16 17:40:17 2012
New Revision: 1398896

URL: http://svn.apache.org/viewvc?rev=1398896&view=rev
Log:
HIVE-967. Implement "show create table" (Feng Lu via kevinwilfong)

Added:
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java
    hive/trunk/ql/src/test/queries/clientnegative/show_create_table_does_not_exist.q
    hive/trunk/ql/src/test/queries/clientnegative/show_create_table_index.q
    hive/trunk/ql/src/test/queries/clientpositive/show_create_table_alter.q
    hive/trunk/ql/src/test/queries/clientpositive/show_create_table_db_table.q
    hive/trunk/ql/src/test/queries/clientpositive/show_create_table_delimited.q
    hive/trunk/ql/src/test/queries/clientpositive/show_create_table_partitioned.q
    hive/trunk/ql/src/test/queries/clientpositive/show_create_table_serde.q
    hive/trunk/ql/src/test/queries/clientpositive/show_create_table_view.q
    hive/trunk/ql/src/test/results/clientnegative/show_create_table_does_not_exist.q.out
    hive/trunk/ql/src/test/results/clientnegative/show_create_table_index.q.out
    hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out
    hive/trunk/ql/src/test/results/clientpositive/show_create_table_db_table.q.out
    hive/trunk/ql/src/test/results/clientpositive/show_create_table_delimited.q.out
    hive/trunk/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out
    hive/trunk/ql/src/test/results/clientpositive/show_create_table_serde.q.out
    hive/trunk/ql/src/test/results/clientpositive/show_create_table_view.q.out
Modified:
    hive/trunk/build.xml
    hive/trunk/eclipse-templates/.classpath
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java

Modified: hive/trunk/build.xml
URL: http://svn.apache.org/viewvc/hive/trunk/build.xml?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/build.xml (original)
+++ hive/trunk/build.xml Tue Oct 16 17:40:17 2012
@@ -509,7 +509,6 @@
         <exclude name="**/mockito*.jar"/>
         <exclude name="**/velocity*.jar"/>
         <exclude name="**/antlr-3*.jar"/>
-        <exclude name="**/antlr-2*.jar"/>
       </fileset>
     </copy>
     <copy todir="${target.example.dir}/files" preservelastmodified="true" flatten="true">

Modified: hive/trunk/eclipse-templates/.classpath
URL: http://svn.apache.org/viewvc/hive/trunk/eclipse-templates/.classpath?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/eclipse-templates/.classpath (original)
+++ hive/trunk/eclipse-templates/.classpath Tue Oct 16 17:40:17 2012
@@ -84,6 +84,7 @@
   <classpathentry kind="lib" path="build/ivy/lib/default/jackson-mapper-asl-1.8.8.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/jackson-xc-1.8.8.jar"/>
   <classpathentry kind="lib" path="build/ivy/lib/default/mockito-all-@mockito-all.version@.jar"/>
+  <classpathentry kind="lib" path="build/ivy/lib/default/stringtemplate-@stringtemplate.version@.jar"/>
   <classpathentry kind="lib" path="build/builtins/hive-builtins-@HIVE_VERSION@.jar"/>
   <classpathentry kind="src" path="build/contrib/test/src"/>
   <classpathentry kind="src" path="build/metastore/gen/antlr/gen-java"/>

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java Tue Oct 16 17:40:17 2012
@@ -243,6 +243,8 @@ public enum ErrorMsg {
 
   INVALID_JDO_FILTER_EXPRESSION(10043, "Invalid expression for JDO filter"),
 
+  SHOW_CREATETABLE_INDEX(10144, "SHOW CREATE TABLE does not support tables of type INDEX_TABLE."),
+
   CREATE_SKEWED_TABLE_NO_COLUMN_NAME(10200, "No skewed column name."),
   CREATE_SKEWED_TABLE_NO_COLUMN_VALUE(10201, "No skewed values."),
   CREATE_SKEWED_TABLE_DUPLICATE_COLUMN_NAMES(10202,

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Tue Oct 16 17:40:17 2012
@@ -45,6 +45,9 @@ import java.util.Set;
 import java.util.SortedSet;
 import java.util.TreeSet;
 
+import org.antlr.stringtemplate.StringTemplate;
+import org.apache.commons.lang.StringEscapeUtils;
+import org.apache.commons.lang.StringUtils;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -73,6 +76,8 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
 import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
 import org.apache.hadoop.hive.metastore.api.Role;
+import org.apache.hadoop.hive.metastore.api.SerDeInfo;
+import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryPlan;
@@ -100,6 +105,7 @@ import org.apache.hadoop.hive.ql.metadat
 import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter;
 import org.apache.hadoop.hive.ql.metadata.formatting.TextMetaDataFormatter;
 import org.apache.hadoop.hive.ql.parse.AlterTablePartMergeFilesDesc;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
 import org.apache.hadoop.hive.ql.plan.AlterDatabaseDesc;
 import org.apache.hadoop.hive.ql.plan.AlterIndexDesc;
@@ -130,6 +136,7 @@ import org.apache.hadoop.hive.ql.plan.Re
 import org.apache.hadoop.hive.ql.plan.RevokeDesc;
 import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
+import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc;
 import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
 import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
@@ -366,6 +373,11 @@ public class DDLTask extends Task<DDLWor
         return showPartitions(db, showParts);
       }
 
+      ShowCreateTableDesc showCreateTbl = work.getShowCreateTblDesc();
+      if (showCreateTbl != null) {
+        return showCreateTable(db, showCreateTbl);
+      }
+
       RoleDDLDesc roleDDLDesc = work.getRoleDDLDesc();
       if (roleDDLDesc != null) {
         return roleDDL(roleDDLDesc);
@@ -1838,6 +1850,239 @@ public class DDLTask extends Task<DDLWor
   }
 
   /**
+   * Write a statement of how to create a table to a file.
+   *
+   * @param db
+   *          The database in question.
+   * @param showCreateTbl
+   *          This is the table we're interested in.
+   * @return Returns 0 when execution succeeds and above 0 if it fails.
+   * @throws HiveException
+   *           Throws this exception if an unexpected error occurs.
+   */
+  private int showCreateTable(Hive db, ShowCreateTableDesc showCreateTbl) throws HiveException {
+    // get the create table statement for the table and populate the output
+    final String EXTERNAL = "external";
+    final String LIST_COLUMNS = "columns";
+    final String TBL_COMMENT = "tbl_comment";
+    final String LIST_PARTITIONS = "partitions";
+    final String SORT_BUCKET = "sort_bucket";
+    final String ROW_FORMAT = "row_format";
+    final String TBL_LOCATION = "tbl_location";
+    final String TBL_PROPERTIES = "tbl_properties";
+
+    String tableName = showCreateTbl.getTableName();
+    Table tbl = db.getTable(tableName, false);
+    DataOutput outStream = null;
+    List<String> duplicateProps = new ArrayList<String>();
+    try {
+      Path resFile = new Path(showCreateTbl.getResFile());
+      FileSystem fs = resFile.getFileSystem(conf);
+      outStream = fs.create(resFile);
+
+      if (tbl.isView()) {
+        String createTab_stmt = "CREATE VIEW " + tableName + " AS " + tbl.getViewExpandedText();
+        outStream.writeBytes(createTab_stmt.toString());
+        ((FSDataOutputStream) outStream).close();
+        outStream = null;
+        return 0;
+      }
+
+      StringTemplate createTab_stmt = new StringTemplate("CREATE $" + EXTERNAL + "$ TABLE " +
+          tableName + "(\n" +
+          "$" + LIST_COLUMNS + "$)\n" +
+          "$" + TBL_COMMENT + "$\n" +
+          "$" + LIST_PARTITIONS + "$\n" +
+          "$" + SORT_BUCKET + "$\n" +
+          "$" + ROW_FORMAT + "$\n" +
+          "LOCATION\n" +
+          "$" + TBL_LOCATION + "$\n" +
+          "TBLPROPERTIES (\n" +
+          "$" + TBL_PROPERTIES + "$)\n");
+
+      // For cases where the table is external
+      String tbl_external = "";
+      if (tbl.getTableType() == TableType.EXTERNAL_TABLE) {
+        duplicateProps.add("EXTERNAL");
+        tbl_external = "EXTERNAL";
+      }
+
+      // Columns
+      String tbl_columns = "";
+      List<FieldSchema> cols = tbl.getCols();
+      List<String> columns = new ArrayList<String>();
+      for (FieldSchema col : cols) {
+        String columnDesc = "  " + col.getName() + " " + col.getType();
+        if (col.getComment() != null) {
+          columnDesc = columnDesc + " COMMENT '" + escapeHiveCommand(col.getComment()) + "'";
+        }
+        columns.add(columnDesc);
+      }
+      tbl_columns = StringUtils.join(columns, ", \n");
+
+      // Table comment
+      String tbl_comment = "";
+      String tabComment = tbl.getProperty("comment");
+      if (tabComment != null) {
+        duplicateProps.add("comment");
+        tbl_comment = "COMMENT '" + escapeHiveCommand(tabComment) + "'";
+      }
+
+      // Partitions
+      String tbl_partitions = "";
+      List<FieldSchema> partKeys = tbl.getPartitionKeys();
+      if (partKeys.size() > 0) {
+        tbl_partitions += "PARTITIONED BY ( \n";
+        List<String> partCols = new ArrayList<String>();
+        for (FieldSchema partKey : partKeys) {
+          String partColDesc = "  " + partKey.getName() + " " + partKey.getType();
+          if (partKey.getComment() != null) {
+            partColDesc = partColDesc + " COMMENT '" +
+                escapeHiveCommand(partKey.getComment()) + "'";
+          }
+          partCols.add(partColDesc);
+        }
+        tbl_partitions += StringUtils.join(partCols, ", \n");
+        tbl_partitions += ")";
+      }
+
+      // Clusters (Buckets)
+      String tbl_sort_bucket = "";
+      List<String> buckCols = tbl.getBucketCols();
+      if (buckCols.size() > 0) {
+        duplicateProps.add("SORTBUCKETCOLSPREFIX");
+        tbl_sort_bucket += "CLUSTERED BY ( \n  ";
+        tbl_sort_bucket += StringUtils.join(buckCols, ", \n  ");
+        tbl_sort_bucket += ") \n";
+        List<Order> sortCols = tbl.getSortCols();
+        if (sortCols.size() > 0) {
+          tbl_sort_bucket += "SORTED BY ( \n";
+          // Order
+          List<String> sortKeys = new ArrayList<String>();
+          for (Order sortCol : sortCols) {
+            String sortKeyDesc = "  " + sortCol.getCol() + " ";
+            if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_ASC) {
+              sortKeyDesc = sortKeyDesc + "ASC";
+            }
+            else if (sortCol.getOrder() == BaseSemanticAnalyzer.HIVE_COLUMN_ORDER_DESC) {
+              sortKeyDesc = sortKeyDesc + "DESC";
+            }
+            sortKeys.add(sortKeyDesc);
+          }
+          tbl_sort_bucket += StringUtils.join(sortKeys, ", \n");
+          tbl_sort_bucket += ") \n";
+        }
+        tbl_sort_bucket += "INTO " + tbl.getNumBuckets() + " BUCKETS";
+      }
+
+      // Row format (SerDe)
+      String tbl_row_format = "";
+      StorageDescriptor sd = tbl.getTTable().getSd();
+      SerDeInfo serdeInfo = sd.getSerdeInfo();
+      tbl_row_format += "ROW FORMAT";
+      if (tbl.getStorageHandler() == null) {
+        if (serdeInfo.getParametersSize() > 1) {
+          // There is a "serialization.format" property by default,
+          // even with a delimited row format.
+          // But our result will only cover the following four delimiters.
+          tbl_row_format += " DELIMITED \n";
+          Map<String, String> delims = serdeInfo.getParameters();
+          // Warn:
+          // If the four delimiters all exist in a CREATE TABLE query,
+          // this following order needs to be strictly followed,
+          // or the query will fail with a ParseException.
+          if (delims.containsKey(Constants.FIELD_DELIM)) {
+            tbl_row_format += "  FIELDS TERMINATED BY '" +
+                escapeHiveCommand(StringEscapeUtils.escapeJava(delims.get(
+                Constants.FIELD_DELIM))) + "' \n";
+          }
+          if (delims.containsKey(Constants.COLLECTION_DELIM)) {
+            tbl_row_format += "  COLLECTION ITEMS TERMINATED BY '" +
+                escapeHiveCommand(StringEscapeUtils.escapeJava(delims.get(
+                Constants.COLLECTION_DELIM))) + "' \n";
+          }
+          if (delims.containsKey(Constants.MAPKEY_DELIM)) {
+            tbl_row_format += "  MAP KEYS TERMINATED BY '" +
+                escapeHiveCommand(StringEscapeUtils.escapeJava(delims.get(
+                Constants.MAPKEY_DELIM))) + "' \n";
+          }
+          if (delims.containsKey(Constants.LINE_DELIM)) {
+            tbl_row_format += "  LINES TERMINATED BY '" +
+                escapeHiveCommand(StringEscapeUtils.escapeJava(delims.get(
+                Constants.LINE_DELIM))) + "' \n";
+          }
+        }
+        else {
+          tbl_row_format += " SERDE \n  '" +
+              escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n";
+        }
+        tbl_row_format += "STORED AS INPUTFORMAT \n  '" +
+            escapeHiveCommand(sd.getInputFormat()) + "' \n";
+        tbl_row_format += "OUTPUTFORMAT \n  '" +
+            escapeHiveCommand(sd.getOutputFormat()) + "'";
+      }
+      else {
+        duplicateProps.add(org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE);
+        tbl_row_format += " SERDE \n  '" +
+            escapeHiveCommand(serdeInfo.getSerializationLib()) + "' \n";
+        tbl_row_format += "STORED BY \n  '" + escapeHiveCommand(tbl.getParameters().get(
+            org.apache.hadoop.hive.metastore.api.Constants.META_TABLE_STORAGE)) + "' \n";
+        // SerDe Properties
+        if (serdeInfo.getParametersSize() > 0) {
+          tbl_row_format += "WITH SERDEPROPERTIES ( \n";
+          List<String> serdeCols = new ArrayList<String>();
+          for (Map.Entry<String, String> entry : serdeInfo.getParameters().entrySet()) {
+            serdeCols.add("  '" + entry.getKey() + "'='"
+                + escapeHiveCommand(StringEscapeUtils.escapeJava(entry.getValue())) + "'");
+          }
+          tbl_row_format += StringUtils.join(serdeCols, ", \n");
+          tbl_row_format += ")";
+        }
+      }
+      String tbl_location = "  '" + escapeHiveCommand(sd.getLocation()) + "'";
+
+      // Table properties
+      String tbl_properties = "";
+      Map<String, String> properties = tbl.getParameters();
+      if (properties.size() > 0) {
+        List<String> realProps = new ArrayList<String>();
+        for (String key : properties.keySet()) {
+          if (properties.get(key) != null && !duplicateProps.contains(key)) {
+            realProps.add("  '" + key + "'='" +
+                escapeHiveCommand(StringEscapeUtils.escapeJava(properties.get(key))) + "'");
+          }
+        }
+        tbl_properties += StringUtils.join(realProps, ", \n");
+      }
+
+      createTab_stmt.setAttribute(EXTERNAL, tbl_external);
+      createTab_stmt.setAttribute(LIST_COLUMNS, tbl_columns);
+      createTab_stmt.setAttribute(TBL_COMMENT, tbl_comment);
+      createTab_stmt.setAttribute(LIST_PARTITIONS, tbl_partitions);
+      createTab_stmt.setAttribute(SORT_BUCKET, tbl_sort_bucket);
+      createTab_stmt.setAttribute(ROW_FORMAT, tbl_row_format);
+      createTab_stmt.setAttribute(TBL_LOCATION, tbl_location);
+      createTab_stmt.setAttribute(TBL_PROPERTIES, tbl_properties);
+
+      outStream.writeBytes(createTab_stmt.toString());
+      ((FSDataOutputStream) outStream).close();
+      outStream = null;
+    } catch (FileNotFoundException e) {
+      LOG.info("show create table: " + stringifyException(e));
+      return 1;
+    } catch (IOException e) {
+      LOG.info("show create table: " + stringifyException(e));
+      return 1;
+    } catch (Exception e) {
+      throw new HiveException(e);
+    } finally {
+      IOUtils.closeStream((FSDataOutputStream) outStream);
+    }
+
+    return 0;
+  }
+
+  /**
    * Write a list of indexes to a file.
    *
    * @param db
@@ -3602,6 +3847,18 @@ public class DDLTask extends Task<DDLWor
     return 0;
   }
 
+  private String escapeHiveCommand(String str) {
+    StringBuilder sb = new StringBuilder();
+    for (int i = 0; i < str.length(); i ++) {
+      char c = str.charAt(i);
+      if (c == '\'' || c == ';') {
+        sb.append('\\');
+      }
+      sb.append(c);
+    }
+    return sb.toString();
+  }
+
   @Override
   public StageType getType() {
     return StageType.DDL;

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Tue Oct 16 17:40:17 2012
@@ -100,6 +100,7 @@ import org.apache.hadoop.hive.ql.plan.Re
 import org.apache.hadoop.hive.ql.plan.RevokeDesc;
 import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
+import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc;
 import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
 import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
@@ -327,6 +328,10 @@ public class DDLSemanticAnalyzer extends
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowPartitions(ast);
       break;
+    case HiveParser.TOK_SHOW_CREATETABLE:
+      ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
+      analyzeShowCreateTable(ast);
+      break;
     case HiveParser.TOK_SHOWINDEXES:
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeShowIndexes(ast);
@@ -1516,6 +1521,27 @@ public class DDLSemanticAnalyzer extends
     setFetchTask(createFetchTask(showPartsDesc.getSchema()));
   }
 
+  private void analyzeShowCreateTable(ASTNode ast) throws SemanticException {
+    ShowCreateTableDesc showCreateTblDesc;
+    String tableName = getUnescapedName((ASTNode)ast.getChild(0));
+    showCreateTblDesc = new ShowCreateTableDesc(tableName, ctx.getResFile().toString());
+    try {
+      Table tab = db.getTable(tableName, true);
+      if (tab.getTableType() == org.apache.hadoop.hive.metastore.TableType.INDEX_TABLE) {
+        throw new SemanticException(ErrorMsg.SHOW_CREATETABLE_INDEX.getMsg(tableName
+            + " has table type INDEX_TABLE"));
+      }
+      inputs.add(new ReadEntity(tab));
+    } catch (SemanticException e) {
+      throw e;
+    } catch (HiveException e) {
+      throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(tableName));
+    }
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+        showCreateTblDesc), conf));
+    setFetchTask(createFetchTask(showCreateTblDesc.getSchema()));
+  }
+
   private void analyzeShowDatabases(ASTNode ast) throws SemanticException {
     ShowDatabasesDesc showDatabasesDesc;
     if (ast.getChildCount() == 1) {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Tue Oct 16 17:40:17 2012
@@ -145,6 +145,7 @@ TOK_SHOWTABLES;
 TOK_SHOWCOLUMNS;
 TOK_SHOWFUNCTIONS;
 TOK_SHOWPARTITIONS;
+TOK_SHOW_CREATETABLE;
 TOK_SHOW_TABLESTATUS;
 TOK_SHOW_TBLPROPERTIES;
 TOK_SHOWLOCKS;
@@ -855,6 +856,7 @@ showStatement
     -> ^(TOK_SHOWCOLUMNS $db_name? $tabname)
     | KW_SHOW KW_FUNCTIONS showStmtIdentifier?  -> ^(TOK_SHOWFUNCTIONS showStmtIdentifier?)
     | KW_SHOW KW_PARTITIONS Identifier partitionSpec? -> ^(TOK_SHOWPARTITIONS Identifier partitionSpec?)
+    | KW_SHOW KW_CREATE KW_TABLE tabName=tableName -> ^(TOK_SHOW_CREATETABLE $tabName)
     | KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM|KW_IN) db_name=Identifier)? KW_LIKE showStmtIdentifier partitionSpec?
     -> ^(TOK_SHOW_TABLESTATUS showStmtIdentifier $db_name? partitionSpec?)
     | KW_SHOW KW_TBLPROPERTIES tblName=Identifier (LPAREN prptyName=StringLiteral RPAREN)? -> ^(TOK_SHOW_TBLPROPERTIES $tblName $prptyName?)

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java Tue Oct 16 17:40:17 2012
@@ -62,6 +62,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_SHOWCOLUMNS, HiveOperation.SHOWCOLUMNS);
     commandType.put(HiveParser.TOK_SHOW_TABLESTATUS, HiveOperation.SHOW_TABLESTATUS);
     commandType.put(HiveParser.TOK_SHOW_TBLPROPERTIES, HiveOperation.SHOW_TBLPROPERTIES);
+    commandType.put(HiveParser.TOK_SHOW_CREATETABLE, HiveOperation.SHOW_CREATETABLE);
     commandType.put(HiveParser.TOK_SHOWFUNCTIONS, HiveOperation.SHOWFUNCTIONS);
     commandType.put(HiveParser.TOK_SHOWINDEXES, HiveOperation.SHOWINDEXES);
     commandType.put(HiveParser.TOK_SHOWPARTITIONS, HiveOperation.SHOWPARTITIONS);
@@ -161,6 +162,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_SHOWCOLUMNS:
       case HiveParser.TOK_SHOW_TABLESTATUS:
       case HiveParser.TOK_SHOW_TBLPROPERTIES:
+      case HiveParser.TOK_SHOW_CREATETABLE:
       case HiveParser.TOK_SHOWFUNCTIONS:
       case HiveParser.TOK_SHOWPARTITIONS:
       case HiveParser.TOK_SHOWINDEXES:

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Tue Oct 16 17:40:17 2012
@@ -52,6 +52,7 @@ public class DDLWork implements Serializ
   private ShowLocksDesc showLocksDesc;
   private DescFunctionDesc descFunctionDesc;
   private ShowPartitionsDesc showPartsDesc;
+  private ShowCreateTableDesc showCreateTblDesc;
   private DescTableDesc descTblDesc;
   private AddPartitionDesc addPartitionDesc;
   private RenamePartitionDesc renamePartitionDesc;
@@ -314,6 +315,16 @@ public class DDLWork implements Serializ
   }
 
   /**
+   * @param showCreateTblDesc
+   */
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      ShowCreateTableDesc showCreateTblDesc) {
+    this(inputs, outputs);
+
+    this.showCreateTblDesc = showCreateTblDesc;
+  }
+
+  /**
    * @param addPartitionDesc
    *          information about the partitions we want to add.
    */
@@ -724,6 +735,22 @@ public class DDLWork implements Serializ
   }
 
   /**
+   * @return the showCreateTblDesc
+   */
+  @Explain(displayName = "Show Create Table Operator")
+  public ShowCreateTableDesc getShowCreateTblDesc() {
+    return showCreateTblDesc;
+  }
+
+  /**
+   * @param showCreateTblDesc
+   *          the showCreateTblDesc to set
+   */
+  public void setShowCreateTblDesc(ShowCreateTableDesc showCreateTblDesc) {
+    this.showCreateTblDesc = showCreateTblDesc;
+  }
+
+  /**
    * @return the showIndexesDesc
    */
   @Explain(displayName = "Show Index Operator")

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1398896&r1=1398895&r2=1398896&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java Tue Oct 16 17:40:17 2012
@@ -54,6 +54,7 @@ public enum HiveOperation {
   SHOWCOLUMNS("SHOWCOLUMNS", null, null),
   SHOW_TABLESTATUS("SHOW_TABLESTATUS", null, null),
   SHOW_TBLPROPERTIES("SHOW_TBLPROPERTIES", null, null),
+  SHOW_CREATETABLE("SHOW_CREATETABLE", new Privilege[]{Privilege.SELECT}, null),
   SHOWFUNCTIONS("SHOWFUNCTIONS", null, null),
   SHOWINDEXES("SHOWINDEXES", null, null),
   SHOWPARTITIONS("SHOWPARTITIONS", null, null),

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java (added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowCreateTableDesc.java Tue Oct 16 17:40:17 2012
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+
+/**
+ * ShowCreateTableDesc.
+ *
+ */
+@Explain(displayName = "Show Create Table")
+public class ShowCreateTableDesc extends DDLDesc implements Serializable {
+  private static final long serialVersionUID = 1L;
+  String resFile;
+  String tableName;
+
+  /**
+   * table name for the result of showcreatetable.
+   */
+  private static final String table = "show_create_table";
+  /**
+   * thrift ddl for the result of showcreatetable.
+   */
+  private static final String schema = "createtab_stmt#string";
+
+  public String getTable() {
+    return table;
+  }
+
+  public String getSchema() {
+    return schema;
+  }
+
+  /**
+   * For serialization use only.
+   */
+  public ShowCreateTableDesc() {
+  }
+
+  /**
+   * @param resFile
+   * @param tableName
+   *          name of table to show
+   */
+  public ShowCreateTableDesc(String tableName, String resFile) {
+    this.tableName = tableName;
+    this.resFile = resFile;
+  }
+
+  /**
+   * @return the resFile
+   */
+  @Explain(displayName = "result file", normalExplain = false)
+  public String getResFile() {
+    return resFile;
+  }
+
+  /**
+   * @param resFile
+   *          the resFile to set
+   */
+  public void setResFile(String resFile) {
+    this.resFile = resFile;
+  }
+
+  /**
+   * @return the tableName
+   */
+  @Explain(displayName = "table name")
+  public String getTableName() {
+    return tableName;
+  }
+
+  /**
+   * @param tableName
+   *          the tableName to set
+   */
+  public void setTableName(String tableName) {
+    this.tableName = tableName;
+  }
+}

Added: hive/trunk/ql/src/test/queries/clientnegative/show_create_table_does_not_exist.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/show_create_table_does_not_exist.q?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/show_create_table_does_not_exist.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/show_create_table_does_not_exist.q Tue Oct 16 17:40:17 2012
@@ -0,0 +1,2 @@
+SHOW CREATE TABLE tmp_nonexist;
+

Added: hive/trunk/ql/src/test/queries/clientnegative/show_create_table_index.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientnegative/show_create_table_index.q?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientnegative/show_create_table_index.q (added)
+++ hive/trunk/ql/src/test/queries/clientnegative/show_create_table_index.q Tue Oct 16 17:40:17 2012
@@ -0,0 +1,6 @@
+CREATE TABLE tmp_showcrt (key int, value string);
+CREATE INDEX tmp_index on table tmp_showcrt(key) as 'compact' WITH DEFERRED REBUILD;
+SHOW CREATE TABLE default__tmp_showcrt_tmp_index__;
+DROP INDEX tmp_index on tmp_showcrt;
+DROP TABLE tmp_showcrt;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/show_create_table_alter.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_create_table_alter.q?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/show_create_table_alter.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/show_create_table_alter.q Tue Oct 16 17:40:17 2012
@@ -0,0 +1,23 @@
+-- Test SHOW CREATE TABLE on an external, clustered and sorted table. Then test the query again after ALTERs.
+
+CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float)
+CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS;
+SHOW CREATE TABLE tmp_showcrt1;
+
+-- Add a comment to the table, change the EXTERNAL property, and test SHOW CREATE TABLE on the change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE');
+SHOW CREATE TABLE tmp_showcrt1;
+
+-- Alter the table comment, change the EXTERNAL property back and test SHOW CREATE TABLE on the change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE');
+SHOW CREATE TABLE tmp_showcrt1;
+
+-- Change the 'SORTBUCKETCOLSPREFIX' property and test SHOW CREATE TABLE. The output should not change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE');
+SHOW CREATE TABLE tmp_showcrt1;
+
+-- Alter the storage handler of the table, and test SHOW CREATE TABLE.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler');
+SHOW CREATE TABLE tmp_showcrt1;
+DROP TABLE tmp_showcrt1;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/show_create_table_db_table.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_create_table_db_table.q?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/show_create_table_db_table.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/show_create_table_db_table.q Tue Oct 16 17:40:17 2012
@@ -0,0 +1,10 @@
+-- Test SHOW CREATE TABLE on a table name of format "db.table".
+
+CREATE DATABASE tmp_feng comment 'for show create table test';
+SHOW DATABASES;
+CREATE TABLE tmp_feng.tmp_showcrt (key string, value int);
+USE default;
+SHOW CREATE TABLE tmp_feng.tmp_showcrt;
+DROP TABLE tmp_feng.tmp_showcrt;
+DROP DATABASE tmp_feng;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/show_create_table_delimited.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_create_table_delimited.q?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/show_create_table_delimited.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/show_create_table_delimited.q Tue Oct 16 17:40:17 2012
@@ -0,0 +1,9 @@
+-- Test SHOW CREATE TABLE on a table with delimiters, stored format, and location.
+
+CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n'
+STORED AS textfile
+LOCATION 'file:${system:test.tmp.dir}/tmp_showcrt1';
+SHOW CREATE TABLE tmp_showcrt1;
+DROP TABLE tmp_showcrt1;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/show_create_table_partitioned.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_create_table_partitioned.q?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/show_create_table_partitioned.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/show_create_table_partitioned.q Tue Oct 16 17:40:17 2012
@@ -0,0 +1,8 @@
+-- Test SHOW CREATE TABLE on a table with partitions and column comments.
+
+CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value')
+COMMENT 'temporary table'
+PARTITIONED BY (value bigint COMMENT 'some value');
+SHOW CREATE TABLE tmp_showcrt1;
+DROP TABLE tmp_showcrt1;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/show_create_table_serde.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_create_table_serde.q?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/show_create_table_serde.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/show_create_table_serde.q Tue Oct 16 17:40:17 2012
@@ -0,0 +1,19 @@
+-- Test SHOW CREATE TABLE on a table with serde.
+
+-- without a storage handler
+CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint)
+COMMENT 'temporary table'
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat';
+SHOW CREATE TABLE tmp_showcrt1;
+DROP TABLE tmp_showcrt1;
+
+-- with a storage handler and serde properties
+CREATE EXTERNAL TABLE tmp_showcrt1 (key string, value boolean)
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
+WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$');
+SHOW CREATE TABLE tmp_showcrt1;
+DROP TABLE tmp_showcrt1;
+

Added: hive/trunk/ql/src/test/queries/clientpositive/show_create_table_view.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_create_table_view.q?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/show_create_table_view.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/show_create_table_view.q Tue Oct 16 17:40:17 2012
@@ -0,0 +1,6 @@
+-- Test SHOW CREATE TABLE on a view name.
+
+CREATE VIEW tmp_copy_src AS SELECT * FROM src;
+SHOW CREATE TABLE tmp_copy_src;
+DROP VIEW tmp_copy_src;
+

Added: hive/trunk/ql/src/test/results/clientnegative/show_create_table_does_not_exist.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/show_create_table_does_not_exist.q.out?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/show_create_table_does_not_exist.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/show_create_table_does_not_exist.q.out Tue Oct 16 17:40:17 2012
@@ -0,0 +1 @@
+FAILED: SemanticException [Error 10001]: Table not found tmp_nonexist

Added: hive/trunk/ql/src/test/results/clientnegative/show_create_table_index.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientnegative/show_create_table_index.q.out?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientnegative/show_create_table_index.q.out (added)
+++ hive/trunk/ql/src/test/results/clientnegative/show_create_table_index.q.out Tue Oct 16 17:40:17 2012
@@ -0,0 +1,11 @@
+PREHOOK: query: CREATE TABLE tmp_showcrt (key int, value string)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE tmp_showcrt (key int, value string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_showcrt
+PREHOOK: query: CREATE INDEX tmp_index on table tmp_showcrt(key) as 'compact' WITH DEFERRED REBUILD
+PREHOOK: type: CREATEINDEX
+POSTHOOK: query: CREATE INDEX tmp_index on table tmp_showcrt(key) as 'compact' WITH DEFERRED REBUILD
+POSTHOOK: type: CREATEINDEX
+POSTHOOK: Output: default@default__tmp_showcrt_tmp_index__
+FAILED: SemanticException [Error 10144]: SHOW CREATE TABLE does not support tables of type INDEX_TABLE. default__tmp_showcrt_tmp_index__ has table type INDEX_TABLE

Added: hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/show_create_table_alter.q.out Tue Oct 16 17:40:17 2012
@@ -0,0 +1,184 @@
+PREHOOK: query: -- Test SHOW CREATE TABLE on an external, clustered and sorted table. Then test the query again after ALTERs.
+
+CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float)
+CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- Test SHOW CREATE TABLE on an external, clustered and sorted table. Then test the query again after ALTERs.
+
+CREATE EXTERNAL TABLE tmp_showcrt1 (key smallint, value float)
+CLUSTERED BY (key) SORTED BY (value DESC) INTO 5 BUCKETS
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE EXTERNAL TABLE tmp_showcrt1(
+  key smallint, 
+  value float)
+CLUSTERED BY ( 
+  key) 
+SORTED BY ( 
+  value DESC) 
+INTO 5 BUCKETS
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: -- Add a comment to the table, change the EXTERNAL property, and test SHOW CREATE TABLE on the change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: -- Add a comment to the table, change the EXTERNAL property, and test SHOW CREATE TABLE on the change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='temporary table', 'EXTERNAL'='FALSE')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE  TABLE tmp_showcrt1(
+  key smallint, 
+  value float)
+COMMENT 'temporary table'
+CLUSTERED BY ( 
+  key) 
+SORTED BY ( 
+  value DESC) 
+INTO 5 BUCKETS
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+  'EXTERNAL'='FALSE', 
+#### A masked pattern was here ####
+PREHOOK: query: -- Alter the table comment, change the EXTERNAL property back and test SHOW CREATE TABLE on the change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: -- Alter the table comment, change the EXTERNAL property back and test SHOW CREATE TABLE on the change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('comment'='changed comment', 'EXTERNAL'='TRUE')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE EXTERNAL TABLE tmp_showcrt1(
+  key smallint, 
+  value float)
+COMMENT 'changed comment'
+CLUSTERED BY ( 
+  key) 
+SORTED BY ( 
+  value DESC) 
+INTO 5 BUCKETS
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: -- Change the 'SORTBUCKETCOLSPREFIX' property and test SHOW CREATE TABLE. The output should not change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: -- Change the 'SORTBUCKETCOLSPREFIX' property and test SHOW CREATE TABLE. The output should not change.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('SORTBUCKETCOLSPREFIX'='FALSE')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE EXTERNAL TABLE tmp_showcrt1(
+  key smallint, 
+  value float)
+COMMENT 'changed comment'
+CLUSTERED BY ( 
+  key) 
+SORTED BY ( 
+  value DESC) 
+INTO 5 BUCKETS
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: -- Alter the storage handler of the table, and test SHOW CREATE TABLE.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler')
+PREHOOK: type: ALTERTABLE_PROPERTIES
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: -- Alter the storage handler of the table, and test SHOW CREATE TABLE.
+ALTER TABLE tmp_showcrt1 SET TBLPROPERTIES ('storage_handler'='org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler')
+POSTHOOK: type: ALTERTABLE_PROPERTIES
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE EXTERNAL TABLE tmp_showcrt1(
+  key smallint, 
+  value float)
+COMMENT 'changed comment'
+CLUSTERED BY ( 
+  key) 
+SORTED BY ( 
+  value DESC) 
+INTO 5 BUCKETS
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED BY 
+  'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' 
+WITH SERDEPROPERTIES ( 
+  'serialization.format'='1')
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: DROP TABLE tmp_showcrt1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: DROP TABLE tmp_showcrt1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1

Added: hive/trunk/ql/src/test/results/clientpositive/show_create_table_db_table.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_create_table_db_table.q.out?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_create_table_db_table.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/show_create_table_db_table.q.out Tue Oct 16 17:40:17 2012
@@ -0,0 +1,51 @@
+PREHOOK: query: -- Test SHOW CREATE TABLE on a table name of format "db.table".
+
+CREATE DATABASE tmp_feng comment 'for show create table test'
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: -- Test SHOW CREATE TABLE on a table name of format "db.table".
+
+CREATE DATABASE tmp_feng comment 'for show create table test'
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: SHOW DATABASES
+PREHOOK: type: SHOWDATABASES
+POSTHOOK: query: SHOW DATABASES
+POSTHOOK: type: SHOWDATABASES
+default
+tmp_feng
+PREHOOK: query: CREATE TABLE tmp_feng.tmp_showcrt (key string, value int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: CREATE TABLE tmp_feng.tmp_showcrt (key string, value int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: tmp_feng@tmp_showcrt
+PREHOOK: query: USE default
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: USE default
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: SHOW CREATE TABLE tmp_feng.tmp_showcrt
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: tmp_feng@tmp_showcrt
+POSTHOOK: query: SHOW CREATE TABLE tmp_feng.tmp_showcrt
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: tmp_feng@tmp_showcrt
+CREATE  TABLE tmp_feng.tmp_showcrt(
+  key string, 
+  value int)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: DROP TABLE tmp_feng.tmp_showcrt
+PREHOOK: type: DROPTABLE
+POSTHOOK: query: DROP TABLE tmp_feng.tmp_showcrt
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Output: tmp_feng@tmp_showcrt
+PREHOOK: query: DROP DATABASE tmp_feng
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: DROP DATABASE tmp_feng
+POSTHOOK: type: DROPDATABASE

Added: hive/trunk/ql/src/test/results/clientpositive/show_create_table_delimited.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_create_table_delimited.q.out?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_create_table_delimited.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/show_create_table_delimited.q.out Tue Oct 16 17:40:17 2012
@@ -0,0 +1,46 @@
+PREHOOK: query: -- Test SHOW CREATE TABLE on a table with delimiters, stored format, and location.
+
+CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n'
+STORED AS textfile
+#### A masked pattern was here ####
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- Test SHOW CREATE TABLE on a table with delimiters, stored format, and location.
+
+CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint)
+ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' COLLECTION ITEMS TERMINATED BY '|' MAP KEYS TERMINATED BY '\045' LINES TERMINATED BY '\n'
+STORED AS textfile
+#### A masked pattern was here ####
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE  TABLE tmp_showcrt1(
+  key int, 
+  value string, 
+  newvalue bigint)
+ROW FORMAT DELIMITED 
+  FIELDS TERMINATED BY ',' 
+  COLLECTION ITEMS TERMINATED BY '|' 
+  MAP KEYS TERMINATED BY '%' 
+  LINES TERMINATED BY '\n' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: DROP TABLE tmp_showcrt1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: DROP TABLE tmp_showcrt1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1

Added: hive/trunk/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/show_create_table_partitioned.q.out Tue Oct 16 17:40:17 2012
@@ -0,0 +1,43 @@
+PREHOOK: query: -- Test SHOW CREATE TABLE on a table with partitions and column comments.
+
+CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value')
+COMMENT 'temporary table'
+PARTITIONED BY (value bigint COMMENT 'some value')
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- Test SHOW CREATE TABLE on a table with partitions and column comments.
+
+CREATE EXTERNAL TABLE tmp_showcrt1 (key string, newvalue boolean COMMENT 'a new value')
+COMMENT 'temporary table'
+PARTITIONED BY (value bigint COMMENT 'some value')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE EXTERNAL TABLE tmp_showcrt1(
+  key string, 
+  newvalue boolean COMMENT 'a new value')
+COMMENT 'temporary table'
+PARTITIONED BY ( 
+  value bigint COMMENT 'some value')
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.mapred.TextInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: DROP TABLE tmp_showcrt1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: DROP TABLE tmp_showcrt1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1

Added: hive/trunk/ql/src/test/results/clientpositive/show_create_table_serde.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_create_table_serde.q.out?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_create_table_serde.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/show_create_table_serde.q.out Tue Oct 16 17:40:17 2012
@@ -0,0 +1,89 @@
+PREHOOK: query: -- Test SHOW CREATE TABLE on a table with serde.
+
+-- without a storage handler
+CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint)
+COMMENT 'temporary table'
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- Test SHOW CREATE TABLE on a table with serde.
+
+-- without a storage handler
+CREATE TABLE tmp_showcrt1 (key int, value string, newvalue bigint)
+COMMENT 'temporary table'
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat'
+OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE  TABLE tmp_showcrt1(
+  key int, 
+  value string, 
+  newvalue bigint)
+COMMENT 'temporary table'
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' 
+STORED AS INPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.RCFileInputFormat' 
+OUTPUTFORMAT 
+  'org.apache.hadoop.hive.ql.io.RCFileOutputFormat'
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: DROP TABLE tmp_showcrt1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: DROP TABLE tmp_showcrt1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: -- with a storage handler and serde properties
+CREATE EXTERNAL TABLE tmp_showcrt1 (key string, value boolean)
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
+WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$')
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: -- with a storage handler and serde properties
+CREATE EXTERNAL TABLE tmp_showcrt1 (key string, value boolean)
+ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe'
+STORED BY 'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler'
+WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'='$')
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: default@tmp_showcrt1
+PREHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_showcrt1
+POSTHOOK: query: SHOW CREATE TABLE tmp_showcrt1
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_showcrt1
+CREATE EXTERNAL TABLE tmp_showcrt1(
+  key string, 
+  value boolean)
+ROW FORMAT SERDE 
+  'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe' 
+STORED BY 
+  'org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler' 
+WITH SERDEPROPERTIES ( 
+  'serialization.format'='$', 
+  'field.delim'=',')
+LOCATION
+#### A masked pattern was here ####
+TBLPROPERTIES (
+#### A masked pattern was here ####
+PREHOOK: query: DROP TABLE tmp_showcrt1
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tmp_showcrt1
+PREHOOK: Output: default@tmp_showcrt1
+POSTHOOK: query: DROP TABLE tmp_showcrt1
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tmp_showcrt1
+POSTHOOK: Output: default@tmp_showcrt1

Added: hive/trunk/ql/src/test/results/clientpositive/show_create_table_view.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_create_table_view.q.out?rev=1398896&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_create_table_view.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/show_create_table_view.q.out Tue Oct 16 17:40:17 2012
@@ -0,0 +1,26 @@
+PREHOOK: query: -- Test SHOW CREATE TABLE on a view name.
+
+CREATE VIEW tmp_copy_src AS SELECT * FROM src
+PREHOOK: type: CREATEVIEW
+#### A masked pattern was here ####
+POSTHOOK: query: -- Test SHOW CREATE TABLE on a view name.
+
+CREATE VIEW tmp_copy_src AS SELECT * FROM src
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Output: default@tmp_copy_src
+#### A masked pattern was here ####
+PREHOOK: query: SHOW CREATE TABLE tmp_copy_src
+PREHOOK: type: SHOW_CREATETABLE
+PREHOOK: Input: default@tmp_copy_src
+POSTHOOK: query: SHOW CREATE TABLE tmp_copy_src
+POSTHOOK: type: SHOW_CREATETABLE
+POSTHOOK: Input: default@tmp_copy_src
+CREATE VIEW tmp_copy_src AS SELECT `src`.`key`, `src`.`value` FROM `default`.`src`
+PREHOOK: query: DROP VIEW tmp_copy_src
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@tmp_copy_src
+PREHOOK: Output: default@tmp_copy_src
+POSTHOOK: query: DROP VIEW tmp_copy_src
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@tmp_copy_src
+POSTHOOK: Output: default@tmp_copy_src