You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/20 07:09:27 UTC

svn commit: r901074 [1/3] - in /hadoop/hive/trunk: ./ metastore/if/ metastore/src/gen-cpp/ metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/ metastore/src/gen-php/ metastore/src/gen-py/hive_metastore/ metastore/src/java/org/apache/hadoop...

Author: zshao
Date: Wed Jan 20 06:07:34 2010
New Revision: 901074

URL: http://svn.apache.org/viewvc?rev=901074&view=rev
Log:
HIVE-972. Support views. (John Sichi via zshao)

Added:
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNodeOrigin.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/UnparseTranslator.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/createViewDesc.java
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/alter_view_failure.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_view_failure1.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_view_failure2.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_view_failure3.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_view_failure4.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/create_view_failure5.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/drop_view_failure1.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/drop_view_failure2.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/insert_view_failure.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/invalidate_view1.q
    hadoop/hive/trunk/ql/src/test/queries/clientnegative/load_view_failure.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_big_view.q
    hadoop/hive/trunk/ql/src/test/queries/clientpositive/create_view.q
    hadoop/hive/trunk/ql/src/test/results/clientnegative/alter_view_failure.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/create_view_failure1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/create_view_failure2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/create_view_failure3.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/create_view_failure4.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/create_view_failure5.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/drop_view_failure2.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/insert_view_failure.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/invalidate_view1.q.out
    hadoop/hive/trunk/ql/src/test/results/clientnegative/load_view_failure.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_big_view.q.out
    hadoop/hive/trunk/ql/src/test/results/clientpositive/create_view.q.out
Modified:
    hadoop/hive/trunk/CHANGES.txt
    hadoop/hive/trunk/metastore/if/hive_metastore.thrift
    hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.cpp
    hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.h
    hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
    hadoop/hive/trunk/metastore/src/gen-php/hive_metastore_types.php
    hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py
    hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
    hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hadoop/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTable.java
    hadoop/hive/trunk/metastore/src/model/package.jdo
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckCtx.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/TypeCheckProcFactory.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
    hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/dropTableDesc.java
    hadoop/hive/trunk/ql/src/test/results/clientnegative/invalid_tbl_name.q.out
    hadoop/hive/trunk/ql/src/test/results/compiler/errors/invalid_create_table.q.out

Modified: hadoop/hive/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/CHANGES.txt?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/CHANGES.txt (original)
+++ hadoop/hive/trunk/CHANGES.txt Wed Jan 20 06:07:34 2010
@@ -9,6 +9,8 @@
     HIVE-964. Handle skew join
     (Yongqiang He via namit)
 
+    HIVE-972. Support views. (John Sichi via zshao)
+
   IMPROVEMENTS
 
     HIVE-983. Function from_unixtime takes long.

Modified: hadoop/hive/trunk/metastore/if/hive_metastore.thrift
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/if/hive_metastore.thrift?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/if/hive_metastore.thrift (original)
+++ hadoop/hive/trunk/metastore/if/hive_metastore.thrift Wed Jan 20 06:07:34 2010
@@ -72,7 +72,9 @@
   6: i32    retention,                // retention time
   7: StorageDescriptor sd,            // storage descriptor of the table
   8: list<FieldSchema> partitionKeys, // partition keys of the table. only primitive types are supported
-  9: map<string, string> parameters   // to store comments or any other user level parameters
+  9: map<string, string> parameters,   // to store comments or any other user level parameters
+  10: string viewOriginalText,         // original view text, null for non-view
+  11: string viewExpandedText          // expanded view text, null for non-view
 }
 
 struct Partition {

Modified: hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.cpp
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.cpp?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.cpp (original)
+++ hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.cpp Wed Jan 20 06:07:34 2010
@@ -728,8 +728,8 @@
   return xfer;
 }
 
-const char* Table::ascii_fingerprint = "9AED95F9B0977E7C06642265B26C8B36";
-const uint8_t Table::binary_fingerprint[16] = {0x9A,0xED,0x95,0xF9,0xB0,0x97,0x7E,0x7C,0x06,0x64,0x22,0x65,0xB2,0x6C,0x8B,0x36};
+const char* Table::ascii_fingerprint = "CBD4F726F025A868EEB3BDC4028F3D66";
+const uint8_t Table::binary_fingerprint[16] = {0xCB,0xD4,0xF7,0x26,0xF0,0x25,0xA8,0x68,0xEE,0xB3,0xBD,0xC4,0x02,0x8F,0x3D,0x66};
 
 uint32_t Table::read(apache::thrift::protocol::TProtocol* iprot) {
 
@@ -850,6 +850,22 @@
           xfer += iprot->skip(ftype);
         }
         break;
+      case 10:
+        if (ftype == apache::thrift::protocol::T_STRING) {
+          xfer += iprot->readString(this->viewOriginalText);
+          this->__isset.viewOriginalText = true;
+        } else {
+          xfer += iprot->skip(ftype);
+        }
+        break;
+      case 11:
+        if (ftype == apache::thrift::protocol::T_STRING) {
+          xfer += iprot->readString(this->viewExpandedText);
+          this->__isset.viewExpandedText = true;
+        } else {
+          xfer += iprot->skip(ftype);
+        }
+        break;
       default:
         xfer += iprot->skip(ftype);
         break;
@@ -909,6 +925,12 @@
     xfer += oprot->writeMapEnd();
   }
   xfer += oprot->writeFieldEnd();
+  xfer += oprot->writeFieldBegin("viewOriginalText", apache::thrift::protocol::T_STRING, 10);
+  xfer += oprot->writeString(this->viewOriginalText);
+  xfer += oprot->writeFieldEnd();
+  xfer += oprot->writeFieldBegin("viewExpandedText", apache::thrift::protocol::T_STRING, 11);
+  xfer += oprot->writeString(this->viewExpandedText);
+  xfer += oprot->writeFieldEnd();
   xfer += oprot->writeFieldStop();
   xfer += oprot->writeStructEnd();
   return xfer;

Modified: hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.h
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.h?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.h (original)
+++ hadoop/hive/trunk/metastore/src/gen-cpp/hive_metastore_types.h Wed Jan 20 06:07:34 2010
@@ -345,10 +345,10 @@
 class Table {
  public:
 
-  static const char* ascii_fingerprint; // = "9AED95F9B0977E7C06642265B26C8B36";
-  static const uint8_t binary_fingerprint[16]; // = {0x9A,0xED,0x95,0xF9,0xB0,0x97,0x7E,0x7C,0x06,0x64,0x22,0x65,0xB2,0x6C,0x8B,0x36};
+  static const char* ascii_fingerprint; // = "CBD4F726F025A868EEB3BDC4028F3D66";
+  static const uint8_t binary_fingerprint[16]; // = {0xCB,0xD4,0xF7,0x26,0xF0,0x25,0xA8,0x68,0xEE,0xB3,0xBD,0xC4,0x02,0x8F,0x3D,0x66};
 
-  Table() : tableName(""), dbName(""), owner(""), createTime(0), lastAccessTime(0), retention(0) {
+  Table() : tableName(""), dbName(""), owner(""), createTime(0), lastAccessTime(0), retention(0), viewOriginalText(""), viewExpandedText("") {
   }
 
   virtual ~Table() throw() {}
@@ -362,9 +362,11 @@
   StorageDescriptor sd;
   std::vector<FieldSchema>  partitionKeys;
   std::map<std::string, std::string>  parameters;
+  std::string viewOriginalText;
+  std::string viewExpandedText;
 
   struct __isset {
-    __isset() : tableName(false), dbName(false), owner(false), createTime(false), lastAccessTime(false), retention(false), sd(false), partitionKeys(false), parameters(false) {}
+    __isset() : tableName(false), dbName(false), owner(false), createTime(false), lastAccessTime(false), retention(false), sd(false), partitionKeys(false), parameters(false), viewOriginalText(false), viewExpandedText(false) {}
     bool tableName;
     bool dbName;
     bool owner;
@@ -374,6 +376,8 @@
     bool sd;
     bool partitionKeys;
     bool parameters;
+    bool viewOriginalText;
+    bool viewExpandedText;
   } __isset;
 
   bool operator == (const Table & rhs) const
@@ -396,6 +400,10 @@
       return false;
     if (!(parameters == rhs.parameters))
       return false;
+    if (!(viewOriginalText == rhs.viewOriginalText))
+      return false;
+    if (!(viewExpandedText == rhs.viewExpandedText))
+      return false;
     return true;
   }
   bool operator != (const Table &rhs) const {

Modified: hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java (original)
+++ hadoop/hive/trunk/metastore/src/gen-javabean/org/apache/hadoop/hive/metastore/api/Table.java Wed Jan 20 06:07:34 2010
@@ -12,7 +12,6 @@
 import java.util.Set;
 import java.util.HashSet;
 import java.util.Collections;
-import org.apache.log4j.Logger;
 
 import org.apache.thrift.*;
 import org.apache.thrift.meta_data.*;
@@ -29,6 +28,8 @@
   private static final TField SD_FIELD_DESC = new TField("sd", TType.STRUCT, (short)7);
   private static final TField PARTITION_KEYS_FIELD_DESC = new TField("partitionKeys", TType.LIST, (short)8);
   private static final TField PARAMETERS_FIELD_DESC = new TField("parameters", TType.MAP, (short)9);
+  private static final TField VIEW_ORIGINAL_TEXT_FIELD_DESC = new TField("viewOriginalText", TType.STRING, (short)10);
+  private static final TField VIEW_EXPANDED_TEXT_FIELD_DESC = new TField("viewExpandedText", TType.STRING, (short)11);
 
   private String tableName;
   public static final int TABLENAME = 1;
@@ -48,6 +49,10 @@
   public static final int PARTITIONKEYS = 8;
   private Map<String,String> parameters;
   public static final int PARAMETERS = 9;
+  private String viewOriginalText;
+  public static final int VIEWORIGINALTEXT = 10;
+  private String viewExpandedText;
+  public static final int VIEWEXPANDEDTEXT = 11;
 
   private final Isset __isset = new Isset();
   private static final class Isset implements java.io.Serializable {
@@ -78,6 +83,10 @@
         new MapMetaData(TType.MAP, 
             new FieldValueMetaData(TType.STRING), 
             new FieldValueMetaData(TType.STRING))));
+    put(VIEWORIGINALTEXT, new FieldMetaData("viewOriginalText", TFieldRequirementType.DEFAULT, 
+        new FieldValueMetaData(TType.STRING)));
+    put(VIEWEXPANDEDTEXT, new FieldMetaData("viewExpandedText", TFieldRequirementType.DEFAULT, 
+        new FieldValueMetaData(TType.STRING)));
   }});
 
   static {
@@ -96,7 +105,9 @@
     int retention,
     StorageDescriptor sd,
     List<FieldSchema> partitionKeys,
-    Map<String,String> parameters)
+    Map<String,String> parameters,
+    String viewOriginalText,
+    String viewExpandedText)
   {
     this();
     this.tableName = tableName;
@@ -111,6 +122,8 @@
     this.sd = sd;
     this.partitionKeys = partitionKeys;
     this.parameters = parameters;
+    this.viewOriginalText = viewOriginalText;
+    this.viewExpandedText = viewExpandedText;
   }
 
   /**
@@ -157,6 +170,12 @@
       }
       this.parameters = __this__parameters;
     }
+    if (other.isSetViewOriginalText()) {
+      this.viewOriginalText = other.viewOriginalText;
+    }
+    if (other.isSetViewExpandedText()) {
+      this.viewExpandedText = other.viewExpandedText;
+    }
   }
 
   @Override
@@ -346,6 +365,40 @@
     return this.parameters != null;
   }
 
+  public String getViewOriginalText() {
+    return this.viewOriginalText;
+  }
+
+  public void setViewOriginalText(String viewOriginalText) {
+    this.viewOriginalText = viewOriginalText;
+  }
+
+  public void unsetViewOriginalText() {
+    this.viewOriginalText = null;
+  }
+
+  // Returns true if field viewOriginalText is set (has been asigned a value) and false otherwise
+  public boolean isSetViewOriginalText() {
+    return this.viewOriginalText != null;
+  }
+
+  public String getViewExpandedText() {
+    return this.viewExpandedText;
+  }
+
+  public void setViewExpandedText(String viewExpandedText) {
+    this.viewExpandedText = viewExpandedText;
+  }
+
+  public void unsetViewExpandedText() {
+    this.viewExpandedText = null;
+  }
+
+  // Returns true if field viewExpandedText is set (has been asigned a value) and false otherwise
+  public boolean isSetViewExpandedText() {
+    return this.viewExpandedText != null;
+  }
+
   public void setFieldValue(int fieldID, Object value) {
     switch (fieldID) {
     case TABLENAME:
@@ -420,6 +473,22 @@
       }
       break;
 
+    case VIEWORIGINALTEXT:
+      if (value == null) {
+        unsetViewOriginalText();
+      } else {
+        setViewOriginalText((String)value);
+      }
+      break;
+
+    case VIEWEXPANDEDTEXT:
+      if (value == null) {
+        unsetViewExpandedText();
+      } else {
+        setViewExpandedText((String)value);
+      }
+      break;
+
     default:
       throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
     }
@@ -454,6 +523,12 @@
     case PARAMETERS:
       return getParameters();
 
+    case VIEWORIGINALTEXT:
+      return getViewOriginalText();
+
+    case VIEWEXPANDEDTEXT:
+      return getViewExpandedText();
+
     default:
       throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
     }
@@ -480,6 +555,10 @@
       return isSetPartitionKeys();
     case PARAMETERS:
       return isSetParameters();
+    case VIEWORIGINALTEXT:
+      return isSetViewOriginalText();
+    case VIEWEXPANDEDTEXT:
+      return isSetViewExpandedText();
     default:
       throw new IllegalArgumentException("Field " + fieldID + " doesn't exist!");
     }
@@ -579,6 +658,24 @@
         return false;
     }
 
+    boolean this_present_viewOriginalText = true && this.isSetViewOriginalText();
+    boolean that_present_viewOriginalText = true && that.isSetViewOriginalText();
+    if (this_present_viewOriginalText || that_present_viewOriginalText) {
+      if (!(this_present_viewOriginalText && that_present_viewOriginalText))
+        return false;
+      if (!this.viewOriginalText.equals(that.viewOriginalText))
+        return false;
+    }
+
+    boolean this_present_viewExpandedText = true && this.isSetViewExpandedText();
+    boolean that_present_viewExpandedText = true && that.isSetViewExpandedText();
+    if (this_present_viewExpandedText || that_present_viewExpandedText) {
+      if (!(this_present_viewExpandedText && that_present_viewExpandedText))
+        return false;
+      if (!this.viewExpandedText.equals(that.viewExpandedText))
+        return false;
+    }
+
     return true;
   }
 
@@ -688,6 +785,20 @@
             TProtocolUtil.skip(iprot, field.type);
           }
           break;
+        case VIEWORIGINALTEXT:
+          if (field.type == TType.STRING) {
+            this.viewOriginalText = iprot.readString();
+          } else { 
+            TProtocolUtil.skip(iprot, field.type);
+          }
+          break;
+        case VIEWEXPANDEDTEXT:
+          if (field.type == TType.STRING) {
+            this.viewExpandedText = iprot.readString();
+          } else { 
+            TProtocolUtil.skip(iprot, field.type);
+          }
+          break;
         default:
           TProtocolUtil.skip(iprot, field.type);
           break;
@@ -755,6 +866,16 @@
       }
       oprot.writeFieldEnd();
     }
+    if (this.viewOriginalText != null) {
+      oprot.writeFieldBegin(VIEW_ORIGINAL_TEXT_FIELD_DESC);
+      oprot.writeString(this.viewOriginalText);
+      oprot.writeFieldEnd();
+    }
+    if (this.viewExpandedText != null) {
+      oprot.writeFieldBegin(VIEW_EXPANDED_TEXT_FIELD_DESC);
+      oprot.writeString(this.viewExpandedText);
+      oprot.writeFieldEnd();
+    }
     oprot.writeFieldStop();
     oprot.writeStructEnd();
   }
@@ -823,6 +944,22 @@
       sb.append(this.parameters);
     }
     first = false;
+    if (!first) sb.append(", ");
+    sb.append("viewOriginalText:");
+    if (this.viewOriginalText == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.viewOriginalText);
+    }
+    first = false;
+    if (!first) sb.append(", ");
+    sb.append("viewExpandedText:");
+    if (this.viewExpandedText == null) {
+      sb.append("null");
+    } else {
+      sb.append(this.viewExpandedText);
+    }
+    first = false;
     sb.append(")");
     return sb.toString();
   }

Modified: hadoop/hive/trunk/metastore/src/gen-php/hive_metastore_types.php
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-php/hive_metastore_types.php?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-php/hive_metastore_types.php (original)
+++ hadoop/hive/trunk/metastore/src/gen-php/hive_metastore_types.php Wed Jan 20 06:07:34 2010
@@ -1087,6 +1087,8 @@
   public $sd = null;
   public $partitionKeys = null;
   public $parameters = null;
+  public $viewOriginalText = null;
+  public $viewExpandedText = null;
 
   public function __construct($vals=null) {
     if (!isset(self::$_TSPEC)) {
@@ -1141,6 +1143,14 @@
             'type' => TType::STRING,
             ),
           ),
+        10 => array(
+          'var' => 'viewOriginalText',
+          'type' => TType::STRING,
+          ),
+        11 => array(
+          'var' => 'viewExpandedText',
+          'type' => TType::STRING,
+          ),
         );
     }
     if (is_array($vals)) {
@@ -1171,6 +1181,12 @@
       if (isset($vals['parameters'])) {
         $this->parameters = $vals['parameters'];
       }
+      if (isset($vals['viewOriginalText'])) {
+        $this->viewOriginalText = $vals['viewOriginalText'];
+      }
+      if (isset($vals['viewExpandedText'])) {
+        $this->viewExpandedText = $vals['viewExpandedText'];
+      }
     }
   }
 
@@ -1281,6 +1297,20 @@
             $xfer += $input->skip($ftype);
           }
           break;
+        case 10:
+          if ($ftype == TType::STRING) {
+            $xfer += $input->readString($this->viewOriginalText);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
+        case 11:
+          if ($ftype == TType::STRING) {
+            $xfer += $input->readString($this->viewExpandedText);
+          } else {
+            $xfer += $input->skip($ftype);
+          }
+          break;
         default:
           $xfer += $input->skip($ftype);
           break;
@@ -1367,6 +1397,16 @@
       }
       $xfer += $output->writeFieldEnd();
     }
+    if ($this->viewOriginalText !== null) {
+      $xfer += $output->writeFieldBegin('viewOriginalText', TType::STRING, 10);
+      $xfer += $output->writeString($this->viewOriginalText);
+      $xfer += $output->writeFieldEnd();
+    }
+    if ($this->viewExpandedText !== null) {
+      $xfer += $output->writeFieldBegin('viewExpandedText', TType::STRING, 11);
+      $xfer += $output->writeString($this->viewExpandedText);
+      $xfer += $output->writeFieldEnd();
+    }
     $xfer += $output->writeFieldStop();
     $xfer += $output->writeStructEnd();
     return $xfer;

Modified: hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py (original)
+++ hadoop/hive/trunk/metastore/src/gen-py/hive_metastore/ttypes.py Wed Jan 20 06:07:34 2010
@@ -704,6 +704,8 @@
    - sd
    - partitionKeys
    - parameters
+   - viewOriginalText
+   - viewExpandedText
   """
 
   thrift_spec = (
@@ -717,9 +719,11 @@
     (7, TType.STRUCT, 'sd', (StorageDescriptor, StorageDescriptor.thrift_spec), None, ), # 7
     (8, TType.LIST, 'partitionKeys', (TType.STRUCT,(FieldSchema, FieldSchema.thrift_spec)), None, ), # 8
     (9, TType.MAP, 'parameters', (TType.STRING,None,TType.STRING,None), None, ), # 9
+    (10, TType.STRING, 'viewOriginalText', None, None, ), # 10
+    (11, TType.STRING, 'viewExpandedText', None, None, ), # 11
   )
 
-  def __init__(self, tableName=None, dbName=None, owner=None, createTime=None, lastAccessTime=None, retention=None, sd=None, partitionKeys=None, parameters=None,):
+  def __init__(self, tableName=None, dbName=None, owner=None, createTime=None, lastAccessTime=None, retention=None, sd=None, partitionKeys=None, parameters=None, viewOriginalText=None, viewExpandedText=None,):
     self.tableName = tableName
     self.dbName = dbName
     self.owner = owner
@@ -729,6 +733,8 @@
     self.sd = sd
     self.partitionKeys = partitionKeys
     self.parameters = parameters
+    self.viewOriginalText = viewOriginalText
+    self.viewExpandedText = viewExpandedText
 
   def read(self, iprot):
     if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
@@ -797,6 +803,16 @@
           iprot.readMapEnd()
         else:
           iprot.skip(ftype)
+      elif fid == 10:
+        if ftype == TType.STRING:
+          self.viewOriginalText = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 11:
+        if ftype == TType.STRING:
+          self.viewExpandedText = iprot.readString();
+        else:
+          iprot.skip(ftype)
       else:
         iprot.skip(ftype)
       iprot.readFieldEnd()
@@ -850,6 +866,14 @@
         oprot.writeString(viter61)
       oprot.writeMapEnd()
       oprot.writeFieldEnd()
+    if self.viewOriginalText != None:
+      oprot.writeFieldBegin('viewOriginalText', TType.STRING, 10)
+      oprot.writeString(self.viewOriginalText)
+      oprot.writeFieldEnd()
+    if self.viewExpandedText != None:
+      oprot.writeFieldBegin('viewExpandedText', TType.STRING, 11)
+      oprot.writeString(self.viewExpandedText)
+      oprot.writeFieldEnd()
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
@@ -1267,9 +1291,6 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self):
-    return repr(self)
-
   def __repr__(self):
     L = ['%s=%r' % (key, value)
       for key, value in self.__dict__.iteritems()]
@@ -1326,9 +1347,6 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self):
-    return repr(self)
-
   def __repr__(self):
     L = ['%s=%r' % (key, value)
       for key, value in self.__dict__.iteritems()]
@@ -1385,9 +1403,6 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self):
-    return repr(self)
-
   def __repr__(self):
     L = ['%s=%r' % (key, value)
       for key, value in self.__dict__.iteritems()]
@@ -1444,9 +1459,6 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self):
-    return repr(self)
-
   def __repr__(self):
     L = ['%s=%r' % (key, value)
       for key, value in self.__dict__.iteritems()]
@@ -1503,9 +1515,6 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self):
-    return repr(self)
-
   def __repr__(self):
     L = ['%s=%r' % (key, value)
       for key, value in self.__dict__.iteritems()]
@@ -1562,9 +1571,6 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self):
-    return repr(self)
-
   def __repr__(self):
     L = ['%s=%r' % (key, value)
       for key, value in self.__dict__.iteritems()]
@@ -1621,9 +1627,6 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self):
-    return repr(self)
-
   def __repr__(self):
     L = ['%s=%r' % (key, value)
       for key, value in self.__dict__.iteritems()]
@@ -1680,9 +1683,6 @@
     oprot.writeFieldStop()
     oprot.writeStructEnd()
 
-  def __str__(self):
-    return repr(self)
-
   def __repr__(self):
     L = ['%s=%r' % (key, value)
       for key, value in self.__dict__.iteritems()]

Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java Wed Jan 20 06:07:34 2010
@@ -60,6 +60,12 @@
       throw new InvalidOperationException(newt.getTableName() + " is not a valid object name");
     }
 
+    if (newt.getViewExpandedText() != null) {
+      throw new InvalidOperationException(
+        newt.getTableName()
+        + " is a view, so it cannot be modified via ALTER TABLE");
+    }
+    
     Path srcPath = null;
     FileSystem srcFs = null;
     Path destPath = null;

Modified: hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hadoop/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Wed Jan 20 06:07:34 2010
@@ -569,7 +569,9 @@
         mtbl.getRetention(),
         convertToStorageDescriptor(mtbl.getSd()),
         convertToFieldSchemas(mtbl.getPartitionKeys()),
-        mtbl.getParameters());
+        mtbl.getParameters(),
+        mtbl.getViewOriginalText(),
+        mtbl.getViewExpandedText());
   }
   
   private MTable convertToMTable(Table tbl) throws InvalidObjectException, MetaException {
@@ -589,7 +591,9 @@
         tbl.getLastAccessTime(),
         tbl.getRetention(),
         convertToMFieldSchemas(tbl.getPartitionKeys()),
-        tbl.getParameters());
+        tbl.getParameters(),
+        tbl.getViewOriginalText(),
+        tbl.getViewExpandedText());
   }
   
   private List<MFieldSchema> convertToMFieldSchemas(List<FieldSchema> keys) {

Modified: hadoop/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTable.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTable.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTable.java (original)
+++ hadoop/hive/trunk/metastore/src/model/org/apache/hadoop/hive/metastore/model/MTable.java Wed Jan 20 06:07:34 2010
@@ -32,7 +32,9 @@
   private int retention;
   private List<MFieldSchema> partitionKeys;
   private Map<String, String> parameters;
-  
+  private String viewOriginalText;
+  private String viewExpandedText;
+
   public MTable() {}
 
   /**
@@ -48,7 +50,8 @@
    */
   public MTable(String tableName, MDatabase database, MStorageDescriptor sd, String owner,
       int createTime, int lastAccessTime, int retention, List<MFieldSchema> partitionKeys,
-      Map<String, String> parameters) {
+      Map<String, String> parameters,
+      String viewOriginalText, String viewExpandedText) {
     this.tableName = tableName;
     this.database = database;
     this.sd = sd;
@@ -58,6 +61,8 @@
     this.retention = retention;
     this.partitionKeys = partitionKeys;
     this.parameters = parameters;
+    this.viewOriginalText = viewOriginalText;
+    this.viewExpandedText = viewExpandedText;
   }
 
   /**
@@ -117,6 +122,34 @@
   }
 
   /**
+   * @return the original view text, or null if this table is not a view
+   */
+  public String getViewOriginalText() {
+    return viewOriginalText;
+  }
+
+  /**
+   * @param viewOriginalText the original view text to set
+   */
+  public void setViewOriginalText(String viewOriginalText) {
+    this.viewOriginalText = viewOriginalText;
+  }
+
+  /**
+   * @return the expanded view text, or null if this table is not a view
+   */
+  public String getViewExpandedText() {
+    return viewExpandedText;
+  }
+
+  /**
+   * @param viewExpandedText the expanded view text to set
+   */
+  public void setViewExpandedText(String viewExpandedText) {
+    this.viewExpandedText = viewExpandedText;
+  }
+
+  /**
    * @return the owner
    */
   public String getOwner() {

Modified: hadoop/hive/trunk/metastore/src/model/package.jdo
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/metastore/src/model/package.jdo?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/metastore/src/model/package.jdo (original)
+++ hadoop/hive/trunk/metastore/src/model/package.jdo Wed Jan 20 06:07:34 2010
@@ -125,6 +125,12 @@
            <column name="PARAM_VALUE" length="767" jdbc-type="VARCHAR"/>
         </value>
       </field>
+      <field name="viewOriginalText">
+        <column name="VIEW_ORIGINAL_TEXT" jdbc-type="LONGVARCHAR"/>
+      </field>
+      <field name="viewExpandedText">
+        <column name="VIEW_EXPANDED_TEXT" jdbc-type="LONGVARCHAR"/>
+      </field>
     </class>
 
     <class name="MSerDeInfo" identity-type="datastore" table="SERDES" detachable="true">

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Context.java Wed Jan 20 06:07:34 2010
@@ -27,6 +27,8 @@
 import java.util.Random;
 import java.util.ArrayList;
 
+import org.antlr.runtime.TokenRewriteStream;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -60,6 +62,7 @@
   protected int randomid = Math.abs(rand.nextInt());
   protected int pathid = 10000;
   protected boolean explain = false;
+  private TokenRewriteStream tokenRewriteStream;
 
   public Context() {  
   }
@@ -394,5 +397,28 @@
   private static boolean strEquals(String str1, String str2) {
     return org.apache.commons.lang.StringUtils.equals(str1, str2);
   }
+
+  /**
+   * Set the token rewrite stream being used to parse the current top-level SQL
+   * statement.  Note that this should <b>not</b> be used for other parsing
+   * activities; for example, when we encounter a reference to a view, we
+   * switch to a new stream for parsing the stored view definition from the
+   * catalog, but we don't clobber the top-level stream in the context.
+   *
+   * @param tokenRewriteStream the stream being used
+   */
+  public void setTokenRewriteStream(TokenRewriteStream tokenRewriteStream) {
+    assert(this.tokenRewriteStream == null);
+    this.tokenRewriteStream = tokenRewriteStream;
+  }
+
+  /**
+   * @return the token rewrite stream being used to parse the current
+   * top-level SQL statement, or null if it isn't available
+   * (e.g. for parser tests)
+   */
+  public TokenRewriteStream getTokenRewriteStream() {
+    return tokenRewriteStream;
+  }
 }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/Driver.java Wed Jan 20 06:07:34 2010
@@ -31,6 +31,7 @@
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Schema;
 import org.apache.hadoop.hive.ql.parse.ParseDriver;
+import org.apache.hadoop.hive.ql.parse.ParseUtils;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.ParseException;
 import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
@@ -269,11 +270,8 @@
       ctx = new Context (conf);
 
       ParseDriver pd = new ParseDriver();
-      ASTNode tree = pd.parse(command);
-
-      while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
-        tree = (ASTNode) tree.getChild(0);
-      }
+      ASTNode tree = pd.parse(command, ctx);
+      tree = ParseUtils.findRootNonNullToken(tree);
 
       BaseSemanticAnalyzer sem = SemanticAnalyzerFactory.get(conf, tree);
       // Do semantic analysis and plan generation
@@ -284,6 +282,7 @@
       sem.validate();
 
       plan = new QueryPlan(command, sem);
+
       return (0);
     } catch (SemanticException e) {
       errorMessage = "FAILED: Error in semantic analysis: " + e.getMessage();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Wed Jan 20 06:07:34 2010
@@ -57,6 +57,7 @@
 import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.HiveMetaStoreChecker;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -66,6 +67,7 @@
 import org.apache.hadoop.hive.ql.plan.alterTableDesc;
 import org.apache.hadoop.hive.ql.plan.createTableDesc;
 import org.apache.hadoop.hive.ql.plan.createTableLikeDesc;
+import org.apache.hadoop.hive.ql.plan.createViewDesc;
 import org.apache.hadoop.hive.ql.plan.descFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.descTableDesc;
 import org.apache.hadoop.hive.ql.plan.dropTableDesc;
@@ -135,6 +137,11 @@
         return alterTable(db, alterTbl);
       }
 
+      createViewDesc crtView = work.getCreateViewDesc();
+      if (crtView != null) {
+        return createView(db, crtView);
+      }
+      
       AddPartitionDesc addPartitionDesc = work.getAddPartitionDesc();
       if (addPartitionDesc != null) {
         return addPartition(db, addPartitionDesc);
@@ -204,6 +211,10 @@
     Table tbl = db.getTable(addPartitionDesc.getDbName(),
         addPartitionDesc.getTableName());
 
+    if (tbl.isView()) {
+      throw new HiveException("Cannot use ALTER TABLE on a view");
+    }
+
     if(addPartitionDesc.getLocation() == null) {
       db.createPartition(tbl, addPartitionDesc.getPartSpec());
     } else {
@@ -884,6 +895,10 @@
   private int alterTable(Hive db, alterTableDesc alterTbl) throws HiveException {
     // alter the table
     Table tbl = db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, alterTbl.getOldName());
+
+    if (tbl.isView()) {
+      throw new HiveException("Cannot use ALTER TABLE on a view");
+    }
     Table oldTbl = tbl.copy();
 
     if (alterTbl.getOp() == alterTableDesc.alterTableTypes.RENAME) {
@@ -1084,6 +1099,18 @@
       // drop table is idempotent
     }
 
+    if (tbl != null) {
+      if (tbl.isView()) {
+        if (!dropTbl.getExpectView()) {
+          throw new HiveException("Cannot drop a view with DROP TABLE");
+        } 
+      } else {
+        if (dropTbl.getExpectView()) {
+          throw new HiveException("Cannot drop a base table with DROP VIEW");
+        } 
+      }
+    }
+
     if (dropTbl.getPartSpecs() == null) {
       // drop the table
       db.dropTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, dropTbl.getTableName());
@@ -1258,14 +1285,10 @@
       }
     }
 
-    try {
-      tbl.setOwner(conf.getUser());
-    } catch (IOException e) {
-      console.printError("Unable to get current user: " + e.getMessage(), stringifyException(e));
-      return 1;
+    int rc = setGenericTableAttributes(tbl);
+    if (rc != 0) {
+      return rc;
     }
-    // set create time
-    tbl.getTTable().setCreateTime((int) (System.currentTimeMillis() / 1000));
 
     if (crtTbl.getCols() != null) {
       tbl.setFields(crtTbl.getCols());
@@ -1312,6 +1335,46 @@
     return 0;
   }
 
+
+  /**
+   * Create a new view.
+   *
+   * @param db The database in question.
+   * @param crtView This is the view we're creating.
+   * @return Returns 0 when execution succeeds and above 0 if it fails.
+   * @throws HiveException Throws this exception if an unexpected error occurs.
+   */
+  private int createView(Hive db, createViewDesc crtView) throws HiveException {
+    Table tbl = new Table(crtView.getViewName());
+    tbl.setViewOriginalText(crtView.getViewOriginalText());
+    tbl.setViewExpandedText(crtView.getViewExpandedText());
+    tbl.setFields(crtView.getSchema());
+    if (crtView.getComment() != null) {
+      tbl.setProperty("comment", crtView.getComment());
+    }
+
+    int rc = setGenericTableAttributes(tbl);
+    if (rc != 0) {
+      return rc;
+    }
+
+    db.createTable(tbl, crtView.getIfNotExists());
+    work.getOutputs().add(new WriteEntity(tbl));
+    return 0;
+  }
+
+  private int setGenericTableAttributes(Table tbl) {
+    try {
+      tbl.setOwner(conf.getUser());
+    } catch (IOException e) {
+      console.printError("Unable to get current user: " + e.getMessage(), stringifyException(e));
+      return 1;
+    }
+    // set create time
+    tbl.getTTable().setCreateTime((int) (System.currentTimeMillis() / 1000));
+    return 0;
+  }
+
   public int getType() {
     return StageType.DDL;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java Wed Jan 20 06:07:34 2010
@@ -381,6 +381,13 @@
       Properties p = MetaStoreUtils.getSchema(tTable);
       table.setSchema(p);
       table.setTTable(tTable);
+
+      if (table.isView()) {
+        // Skip the rest, which isn't relevant for a view.
+        table.checkValidity();
+        return table;
+      }
+
       table.setInputFormatClass((Class<? extends InputFormat<WritableComparable, Writable>>)
           Class.forName(table.getSchema().getProperty(org.apache.hadoop.hive.metastore.api.Constants.FILE_INPUT_FORMAT,
                                                       org.apache.hadoop.mapred.SequenceFileInputFormat.class.getName()),

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveUtils.java Wed Jan 20 06:07:34 2010
@@ -30,6 +30,7 @@
   public static final String RBRACKET = "]";
   public static final String LBRACE = "{";
   public static final String RBRACE = "}";
+  public static final String LINE_SEP = System.getProperty("line.separator");
 
 
   public static String escapeString(String str) {
@@ -110,4 +111,14 @@
     }
     return (escape.toString());
   }
+
+  /**
+   * Regenerate an identifier as part of unparsing it back to SQL text.
+   */
+  public static String unparseIdentifier(String identifier) {
+    // In the future, if we support arbitrary characters in
+    // identifiers, then we'll need to escape any backticks
+    // in identifier by doubling them up.
+    return "`" + identifier + "`";
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java Wed Jan 20 06:07:34 2010
@@ -156,16 +156,18 @@
       throw new HiveException("[" + name + "]: is not a valid table name");
     }
     if (0 == getCols().size()) {
-      throw new HiveException("atleast one column must be specified for the table");
+      throw new HiveException("at least one column must be specified for the table");
     }
-    if (null == getDeserializer()) {
-      throw new HiveException("must specify a non-null serDe");
-    }
-    if (null == getInputFormatClass()) {
-      throw new HiveException("must specify an InputFormat class");
-    }
-    if (null == getOutputFormatClass()) {
-      throw new HiveException("must specify an OutputFormat class");
+    if (!isView()) {
+      if (null == getDeserializer()) {
+        throw new HiveException("must specify a non-null serDe");
+      }
+      if (null == getInputFormatClass()) {
+        throw new HiveException("must specify an InputFormat class");
+      }
+      if (null == getOutputFormatClass()) {
+        throw new HiveException("must specify an OutputFormat class");
+      }
     }
 
     Iterator<FieldSchema> iterCols = getCols().iterator();
@@ -572,6 +574,46 @@
   }
 
   /**
+   * @return the original view text, or null if this table is not a view
+   */
+  public String getViewOriginalText() {
+    return getTTable().getViewOriginalText();
+  }
+
+  /**
+   * @param viewOriginalText the original view text to set
+   */
+  public void setViewOriginalText(String viewOriginalText) {
+    getTTable().setViewOriginalText(viewOriginalText);
+  }
+
+  /**
+   * @return the expanded view text, or null if this table is not a view
+   */
+  public String getViewExpandedText() {
+    return getTTable().getViewExpandedText();
+  }
+
+  /**
+   * @param viewExpandedText the expanded view text to set
+   */
+  public void setViewExpandedText(String viewExpandedText) {
+    getTTable().setViewExpandedText(viewExpandedText);
+  }
+
+  /**
+   * @return whether this table is actually a view
+   */
+  public boolean isView() {
+    // either both attributes (expandedText and originalText) should
+    // be set, or neither
+    boolean hasExpandedText = (getViewExpandedText() != null);
+    boolean hasOriginalText = (getViewOriginalText() != null);
+    assert(hasExpandedText == hasOriginalText);
+    return hasExpandedText;
+  }
+
+  /**
    * Creates a partition name -> value spec map object
    * @param tp Use the information from this partition.
    * @return Partition name to value mapping.

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNode.java Wed Jan 20 06:07:34 2010
@@ -29,6 +29,8 @@
  */
 public class ASTNode extends CommonTree implements Node {
 
+  private ASTNodeOrigin origin;
+  
   public ASTNode() {  
   }
   
@@ -62,6 +64,23 @@
   public String getName() {
     return (new Integer(super.getToken().getType())).toString();
   }
+
+  /**
+   * @return information about the object from which this ASTNode
+   * originated, or null if this ASTNode was not expanded from
+   * an object reference
+   */
+  public ASTNodeOrigin getOrigin() {
+    return origin;
+  }
+
+  /**
+   * Tag this ASTNode with information about the object from which this
+   * node originated.
+   */
+  public void setOrigin(ASTNodeOrigin origin) {
+    this.origin = origin;
+  }
   
   public String dump() {
     StringBuffer sb = new StringBuffer();

Added: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNodeOrigin.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNodeOrigin.java?rev=901074&view=auto
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNodeOrigin.java (added)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ASTNodeOrigin.java Wed Jan 20 06:07:34 2010
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.parse;
+
+/**
+ * ASTNodeOrigin contains contextual information about the object from whose
+ * definition a particular ASTNode originated.  For example, suppose a view v
+ * is defined as <code>select x+1 as y from t</code>, and we're processing a
+ * query <code>select v1.y from v as v1</code>, and there's a type-checking
+ * problem with the expression <code>x+1</code> due to an ALTER TABLE on t
+ * subsequent to the creation of v.  Then, when reporting the error, we want to
+ * provide the parser location with respect to the definition of v (rather than
+ * with respect to the top-level query, since that represents a completely
+ * different "parser coordinate system").
+ *
+ *<p>
+ *
+ * So, when expanding the definition of v while analyzing the top-level query,
+ * we tag each ASTNode with a reference to an ASTNodeOrign describing v
+ * and its usage within the query.
+ */
+public class ASTNodeOrigin {
+  private final String objectType;
+  private final String objectName;
+  private final String objectDefinition;
+  private final String usageAlias;
+  private final ASTNode usageNode;
+
+  public ASTNodeOrigin(
+    String objectType,
+    String objectName,
+    String objectDefinition,
+    String usageAlias,
+    ASTNode usageNode) {
+    this.objectType = objectType;
+    this.objectName = objectName;
+    this.objectDefinition = objectDefinition;
+    this.usageAlias = usageAlias;
+    this.usageNode = usageNode;
+  }
+
+  /**
+   * @return the type of the object from which an ASTNode originated,
+   * e.g. "view".
+   */
+  public String getObjectType() {
+    return objectType;
+  }
+
+  /**
+   * @return the name of the object from which an ASTNode originated,
+   * e.g. "v".
+   */
+  public String getObjectName() {
+    return objectName;
+  }
+
+  /**
+   * @return the definition of the object from which an ASTNode originated,
+   * e.g. <code>select x+1 as y from t</code>.
+   */
+  public String getObjectDefinition() {
+    return objectDefinition;
+  }
+
+  /**
+   * @return the alias of the object from which an ASTNode originated,
+   * e.g. "v1" (this can help with debugging context-dependent expansions)
+   */
+  public String getUsageAlias() {
+    return usageAlias;
+  }
+
+  /**
+   * @return the expression node triggering usage of an object from which an
+   * ASTNode originated, e.g. <code>v as v1</code> (this can help with
+   * debugging context-dependent expansions)
+   */
+  public ASTNode getUsageNode() {
+    return usageNode;
+  }
+}
+
+// End ASTNodeOrigin.java

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Wed Jan 20 06:07:34 2010
@@ -88,7 +88,7 @@
   @Override
   public void analyzeInternal(ASTNode ast) throws SemanticException {
     if (ast.getToken().getType() == HiveParser.TOK_DROPTABLE)
-       analyzeDropTable(ast);
+       analyzeDropTable(ast, false);
     else if (ast.getToken().getType() == HiveParser.TOK_DESCTABLE)
     {
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
@@ -110,7 +110,9 @@
     } else if (ast.getToken().getType() == HiveParser.TOK_MSCK) {
       ctx.setResFile(new Path(ctx.getLocalTmpFileURI()));
       analyzeMetastoreCheck(ast);
-    } else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME)
+    } else if (ast.getToken().getType() == HiveParser.TOK_DROPVIEW)
+      analyzeDropTable(ast, true);
+    else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_RENAME)
       analyzeAlterTableRename(ast);
     else if (ast.getToken().getType() == HiveParser.TOK_ALTERTABLE_ADDCOLS)
       analyzeAlterTableModifyCols(ast, alterTableTypes.ADDCOLS);
@@ -142,10 +144,10 @@
     }
   }
 
-  private void analyzeDropTable(ASTNode ast) 
+  private void analyzeDropTable(ASTNode ast, boolean expectView) 
     throws SemanticException {
     String tableName = unescapeIdentifier(ast.getChild(0).getText());
-    dropTableDesc dropTblDesc = new dropTableDesc(tableName);
+    dropTableDesc dropTblDesc = new dropTableDesc(tableName, expectView);
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), dropTblDesc), conf));
   }
 

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ErrorMsg.java Wed Jan 20 06:07:34 2010
@@ -20,6 +20,8 @@
 
 import org.antlr.runtime.tree.*;
 
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+
 import java.util.Map;
 import java.util.HashMap;
 import java.util.regex.Pattern;
@@ -72,7 +74,7 @@
   NON_COLLECTION_TYPE("[] not Valid on Non Collection Types"),
   SELECT_DISTINCT_WITH_GROUPBY("SELECT DISTINCT and GROUP BY can not be in the same query"),
   COLUMN_REPEATED_IN_PARTITIONING_COLS("Column repeated in partitioning columns"),
-  DUPLICATE_COLUMN_NAMES("Duplicate column names"),
+  DUPLICATE_COLUMN_NAMES("Duplicate column name:"),
   INVALID_BUCKET_NUMBER("Bucket number should be bigger than zero"),
   COLUMN_REPEATED_IN_CLUSTER_SORT("Same column cannot appear in cluster and sort by"),
   SAMPLE_RESTRICTION("Cannot Sample on More Than Two Columns"),
@@ -121,7 +123,9 @@
   LATERAL_VIEW_WITH_JOIN("Join with a lateral view is not supported"),
   LATERAL_VIEW_INVALID_CHILD("Lateral view AST with invalid child"),
   OUTPUT_SPECIFIED_MULTIPLE_TIMES("The same output cannot be present multiple times: "),
-  INVALID_AS("AS clause has an invalid number of aliases");
+  INVALID_AS("AS clause has an invalid number of aliases"),
+  VIEW_COL_MISMATCH("The number of columns produced by the SELECT clause does not match the number of column names specified by CREATE VIEW"),
+  DML_AGAINST_VIEW("A view cannot be used as target table for LOAD or INSERT");
   private String mesg;
   private String SQLState;
 
@@ -202,7 +206,7 @@
     this.SQLState = SQLState;
   }
 
-  private int getLine(ASTNode tree) {
+  private static int getLine(ASTNode tree) {
     if (tree.getChildCount() == 0) {
       return tree.getToken().getLine();
     }
@@ -210,7 +214,7 @@
     return getLine((ASTNode)tree.getChild(0));
   }
 
-  private int getCharPositionInLine(ASTNode tree) {
+  private static int getCharPositionInLine(ASTNode tree) {
     if (tree.getChildCount() == 0) {
       return tree.getToken().getCharPositionInLine();
     }
@@ -228,7 +232,40 @@
   }
 
   public String getMsg(ASTNode tree) {
-    return "line " + getLine(tree) + ":" + getCharPositionInLine(tree) + " " + mesg + " " + getText(tree);
+    StringBuilder sb = new StringBuilder();
+    renderPosition(sb, tree);
+    sb.append(" ");
+    sb.append(mesg);
+    sb.append(" ");
+    sb.append(getText(tree));
+    renderOrigin(sb, tree.getOrigin());
+    return sb.toString();
+  }
+
+  public static void renderOrigin(StringBuilder sb, ASTNodeOrigin origin) {
+    while (origin != null) {
+      sb.append(" in definition of ");
+      sb.append(origin.getObjectType());
+      sb.append(" ");
+      sb.append(origin.getObjectName());
+      sb.append(" [");
+      sb.append(HiveUtils.LINE_SEP);
+      sb.append(origin.getObjectDefinition());
+      sb.append(HiveUtils.LINE_SEP);
+      sb.append("] used as ");
+      sb.append(origin.getUsageAlias());
+      sb.append(" at ");
+      ASTNode usageNode = origin.getUsageNode();
+      renderPosition(sb, usageNode);
+      origin = usageNode.getOrigin();
+    }
+  }
+
+  private static void renderPosition(StringBuilder sb, ASTNode tree) {
+    sb.append("line ");
+    sb.append(getLine(tree));
+    sb.append(":");
+    sb.append(getCharPositionInLine(tree));
   }
 
   String getMsg(Tree tree) {
@@ -236,7 +273,7 @@
   }
 
   String getMsg(ASTNode tree, String reason) {
-    return "line " + getLine(tree) + ":" + getCharPositionInLine(tree) + " " + mesg + " " + getText(tree) + ": " + reason;
+    return getMsg(tree) + ": " + reason;
   }
 
   String getMsg(Tree tree, String reason) {

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Wed Jan 20 06:07:34 2010
@@ -130,6 +130,8 @@
 TOK_CHARSETLITERAL;
 TOK_CREATEFUNCTION;
 TOK_DROPFUNCTION;
+TOK_CREATEVIEW;
+TOK_DROPVIEW;
 TOK_EXPLAIN;
 TOK_TABLESERIALIZER;
 TOK_TABLEPROPERTIES;
@@ -201,12 +203,14 @@
 ddlStatement
 @init { msgs.push("ddl statement"); }
 @after { msgs.pop(); }
-    : createStatement
-    | dropStatement
+    : createTableStatement
+    | dropTableStatement
     | alterStatement
     | descStatement
     | showStatement
     | metastoreCheck
+    | createViewStatement
+    | dropViewStatement
     | createFunctionStatement
     | dropFunctionStatement
     ;
@@ -218,8 +222,8 @@
     -> ^(TOK_IFNOTEXISTS)
     ;
 
-createStatement
-@init { msgs.push("create statement"); }
+createTableStatement
+@init { msgs.push("create table statement"); }
 @after { msgs.pop(); }
     : KW_CREATE (ext=KW_EXTERNAL)? KW_TABLE ifNotExists? name=Identifier
       (  like=KW_LIKE likeName=Identifier
@@ -246,7 +250,7 @@
         )
     ;
 
-dropStatement
+dropTableStatement
 @init { msgs.push("drop statement"); }
 @after { msgs.pop(); }
     : KW_DROP KW_TABLE Identifier  -> ^(TOK_DROPTABLE Identifier)
@@ -412,6 +416,29 @@
     -> ^(TOK_DROPFUNCTION Identifier)
     ;
 
+createViewStatement
+@init { 
+    msgs.push("create view statement");
+}
+@after { msgs.pop(); }
+    : KW_CREATE KW_VIEW ifNotExists? name=Identifier
+        (LPAREN columnNameCommentList RPAREN)? tableComment?
+        KW_AS 
+        selectStatement 
+    -> ^(TOK_CREATEVIEW $name ifNotExists? 
+         columnNameCommentList?
+         tableComment?
+         selectStatement
+        )
+    ;
+
+dropViewStatement
+@init { msgs.push("drop view statement"); }
+@after { msgs.pop(); }
+    : KW_DROP KW_VIEW Identifier
+    -> ^(TOK_DROPVIEW Identifier)
+    ;
+
 showStmtIdentifier
 @init { msgs.push("identifier for show statement"); }
 @after { msgs.pop(); }
@@ -598,6 +625,19 @@
     ->                  ^(TOK_TABSORTCOLNAMEDESC Identifier)
     ;
 
+columnNameCommentList
+@init { msgs.push("column name comment list"); }
+@after { msgs.pop(); }
+    : columnNameComment (COMMA columnNameComment)* -> ^(TOK_TABCOLNAME columnNameComment+)
+    ;
+
+columnNameComment
+@init { msgs.push("column name comment"); }
+@after { msgs.pop(); }
+    : colName=Identifier (KW_COMMENT comment=StringLiteral)? 
+    -> ^(TOK_TABCOL $colName TOK_NULL $comment?)
+    ;
+
 columnRefOrder
 @init { msgs.push("column order"); }
 @after { msgs.pop(); }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/LoadSemanticAnalyzer.java Wed Jan 20 06:07:34 2010
@@ -176,6 +176,10 @@
 
     // initialize destination table/partition
     tableSpec ts = new tableSpec(db, conf, (ASTNode) table_t);
+
+    if (ts.tableHandle.isView()) {
+      throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
+    }
     URI toURI = (ts.partHandle != null) ? ts.partHandle.getDataLocation() : ts.tableHandle.getDataLocation();
 
     List<FieldSchema> parts = ts.tableHandle.getTTable().getPartitionKeys();

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseDriver.java Wed Jan 20 06:07:34 2010
@@ -27,6 +27,8 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
+import org.apache.hadoop.hive.ql.Context;
+
 public class ParseDriver {
     
   static final private Log LOG = LogFactory.getLog("hive.ql.parse.ParseDriver");
@@ -344,10 +346,29 @@
   };
   
   public ASTNode parse(String command) throws ParseException {
+    return parse(command, null);
+  }
+
+  /**
+   * Parses a command, optionally assigning the parser's token stream to
+   * the given context.
+   *
+   * @param command command to parse
+   *
+   * @param ctx context with which to associate this parser's
+   * token stream, or null if either no context is available
+   * or the context already has an existing stream
+   *
+   * @return parsed AST
+   */
+  public ASTNode parse(String command, Context ctx) throws ParseException {
     LOG.info("Parsing command: " + command);
       
     HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
-    TokenStream tokens = new TokenRewriteStream(lexer);
+    TokenRewriteStream tokens = new TokenRewriteStream(lexer);
+    if (ctx != null) {
+      ctx.setTokenRewriteStream(tokens);
+    }
     HiveParserX parser = new HiveParserX(tokens);
     parser.setTreeAdaptor(adaptor);
     HiveParser.statement_return r = null;

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ParseUtils.java Wed Jan 20 06:07:34 2010
@@ -24,19 +24,35 @@
  */
 public class ParseUtils {
 	
-	/**
-	 * Tests whether the parse tree node is a join token
-	 * 
-	 * @param node The parse tree node
-	 * @return boolean
-	 */
-	public static boolean isJoinToken(ASTNode node) {
-		if ((node.getToken().getType() == HiveParser.TOK_JOIN)
-				|| (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
-				|| (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN)
-				|| (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN))
-			return true;
+  /**
+   * Tests whether the parse tree node is a join token
+   * 
+   * @param node The parse tree node
+   * @return boolean
+   */
+  public static boolean isJoinToken(ASTNode node) {
+    if ((node.getToken().getType() == HiveParser.TOK_JOIN)
+      || (node.getToken().getType() == HiveParser.TOK_LEFTOUTERJOIN)
+      || (node.getToken().getType() == HiveParser.TOK_RIGHTOUTERJOIN)
+      || (node.getToken().getType() == HiveParser.TOK_FULLOUTERJOIN))
+      return true;
 
-		return false;
-	}
+    return false;
+  }
+
+  /**
+   * Performs a descent of the leftmost branch of a tree, stopping
+   * when either a node with a non-null token is found or the leaf
+   * level is encountered.
+   *
+   * @param tree candidate node from which to start searching
+   *
+   * @return node at which descent stopped
+   */
+  public static ASTNode findRootNonNullToken(ASTNode tree) {
+      while ((tree.getToken() == null) && (tree.getChildCount() > 0)) {
+        tree = (ASTNode) tree.getChild(0);
+      }
+      return tree;
+  }
 }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QB.java Wed Jan 20 06:07:34 2010
@@ -145,6 +145,13 @@
     return aliasToTabs.get(alias.toLowerCase());
   }
 
+  public void rewriteViewToSubq(String alias, String viewName, QBExpr qbexpr) {
+    alias = alias.toLowerCase();
+    String tableName = aliasToTabs.remove(alias);
+    assert (viewName.equals(tableName));
+    aliasToSubq.put(alias, qbexpr);
+  }
+
   public QBJoinTree getQbJoinTree() {
     return qbjoin;
   }

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/QBMetaData.java Wed Jan 20 06:07:34 2010
@@ -37,7 +37,6 @@
   public static final int DEST_REDUCE = 4;
   public static final int DEST_LOCAL_FILE = 5;
 
-  private ArrayList<Class<?>> outTypes;
   private HashMap<String, Table> aliasToTable;
   private HashMap<String, Table> nameToDestTable;
   private HashMap<String, Partition> nameToDestPartition;
@@ -48,7 +47,6 @@
   private static final Log LOG = LogFactory.getLog(QBMetaData.class.getName());
   
   public QBMetaData() {
-    this.outTypes = new ArrayList<Class<?>>();
     this.aliasToTable = new HashMap<String, Table>();
     this.nameToDestTable = new HashMap<String, Table>();
     this.nameToDestPartition = new HashMap<String, Partition>();
@@ -56,14 +54,6 @@
     this.nameToDestType = new HashMap<String, Integer>();
   }
 
-  public ArrayList<Class<?>> getOutputTypes() {
-    return this.outTypes;
-  }
-
-  public void addOutputType(Class<?> cls) {
-    this.outTypes.add(cls);
-  }
-
   // All getXXX needs toLowerCase() because they are directly called from SemanticAnalyzer
   // All setXXX does not need it because they are called from QB which already lowercases
   // the aliases.

Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=901074&r1=901073&r2=901074&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Wed Jan 20 06:07:34 2010
@@ -75,6 +75,7 @@
 import org.apache.hadoop.hive.ql.lib.Rule;
 import org.apache.hadoop.hive.ql.lib.RuleRegExp;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
@@ -102,6 +103,7 @@
 import org.apache.hadoop.hive.ql.plan.aggregationDesc;
 import org.apache.hadoop.hive.ql.plan.createTableDesc;
 import org.apache.hadoop.hive.ql.plan.createTableLikeDesc;
+import org.apache.hadoop.hive.ql.plan.createViewDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeColumnDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeConstantDesc;
 import org.apache.hadoop.hive.ql.plan.exprNodeDesc;
@@ -171,6 +173,9 @@
   private HashMap<TableScanOperator, sampleDesc> opToSamplePruner;
   Map<GroupByOperator, Set<String>> groupOpToInputTables;
   Map<String, PrunedPartitionList> prunedPartitions;
+  private createViewDesc createVwDesc;
+  private ASTNode viewSelect;
+  private UnparseTranslator unparseTranslator;
 
   private static class Phase1Ctx {
     String dest;
@@ -195,6 +200,7 @@
     this.listMapJoinOpsNoReducer = new ArrayList<MapJoinOperator>();
     this.groupOpToInputTables = new HashMap<GroupByOperator, Set<String>>();
     prunedPartitions = new HashMap<String, PrunedPartitionList> ();
+    unparseTranslator = new UnparseTranslator();
   }
 
   @Override
@@ -305,6 +311,11 @@
         String functionName = unescapeIdentifier(expressionTree.getChild(0).getText());
         if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
           aggregations.put(expressionTree.toStringTree(), expressionTree);
+          FunctionInfo fi = FunctionRegistry.getFunctionInfo(functionName);
+          if (!fi.isNative()) {
+            unparseTranslator.addIdentifierTranslation(
+              (ASTNode) expressionTree.getChild(0));
+          }
           return;
         }
       }
@@ -386,6 +397,12 @@
           unescapeIdentifier(sampleClause.getChild(1).getText()),
           sampleCols)
       );
+      if (unparseTranslator.isEnabled()) {
+        for (ASTNode sampleCol : sampleCols) {
+          unparseTranslator.addIdentifierTranslation(
+            (ASTNode) sampleCol.getChild(0));
+        }
+      }
     }
     // Insert this map into the stats
     String table_name = unescapeIdentifier(tabref.getChild(0).getText());
@@ -393,6 +410,12 @@
 
     qb.getParseInfo().setSrcForAlias(alias, tableTree);
 
+    unparseTranslator.addIdentifierTranslation(tableTree);
+    if (aliasIndex != 0) {
+      unparseTranslator.addIdentifierTranslation(
+        (ASTNode) tabref.getChild(aliasIndex));
+    }
+    
     return alias;
   }
 
@@ -417,6 +440,9 @@
     // Insert this map into the stats
     qb.setSubqAlias(alias, qbexpr);
 
+    unparseTranslator.addIdentifierTranslation(
+      (ASTNode) subq.getChild(1));
+
     return alias;
   }
 
@@ -679,8 +705,11 @@
 
       LOG.info("Get metadata for source tables");
 
-      // Go over the tables and populate the related structures
-      for (String alias : qb.getTabAliases()) {
+      // Go over the tables and populate the related structures.
+      // We have to materialize the table alias list since we might
+      // modify it in the middle for view rewrite.
+      List<String> tabAliases = new ArrayList<String>(qb.getTabAliases());
+      for (String alias : tabAliases) {
         String tab_name = qb.getTabNameForAlias(alias);
         Table tab = null;
         try {
@@ -689,6 +718,10 @@
         catch (InvalidTableException ite) {
           throw new SemanticException(ErrorMsg.INVALID_TABLE.getMsg(qb.getParseInfo().getSrcForAlias(alias)));
         }
+        if (tab.isView()) {
+          replaceViewReferenceWithDefinition(qb, tab, tab_name, alias);
+          continue;
+        }
 
         if (!InputFormat.class.isAssignableFrom(tab.getInputFormatClass()))
           throw new SemanticException(ErrorMsg.INVALID_INPUT_FORMAT_TYPE.getMsg(qb.getParseInfo().getSrcForAlias(alias)));
@@ -713,6 +746,9 @@
         switch (ast.getToken().getType()) {
         case HiveParser.TOK_TAB: {
           tableSpec ts = new tableSpec(this.db, conf, ast);
+          if (ts.tableHandle.isView()) {
+            throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
+          }
 
           if (!HiveOutputFormat.class.isAssignableFrom(ts.tableHandle.getOutputFormatClass()))
             throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg(ast));
@@ -758,6 +794,54 @@
     }
   }
 
+  private void replaceViewReferenceWithDefinition(
+    QB qb, Table tab, String tab_name, String alias) throws SemanticException {
+
+    ParseDriver pd = new ParseDriver();
+    ASTNode viewTree;
+    final ASTNodeOrigin viewOrigin =
+      new ASTNodeOrigin(
+        "VIEW",
+        tab.getName(),
+        tab.getViewExpandedText(),
+        alias,
+        qb.getParseInfo().getSrcForAlias(alias));
+    try {
+      String viewText = tab.getViewExpandedText();
+      // Reparse text, passing null for context to avoid clobbering
+      // the top-level token stream.
+      ASTNode tree = pd.parse(viewText, null);
+      tree = ParseUtils.findRootNonNullToken(tree);
+      viewTree = tree;
+      Dispatcher nodeOriginDispatcher = new Dispatcher()
+        {
+          public Object dispatch(
+            Node nd, java.util.Stack<Node> stack, Object... nodeOutputs)
+          {
+            ((ASTNode) nd).setOrigin(viewOrigin);
+            return null;
+          }
+        };
+      GraphWalker nodeOriginTagger =
+        new DefaultGraphWalker(nodeOriginDispatcher);
+      nodeOriginTagger.startWalking(
+        java.util.Collections.<Node>singleton(viewTree), null);
+    } catch (ParseException e) {
+      // A user could encounter this if a stored view definition contains
+      // an old SQL construct which has been eliminated in a later Hive
+      // version, so we need to provide full debugging info to help
+      // with fixing the view definition.
+      LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e));
+      StringBuilder sb = new StringBuilder();
+      sb.append(e.getMessage());
+      ErrorMsg.renderOrigin(sb, viewOrigin);
+      throw new SemanticException(sb.toString(), e);
+    }
+    QBExpr qbexpr = new QBExpr(alias);
+    doPhase1QBExpr(viewTree, qbexpr, qb.getId(), alias);
+    qb.rewriteViewToSubq(alias, tab_name, qbexpr);
+  }
+
   private boolean isPresent(String[] list, String elem) {
     for (String s : list)
       if (s.equals(elem))
@@ -775,6 +859,8 @@
     switch (condn.getToken().getType()) {
     case HiveParser.TOK_TABLE_OR_COL:
       String tableOrCol = unescapeIdentifier(condn.getChild(0).getText().toLowerCase());
+      unparseTranslator.addIdentifierTranslation(
+        (ASTNode) condn.getChild(0));
       if (isPresent(joinTree.getLeftAliases(), tableOrCol)) {
         if (!leftAliases.contains(tableOrCol))
           leftAliases.add(tableOrCol);
@@ -793,6 +879,7 @@
       if ( fields != null ) {
         fields.add(unescapeIdentifier(condn.getToken().getText().toLowerCase()));
       }
+      unparseTranslator.addIdentifierTranslation((ASTNode) condn);
       break;
     case HiveParser.Number:
     case HiveParser.StringLiteral:
@@ -1032,6 +1119,7 @@
       throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel, e.getMessage()));
     }
 
+    StringBuilder replacementText = new StringBuilder();
     int matched = 0;
     // This is the tab.* case
     // In this case add all the columns to the fieldList
@@ -1059,10 +1147,23 @@
                      colInfo.getTabAlias(), colInfo.getIsPartitionCol()));
       pos = Integer.valueOf(pos.intValue() + 1);
       matched ++;
+
+      if (unparseTranslator.isEnabled()) {
+        if (replacementText.length() > 0) {
+          replacementText.append(", ");
+        }
+        replacementText.append(HiveUtils.unparseIdentifier(tmp[0]));
+        replacementText.append(".");
+        replacementText.append(HiveUtils.unparseIdentifier(tmp[1]));
+      }
     }
     if (matched == 0) {
       throw new SemanticException(ErrorMsg.INVALID_COLUMN.getMsg(sel));
     }
+
+    if (unparseTranslator.isEnabled()) {
+      unparseTranslator.addTranslation(sel, replacementText.toString());
+    }
     return pos;
   }
 
@@ -1476,6 +1577,10 @@
         genericUDTF = fi.getGenericUDTF();
       }
       isUDTF = (genericUDTF != null);
+      if (isUDTF && !fi.isNative()) {
+        unparseTranslator.addIdentifierTranslation(
+          (ASTNode) udtfExpr.getChild(0));
+      }
     }
 
     if (isUDTF) {
@@ -1495,11 +1600,14 @@
         switch (selExprChild.getType()) {
         case HiveParser.Identifier:
           udtfColAliases.add(unescapeIdentifier(selExprChild.getText()));
+          unparseTranslator.addIdentifierTranslation(selExprChild);
           break;
         case HiveParser.TOK_TABALIAS:
           assert(selExprChild.getChildCount() == 1);
           udtfTableAlias =
             unescapeIdentifier(selExprChild.getChild(0).getText());
+          unparseTranslator.addIdentifierTranslation(
+            (ASTNode) selExprChild.getChild(0));
           break;
         default:
           assert(false);
@@ -1551,6 +1659,10 @@
         String[] colRef = getColAlias(child, "_C" + i, inputRR);
         tabAlias = colRef[0];
         colAlias = colRef[1];
+        if (hasAsClause) {
+          unparseTranslator.addIdentifierTranslation(
+            (ASTNode) child.getChild(1));
+        }
         // Get rid of TOK_SELEXPR
         expr = (ASTNode)child.getChild(0);
       }
@@ -2004,7 +2116,7 @@
 
     for (Map.Entry<String, ASTNode> entry : aggregationTrees.entrySet()) {
       ASTNode value = entry.getValue();
-      String aggName = value.getChild(0).getText();
+      String aggName = unescapeIdentifier(value.getChild(0).getText());
       ArrayList<exprNodeDesc> aggParameters = new ArrayList<exprNodeDesc>();
       ArrayList<Class<?>> aggClasses = new ArrayList<Class<?>>();
       // 0 is the function name
@@ -5201,6 +5313,15 @@
          return;
     }
 
+    // analyze create view command
+    if (ast.getToken().getType() == HiveParser.TOK_CREATEVIEW) {
+      child = analyzeCreateView(ast, qb);
+      if (child == null) {
+        return;
+      }
+      viewSelect = child;
+    }
+
     // continue analyzing from the child ASTNode.
     doPhase1(child, qb, initPhase1Ctx());
     LOG.info("Completed phase 1 of Semantic Analysis");
@@ -5208,8 +5329,18 @@
     getMetaData(qb);
     LOG.info("Completed getting MetaData in Semantic Analysis");
 
-    genPlan(qb);
+    Operator sinkOp = genPlan(qb);
 
+    if (createVwDesc != null) {
+      saveViewDefinition(sinkOp);
+      // Since we're only creating a view (not executing it), we
+      // don't need to optimize or translate the plan (and in fact, those
+      // procedures can interfere with the view creation).  So
+      // skip the rest of this method.
+      ctx.setResDir(null);
+      ctx.setResFile(null);
+      return;
+    }
 
     ParseContext pCtx = new ParseContext(conf, qb, child, opToPartPruner, topOps,
                                          topSelOps, opParseCtx, joinContext, topToTable,
@@ -5236,9 +5367,85 @@
     return;
   }
 
+  private void saveViewDefinition(Operator sinkOp)
+    throws SemanticException {
+
+    // Save the view schema derived from the sink operator produced
+    // by genPlan.
+    List<FieldSchema> derivedSchema =
+      convertRowSchemaToViewSchema(opParseCtx.get(sinkOp).getRR());
+    validateColumnNameUniqueness(derivedSchema);
+
+    List<FieldSchema> imposedSchema = createVwDesc.getSchema();
+    if (imposedSchema != null) {
+      int explicitColCount = imposedSchema.size();
+      int derivedColCount = derivedSchema.size();
+      if (explicitColCount != derivedColCount) {
+        throw new SemanticException(
+          ErrorMsg.VIEW_COL_MISMATCH.getMsg(viewSelect));
+      }
+    }
+
+    // Preserve the original view definition as specified by the user.
+    String originalText = ctx.getTokenRewriteStream().toString(
+      viewSelect.getTokenStartIndex(),
+      viewSelect.getTokenStopIndex());
+    createVwDesc.setViewOriginalText(originalText);
+
+    // Now expand the view definition with extras such as explicit column
+    // references; this expanded form is what we'll re-parse when the view is
+    // referenced later.
+    unparseTranslator.applyTranslation(ctx.getTokenRewriteStream());
+    String expandedText = ctx.getTokenRewriteStream().toString(
+      viewSelect.getTokenStartIndex(),
+      viewSelect.getTokenStopIndex());
+
+    if (imposedSchema != null) {
+      // Merge the names from the imposed schema into the types
+      // from the derived schema.
+      StringBuilder sb = new StringBuilder();
+      sb.append("SELECT ");
+      int n = derivedSchema.size();
+      for (int i = 0; i < n; ++i) {
+        if (i > 0) {
+          sb.append(", ");
+        }
+        FieldSchema fieldSchema = derivedSchema.get(i);
+        // Modify a copy, not the original
+        fieldSchema = new FieldSchema(fieldSchema);
+        derivedSchema.set(i, fieldSchema);
+        sb.append(HiveUtils.unparseIdentifier(fieldSchema.getName()));
+        sb.append(" AS ");
+        String imposedName = imposedSchema.get(i).getName();
+        sb.append(HiveUtils.unparseIdentifier(imposedName));
+        fieldSchema.setName(imposedName);
+        // We don't currently allow imposition of a type
+        fieldSchema.setComment(imposedSchema.get(i).getComment());
+      }
+      sb.append(" FROM (");
+      sb.append(expandedText);
+      sb.append(") ");
+      sb.append(HiveUtils.unparseIdentifier(createVwDesc.getViewName()));
+      expandedText = sb.toString();
+    }
+    
+    createVwDesc.setSchema(derivedSchema);
+    createVwDesc.setViewExpandedText(expandedText);
+  }
+
+  private List<FieldSchema> convertRowSchemaToViewSchema(RowResolver rr) {
+    List<FieldSchema> fieldSchemas = new ArrayList<FieldSchema>();
+    for (ColumnInfo colInfo : rr.getColumnInfos()) {
+      String colName = rr.reverseLookup(colInfo.getInternalName())[1];
+      fieldSchemas.add(
+        new FieldSchema(colName, colInfo.getType().getTypeName(), null));
+    }
+    return fieldSchemas;
+  }
+
   /**
-   * Generates and expression node descriptor for the expression passed in the arguments. This
-   * function uses the row resolver and the metadata informatinon that are passed as arguments
+   * Generates an expression node descriptor for the expression passed in the arguments. This
+   * function uses the row resolver and the metadata information that are passed as arguments
    * to resolve the column names to internal names.
    * @param expr The expression
    * @param input The row resolver
@@ -5262,6 +5469,7 @@
 
     // Create the walker, the rules dispatcher and the context.
     TypeCheckCtx tcCtx = new TypeCheckCtx(input);
+    tcCtx.setUnparseTranslator(unparseTranslator);
 
     // create a walker which walks the tree in a DFS manner while maintaining the operator stack. The dispatcher
     // generates the plan from the operator tree
@@ -5294,6 +5502,34 @@
       throw new SemanticException(tcCtx.getError());
     }
 
+    if (!unparseTranslator.isEnabled()) {
+      // Not creating a view, so no need to track view expansions.
+      return desc;
+    }
+
+    for (Map.Entry<Node, Object> entry : nodeOutputs.entrySet()) {
+      if (!(entry.getKey() instanceof ASTNode)) {
+        continue;
+      }
+      if (!(entry.getValue() instanceof exprNodeColumnDesc)) {
+        continue;
+      }
+      ASTNode node = (ASTNode) entry.getKey();
+      exprNodeColumnDesc columnDesc = (exprNodeColumnDesc) entry.getValue();
+      if ((columnDesc.getTabAlias() == null)
+        || (columnDesc.getTabAlias().length() == 0)) {
+        // These aren't real column refs; instead, they are special
+        // internal expressions used in the representation of aggregation.
+        continue;
+      }
+      String [] tmp = input.reverseLookup(columnDesc.getColumn());
+      StringBuilder replacementText = new StringBuilder();
+      replacementText.append(HiveUtils.unparseIdentifier(tmp[0]));
+      replacementText.append(".");
+      replacementText.append(HiveUtils.unparseIdentifier(tmp[1]));
+      unparseTranslator.addTranslation(node, replacementText.toString());
+    }
+    
     return desc;
   }
 
@@ -5404,7 +5640,7 @@
       serde = COLUMNAR_SERDE;
     }
 
-    LOG.info("Creating table" + tableName + " positin=" + ast.getCharPositionInLine());
+    LOG.info("Creating table " + tableName + " position=" + ast.getCharPositionInLine());
     int numCh = ast.getChildCount();
 
     /* Check the 1st-level children and do simple semantic checks:
@@ -5604,10 +5840,67 @@
     return null;
   }
 
-  private void validateCreateTable(createTableDesc crtTblDesc) throws SemanticException {
+  private ASTNode analyzeCreateView(ASTNode ast, QB qb)
+    throws SemanticException {
+    String tableName = unescapeIdentifier(ast.getChild(0).getText());
+    List<FieldSchema> cols = null;
+    boolean ifNotExists = false;
+    String comment = null;
+    ASTNode selectStmt = null;
+
+    LOG.info("Creating view " + tableName + " position="
+      + ast.getCharPositionInLine());
+    int numCh = ast.getChildCount();
+    for (int num = 1; num < numCh; num++) {
+      ASTNode child = (ASTNode)ast.getChild(num);
+      switch (child.getToken().getType()) {
+      case HiveParser.TOK_IFNOTEXISTS:
+        ifNotExists = true;
+        break;
+      case HiveParser.TOK_QUERY:
+        selectStmt = child;
+        break;
+      case HiveParser.TOK_TABCOLNAME:
+        cols = getColumns(child);
+        break;
+      case HiveParser.TOK_TABLECOMMENT:
+        comment = unescapeSQLString(child.getChild(0).getText());
+        break;
+      default: assert false;
+      }
+    }
+
+    createVwDesc = new createViewDesc(tableName, cols, comment, ifNotExists);
+    unparseTranslator.enable();
+    rootTasks.add(
+      TaskFactory.get(
+        new DDLWork(getInputs(), getOutputs(), createVwDesc), conf));
+    return selectStmt;
+  }
+
+  private List<String> validateColumnNameUniqueness(
+    List<FieldSchema> fieldSchemas) throws SemanticException {
+
     // no duplicate column names
     // currently, it is a simple n*n algorithm - this can be optimized later if need be
     // but it should not be a major bottleneck as the number of columns are anyway not so big
+    Iterator<FieldSchema> iterCols = fieldSchemas.iterator();
+    List<String> colNames = new ArrayList<String>();
+    while (iterCols.hasNext()) {
+      String colName = iterCols.next().getName();
+      Iterator<String> iter = colNames.iterator();
+      while (iter.hasNext()) {
+        String oldColName = iter.next();
+        if (colName.equalsIgnoreCase(oldColName))
+          throw new SemanticException(
+            ErrorMsg.DUPLICATE_COLUMN_NAMES.getMsg(oldColName));
+      }
+      colNames.add(colName);
+    }
+    return colNames;
+  }
+
+  private void validateCreateTable(createTableDesc crtTblDesc) throws SemanticException {
 
     if((crtTblDesc.getCols() == null) || (crtTblDesc.getCols().size() == 0)) {
       // for now make sure that serde exists
@@ -5626,18 +5919,7 @@
       throw new SemanticException(ErrorMsg.INVALID_OUTPUT_FORMAT_TYPE.getMsg());
     }
 
-    Iterator<FieldSchema> iterCols = crtTblDesc.getCols().iterator();
-    List<String> colNames = new ArrayList<String>();
-    while (iterCols.hasNext()) {
-      String colName = iterCols.next().getName();
-      Iterator<String> iter = colNames.iterator();
-      while (iter.hasNext()) {
-        String oldColName = iter.next();
-        if (colName.equalsIgnoreCase(oldColName))
-          throw new SemanticException(ErrorMsg.DUPLICATE_COLUMN_NAMES.getMsg());
-      }
-      colNames.add(colName);
-    }
+    List<String> colNames = validateColumnNameUniqueness(crtTblDesc.getCols());
 
     if (crtTblDesc.getBucketCols() != null)
     {