You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hcatalog-commits@incubator.apache.org by ga...@apache.org on 2012/03/03 20:52:20 UTC

svn commit: r1296700 - in /incubator/hcatalog/trunk: ./ src/java/org/apache/hcatalog/cli/ src/java/org/apache/hcatalog/cli/SemanticAnalysis/ src/java/org/apache/hcatalog/common/ src/test/e2e/hcatalog/drivers/ src/test/e2e/hcatalog/tests/ src/test/org/a...

Author: gates
Date: Sat Mar  3 20:52:19 2012
New Revision: 1296700

URL: http://svn.apache.org/viewvc?rev=1296700&view=rev
Log:
HCATALOG-261 Enable more DDL statements for HCat

Removed:
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/AddPartitionHook.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/AlterTableFileFormatHook.java
Modified:
    incubator/hcatalog/trunk/CHANGES.txt
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
    incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hcat.conf
    incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java
    incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java

Modified: incubator/hcatalog/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/CHANGES.txt?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/CHANGES.txt (original)
+++ incubator/hcatalog/trunk/CHANGES.txt Sat Mar  3 20:52:19 2012
@@ -33,6 +33,8 @@ Trunk (unreleased changes)
 Release 0.4.0 - Unreleased
 
   INCOMPATIBLE CHANGES
+  HCAT-261 Enable more DDL statements for HCat (daijy via gates)
+
   HCAT-252 Rework HBase storage driver into HBase storage handler (rohini via toffer) 
 
   HCAT-265 remove deprecated HCatStorageHandler (toffer)

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/HCatCli.java Sat Mar  3 20:52:19 2012
@@ -44,6 +44,7 @@ import org.apache.hadoop.hive.common.Log
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.CommandNeedRetryException;
+import org.apache.hadoop.hive.ql.processors.DfsProcessor;
 import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hcatalog.cli.SemanticAnalysis.HCatSemanticAnalyzer;
@@ -240,6 +241,8 @@ public class HCatCli {
 
     if(firstToken.equalsIgnoreCase("set")){
       return new SetProcessor().run(cmd.substring(firstToken.length()).trim()).getResponseCode();
+    } else if (firstToken.equalsIgnoreCase("dfs")){
+      return new DfsProcessor(ss.getConf()).run(cmd.substring(firstToken.length()).trim()).getResponseCode();
     }
 
     HCatDriver driver = new HCatDriver();

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateDatabaseHook.java Sat Mar  3 20:52:19 2012
@@ -59,9 +59,6 @@ final class CreateDatabaseHook  extends 
 
       switch (child.getToken().getType()) {
 
-      case HiveParser.TOK_QUERY: // CTAS
-        throw new SemanticException("Operation not supported. Create db as Select is not a valid operation.");
-
       case HiveParser.TOK_IFNOTEXISTS:
         try {
           List<String> dbs = db.getDatabasesByPattern(databaseName);

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/CreateTableHook.java Sat Mar  3 20:52:19 2012
@@ -57,8 +57,8 @@ import org.apache.hcatalog.rcfile.RCFile
 
 final class CreateTableHook extends HCatSemanticAnalyzerBase {
 
-    private String inStorageDriver, outStorageDriver, tableName, loader, storer;
-
+    private String tableName;
+    
     @Override
     public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context,
             ASTNode ast) throws SemanticException {
@@ -78,6 +78,7 @@ final class CreateTableHook extends HCat
         String inputFormat = null, outputFormat = null;
         tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast
                 .getChild(0));
+        boolean likeTable = false;
 
         for (int num = 1; num < numCh; num++) {
             ASTNode child = (ASTNode) ast.getChild(num);
@@ -90,51 +91,21 @@ final class CreateTableHook extends HCat
                             "Select is not a valid operation.");
 
                 case HiveParser.TOK_TABLEBUCKETS:
-                    throw new SemanticException(
-                            "Operation not supported. HCatalog doesn't " +
-                            "allow Clustered By in create table.");
+                    break;
 
                 case HiveParser.TOK_TBLSEQUENCEFILE:
-                    throw new SemanticException(
-                            "Operation not supported. HCatalog doesn't support " +
-                            "Sequence File by default yet. "
-                             + "You may specify it through INPUT/OUTPUT storage drivers.");
+                    inputFormat = HCatConstants.SEQUENCEFILE_INPUT;
+                    outputFormat = HCatConstants.SEQUENCEFILE_OUTPUT;
+                    break;
 
                 case HiveParser.TOK_TBLTEXTFILE:
                     inputFormat      = org.apache.hadoop.mapred.TextInputFormat.class.getName();
                     outputFormat     = org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat.class.getName();
-                    inStorageDriver  = org.apache.hcatalog.pig.drivers.LoadFuncBasedInputDriver.class.getName();
-                    outStorageDriver = org.apache.hcatalog.pig.drivers.StoreFuncBasedOutputDriver.class.getName();
-                    loader = HCatConstants.HCAT_PIG_STORAGE_CLASS;
-                    storer = HCatConstants.HCAT_PIG_STORAGE_CLASS;
 
                     break;
 
                 case HiveParser.TOK_LIKETABLE:
-
-                    String likeTableName;
-                    if (child.getChildCount() > 0
-                            && (likeTableName = BaseSemanticAnalyzer
-                                    .getUnescapedName((ASTNode) ast.getChild(0))) != null) {
-
-                        throw new SemanticException(
-                                "Operation not supported. CREATE TABLE LIKE is not supported.");
-                        // Map<String, String> tblProps;
-                        // try {
-                        // tblProps =
-                        // db.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME,
-                        // likeTableName).getParameters();
-                        // } catch (HiveException he) {
-                        // throw new SemanticException(he);
-                        // }
-                        // if(!(tblProps.containsKey(InitializeInput.HOWL_ISD_CLASS)
-                        // &&
-                        // tblProps.containsKey(InitializeInput.HOWL_OSD_CLASS))){
-                        // throw new
-                        // SemanticException("Operation not supported. Table "+likeTableName+" should have been created through HCat. Seems like its not.");
-                        // }
-                        // return ast;
-                    }
+                    likeTable = true;
                     break;
 
                 case HiveParser.TOK_IFNOTEXISTS:
@@ -175,38 +146,31 @@ final class CreateTableHook extends HCat
                     break;
 
                 case HiveParser.TOK_TABLEFILEFORMAT:
-                    if (child.getChildCount() < 4) {
+                    if (child.getChildCount() < 2) {
                         throw new SemanticException(
                                 "Incomplete specification of File Format. " +
-                                "You must provide InputFormat, OutputFormat, " +
-                                "InputDriver, OutputDriver.");
+                                "You must provide InputFormat, OutputFormat.");
                     }
                     inputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
                             .getChild(0).getText());
                     outputFormat = BaseSemanticAnalyzer.unescapeSQLString(child
                             .getChild(1).getText());
-                    inStorageDriver = BaseSemanticAnalyzer
-                            .unescapeSQLString(child.getChild(2).getText());
-                    outStorageDriver = BaseSemanticAnalyzer
-                            .unescapeSQLString(child.getChild(3).getText());
                     break;
 
                 case HiveParser.TOK_TBLRCFILE:
                     inputFormat = RCFileInputFormat.class.getName();
                     outputFormat = RCFileOutputFormat.class.getName();
-                    inStorageDriver = RCFileInputDriver.class.getName();
-                    outStorageDriver = RCFileOutputDriver.class.getName();
                     break;
 
             }
         }
-
-        if (inputFormat == null || outputFormat == null
-                || inStorageDriver == null || outStorageDriver == null) {
+        
+        if (!likeTable && (inputFormat == null || outputFormat == null)) {
             throw new SemanticException(
                     "STORED AS specification is either incomplete or incorrect.");
         }
 
+
         return ast;
     }
 
@@ -239,9 +203,6 @@ final class CreateTableHook extends HCat
         // first check if we will allow the user to create table.
         String storageHandler = desc.getStorageHandler();
         if (StringUtils.isEmpty(storageHandler)) {
-            tblProps.put(HCatConstants.HCAT_ISD_CLASS, inStorageDriver);
-            tblProps.put(HCatConstants.HCAT_OSD_CLASS, outStorageDriver);
-
         } else {
             try {
                 HCatStorageHandler storageHandlerInst = HCatUtil
@@ -256,12 +217,6 @@ final class CreateTableHook extends HCat
                 throw new SemanticException(e);
             }
         }
-        if (loader!=null) {
-            tblProps.put(HCatConstants.HCAT_PIG_LOADER, loader);
-        }
-        if (storer!=null) {
-            tblProps.put(HCatConstants.HCAT_PIG_STORER, storer);
-        }
 
         if (desc != null) {
           try {

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/cli/SemanticAnalysis/HCatSemanticAnalyzer.java Sat Mar  3 20:52:19 2012
@@ -61,59 +61,98 @@ public class HCatSemanticAnalyzer extend
   public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast)
       throws SemanticException {
 
-    this.ast = ast;
-    switch (ast.getToken().getType()) {
+      this.ast = ast;
+      switch (ast.getToken().getType()) {
 
-    // HCat wants to intercept following tokens and special-handle them.
-    case HiveParser.TOK_CREATETABLE:
-      hook = new CreateTableHook();
-      return hook.preAnalyze(context, ast);
-
-    case HiveParser.TOK_CREATEDATABASE:
-      hook = new CreateDatabaseHook();
-      return hook.preAnalyze(context, ast);
-
-    // HCat will allow these operations to be performed since they are DDL statements.
-    case HiveParser.TOK_SHOWDATABASES:
-    case HiveParser.TOK_DROPDATABASE:
-    case HiveParser.TOK_SWITCHDATABASE:
-    case HiveParser.TOK_DESCDATABASE:
-
-    case HiveParser.TOK_DROPTABLE:
-    case HiveParser.TOK_DESCTABLE:
-    case HiveParser.TOK_ALTERTABLE_ADDCOLS:
-    case HiveParser.TOK_ALTERTABLE_RENAME:
-    case HiveParser.TOK_ALTERTABLE_DROPPARTS:
-    case HiveParser.TOK_ALTERTABLE_PROPERTIES:
-    case HiveParser.TOK_ALTERTABLE_SERIALIZER:
-    case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
-    case HiveParser.TOK_SHOW_TABLESTATUS:
-    case HiveParser.TOK_SHOWTABLES:
-    case HiveParser.TOK_SHOWPARTITIONS:
-      return ast;
-
-    case HiveParser.TOK_ALTERTABLE_ADDPARTS:
-      hook = new AddPartitionHook();
-      return hook.preAnalyze(context, ast);
-
-    case HiveParser.TOK_ALTERTABLE_PARTITION:
-      if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
-        hook = new AlterTableFileFormatHook();
+      // HCat wants to intercept following tokens and special-handle them.
+      case HiveParser.TOK_CREATETABLE:
+        hook = new CreateTableHook();
         return hook.preAnalyze(context, ast);
-      } else {
+
+      case HiveParser.TOK_CREATEDATABASE:
+        hook = new CreateDatabaseHook();
+        return hook.preAnalyze(context, ast);
+      
+      case HiveParser.TOK_ALTERTABLE_PARTITION:
+          if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_FILEFORMAT) {
+            return ast;
+          } else if (((ASTNode)ast.getChild(1)).getToken().getType() == HiveParser.TOK_ALTERTABLE_ALTERPARTS_MERGEFILES){
+              // unsupported
+              throw new SemanticException("Operation not supported.");
+          } else {
+              return ast;
+          }
+      
+      // HCat will allow these operations to be performed.
+      // Database DDL
+      case HiveParser.TOK_SHOWDATABASES:
+      case HiveParser.TOK_DROPDATABASE:
+      case HiveParser.TOK_SWITCHDATABASE:
+      case HiveParser.TOK_DESCDATABASE:
+      case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
+
+      // Index DDL
+      case HiveParser.TOK_ALTERINDEX_PROPERTIES:
+      case HiveParser.TOK_CREATEINDEX:
+      case HiveParser.TOK_DROPINDEX:
+      case HiveParser.TOK_SHOWINDEXES:
+      
+      // View DDL
+      // "alter view add partition" does not work because of the nature of implementation
+      // of the DDL in hive. Hive will internally invoke another Driver on the select statement, 
+      // and HCat does not let "select" statement through. I cannot find a way to get around it
+      // without modifying hive code. So just leave it unsupported. 
+      //case HiveParser.TOK_ALTERVIEW_ADDPARTS:
+      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
+      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
+      case HiveParser.TOK_ALTERVIEW_RENAME:
+      case HiveParser.TOK_CREATEVIEW:
+      case HiveParser.TOK_DROPVIEW:
+      
+      // Authorization DDL
+      case HiveParser.TOK_CREATEROLE:
+      case HiveParser.TOK_DROPROLE:
+      case HiveParser.TOK_GRANT_ROLE:
+      case HiveParser.TOK_GRANT_WITH_OPTION:
+      case HiveParser.TOK_GRANT:
+      case HiveParser.TOK_REVOKE_ROLE:
+      case HiveParser.TOK_REVOKE:
+      case HiveParser.TOK_SHOW_GRANT:
+      case HiveParser.TOK_SHOW_ROLE_GRANT:
+      
+      // Misc DDL
+      case HiveParser.TOK_LOCKTABLE:
+      case HiveParser.TOK_UNLOCKTABLE:
+      case HiveParser.TOK_SHOWLOCKS:
+      case HiveParser.TOK_DESCFUNCTION:
+      case HiveParser.TOK_SHOWFUNCTIONS:
+      case HiveParser.TOK_EXPLAIN:
+      
+      // Table DDL
+      case HiveParser.TOK_ALTERTABLE_ADDPARTS:
+      case HiveParser.TOK_ALTERTABLE_ADDCOLS:
+      case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
+      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
+      case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
+      case HiveParser.TOK_ALTERTABLE_DROPPARTS:
+      case HiveParser.TOK_ALTERTABLE_PROPERTIES:
+      case HiveParser.TOK_ALTERTABLE_RENAME:
+      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
+      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
+      case HiveParser.TOK_ALTERTABLE_SERIALIZER:
+      case HiveParser.TOK_ALTERTABLE_TOUCH:
+      case HiveParser.TOK_DESCTABLE:
+      case HiveParser.TOK_DROPTABLE:
+      case HiveParser.TOK_SHOW_TABLESTATUS:
+      case HiveParser.TOK_SHOWPARTITIONS:
+      case HiveParser.TOK_SHOWTABLES:
         return ast;
-      }
 
-    // allow export/import operations
-    case HiveParser.TOK_EXPORT:
-    case HiveParser.TOK_IMPORT:
-      return ast;
-
-    // In all other cases, throw an exception. Its a white-list of allowed operations.
-    default:
-      throw new SemanticException("Operation not supported.");
+      // In all other cases, throw an exception. Its a white-list of allowed operations.
+      default:
+        throw new SemanticException("Operation not supported.");
 
-    }
+      }
   }
 
   @Override
@@ -124,29 +163,69 @@ public class HCatSemanticAnalyzer extend
 
       switch (ast.getToken().getType()) {
 
-      case HiveParser.TOK_DESCTABLE:
-      case HiveParser.TOK_SHOWPARTITIONS:
+      case HiveParser.TOK_CREATETABLE:
+      case HiveParser.TOK_CREATEDATABASE:
+      case HiveParser.TOK_ALTERTABLE_PARTITION:
+      
+      // HCat will allow these operations to be performed.
+      // Database DDL
+      case HiveParser.TOK_SHOWDATABASES:
+      case HiveParser.TOK_DROPDATABASE:
+      case HiveParser.TOK_SWITCHDATABASE:
+      case HiveParser.TOK_DESCDATABASE:
+      case HiveParser.TOK_ALTERDATABASE_PROPERTIES:
+
+      // Index DDL
+      case HiveParser.TOK_ALTERINDEX_PROPERTIES:
+      case HiveParser.TOK_CREATEINDEX:
+      case HiveParser.TOK_DROPINDEX:
+      case HiveParser.TOK_SHOWINDEXES:
+      
+      // View DDL
+      //case HiveParser.TOK_ALTERVIEW_ADDPARTS:
+      case HiveParser.TOK_ALTERVIEW_DROPPARTS:
+      case HiveParser.TOK_ALTERVIEW_PROPERTIES:
+      case HiveParser.TOK_ALTERVIEW_RENAME:
+      case HiveParser.TOK_CREATEVIEW:
+      case HiveParser.TOK_DROPVIEW:
+      
+      // Authorization DDL
+      case HiveParser.TOK_CREATEROLE:
+      case HiveParser.TOK_DROPROLE:
+      case HiveParser.TOK_GRANT_ROLE:
+      case HiveParser.TOK_GRANT_WITH_OPTION:
+      case HiveParser.TOK_GRANT:
+      case HiveParser.TOK_REVOKE_ROLE:
+      case HiveParser.TOK_REVOKE:
+      case HiveParser.TOK_SHOW_GRANT:
+      case HiveParser.TOK_SHOW_ROLE_GRANT:
+      
+      // Misc DDL
+      case HiveParser.TOK_LOCKTABLE:
+      case HiveParser.TOK_UNLOCKTABLE:
+      case HiveParser.TOK_SHOWLOCKS:
+      case HiveParser.TOK_DESCFUNCTION:
+      case HiveParser.TOK_SHOWFUNCTIONS:
+      case HiveParser.TOK_EXPLAIN:
+      
+      // Table DDL
       case HiveParser.TOK_ALTERTABLE_ADDPARTS:
-      case HiveParser.TOK_DROPTABLE:
       case HiveParser.TOK_ALTERTABLE_ADDCOLS:
-      case HiveParser.TOK_ALTERTABLE_RENAME:
+      case HiveParser.TOK_ALTERTABLE_CHANGECOL_AFTER_POSITION:
+      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
+      case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:
       case HiveParser.TOK_ALTERTABLE_DROPPARTS:
       case HiveParser.TOK_ALTERTABLE_PROPERTIES:
+      case HiveParser.TOK_ALTERTABLE_RENAME:
+      case HiveParser.TOK_ALTERTABLE_RENAMECOL:
+      case HiveParser.TOK_ALTERTABLE_REPLACECOLS:
       case HiveParser.TOK_ALTERTABLE_SERIALIZER:
-      case HiveParser.TOK_ALTERTABLE_SERDEPROPERTIES:
-      case HiveParser.TOK_ALTERTABLE_PARTITION:
-      case HiveParser.TOK_DESCDATABASE:
-      case HiveParser.TOK_SWITCHDATABASE: 
-      case HiveParser.TOK_DROPDATABASE:
-      case HiveParser.TOK_CREATEDATABASE:
-      case HiveParser.TOK_SHOWDATABASES:
+      case HiveParser.TOK_ALTERTABLE_TOUCH:
+      case HiveParser.TOK_DESCTABLE:
+      case HiveParser.TOK_DROPTABLE:
       case HiveParser.TOK_SHOW_TABLESTATUS:
+      case HiveParser.TOK_SHOWPARTITIONS:
       case HiveParser.TOK_SHOWTABLES:
-      case HiveParser.TOK_CREATETABLE: 
-        break;
-
-      case HiveParser.TOK_EXPORT:
-      case HiveParser.TOK_IMPORT:
         break;
 
       default:

Modified: incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java (original)
+++ incubator/hcatalog/trunk/src/java/org/apache/hcatalog/common/HCatConstants.java Sat Mar  3 20:52:19 2012
@@ -17,6 +17,9 @@
  */
 package org.apache.hcatalog.common;
 
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+
 public final class HCatConstants {
 
   /** The key for the input storage driver class name */
@@ -30,6 +33,9 @@ public final class HCatConstants {
   public static final String HCAT_RCFILE_ISD_CLASS = "org.apache.hcatalog.rcfile.RCFileInputDriver";
   public static final String HCAT_RCFILE_OSD_CLASS = "org.apache.hcatalog.rcfile.RCFileOutputDriver";
 
+  public static final String SEQUENCEFILE_INPUT = SequenceFileInputFormat.class.getName();
+  public static final String SEQUENCEFILE_OUTPUT = SequenceFileOutputFormat.class.getName();
+  
   public static final String HCAT_PIG_STORAGE_CLASS = "org.apache.pig.builtin.PigStorage";
   public static final String HCAT_PIG_LOADER = "hcat.pig.loader";
   public static final String HCAT_PIG_LOADER_ARGS = "hcat.pig.loader.args";

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/drivers/TestDriverHCat.pm Sat Mar  3 20:52:19 2012
@@ -178,7 +178,7 @@ sub runHCatCmdLine
     print FH $testCmd->{'hcat'} . "\n";
     close(FH);
 
-    Util::runHCatCmdFromFile($testCmd, $log, $hcatfiles[0], $stdoutfile, $stderrfile);
+    Util::runHCatCmdFromFile($testCmd, $log, $hcatfiles[0], $stdoutfile, $stderrfile, 1);
     $result{'rc'} = $? >> 8;
     $result{'stdout'} = `cat $outdir/stdout`;
     $result{'stderr'} = `cat $outdir/stderr`;

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hcat.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hcat.conf?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hcat.conf (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/hcat.conf Sat Mar  3 20:52:19 2012
@@ -113,6 +113,138 @@ show partitions hcat_altertable_1;\,
                                 ,'expected_out_regex' => 'b=2010-10-11'
                                 ,'not_expected_out_regex' => 'b=2010-10-10'
                                 },
+                                {
+                                 'num' => 3
+                                ,'hcat' => q\
+alter table studenttab10k touch;
+\
+                                ,'rc'   => 0
+                                },
+                                {
+                                 'num' => 4
+                                ,'hcat' => q\
+drop table if exists hcat_altertable_4;
+create table hcat_altertable_4(name string, age int, gpa double) stored as textfile;
+alter table hcat_altertable_4 set serdeproperties('xyz'='0');
+\
+                                ,'rc'   => 0
+                                },
+                                {
+                                 'num' => 5
+                                ,'hcat' => q\
+drop table if exists hcat_altertable_5;
+create table hcat_altertable_5(name string, age int, gpa double) stored as textfile;
+alter table hcat_altertable_5 clustered by (age) into 1 buckets;
+\
+                                ,'rc'   => 0
+                                },
+                                {
+                                 'num' => 6
+                                ,'hcat' => q\
+drop table if exists hcat_altertable_6;
+create table hcat_altertable_6(name string, age int, gpa double) stored as textfile;
+alter table hcat_altertable_6 set serde 'org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe';
+\
+                                ,'rc'   => 0
+                                },
+                        ],
+                }, # end g
+                {
+                        'name' => 'HCat_Database',
+                        'tests' => [
+                                {
+                                 'num' => 1
+                                ,'hcat' => q\
+drop database if exists hcat_database_1;
+create database hcat_database_1;
+alter database hcat_database_1 set dbproperties ('new.property'='some props');
+describe database hcat_database_1;
+show databases;
+use hcat_database_1;
+use default;
+drop database hcat_database_1;\
+                                ,'rc'   => 0
+                                },
+                        ],
+                }, # end g
+                {
+                        'name' => 'HCat_View',
+                        'tests' => [
+                                {
+                                 'num' => 1
+                                ,'hcat' => q"
+drop table if exists hcat_view_1;
+drop table if exists hcat_view_1_1;
+drop table if exists hcat_view_1_2;
+create external table hcat_view_1 (name string, age int, gpa double) row format delimited fields terminated by '\t' stored as TEXTFILE location '/user/hcat/tests/data/studenttab10k';
+create view hcat_view_1_1 as select name, gpa, age from studenttab10k;
+create view hcat_view_1_2 partitioned on (age) as select name, gpa, age from studenttab10k;
+alter view hcat_view_1_1 set tblproperties('key'='value');
+show tables;
+describe hcat_view_1_1;
+describe hcat_view_1_2;
+alter view hcat_view_1_2 rename to hcat_view_1_3;
+drop view hcat_view_1_1;
+drop view hcat_view_1_3;"
+                                ,'rc'   => 0
+                                },
+                        ],
+                }, # end g
+                {
+                        'name' => 'HCat_Authorize',
+                        'tests' => [
+                                {
+                                 'num' => 1
+                                ,'hcat' => q\
+create role role1;
+grant drop, select on table studenttab10k to role role1 with grant option;
+show grant role role1 on table studenttab10k;
+revoke drop on table studenttab10k from role role1;
+drop role role1;\
+                                ,'rc'   => 0
+                                },
+                                {
+                                 'num' => 2
+                                ,'hcat' => q\
+grant drop, select on table studenttab10k to user root;
+show grant user root on table studenttab10k;
+revoke drop, select on table studenttab10k from user root;\
+                                ,'rc'   => 0
+                                },
+                        ],
+                }, # end g
+                {
+                        'name' => 'HCat_Index',
+                        'tests' => [
+                                {
+                                 'num' => 1
+                                ,'hcat' => q\
+drop table if exists hcat_index_1;
+create table hcat_index_1 (a string) partitioned by (b string) stored as TEXTFILE;
+create index hcat_index_1_1 on table hcat_index_1(a) as 'compact' with deferred rebuild comment 'hcat test';
+alter index hcat_index_1_1 on hcat_index_1 set idxproperties ('prop1'='val1');
+show indexes on hcat_index_1;
+drop index hcat_index_1_1 on hcat_index_1;
+;\
+                                ,'rc'   => 0
+                                },
+                        ],
+                }, # end g
+                {
+                        'name' => 'HCat_Lock',
+                        'tests' => [
+                                {
+                                 'num' => 1,
+                                 'ignore' => 1, # this test need zookeeper setup, to ease the tests, ignore it by default, you may enable it if you have the right zookeeper setup
+                                ,'hcat' => q\
+set hive.support.concurrency=true;
+set hive.zookeeper.quorum=localhost;
+lock table studenttab10k shared;
+show locks;
+unlock table studenttab10k;
+;\
+                                ,'rc'   => 0
+                                },
                         ],
                 }, # end g
                 {
@@ -132,6 +264,90 @@ show tables;\,
                                 ,'rc'   => 0
                                 ,'expected_out_regex' => 'hcat_createtable_1'
                                 },
+                                {
+                                 'num' => 3
+                                ,'hcat' => q\
+show tables in default;\,
+                                ,'rc'   => 0
+                                },
+                                {
+                                 'num' => 4
+                                ,'hcat' => q\
+explain select * from studenttab10k;\,
+                                ,'rc'   => 0
+                                },
+                                {
+                                 'num' => 5
+                                ,'hcat' => q\
+show functions;\,
+                                ,'rc'   => 0
+                                },
+                                {
+                                 'num' => 6
+                                ,'hcat' => q\
+describe function xpath_int;\,
+                                ,'rc'   => 0
+                                },
+                                {
+                                 'num' => 7
+                                ,'hcat' => q\
+dfs -ls;\,
+                                ,'rc'   => 0
+                                },
+                        ],
+                }, # end g
+                {
+                        'name' => 'HCat_Negative',
+                        'tests' => [
+                                {
+                                 'num' => 1
+                                ,'hcat' => q\
+create table hcat_negative_1 as select * from studenttab10k;
+;\
+                                ,'expected_err_regex' => 'Operation not supported'
+                                },
+                                {
+                                 'num' => 2
+                                ,'hcat' => q\
+alter index test111 on hcat_test2 rebuild;
+;\
+                                ,'expected_err_regex' => 'Operation not supported'
+                                },
+                                {
+                                 'num' => 3
+                                ,'hcat' => q\
+alter table studentparttab30k PARTITION (ds='1') CONCATENATE;
+;\
+                                ,'expected_err_regex' => 'Operation not supported'
+                                },
+                                {
+                                 'num' => 4
+                                ,'hcat' => q\
+alter table studentparttab30k archive PARTITION (ds='20110924');
+;\
+                                ,'expected_err_regex' => 'Operation not supported'
+                                },
+                                {
+                                 'num' => 5
+                                ,'hcat' => q\
+analyze table studenttab10k compute statistics;
+;\
+                                ,'expected_err_regex' => 'Operation not supported'
+                                },
+                                {
+                                 'num' => 6
+                                ,'hcat' => q\
+export table studenttab10k to '111';
+;\
+                                ,'expected_err_regex' => 'Operation not supported'
+                                },
+                                {
+                                 'num' => 7
+                                ,'hcat' => q\
+import from '111';
+;\
+                                ,'expected_err_regex' => 'Operation not supported'
+                                },
                         ],
                 }, # end g
          ]

Modified: incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf (original)
+++ incubator/hcatalog/trunk/src/test/e2e/hcatalog/tests/pig.conf Sat Mar  3 20:52:19 2012
@@ -237,6 +237,49 @@ store d into 'pig_hbase_2_2' using org.a
                                 }
                         ],
                 }, # end g
+                {
+                        'name' => 'Pig_HCAT_COOP',
+                        'tests' => [
+                                {
+                                 # test if Pig can load the table after various table schema change
+                                 'num' => 1
+                                ,'hcat_prep'=>q:drop table if exists pig_hcat_coop_1;
+create external table pig_hcat_coop_1 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as TEXTFILE;
+alter table pig_hcat_coop_1 add partition (b='1') location '/user/hcat/tests/data/studenttab10k';
+alter table pig_hcat_coop_1 partition(b='1') set fileformat TEXTFILE;
+alter table pig_hcat_coop_1 change gpa registration string;
+alter table pig_hcat_coop_1 add columns (contributions float);
+alter table pig_hcat_coop_1 add partition (b='2') location '/user/hcat/tests/data/votertab10k';
+alter table pig_hcat_coop_1 partition(b='2') set fileformat TEXTFILE;
+alter table pig_hcat_coop_1 replace columns (name string, age int);
+:
+                                ,'pig' => q\
+a = load 'pig_hcat_coop_1' using org.apache.hcatalog.pig.HCatLoader();
+store a into ':OUTPATH:';\,
+				,'sql'   => q\select name, age, '1' from studenttab10k union all select name, age, '2' from votertab10k;\
+                                ,'floatpostprocess' => 1
+                                ,'delimiter' => '	'
+                                },
+                                {
+                                 # test if Pig can load table after fileformat change and table schema change
+                                 'num' => 2
+                                ,'hcat_prep'=>q:drop table if exists pig_hcat_coop_2;
+create external table pig_hcat_coop_2 (name string, age int, gpa double) partitioned by (b string) row format delimited fields terminated by '\t' stored as TEXTFILE;
+alter table pig_hcat_coop_2 add partition (b='1') location '/user/hcat/tests/data/studenttab10k';
+alter table pig_hcat_coop_2 partition(b='1') set fileformat TEXTFILE;
+alter table pig_hcat_coop_2 add partition (b='2') location '/user/hcat/tests/data/all100krc';
+alter table pig_hcat_coop_2 partition(b='2') set fileformat RCFILE;
+alter table pig_hcat_coop_2 replace columns (age int, name string);
+:
+                                ,'pig' => q\
+a = load 'pig_hcat_coop_2' using org.apache.hcatalog.pig.HCatLoader();
+store a into ':OUTPATH:';\,
+				,'sql'   => q\select age, name, '1' from studenttab10k union all select age, name, '2' from all100krc;\
+                                ,'floatpostprocess' => 1
+                                ,'delimiter' => '	'
+                                }
+                        ],
+                }, # end g
 
          ]
 }

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/cli/TestSemanticAnalysis.java Sat Mar  3 20:52:19 2012
@@ -108,9 +108,6 @@ public class TestSemanticAnalysis extend
     Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
     assertEquals(TextInputFormat.class.getName(),tbl.getSd().getInputFormat());
     assertEquals(HiveIgnoreKeyTextOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-    Map<String, String> tblParams = tbl.getParameters();
-    assertNull(tblParams.get(HCatConstants.HCAT_ISD_CLASS));
-    assertNull(tblParams.get(HCatConstants.HCAT_OSD_CLASS));
 
     List<String> partVals = new ArrayList<String>(1);
     partVals.add("2010-10-10");
@@ -119,10 +116,6 @@ public class TestSemanticAnalysis extend
     assertEquals(RCFileInputFormat.class.getName(),part.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(),part.getSd().getOutputFormat());
 
-    Map<String,String> partParams = part.getParameters();
-    assertEquals(RCFileInputDriver.class.getName(), partParams.get(HCatConstants.HCAT_ISD_CLASS));
-    assertEquals(RCFileOutputDriver.class.getName(), partParams.get(HCatConstants.HCAT_OSD_CLASS));
-
     hcatDriver.run("drop table junit_sem_analysis");
   }
 
@@ -168,9 +161,6 @@ public class TestSemanticAnalysis extend
     assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
     assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-    Map<String, String> tblParams = tbl.getParameters();
-    assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS));
-    assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS));
 
     CommandProcessorResponse resp = hcatDriver.run("create table if not exists junit_sem_analysis (a int) stored as RCFILE");
     assertEquals(0, resp.getResponseCode());
@@ -182,9 +172,6 @@ public class TestSemanticAnalysis extend
     assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
 
-    tblParams = tbl.getParameters();
-    assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS));
-    assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS));
     hcatDriver.run("drop table junit_sem_analysis");
   }
 
@@ -193,12 +180,10 @@ public class TestSemanticAnalysis extend
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
     CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis touch");
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("Operation not supported."));
+    assertEquals(0, response.getResponseCode());
 
     hcatDriver.run("alter table junit_sem_analysis touch partition (b='12')");
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("Operation not supported."));
+    assertEquals(0, response.getResponseCode());
 
     hcatDriver.run("drop table junit_sem_analysis");
   }
@@ -207,16 +192,13 @@ public class TestSemanticAnalysis extend
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
     CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis change a a1 int");
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("Operation not supported."));
+    assertEquals(0, response.getResponseCode());
 
-    response = hcatDriver.run("alter table junit_sem_analysis change a a string");
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("Operation not supported."));
+    response = hcatDriver.run("alter table junit_sem_analysis change a1 a string");
+    assertEquals(0, response.getResponseCode());
 
     response = hcatDriver.run("alter table junit_sem_analysis change a a int after c");
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("Operation not supported."));
+    assertEquals(0, response.getResponseCode());
     hcatDriver.run("drop table junit_sem_analysis");
   }
 
@@ -225,18 +207,19 @@ public class TestSemanticAnalysis extend
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int, c string) partitioned by (b string) stored as RCFILE");
     CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis replace columns (a1 tinyint)");
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("Operation not supported."));
+    assertEquals(0, response.getResponseCode());
 
     response = hcatDriver.run("alter table junit_sem_analysis add columns (d tinyint)");
     assertEquals(0, response.getResponseCode());
     assertNull(response.getErrorMessage());
+    
+    response = hcatDriver.run("describe extended junit_sem_analysis");
+    assertEquals(0, response.getResponseCode());
     Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
     List<FieldSchema> cols = tbl.getSd().getCols();
-    assertEquals(3, cols.size());
-    assertTrue(cols.get(0).equals(new FieldSchema("a", "int", null)));
-    assertTrue(cols.get(1).equals(new FieldSchema("c", "string", null)));
-    assertTrue(cols.get(2).equals(new FieldSchema("d", "tinyint", null)));
+    assertEquals(2, cols.size());
+    assertTrue(cols.get(0).equals(new FieldSchema("a1", "tinyint", null)));
+    assertTrue(cols.get(1).equals(new FieldSchema("d", "tinyint", null)));
     hcatDriver.run("drop table junit_sem_analysis");
   }
 
@@ -245,8 +228,7 @@ public class TestSemanticAnalysis extend
     hcatDriver.run("drop table junit_sem_analysis");
     hcatDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
     CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis clustered by (a) into 7 buckets");
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("Operation not supported."));
+    assertEquals(0, response.getResponseCode());
     hcatDriver.run("drop table junit_sem_analysis");
   }
 
@@ -259,10 +241,6 @@ public class TestSemanticAnalysis extend
     assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
 
-    Map<String,String> tblParams = tbl.getParameters();
-    assertEquals(RCFileInputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_ISD_CLASS));
-    assertEquals(RCFileOutputDriver.class.getName(), tblParams.get(HCatConstants.HCAT_OSD_CLASS));
-
     hcatDriver.run("alter table junit_sem_analysis set fileformat INPUTFORMAT 'org.apache.hadoop.hive.ql.io.RCFileInputFormat' OUTPUTFORMAT " +
         "'org.apache.hadoop.hive.ql.io.RCFileOutputFormat' inputdriver 'mydriver' outputdriver 'yourdriver'");
     hcatDriver.run("desc extended junit_sem_analysis");
@@ -270,9 +248,6 @@ public class TestSemanticAnalysis extend
     tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
     assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-    tblParams = tbl.getParameters();
-    assertEquals("mydriver", tblParams.get(HCatConstants.HCAT_ISD_CLASS));
-    assertEquals("yourdriver", tblParams.get(HCatConstants.HCAT_OSD_CLASS));
 
     hcatDriver.run("drop table junit_sem_analysis");
   }
@@ -281,10 +256,8 @@ public class TestSemanticAnalysis extend
 
     hiveDriver.run("drop table junit_sem_analysis");
     hiveDriver.run("create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE");
-    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location '/some/path'");
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("FAILED: Error in semantic analysis: Operation not supported. Partitions can be added only in a table created through HCatalog. " +
-    		"It seems table junit_sem_analysis was not created through HCatalog."));
+    CommandProcessorResponse response = hcatDriver.run("alter table junit_sem_analysis add partition (b='2') location 'README.txt'");
+    assertEquals(0, response.getResponseCode());
     hiveDriver.run("drop table junit_sem_analysis");
   }
 
@@ -311,8 +284,7 @@ public class TestSemanticAnalysis extend
     hcatDriver.run("drop table junit_sem_analysis");
     query = "create table junit_sem_analysis (a int)";
     CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(10, response.getResponseCode());
-    assertTrue(response.getErrorMessage().contains("FAILED: Error in semantic analysis: STORED AS specification is either incomplete or incorrect."));
+    assertEquals(0, response.getResponseCode());
     hcatDriver.run("drop table junit_sem_analysis");
   }
 
@@ -327,9 +299,6 @@ public class TestSemanticAnalysis extend
     Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, tblName);
     assertEquals(RCFileInputFormat.class.getName(),tbl.getSd().getInputFormat());
     assertEquals(RCFileOutputFormat.class.getName(),tbl.getSd().getOutputFormat());
-    Map<String, String> tblParams = tbl.getParameters();
-    assertEquals("mydriver", tblParams.get(HCatConstants.HCAT_ISD_CLASS));
-    assertEquals("yourdriver", tblParams.get(HCatConstants.HCAT_OSD_CLASS));
 
     hcatDriver.run("drop table junit_sem_analysis");
   }
@@ -352,9 +321,7 @@ public class TestSemanticAnalysis extend
     query =  "create table junit_sem_analysis (a int) partitioned by (b string)  stored as SEQUENCEFILE";
 
     CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(10,response.getResponseCode());
-    assertEquals("FAILED: Error in semantic analysis: Operation not supported. HCatalog doesn't support Sequence File by default yet. You may specify it through INPUT/OUTPUT storage drivers.",
-        response.getErrorMessage());
+    assertEquals(0,response.getResponseCode());
 
   }
 
@@ -374,21 +341,19 @@ public class TestSemanticAnalysis extend
     query =  "create table junit_sem_analysis (a int) partitioned by (b string) clustered by (a) into 10 buckets stored as TEXTFILE";
 
     CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(10,response.getResponseCode());
-    assertEquals("FAILED: Error in semantic analysis: Operation not supported. HCatalog doesn't allow Clustered By in create table.",
-        response.getErrorMessage());
+    assertEquals(0,response.getResponseCode());
   }
 
   public void testCTLFail() throws IOException, CommandNeedRetryException{
 
     hiveDriver.run("drop table junit_sem_analysis");
+    hiveDriver.run("drop table like_table");
     query =  "create table junit_sem_analysis (a int) partitioned by (b string) stored as RCFILE";
 
     hiveDriver.run(query);
     query = "create table like_table like junit_sem_analysis";
     CommandProcessorResponse response = hcatDriver.run(query);
-    assertEquals(10,response.getResponseCode());
-    assertEquals("FAILED: Error in semantic analysis: Operation not supported. CREATE TABLE LIKE is not supported.", response.getErrorMessage());
+    assertEquals(0,response.getResponseCode());
   }
 
   public void testCTLPass() throws IOException, MetaException, TException, NoSuchObjectException, CommandNeedRetryException{
@@ -406,8 +371,7 @@ public class TestSemanticAnalysis extend
     hcatDriver.run("drop table "+likeTbl);
     query = "create table like_table like junit_sem_analysis";
     CommandProcessorResponse resp = hcatDriver.run(query);
-    assertEquals(10, resp.getResponseCode());
-    assertEquals("FAILED: Error in semantic analysis: Operation not supported. CREATE TABLE LIKE is not supported.", resp.getErrorMessage());
+    assertEquals(0, resp.getResponseCode());
 //    Table tbl = msc.getTable(MetaStoreUtils.DEFAULT_DATABASE_NAME, likeTbl);
 //    assertEquals(likeTbl,tbl.getTableName());
 //    List<FieldSchema> cols = tbl.getSd().getCols();

Modified: incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java
URL: http://svn.apache.org/viewvc/incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java?rev=1296700&r1=1296699&r2=1296700&view=diff
==============================================================================
--- incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java (original)
+++ incubator/hcatalog/trunk/src/test/org/apache/hcatalog/security/TestHdfsAuthorizationProvider.java Sat Mar  3 20:52:19 2012
@@ -496,7 +496,7 @@ public class TestHdfsAuthorizationProvid
     String relPath = new Random().nextInt() + "/mypart";
     Path partPath = new Path(getTablePath("default", "foo1"), relPath);
     whFs.mkdirs(partPath, perm500);
-    execFail("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10') LOCATION '%s'", partPath);
+    exec("ALTER TABLE foo1 ADD PARTITION (b='2010-10-10') LOCATION '%s'", partPath);
   }
   
   @Test