You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ke...@apache.org on 2012/11/07 03:48:49 UTC

svn commit: r1406427 - in /hive/trunk/metastore/src: java/org/apache/hadoop/hive/metastore/ test/org/apache/hadoop/hive/metastore/

Author: kevinwilfong
Date: Wed Nov  7 02:48:49 2012
New Revision: 1406427

URL: http://svn.apache.org/viewvc?rev=1406427&view=rev
Log:
HIVE-3524. Storing certain Exception objects thrown in HiveMetaStore.java in MetaStoreEndFunctionContext. (Maheshwaran Srinivasan via kevinwilfong)

Added:
    hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java
    hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
Modified:
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEndFunctionContext.java

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1406427&r1=1406426&r2=1406427&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Wed Nov  7 02:48:49 2012
@@ -465,8 +465,8 @@ public class HiveMetaStore extends Thrif
       return startFunction(function, " : db=" + db + " tbl=" + tbl + "partition=" + partName);
     }
 
-    public void endFunction(String function, boolean successful) {
-      endFunction(function, new MetaStoreEndFunctionContext(successful));
+    public void endFunction(String function, boolean successful, Exception e) {
+      endFunction(function, new MetaStoreEndFunctionContext(successful, e));
     }
 
     public void endFunction(String function, MetaStoreEndFunctionContext context) {
@@ -566,6 +566,7 @@ public class HiveMetaStore extends Thrif
         throws AlreadyExistsException, InvalidObjectException, MetaException {
       startFunction("create_database", ": " + db.toString());
       boolean success = false;
+      Exception ex = null;
       try {
         try {
           if (null != get_database(db.getName())) {
@@ -577,8 +578,21 @@ public class HiveMetaStore extends Thrif
 
         create_database_core(getMS(), db);
         success = true;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else if (e instanceof AlreadyExistsException) {
+          throw (AlreadyExistsException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("create_database", success);
+        endFunction("create_database", success, ex);
       }
     }
 
@@ -586,17 +600,21 @@ public class HiveMetaStore extends Thrif
         MetaException {
       startFunction("get_database", ": " + name);
       Database db = null;
+      Exception ex = null;
       try {
         db = getMS().getDatabase(name);
       } catch (MetaException e) {
+        ex = e;
         throw e;
       } catch (NoSuchObjectException e) {
+        ex = e;
         throw e;
       } catch (Exception e) {
+        ex = e;
         assert (e instanceof RuntimeException);
         throw (RuntimeException) e;
       } finally {
-        endFunction("get_database", db != null);
+        endFunction("get_database", db != null, ex);
       }
       return db;
     }
@@ -605,11 +623,25 @@ public class HiveMetaStore extends Thrif
         throws NoSuchObjectException, TException, MetaException {
       startFunction("alter_database" + dbName);
       boolean success = false;
+      Exception ex = null;
       try {
         getMS().alterDatabase(dbName, db);
         success = true;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("alter_database", success);
+        endFunction("alter_database", success, ex);
       }
     }
 
@@ -738,18 +770,33 @@ public class HiveMetaStore extends Thrif
 
       startFunction("drop_database", ": " + dbName);
       if (DEFAULT_DATABASE_NAME.equalsIgnoreCase(dbName)) {
-        endFunction("drop_database", false);
+        endFunction("drop_database", false, null);
         throw new MetaException("Can not drop default database");
       }
 
       boolean success = false;
+      Exception ex = null;
       try {
         drop_database_core(getMS(), dbName, deleteData, cascade);
         success = true;
       } catch (IOException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidOperationException) {
+          throw (InvalidOperationException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("drop_database", success);
+        endFunction("drop_database", success, ex);
       }
     }
 
@@ -757,10 +804,20 @@ public class HiveMetaStore extends Thrif
       startFunction("get_databases", ": " + pattern);
 
       List<String> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getDatabases(pattern);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_databases", ret != null);
+        endFunction("get_databases", ret != null, ex);
       }
       return ret;
     }
@@ -769,10 +826,20 @@ public class HiveMetaStore extends Thrif
       startFunction("get_all_databases");
 
       List<String> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getAllDatabases();
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_all_databases", ret != null);
+        endFunction("get_all_databases", ret != null, ex);
       }
       return ret;
     }
@@ -802,11 +869,25 @@ public class HiveMetaStore extends Thrif
         MetaException, InvalidObjectException {
       startFunction("create_type", ": " + type.toString());
       boolean success = false;
+      Exception ex = null;
       try {
         create_type_core(getMS(), type);
         success = true;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else if (e instanceof AlreadyExistsException) {
+          throw (AlreadyExistsException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("create_type", success);
+        endFunction("create_type", success, ex);
       }
 
       return success;
@@ -816,13 +897,25 @@ public class HiveMetaStore extends Thrif
       startFunction("get_type", ": " + name);
 
       Type ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getType(name);
         if (null == ret) {
           throw new NoSuchObjectException("Type \"" + name + "\" not found.");
         }
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_type", ret != null);
+        endFunction("get_type", ret != null, ex);
       }
       return ret;
     }
@@ -856,11 +949,23 @@ public class HiveMetaStore extends Thrif
       startFunction("drop_type", ": " + name);
 
       boolean success = false;
+      Exception ex = null;
       try {
         // TODO:pc validate that there are no types that refer to this
         success = getMS().dropType(name);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("drop_type", success);
+        endFunction("drop_type", success, ex);
       }
       return success;
     }
@@ -868,7 +973,7 @@ public class HiveMetaStore extends Thrif
     public Map<String, Type> get_type_all(String name) throws MetaException {
       // TODO Auto-generated method stub
       startFunction("get_type_all", ": " + name);
-      endFunction("get_type_all", false);
+      endFunction("get_type_all", false, null);
       throw new MetaException("Not yet implemented");
     }
 
@@ -978,13 +1083,28 @@ public class HiveMetaStore extends Thrif
         MetaException, InvalidObjectException {
       startFunction("create_table", ": " + tbl.toString());
       boolean success = false;
+      Exception ex = null;
       try {
         create_table_core(getMS(), tbl, envContext);
         success = true;
       } catch (NoSuchObjectException e) {
+        ex = e;
         throw new InvalidObjectException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else if (e instanceof AlreadyExistsException) {
+          throw (AlreadyExistsException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("create_table", success);
+        endFunction("create_table", success, ex);
       }
     }
 
@@ -1171,13 +1291,26 @@ public class HiveMetaStore extends Thrif
       startTableFunction("drop_table", dbname, name);
 
       boolean success = false;
+      Exception ex = null;
       try {
         drop_table_core(getMS(), dbname, name, deleteData);
         success = true;
       } catch (IOException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("drop_table", success);
+        endFunction("drop_table", success, ex);
       }
 
     }
@@ -1201,14 +1334,26 @@ public class HiveMetaStore extends Thrif
         NoSuchObjectException {
       Table t = null;
       startTableFunction("get_table", dbname, name);
+      Exception ex = null;
       try {
         t = getMS().getTable(dbname, name);
         if (t == null) {
           throw new NoSuchObjectException(dbname + "." + name
               + " table not found");
         }
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_table", t != null);
+        endFunction("get_table", t != null, ex);
       }
       return t;
     }
@@ -1233,6 +1378,7 @@ public class HiveMetaStore extends Thrif
         throws MetaException, InvalidOperationException, UnknownDBException {
       List<Table> tables = null;
       startMultiTableFunction("get_multi_table", dbname, names);
+      Exception ex = null;
       try {
 
         if (dbname == null || dbname.isEmpty()) {
@@ -1243,8 +1389,21 @@ public class HiveMetaStore extends Thrif
           throw new InvalidOperationException(dbname + " cannot find null tables");
         }
         tables = getMS().getTableObjectsByName(dbname, names);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidOperationException) {
+          throw (InvalidOperationException) e;
+        } else if (e instanceof UnknownDBException) {
+          throw (UnknownDBException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_multi_table", tables != null);
+        endFunction("get_multi_table", tables != null, ex);
       }
       return tables;
     }
@@ -1255,6 +1414,7 @@ public class HiveMetaStore extends Thrif
         throws MetaException, InvalidOperationException, UnknownDBException {
       List<String> tables = null;
       startFunction("get_table_names_by_filter", ": db = " + dbName + ", filter = " + filter);
+      Exception ex = null;
       try {
         if (dbName == null || dbName.isEmpty()) {
           throw new UnknownDBException("DB name is null or empty");
@@ -1263,15 +1423,28 @@ public class HiveMetaStore extends Thrif
           throw new InvalidOperationException(filter + " cannot apply null filter");
         }
         tables = getMS().listTableNamesByFilter(dbName, filter, maxTables);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidOperationException) {
+          throw (InvalidOperationException) e;
+        } else if (e instanceof UnknownDBException) {
+          throw (UnknownDBException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_table_names_by_filter", tables != null);
+        endFunction("get_table_names_by_filter", tables != null, ex);
       }
       return tables;
     }
 
     public boolean set_table_parameters(String dbname, String name,
         Map<String, String> params) throws NoSuchObjectException, MetaException {
-      endFunction(startTableFunction("set_table_parameters", dbname, name), false);
+      endFunction(startTableFunction("set_table_parameters", dbname, name), false, null);
       // TODO Auto-generated method stub
       return false;
     }
@@ -1376,10 +1549,24 @@ public class HiveMetaStore extends Thrif
       }
 
       Partition ret = null;
+      Exception ex = null;
       try {
         ret = append_partition_common(getMS(), dbName, tableName, part_vals);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else if (e instanceof AlreadyExistsException) {
+          throw (AlreadyExistsException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("append_partition", ret != null);
+        endFunction("append_partition", ret != null, ex);
       }
       return ret;
     }
@@ -1423,10 +1610,24 @@ public class HiveMetaStore extends Thrif
       }
 
       Integer ret = null;
+      Exception ex = null;
       try {
         ret = add_partitions_core(getMS(), parts);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else if (e instanceof AlreadyExistsException) {
+          throw (AlreadyExistsException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("add_partition", ret != null);
+        endFunction("add_partition", ret != null, ex);
       }
       return ret;
     }
@@ -1592,10 +1793,24 @@ public class HiveMetaStore extends Thrif
       startTableFunction("add_partition",
           part.getDbName(), part.getTableName());
       Partition ret = null;
+      Exception ex = null;
       try {
         ret = add_partition_core(getMS(), part, envContext);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else if (e instanceof AlreadyExistsException) {
+          throw (AlreadyExistsException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("add_partition", ret != null);
+        endFunction("add_partition", ret != null, ex);
       }
       return ret;
     }
@@ -1675,12 +1890,27 @@ public class HiveMetaStore extends Thrif
       LOG.info("Partition values:" + part_vals);
 
       boolean ret = false;
+      Exception ex = null;
       try {
         ret = drop_partition_common(getMS(), db_name, tbl_name, part_vals, deleteData);
       } catch (IOException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("drop_partition", ret);
+        endFunction("drop_partition", ret, ex);
       }
       return ret;
 
@@ -1691,10 +1921,22 @@ public class HiveMetaStore extends Thrif
       startPartitionFunction("get_partition", db_name, tbl_name, part_vals);
 
       Partition ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getPartition(db_name, tbl_name, part_vals);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partition", ret != null);
+        endFunction("get_partition", ret != null, ex);
       }
       return ret;
     }
@@ -1708,13 +1950,28 @@ public class HiveMetaStore extends Thrif
           part_vals);
 
       Partition ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getPartitionWithAuth(db_name, tbl_name, part_vals,
             user_name, group_names);
       } catch (InvalidObjectException e) {
+        ex = e;
         throw new NoSuchObjectException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partition_with_auth", ret != null);
+        endFunction("get_partition_with_auth", ret != null, ex);
       }
       return ret;
     }
@@ -1724,10 +1981,22 @@ public class HiveMetaStore extends Thrif
       startTableFunction("get_partitions", db_name, tbl_name);
 
       List<Partition> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getPartitions(db_name, tbl_name, max_parts);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partitions", ret != null);
+        endFunction("get_partitions", ret != null, ex);
       }
       return ret;
 
@@ -1741,13 +2010,28 @@ public class HiveMetaStore extends Thrif
       startTableFunction("get_partitions_with_auth", dbName, tblName);
 
       List<Partition> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getPartitionsWithAuth(dbName, tblName, maxParts,
             userName, groupNames);
       } catch (InvalidObjectException e) {
+        ex = e;
         throw new NoSuchObjectException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partitions_with_auth", ret != null);
+        endFunction("get_partitions_with_auth", ret != null, ex);
       }
       return ret;
 
@@ -1758,10 +2042,20 @@ public class HiveMetaStore extends Thrif
       startTableFunction("get_partition_names", db_name, tbl_name);
 
       List<String> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().listPartitionNames(db_name, tbl_name, max_parts);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partition_names", ret != null);
+        endFunction("get_partition_names", ret != null, ex);
       }
       return ret;
     }
@@ -1806,6 +2100,7 @@ public class HiveMetaStore extends Thrif
       }
 
       Partition oldPart = null;
+      Exception ex = null;
       try {
         firePreEvent(new PreAlterPartitionEvent(db_name, tbl_name, part_vals, new_part, this));
 
@@ -1818,11 +2113,26 @@ public class HiveMetaStore extends Thrif
           listener.onAlterPartition(alterPartitionEvent);
         }
       } catch (InvalidObjectException e) {
+        ex = e;
         throw new InvalidOperationException(e.getMessage());
       } catch (AlreadyExistsException e) {
+        ex = e;
         throw new InvalidOperationException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidOperationException) {
+          throw (InvalidOperationException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("alter_partition", oldPart != null);
+        endFunction("alter_partition", oldPart != null, ex);
       }
       return;
     }
@@ -1843,6 +2153,7 @@ public class HiveMetaStore extends Thrif
       // all partitions are altered atomically
       // all prehooks are fired together followed by all post hooks
       List<Partition> oldParts = null;
+      Exception ex = null;
       try {
         for (Partition tmpPart : new_parts) {
           try {
@@ -1872,18 +2183,33 @@ public class HiveMetaStore extends Thrif
           }
         }
       } catch (InvalidObjectException e) {
+        ex = e;
         throw new InvalidOperationException(e.getMessage());
       } catch (AlreadyExistsException e) {
+        ex = e;
         throw new InvalidOperationException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidOperationException) {
+          throw (InvalidOperationException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("alter_partition", oldParts != null);
+        endFunction("alter_partition", oldParts != null, ex);
       }
       return;
     }
 
     public boolean create_index(Index index_def)
         throws IndexAlreadyExistsException, MetaException {
-      endFunction(startFunction("create_index"), false);
+      endFunction(startFunction("create_index"), false, null);
       // TODO Auto-generated method stub
       throw new MetaException("Not yet implemented");
     }
@@ -1897,19 +2223,32 @@ public class HiveMetaStore extends Thrif
           .currentTimeMillis() / 1000));
 
       boolean success = false;
+      Exception ex = null;
       try {
         getMS().alterIndex(dbname, base_table_name, index_name, newIndex);
         success = true;
       } catch (InvalidObjectException e) {
+        ex = e;
         throw new InvalidOperationException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidOperationException) {
+          throw (InvalidOperationException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("alter_index", success);
+        endFunction("alter_index", success, ex);
       }
       return;
     }
 
     public String getVersion() throws TException {
-      endFunction(startFunction("getVersion"), true);
+      endFunction(startFunction("getVersion"), true, null);
       return "3.0";
     }
 
@@ -1942,11 +2281,10 @@ public class HiveMetaStore extends Thrif
             .currentTimeMillis() / 1000));
       }
       boolean success = false;
+      Exception ex = null;
       try {
         Table oldt = get_table(dbname, name);
-
         firePreEvent(new PreAlterTableEvent(oldt, newTable, this));
-
         alterHandler.alterTable(getMS(), wh, dbname, name, newTable);
         success = true;
 
@@ -1958,9 +2296,21 @@ public class HiveMetaStore extends Thrif
         }
       } catch (NoSuchObjectException e) {
         // thrown when the table to be altered does not exist
+        ex = e;
         throw new InvalidOperationException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof InvalidOperationException) {
+          throw (InvalidOperationException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("alter_table", success);
+        endFunction("alter_table", success, ex);
       }
     }
 
@@ -1969,10 +2319,20 @@ public class HiveMetaStore extends Thrif
       startFunction("get_tables", ": db=" + dbname + " pat=" + pattern);
 
       List<String> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getTables(dbname, pattern);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_tables", ret != null);
+        endFunction("get_tables", ret != null, ex);
       }
       return ret;
     }
@@ -1981,10 +2341,20 @@ public class HiveMetaStore extends Thrif
       startFunction("get_all_tables", ": db=" + dbname);
 
       List<String> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getAllTables(dbname);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_all_tables", ret != null);
+        endFunction("get_all_tables", ret != null, ex);
       }
       return ret;
     }
@@ -1997,6 +2367,7 @@ public class HiveMetaStore extends Thrif
 
       Table tbl;
       List<FieldSchema> ret = null;
+      Exception ex = null;
       try {
         try {
           tbl = get_table(db, base_table_name);
@@ -2016,8 +2387,21 @@ public class HiveMetaStore extends Thrif
             throw new MetaException(e.getMessage());
           }
         }
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof UnknownDBException) {
+          throw (UnknownDBException) e;
+        } else if (e instanceof UnknownTableException) {
+          throw (UnknownTableException) e;
+        } else if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_fields", ret != null);
+        endFunction("get_fields", ret != null, ex);
       }
 
       return ret;
@@ -2040,6 +2424,7 @@ public class HiveMetaStore extends Thrif
         throws MetaException, UnknownTableException, UnknownDBException {
       startFunction("get_schema", ": db=" + db + "tbl=" + tableName);
       boolean success = false;
+      Exception ex = null;
       try {
         String[] names = tableName.split("\\.");
         String base_table_name = names[0];
@@ -2063,8 +2448,21 @@ public class HiveMetaStore extends Thrif
         }
         success = true;
         return fieldSchemas;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof UnknownDBException) {
+          throw (UnknownDBException) e;
+        } else if (e instanceof UnknownTableException) {
+          throw (UnknownTableException) e;
+        } else if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_schema", success);
+        endFunction("get_schema", success, ex);
       }
     }
 
@@ -2083,6 +2481,7 @@ public class HiveMetaStore extends Thrif
       startFunction("get_config_value", ": name=" + name + " defaultValue="
           + defaultValue);
       boolean success = false;
+      Exception ex = null;
       try {
         if (name == null) {
           success = true;
@@ -2105,8 +2504,19 @@ public class HiveMetaStore extends Thrif
         }
         success = true;
         return toReturn;
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof ConfigValSecurityException) {
+          throw (ConfigValSecurityException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          TException te = new TException(e.toString());
+          te.initCause(e);
+          throw te;
+        }
       } finally {
-        endFunction("get_config_value", success);
+        endFunction("get_config_value", success, ex);
       }
     }
 
@@ -2160,11 +2570,24 @@ public class HiveMetaStore extends Thrif
           + tbl_name + " part=" + part_name);
 
       Partition ret = null;
-
+      Exception ex = null;
       try {
         ret = get_partition_by_name_core(getMS(), db_name, tbl_name, part_name);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partition_by_name", ret != null);
+        endFunction("get_partition_by_name", ret != null, ex);
       }
       return ret;
     }
@@ -2176,12 +2599,28 @@ public class HiveMetaStore extends Thrif
           + tbl_name + " part=" + part_name);
 
       Partition ret = null;
+      Exception ex = null;
       try {
         RawStore ms = getMS();
         List<String> partVals = getPartValsFromName(ms, db_name, tbl_name, part_name);
         ret = append_partition_common(ms, db_name, tbl_name, partVals);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else if (e instanceof AlreadyExistsException) {
+          throw (AlreadyExistsException) e;
+        } else if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("append_partition_by_name", ret != null);
+        endFunction("append_partition_by_name", ret != null, ex);
       }
       return ret;
     }
@@ -2209,13 +2648,28 @@ public class HiveMetaStore extends Thrif
           + tbl_name + " part=" + part_name);
 
       boolean ret = false;
+      Exception ex = null;
       try {
         ret = drop_partition_by_name_core(getMS(), db_name, tbl_name,
             part_name, deleteData);
       } catch (IOException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("drop_partition_by_name", ret);
+        endFunction("drop_partition_by_name", ret, ex);
       }
 
       return ret;
@@ -2228,11 +2682,25 @@ public class HiveMetaStore extends Thrif
       startPartitionFunction("get_partitions_ps", db_name, tbl_name, part_vals);
 
       List<Partition> ret = null;
+      Exception ex = null;
       try {
         ret = get_partitions_ps_with_auth(db_name, tbl_name, part_vals,
             max_parts, null, null);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partitions_ps", ret != null);
+        endFunction("get_partitions_ps", ret != null, ex);
       }
 
       return ret;
@@ -2246,13 +2714,28 @@ public class HiveMetaStore extends Thrif
       startPartitionFunction("get_partitions_ps_with_auth", db_name, tbl_name,
           part_vals);
       List<Partition> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().listPartitionsPsWithAuth(db_name, tbl_name, part_vals, max_parts,
             userName, groupNames);
       } catch (InvalidObjectException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partitions_ps_with_auth", ret != null);
+        endFunction("get_partitions_ps_with_auth", ret != null, ex);
       }
       return ret;
     }
@@ -2263,10 +2746,24 @@ public class HiveMetaStore extends Thrif
         throws MetaException, TException, NoSuchObjectException {
       startPartitionFunction("get_partitions_names_ps", db_name, tbl_name, part_vals);
       List<String> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().listPartitionNamesPs(db_name, tbl_name, part_vals, max_parts);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partitions_names_ps", ret != null);
+        endFunction("get_partitions_names_ps", ret != null, ex);
       }
       return ret;
     }
@@ -2297,10 +2794,26 @@ public class HiveMetaStore extends Thrif
         throws InvalidObjectException, AlreadyExistsException, MetaException, TException {
       startFunction("add_index", ": " + newIndex.toString() + " " + indexTable.toString());
       Index ret = null;
+      Exception ex = null;
       try {
         ret = add_index_core(getMS(), newIndex, indexTable);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof InvalidObjectException) {
+          throw (InvalidObjectException) e;
+        } else if (e instanceof AlreadyExistsException) {
+          throw (AlreadyExistsException) e;
+        } else if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("add_index", ret != null);
+        endFunction("add_index", ret != null, ex);
       }
       return ret;
     }
@@ -2370,13 +2883,28 @@ public class HiveMetaStore extends Thrif
           + tblName + " index=" + indexName);
 
       boolean ret = false;
+      Exception ex = null;
       try {
         ret = drop_index_by_name_core(getMS(), dbName, tblName,
             indexName, deleteData);
       } catch (IOException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("drop_index_by_name", ret);
+        endFunction("drop_index_by_name", ret, ex);
       }
 
       return ret;
@@ -2449,11 +2977,24 @@ public class HiveMetaStore extends Thrif
           + tblName + " index=" + indexName);
 
       Index ret = null;
-
+      Exception ex = null;
       try {
         ret = get_index_by_name_core(getMS(), dbName, tblName, indexName);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("drop_index_by_name", ret != null);
+        endFunction("drop_index_by_name", ret != null, ex);
       }
       return ret;
     }
@@ -2476,10 +3017,22 @@ public class HiveMetaStore extends Thrif
       startTableFunction("get_index_names", dbName, tblName);
 
       List<String> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().listIndexNames(dbName, tblName, maxIndexes);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_index_names", ret != null);
+        endFunction("get_index_names", ret != null, ex);
       }
       return ret;
     }
@@ -2491,10 +3044,24 @@ public class HiveMetaStore extends Thrif
       startTableFunction("get_indexes", dbName, tblName);
 
       List<Index> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getIndexes(dbName, tblName, maxIndexes);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_indexes", ret != null);
+        endFunction("get_indexes", ret != null, ex);
       }
       return ret;
     }
@@ -2506,10 +3073,24 @@ public class HiveMetaStore extends Thrif
       startTableFunction("get_partitions_by_filter", dbName, tblName);
 
       List<Partition> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getPartitionsByFilter(dbName, tblName, filter, maxParts);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partitions_by_filter", ret != null);
+        endFunction("get_partitions_by_filter", ret != null, ex);
       }
       return ret;
     }
@@ -2522,10 +3103,24 @@ public class HiveMetaStore extends Thrif
       startTableFunction("get_partitions_by_names", dbName, tblName);
 
       List<Partition> ret = null;
+      Exception ex = null;
       try {
         ret = getMS().getPartitionsByNames(dbName, tblName, partNames);
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_partitions_by_names", ret != null);
+        endFunction("get_partitions_by_names", ret != null, ex);
       }
       return ret;
     }
@@ -3034,13 +3629,26 @@ public class HiveMetaStore extends Thrif
         throws MetaException, TException {
       startFunction("cancel_delegation_token");
       boolean success = false;
+      Exception ex = null;
       try {
         HiveMetaStore.cancelDelegationToken(token_str_form);
         success = true;
       } catch (IOException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("cancel_delegation_token", success);
+        endFunction("cancel_delegation_token", success, ex);
       }
     }
 
@@ -3049,12 +3657,25 @@ public class HiveMetaStore extends Thrif
         throws MetaException, TException {
       startFunction("renew_delegation_token");
       Long ret = null;
+      Exception ex = null;
       try {
         ret = HiveMetaStore.renewDelegationToken(token_str_form);
       } catch (IOException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("renew_delegation_token", ret != null);
+        endFunction("renew_delegation_token", ret != null, ex);
       }
       return ret;
     }
@@ -3065,16 +3686,30 @@ public class HiveMetaStore extends Thrif
         throws MetaException, TException {
       startFunction("get_delegation_token");
       String ret = null;
+      Exception ex = null;
       try {
         ret =
             HiveMetaStore.getDelegationToken(token_owner,
                 renewer_kerberos_principal_name);
       } catch (IOException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
       } catch (InterruptedException e) {
+        ex = e;
         throw new MetaException(e.getMessage());
+      } catch (Exception e) {
+        ex = e;
+        if (e instanceof MetaException) {
+          throw (MetaException) e;
+        } else if (e instanceof TException) {
+          throw (TException) e;
+        } else {
+          MetaException me = new MetaException(e.toString());
+          me.initCause(e);
+          throw me;
+        }
       } finally {
-        endFunction("get_delegation_token", ret != null);
+        endFunction("get_delegation_token", ret != null, ex);
       }
       return ret;
     }
@@ -3087,31 +3722,11 @@ public class HiveMetaStore extends Thrif
         InvalidPartitionException, UnknownPartitionException {
 
       Table tbl = null;
+      Exception ex = null;
       try {
         startPartitionFunction("markPartitionForEvent", db_name, tbl_name, partName);
-        try {
-          firePreEvent(new PreLoadPartitionDoneEvent(db_name, tbl_name, partName, this));
-          tbl = getMS().markPartitionForEvent(db_name, tbl_name, partName, evtType);
-        } catch (Exception original) {
-          LOG.error(original);
-          if (original instanceof NoSuchObjectException) {
-            throw (NoSuchObjectException) original;
-          } else if (original instanceof UnknownTableException) {
-            throw (UnknownTableException) original;
-          } else if (original instanceof UnknownDBException) {
-            throw (UnknownDBException) original;
-          } else if (original instanceof UnknownPartitionException) {
-            throw (UnknownPartitionException) original;
-          } else if (original instanceof InvalidPartitionException) {
-            throw (InvalidPartitionException) original;
-          } else if (original instanceof MetaException) {
-            throw (MetaException) original;
-          } else {
-            MetaException me = new MetaException(original.toString());
-            me.initCause(original);
-            throw me;
-          }
-        }
+        firePreEvent(new PreLoadPartitionDoneEvent(db_name, tbl_name, partName, this));
+        tbl = getMS().markPartitionForEvent(db_name, tbl_name, partName, evtType);
         if (null == tbl) {
           throw new UnknownTableException("Table: " + tbl_name + " not found.");
         } else {
@@ -3119,8 +3734,28 @@ public class HiveMetaStore extends Thrif
             listener.onLoadPartitionDone(new LoadPartitionDoneEvent(true, tbl, partName, this));
           }
         }
+      } catch (Exception original) {
+        ex = original;
+        LOG.error(original);
+        if (original instanceof NoSuchObjectException) {
+          throw (NoSuchObjectException) original;
+        } else if (original instanceof UnknownTableException) {
+          throw (UnknownTableException) original;
+        } else if (original instanceof UnknownDBException) {
+          throw (UnknownDBException) original;
+        } else if (original instanceof UnknownPartitionException) {
+          throw (UnknownPartitionException) original;
+        } else if (original instanceof InvalidPartitionException) {
+          throw (InvalidPartitionException) original;
+        } else if (original instanceof MetaException) {
+          throw (MetaException) original;
+        } else {
+          MetaException me = new MetaException(original.toString());
+          me.initCause(original);
+          throw me;
+        }
       } finally {
-        endFunction("markPartitionForEvent", tbl != null);
+                endFunction("markPartitionForEvent", tbl != null, ex);
       }
     }
 
@@ -3132,10 +3767,12 @@ public class HiveMetaStore extends Thrif
 
       startPartitionFunction("isPartitionMarkedForEvent", db_name, tbl_name, partName);
       Boolean ret = null;
+      Exception ex = null;
       try {
         ret = getMS().isPartitionMarkedForEvent(db_name, tbl_name, partName, evtType);
       } catch (Exception original) {
         LOG.error(original);
+        ex = original;
         if (original instanceof NoSuchObjectException) {
           throw (NoSuchObjectException) original;
         } else if (original instanceof UnknownTableException) {
@@ -3154,7 +3791,7 @@ public class HiveMetaStore extends Thrif
           throw me;
         }
       } finally {
-        endFunction("isPartitionMarkedForEvent", ret != null);
+                endFunction("isPartitionMarkedForEvent", ret != null, ex);
       }
 
       return ret;

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEndFunctionContext.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEndFunctionContext.java?rev=1406427&r1=1406426&r2=1406427&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEndFunctionContext.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreEndFunctionContext.java Wed Nov  7 02:48:49 2012
@@ -28,9 +28,15 @@ public class MetaStoreEndFunctionContext
    * whether method was successful or not.
    */
   private final boolean success;
+  private final Exception e;
 
-  public MetaStoreEndFunctionContext(boolean success) {
+  public MetaStoreEndFunctionContext(boolean success, Exception e) {
     this.success = success;
+    this.e = e;
+  }
+
+  public MetaStoreEndFunctionContext(boolean success) {
+    this(success, null);
   }
 
   /**
@@ -40,4 +46,8 @@ public class MetaStoreEndFunctionContext
     return success;
   }
 
+  public Exception getException() {
+    return e;
+  }
+
 }

Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java?rev=1406427&view=auto
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java (added)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/DummyEndFunctionListener.java Wed Nov  7 02:48:49 2012
@@ -0,0 +1,47 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.conf.Configuration;
+
+
+/** A dummy implementation for
+ * {@link org.apache.hadoop.hive.metastore.MetaStoreEndFunctionListener}
+ * for testing purposes.
+ */
+public class DummyEndFunctionListener extends MetaStoreEndFunctionListener{
+
+  public static final List<String> funcNameList = new ArrayList<String>();
+  public static final List<MetaStoreEndFunctionContext> contextList =
+    new ArrayList<MetaStoreEndFunctionContext>();
+
+  public DummyEndFunctionListener(Configuration config) {
+    super(config);
+  }
+
+  @Override
+  public void onEndFunction(String functionName, MetaStoreEndFunctionContext context) {
+    funcNameList.add(functionName);
+    contextList.add(context);
+  }
+
+}

Added: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java?rev=1406427&view=auto
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java (added)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestMetaStoreEndFunctionListener.java Wed Nov  7 02:48:49 2012
@@ -0,0 +1,154 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+
+
+
+import junit.framework.TestCase;
+
+import org.apache.hadoop.hive.cli.CliSessionState;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.ql.Driver;
+import org.apache.hadoop.hive.ql.session.SessionState;
+/**
+ * TestMetaStoreEventListener. Test case for
+ * {@link org.apache.hadoop.hive.metastore.MetaStoreEndFunctionListener}
+ */
+public class TestMetaStoreEndFunctionListener extends TestCase {
+  private static final String msPort = "20002";
+  private HiveConf hiveConf;
+  private HiveMetaStoreClient msc;
+  private Driver driver;
+
+  private static class RunMS implements Runnable {
+
+    @Override
+    public void run() {
+      try {
+        HiveMetaStore.main(new String[]{msPort});
+      } catch (Throwable e) {
+        e.printStackTrace(System.err);
+        assert false;
+      }
+    }
+  }
+
+  @Override
+  protected void setUp() throws Exception {
+
+    super.setUp();
+    System.setProperty("hive.metastore.event.listeners",
+        DummyListener.class.getName());
+    System.setProperty("hive.metastore.pre.event.listeners",
+        DummyPreListener.class.getName());
+    System.setProperty("hive.metastore.end.function.listeners",
+        DummyEndFunctionListener.class.getName());
+    Thread t = new Thread(new RunMS());
+    t.start();
+    Thread.sleep(40000);
+    hiveConf = new HiveConf(this.getClass());
+    hiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, "thrift://localhost:" + msPort);
+    hiveConf.setIntVar(HiveConf.ConfVars.METASTORETHRIFTRETRIES, 3);
+    hiveConf.set(HiveConf.ConfVars.PREEXECHOOKS.varname, "");
+    hiveConf.set(HiveConf.ConfVars.POSTEXECHOOKS.varname, "");
+    hiveConf.set(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "false");
+    SessionState.start(new CliSessionState(hiveConf));
+    msc = new HiveMetaStoreClient(hiveConf, null);
+    driver = new Driver(hiveConf);
+  }
+
+  @Override
+  protected void tearDown() throws Exception {
+    super.tearDown();
+  }
+
+  public void testEndFunctionListener() throws Exception {
+    /* Objective here is to ensure that when exceptions are thrown in HiveMetaStore in API methods
+     * they bubble up and are stored in the MetaStoreEndFunctionContext objects
+     */
+    String dbName = "tmpdb";
+    String tblName = "tmptbl";
+    int listSize = 0;
+
+    driver.run("create database " + dbName);
+
+    try {
+      msc.getDatabase("UnknownDB");
+    }
+    catch (Exception e) {
+    }
+    listSize = DummyEndFunctionListener.funcNameList.size();
+    String func_name = DummyEndFunctionListener.funcNameList.get(listSize-1);
+    MetaStoreEndFunctionContext context = DummyEndFunctionListener.contextList.get(listSize-1);
+    assertEquals(func_name,"get_database");
+    assertFalse(context.isSuccess());
+    Exception e = context.getException();
+    assertTrue((e!=null));
+    assertTrue((e instanceof NoSuchObjectException));
+
+    driver.run("use " + dbName);
+    driver.run(String.format("create table %s (a string) partitioned by (b string)", tblName));
+
+    try {
+      msc.getTable(dbName, "Unknown");
+    }
+    catch (Exception e1) {
+    }
+    listSize = DummyEndFunctionListener.funcNameList.size();
+    func_name = DummyEndFunctionListener.funcNameList.get(listSize-1);
+    context = DummyEndFunctionListener.contextList.get(listSize-1);
+    assertEquals(func_name,"get_table");
+    assertFalse(context.isSuccess());
+    e = context.getException();
+    assertTrue((e!=null));
+    assertTrue((e instanceof NoSuchObjectException));
+
+    try {
+      msc.getPartition("tmpdb", "tmptbl", "b=2012");
+    }
+    catch (Exception e2) {
+    }
+    listSize = DummyEndFunctionListener.funcNameList.size();
+    func_name = DummyEndFunctionListener.funcNameList.get(listSize-1);
+    context = DummyEndFunctionListener.contextList.get(listSize-1);
+    assertEquals(func_name,"get_partition_by_name");
+    assertFalse(context.isSuccess());
+    e = context.getException();
+    assertTrue((e!=null));
+    assertTrue((e instanceof NoSuchObjectException));
+
+    try {
+      driver.run("drop table Unknown");
+    }
+    catch (Exception e4) {
+    }
+    listSize = DummyEndFunctionListener.funcNameList.size();
+    func_name = DummyEndFunctionListener.funcNameList.get(listSize-1);
+    context = DummyEndFunctionListener.contextList.get(listSize-1);
+    assertEquals(func_name,"get_table");
+    assertFalse(context.isSuccess());
+    e = context.getException();
+    assertTrue((e!=null));
+    assertTrue((e instanceof NoSuchObjectException));
+
+  }
+
+}