You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by am...@apache.org on 2011/07/11 09:35:20 UTC

svn commit: r1145053 - in /hive/trunk: metastore/src/java/org/apache/hadoop/hive/metastore/ metastore/src/test/org/apache/hadoop/hive/metastore/ ql/src/java/org/apache/hadoop/hive/ql/exec/ ql/src/java/org/apache/hadoop/hive/ql/parse/ ql/src/test/org/ap...

Author: amareshwari
Date: Mon Jul 11 07:35:19 2011
New Revision: 1145053

URL: http://svn.apache.org/viewvc?rev=1145053&view=rev
Log:
HIVE-1537. Allow users to specify LOCATION in CREATE DATABASE statement. Contributed by Thiruvel Thirumoolan

Added:
    hive/trunk/ql/src/test/queries/clientpositive/database_location.q
    hive/trunk/ql/src/test/results/clientpositive/database_location.q.out
Modified:
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
    hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
    hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
    hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveAlterHandler.java Mon Jul 11 07:35:19 2011
@@ -32,6 +32,7 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 
 /**
  * Hive specific implementation of alter
@@ -115,7 +116,7 @@ public class HiveAlterHandler implements
         // that means user is asking metastore to move data to new location
         // corresponding to the new name
         // get new location
-        newTblLoc = wh.getDefaultTablePath(newt.getDbName(), newt.getTableName()).toString();
+        newTblLoc = wh.getTablePath(msdb.getDatabase(newt.getDbName()), newt.getTableName()).toString();
         newt.getSd().setLocation(newTblLoc);
         oldTblLoc = oldt.getSd().getLocation();
         moveData = true;
@@ -176,6 +177,11 @@ public class HiveAlterHandler implements
       throw new InvalidOperationException(
           "Unable to change partition or table."
               + " Check metastore logs for detailed stack." + e.getMessage());
+    } catch (NoSuchObjectException e) {
+      LOG.debug(e);
+      throw new InvalidOperationException(
+          "Unable to change partition or table. Database " + dbname + " does not exist"
+              + " Check metastore logs for detailed stack." + e.getMessage());
     } finally {
       if (!success) {
         msdb.rollbackTransaction();

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java Mon Jul 11 07:35:19 2011
@@ -463,7 +463,7 @@ public class HiveMetaStore extends Thrif
       } catch (NoSuchObjectException e) {
         ms.createDatabase(
             new Database(DEFAULT_DATABASE_NAME, DEFAULT_DATABASE_COMMENT,
-                wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null));
+                getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString(), null));
       }
       HMSHandler.createDefaultDB = true;
     }
@@ -570,31 +570,53 @@ public class HiveMetaStore extends Thrif
       logInfo("Metastore shutdown complete.");
     }
 
+    private static final String DATABASE_WAREHOUSE_SUFFIX = ".db";
+
+    private Path getDefaultDatabasePath(String dbName) throws MetaException {
+      if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
+        return wh.getWhRoot();
+      }
+      return new Path(wh.getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX);
+    }
+
     private void create_database_core(RawStore ms, final Database db)
         throws AlreadyExistsException, InvalidObjectException, MetaException,
         IOException {
       if (!validateName(db.getName())) {
         throw new InvalidObjectException(db.getName() + " is not a valid database name");
       }
+      if (null == db.getLocationUri()) {
+        db.setLocationUri(getDefaultDatabasePath(db.getName()).toString());
+      } else {
+        db.setLocationUri(wh.getDnsPath(new Path(db.getLocationUri())).toString());
+      }
+      Path dbPath = new Path(db.getLocationUri());
       boolean success = false;
+      boolean madeDir = false;
       try {
-        ms.openTransaction();
-        if (null == db.getLocationUri()) {
-          db.setLocationUri(wh.getDefaultDatabasePath(db.getName()).toString());
+        if (!wh.isDir(dbPath)) {
+          if (!wh.mkdirs(dbPath)) {
+            throw new MetaException("Unable to create database path " + dbPath +
+                ", failed to create database " + db.getName());
+          }
+          madeDir = true;
         }
+
+        ms.openTransaction();
         ms.createDatabase(db);
         success = ms.commitTransaction();
       } finally {
         if (!success) {
           ms.rollbackTransaction();
-        } else {
-          wh.mkdirs(new Path(db.getLocationUri()));
+          if (madeDir) {
+            wh.deleteDir(dbPath, true);
+          }
         }
         for (MetaStoreEventListener listener : listeners) {
           listener.onCreateDatabase(new CreateDatabaseEvent(db, success, this));
+        }
       }
     }
-    }
 
     public void create_database(final Database db)
         throws AlreadyExistsException, InvalidObjectException, MetaException {
@@ -923,7 +945,7 @@ public class HiveMetaStore extends Thrif
     }
 
     private void create_table_core(final RawStore ms, final Table tbl)
-        throws AlreadyExistsException, MetaException, InvalidObjectException {
+        throws AlreadyExistsException, MetaException, InvalidObjectException, NoSuchObjectException {
 
       if (!MetaStoreUtils.validateName(tbl.getTableName())
           || !MetaStoreUtils.validateColNames(tbl.getSd().getCols())
@@ -947,8 +969,8 @@ public class HiveMetaStore extends Thrif
         if (!TableType.VIRTUAL_VIEW.toString().equals(tbl.getTableType())) {
           if (tbl.getSd().getLocation() == null
             || tbl.getSd().getLocation().isEmpty()) {
-            tblPath = wh.getDefaultTablePath(
-              tbl.getDbName(), tbl.getTableName());
+            tblPath = wh.getTablePath(
+                ms.getDatabase(tbl.getDbName()), tbl.getTableName());
           } else {
             if (!isExternal(tbl) && !MetaStoreUtils.isNonNativeTable(tbl)) {
               LOG.warn("Location: " + tbl.getSd().getLocation()

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/Warehouse.java Mon Jul 11 07:35:19 2011
@@ -45,6 +45,7 @@ import org.apache.hadoop.fs.permission.F
 import org.apache.hadoop.hive.common.FileUtils;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.shims.ShimLoader;
@@ -59,8 +60,6 @@ public class Warehouse {
   private final Configuration conf;
   private final String whRootString;
 
-  private static final String DATABASE_WAREHOUSE_SUFFIX = ".db";
-
   public static final Log LOG = LogFactory.getLog("hive.metastore.warehouse");
 
   private MetaStoreFS fsHandler = null;
@@ -143,7 +142,7 @@ public class Warehouse {
    * dir (but that should be ok given that this is only called during DDL
    * statements for non-external tables).
    */
-  private Path getWhRoot() throws MetaException {
+  public Path getWhRoot() throws MetaException {
     if (whRoot != null) {
       return whRoot;
     }
@@ -156,16 +155,16 @@ public class Warehouse {
     return new Path(whRoot, tableName.toLowerCase());
   }
 
-  public Path getDefaultDatabasePath(String dbName) throws MetaException {
-    if (dbName.equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
+  public Path getDatabasePath(Database db) throws MetaException {
+    if (db.getName().equalsIgnoreCase(DEFAULT_DATABASE_NAME)) {
       return getWhRoot();
     }
-    return new Path(getWhRoot(), dbName.toLowerCase() + DATABASE_WAREHOUSE_SUFFIX);
+    return new Path(db.getLocationUri());
   }
 
-  public Path getDefaultTablePath(String dbName, String tableName)
+  public Path getTablePath(Database db, String tableName)
       throws MetaException {
-    return new Path(getDefaultDatabasePath(dbName), tableName.toLowerCase());
+    return getDnsPath(new Path(getDatabasePath(db), tableName.toLowerCase()));
   }
 
   public boolean mkdirs(Path f) throws MetaException {
@@ -393,9 +392,9 @@ public class Warehouse {
     }
   }
 
-  public Path getPartitionPath(String dbName, String tableName,
+  public Path getPartitionPath(Database db, String tableName,
       LinkedHashMap<String, String> pm) throws MetaException {
-    return new Path(getDefaultTablePath(dbName, tableName), makePartPath(pm));
+    return new Path(getTablePath(db, tableName), makePartPath(pm));
   }
 
   public Path getPartitionPath(Path tblPath, LinkedHashMap<String, String> pm)

Modified: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java (original)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaStore.java Mon Jul 11 07:35:19 2011
@@ -30,6 +30,7 @@ import junit.framework.TestCase;
 
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
 import org.apache.hadoop.hive.metastore.api.ConfigValSecurityException;
@@ -473,7 +474,7 @@ public abstract class TestHiveMetaStore 
       assertEquals("name of returned db is different from that of inserted db",
           TEST_DB1_NAME, db.getName());
       assertEquals("location of the returned db is different from that of inserted db",
-          warehouse.getDefaultDatabasePath(TEST_DB1_NAME).toString(), db.getLocationUri());
+          warehouse.getDatabasePath(db).toString(), db.getLocationUri());
 
       Database db2 = new Database();
       db2.setName(TEST_DB2_NAME);
@@ -484,7 +485,7 @@ public abstract class TestHiveMetaStore 
       assertEquals("name of returned db is different from that of inserted db",
           TEST_DB2_NAME, db2.getName());
       assertEquals("location of the returned db is different from that of inserted db",
-          warehouse.getDefaultDatabasePath(TEST_DB2_NAME).toString(), db2.getLocationUri());
+          warehouse.getDatabasePath(db2).toString(), db2.getLocationUri());
 
       List<String> dbs = client.getDatabases(".*");
 
@@ -502,6 +503,96 @@ public abstract class TestHiveMetaStore 
     }
   }
 
+  public void testDatabaseLocation() throws Throwable {
+    try {
+      // clear up any existing databases
+      silentDropDatabase(TEST_DB1_NAME);
+
+      Database db = new Database();
+      db.setName(TEST_DB1_NAME);
+      String dbLocation =
+          HiveConf.getVar(hiveConf, HiveConf.ConfVars.METASTOREWAREHOUSE) + "/_testDB_create_";
+      db.setLocationUri(dbLocation);
+      client.createDatabase(db);
+
+      db = client.getDatabase(TEST_DB1_NAME);
+
+      assertEquals("name of returned db is different from that of inserted db",
+          TEST_DB1_NAME, db.getName());
+      assertEquals("location of the returned db is different from that of inserted db",
+          warehouse.getDnsPath(new Path(dbLocation)).toString(), db.getLocationUri());
+
+      client.dropDatabase(TEST_DB1_NAME);
+      silentDropDatabase(TEST_DB1_NAME);
+
+      db = new Database();
+      db.setName(TEST_DB1_NAME);
+      dbLocation =
+          HiveConf.getVar(hiveConf, HiveConf.ConfVars.METASTOREWAREHOUSE) + "/test/_testDB_create_";
+      FileSystem fs = FileSystem.get(new Path(dbLocation).toUri(), hiveConf);
+      fs.mkdirs(
+          new Path(HiveConf.getVar(hiveConf, HiveConf.ConfVars.METASTOREWAREHOUSE) + "/test"),
+          new FsPermission((short) 0));
+      db.setLocationUri(dbLocation);
+
+      boolean createFailed = false;
+      try {
+        client.createDatabase(db);
+      } catch (MetaException cantCreateDB) {
+        createFailed = true;
+      }
+      assertTrue("Database creation succeeded even with permission problem", createFailed);
+
+      boolean objectNotExist = false;
+      try {
+        client.getDatabase(TEST_DB1_NAME);
+      } catch (NoSuchObjectException e) {
+        objectNotExist = true;
+      }
+      assertTrue("Database " + TEST_DB1_NAME + " exists ", objectNotExist);
+
+      // Cleanup
+      fs.setPermission(
+          new Path(HiveConf.getVar(hiveConf, HiveConf.ConfVars.METASTOREWAREHOUSE) + "/test"),
+          new FsPermission((short) 755));
+      fs.delete(new Path(HiveConf.getVar(hiveConf, HiveConf.ConfVars.METASTOREWAREHOUSE) + "/test"), true);
+
+
+      db = new Database();
+      db.setName(TEST_DB1_NAME);
+      dbLocation =
+          HiveConf.getVar(hiveConf, HiveConf.ConfVars.METASTOREWAREHOUSE) + "/_testDB_file_";
+      fs = FileSystem.get(new Path(dbLocation).toUri(), hiveConf);
+      fs.createNewFile(new Path(dbLocation));
+      fs.deleteOnExit(new Path(dbLocation));
+      db.setLocationUri(dbLocation);
+
+      createFailed = false;
+      try {
+        client.createDatabase(db);
+      } catch (MetaException cantCreateDB) {
+        System.err.println(cantCreateDB.getMessage());
+        createFailed = true;
+      }
+      assertTrue("Database creation succeeded even location exists and is a file", createFailed);
+
+      objectNotExist = false;
+      try {
+        client.getDatabase(TEST_DB1_NAME);
+      } catch (NoSuchObjectException e) {
+        objectNotExist = true;
+      }
+      assertTrue("Database " + TEST_DB1_NAME + " exists when location is specified and is a file",
+          objectNotExist);
+
+    } catch (Throwable e) {
+      System.err.println(StringUtils.stringifyException(e));
+      System.err.println("testDatabaseLocation() failed.");
+      throw e;
+    }
+  }
+
+
   public void testSimpleTypeApi() throws Exception {
     try {
       client.dropType(Constants.INT_TYPE_NAME);
@@ -996,6 +1087,60 @@ public abstract class TestHiveMetaStore 
     }
   }
 
+  public void testTableDatabase() throws Exception {
+    String dbName = "testDb";
+    String tblName_1 = "testTbl_1";
+    String tblName_2 = "testTbl_2";
+
+    try {
+      silentDropDatabase(dbName);
+
+      Database db = new Database();
+      db.setName(dbName);
+      String dbLocation =
+          HiveConf.getVar(hiveConf, HiveConf.ConfVars.METASTOREWAREHOUSE) + "_testDB_table_create_";
+      db.setLocationUri(dbLocation);
+      client.createDatabase(db);
+      db = client.getDatabase(dbName);
+
+      Table tbl = new Table();
+      tbl.setDbName(dbName);
+      tbl.setTableName(tblName_1);
+
+      ArrayList<FieldSchema> cols = new ArrayList<FieldSchema>(2);
+      cols.add(new FieldSchema("name", Constants.STRING_TYPE_NAME, ""));
+      cols.add(new FieldSchema("income", Constants.INT_TYPE_NAME, ""));
+
+      StorageDescriptor sd = new StorageDescriptor();
+      sd.setSerdeInfo(new SerDeInfo());
+      sd.getSerdeInfo().setName(tbl.getTableName());
+      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
+      sd.setParameters(new HashMap<String, String>());
+      sd.getSerdeInfo().getParameters().put(
+          org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "9");
+      sd.getSerdeInfo().setSerializationLib(
+          org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
+
+      tbl.setSd(sd);
+      tbl.getSd().setCols(cols);
+      client.createTable(tbl);
+      tbl = client.getTable(dbName, tblName_1);
+
+      Path path = new Path(tbl.getSd().getLocation());
+      System.err.println("Table's location " + path + ", Database's location " + db.getLocationUri());
+      assertEquals("Table location is not a subset of the database location",
+          path.getParent().toString(), db.getLocationUri());
+
+    } catch (Exception e) {
+      System.err.println(StringUtils.stringifyException(e));
+      System.err.println("testTableDatabase() failed.");
+      throw e;
+    } finally {
+      silentDropDatabase(dbName);
+    }
+  }
+
+
   public void testGetConfigValue() {
 
     String val = "value";

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/StatsTask.java Mon Jul 11 07:35:19 2011
@@ -294,7 +294,7 @@ public class StatsTask extends Task<Stat
         if (!tableStatsExist && atomic) {
           return 0;
         }
-        Path tablePath = wh.getDefaultTablePath(table.getDbName(), table.getTableName());
+        Path tablePath = wh.getTablePath(db.getDatabase(table.getDbName()), table.getTableName());
         fileSys = tablePath.getFileSystem(conf);
         fileStatus = Utilities.getFileStatusRecurse(tablePath, 1, fileSys);
 

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java Mon Jul 11 07:35:19 2011
@@ -628,6 +628,7 @@ public class DDLSemanticAnalyzer extends
     String dbName = unescapeIdentifier(ast.getChild(0).getText());
     boolean ifNotExists = false;
     String dbComment = null;
+    String dbLocation = null;
     Map<String, String> dbProps = null;
 
     for (int i = 1; i < ast.getChildCount(); i++) {
@@ -642,16 +643,16 @@ public class DDLSemanticAnalyzer extends
       case HiveParser.TOK_DATABASEPROPERTIES:
         dbProps = DDLSemanticAnalyzer.getProps((ASTNode) childNode.getChild(0));
         break;
+      case HiveParser.TOK_DATABASELOCATION:
+        dbLocation = unescapeSQLString(childNode.getChild(0).getText());
+        break;
       default:
         throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
       }
     }
 
-    CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc();
-    createDatabaseDesc.setName(dbName);
-    createDatabaseDesc.setComment(dbComment);
-    createDatabaseDesc.setIfNotExists(ifNotExists);
-    createDatabaseDesc.setLocationUri(null);
+    CreateDatabaseDesc createDatabaseDesc =
+        new CreateDatabaseDesc(dbName, dbComment, dbLocation, ifNotExists);
     if (dbProps != null) {
       createDatabaseDesc.setDatabaseProperties(dbProps);
     }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/Hive.g Mon Jul 11 07:35:19 2011
@@ -243,6 +243,7 @@ TOK_SHOWINDEXES;
 TOK_INDEXCOMMENT;
 TOK_DESCDATABASE;
 TOK_DATABASEPROPERTIES;
+TOK_DATABASELOCATION;
 TOK_DBPROPLIST;
 TOK_ALTERDATABASE_PROPERTIES;
 TOK_ALTERTABLE_ALTERPARTS_MERGEFILES;
@@ -375,9 +376,17 @@ createDatabaseStatement
     : KW_CREATE (KW_DATABASE|KW_SCHEMA)
         ifNotExists?
         name=Identifier
+        dbLocation?
         databaseComment?
         (KW_WITH KW_DBPROPERTIES dbprops=dbProperties)?
-    -> ^(TOK_CREATEDATABASE $name ifNotExists? databaseComment? $dbprops?)
+    -> ^(TOK_CREATEDATABASE $name ifNotExists? dbLocation? databaseComment? $dbprops?)
+    ;
+
+dbLocation
+@init { msgs.push("database location specification"); }
+@after { msgs.pop(); }
+    :
+      KW_LOCATION locn=StringLiteral -> ^(TOK_DATABASELOCATION $locn)
     ;
 
 dbProperties
@@ -591,7 +600,7 @@ alterDatabaseStatementSuffix
 @after { msgs.pop(); }
     : alterDatabaseSuffixProperties
     ;
-    
+
 alterDatabaseSuffixProperties
 @init { msgs.push("alter database properties statement"); }
 @after { msgs.pop(); }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java Mon Jul 11 07:35:19 2011
@@ -46,6 +46,7 @@ import org.apache.hadoop.hive.ql.exec.Ta
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
@@ -234,8 +235,8 @@ public class ImportSemanticAnalyzer exte
             tblDesc), conf);
         Table table = new Table(dbname, tblDesc.getTableName());
         conf.set("import.destination.dir",
-            wh.getDnsPath(wh.getDefaultTablePath(
-                db.getCurrentDatabase(), tblDesc.getTableName())).toString());
+            wh.getTablePath(db.getDatabase(db.getCurrentDatabase()),
+                tblDesc.getTableName()).toString());
         if ((tblDesc.getPartCols() != null) && (tblDesc.getPartCols().size() != 0)) {
           for (AddPartitionDesc addPartitionDesc : partitionDescs) {
             t.addDependentTask(
@@ -252,8 +253,7 @@ public class ImportSemanticAnalyzer exte
             if (tblDesc.getLocation() != null) {
               tablePath = new Path(tblDesc.getLocation());
             } else {
-              tablePath = wh.getDnsPath(wh.getDefaultTablePath(
-                  db.getCurrentDatabase(), tblDesc.getTableName()));
+              tablePath = wh.getTablePath(db.getDatabase(db.getCurrentDatabase()), tblDesc.getTableName());
             }
             checkTargetLocationEmpty(fs, tablePath);
             t.addDependentTask(loadTable(fromURI, table));
@@ -288,7 +288,7 @@ public class ImportSemanticAnalyzer exte
 
   private Task<?> addSinglePartition(URI fromURI, FileSystem fs, CreateTableDesc tblDesc,
       Table table, Warehouse wh,
-      AddPartitionDesc addPartitionDesc) throws MetaException, IOException, SemanticException {
+      AddPartitionDesc addPartitionDesc) throws MetaException, IOException, HiveException {
     if (tblDesc.isExternal() && tblDesc.getLocation() == null) {
       LOG.debug("Importing in-place: adding AddPart for partition "
           + partSpecToString(addPartitionDesc.getPartSpec()));
@@ -304,8 +304,8 @@ public class ImportSemanticAnalyzer exte
           tgtPath = new Path(table.getDataLocation().toString(),
               Warehouse.makePartPath(addPartitionDesc.getPartSpec()));
         } else {
-          tgtPath = new Path(wh.getDnsPath(wh.getDefaultTablePath(
-              db.getCurrentDatabase(), tblDesc.getTableName())),
+          tgtPath = new Path(wh.getTablePath(
+              db.getDatabase(db.getCurrentDatabase()), tblDesc.getTableName()),
               Warehouse.makePartPath(addPartitionDesc.getPartSpec()));
         }
       } else {

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java (original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java Mon Jul 11 07:35:19 2011
@@ -6810,7 +6810,7 @@ public class SemanticAnalyzer extends Ba
             try {
               dumpTable = db.newTable(qb.getTableDesc().getTableName());
               Warehouse wh = new Warehouse(conf);
-              targetPath = wh.getDefaultTablePath(dumpTable.getDbName(), dumpTable
+              targetPath = wh.getTablePath(db.getDatabase(dumpTable.getDbName()), dumpTable
                   .getTableName());
             } catch (HiveException e) {
               throw new SemanticException(e);

Modified: hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java?rev=1145053&r1=1145052&r2=1145053&view=diff
==============================================================================
--- hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java (original)
+++ hive/trunk/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java Mon Jul 11 07:35:19 2011
@@ -155,9 +155,8 @@ public class TestHive extends TestCase {
             .getOwner(), ft.getOwner());
         assertEquals("Table retention didn't match for table: " + tableName,
             tbl.getRetention(), ft.getRetention());
-        String dbPath = wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString();
         assertEquals("Data location is not set correctly",
-            wh.getDefaultTablePath(DEFAULT_DATABASE_NAME, tableName).toString(),
+            wh.getTablePath(hm.getDatabase(DEFAULT_DATABASE_NAME), tableName).toString(),
             ft.getDataLocation().toString());
         // now that URI is set correctly, set the original table's uri and then
         // compare the two tables
@@ -229,9 +228,8 @@ public class TestHive extends TestCase {
             .getOwner(), ft.getOwner());
         assertEquals("Table retention didn't match for table: " + tableName,
             tbl.getRetention(), ft.getRetention());
-        String dbPath = wh.getDefaultDatabasePath(DEFAULT_DATABASE_NAME).toString();
         assertEquals("Data location is not set correctly",
-            wh.getDefaultTablePath(DEFAULT_DATABASE_NAME, tableName).toString(),
+            wh.getTablePath(hm.getDatabase(DEFAULT_DATABASE_NAME), tableName).toString(),
             ft.getDataLocation().toString());
         // now that URI is set correctly, set the original table's uri and then
         // compare the two tables

Added: hive/trunk/ql/src/test/queries/clientpositive/database_location.q
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/database_location.q?rev=1145053&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/database_location.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/database_location.q Mon Jul 11 07:35:19 2011
@@ -0,0 +1,16 @@
+create database db1;
+describe database extended db1;
+use db1;
+create table table_db1 (name string, value int);
+describe formatted table_db1;
+show tables;
+
+create database db2 location '${hiveconf:hive.metastore.warehouse.dir}/db2';
+describe database extended db2;
+use db2;
+create table table_db2 (name string, value int);
+describe formatted table_db2;
+show tables;
+
+drop database db2 cascade;
+drop database db1 cascade;
\ No newline at end of file

Added: hive/trunk/ql/src/test/results/clientpositive/database_location.q.out
URL: http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/database_location.q.out?rev=1145053&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/database_location.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/database_location.q.out Mon Jul 11 07:35:19 2011
@@ -0,0 +1,116 @@
+PREHOOK: query: create database db1
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database db1
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: describe database extended db1
+PREHOOK: type: DESCDATABASE
+POSTHOOK: query: describe database extended db1
+POSTHOOK: type: DESCDATABASE
+db1		pfile:/home/thiruvel/projects/hive/secure.trunk/build/ql/test/data/warehouse/db1.db	
+PREHOOK: query: use db1
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use db1
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table table_db1 (name string, value int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table table_db1 (name string, value int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: db1@table_db1
+PREHOOK: query: describe formatted table_db1
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted table_db1
+POSTHOOK: type: DESCTABLE
+# col_name            	data_type           	comment             
+	 	 
+name                	string              	None                
+value               	int                 	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	db1                 	 
+Owner:              	thiruvel            	 
+CreateTime:         	Tue Jun 21 06:57:44 PDT 2011	 
+LastAccessTime:     	UNKNOWN             	 
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/secure.trunk/build/ql/test/data/warehouse/db1.db/table_db1	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	transient_lastDdlTime	1308664664          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+table_db1
+PREHOOK: query: create database db2 location 'pfile:///home/thiruvel/projects/hive/secure.trunk/build/ql/test/data/warehouse/db2'
+PREHOOK: type: CREATEDATABASE
+POSTHOOK: query: create database db2 location 'pfile:///home/thiruvel/projects/hive/secure.trunk/build/ql/test/data/warehouse/db2'
+POSTHOOK: type: CREATEDATABASE
+PREHOOK: query: describe database extended db2
+PREHOOK: type: DESCDATABASE
+POSTHOOK: query: describe database extended db2
+POSTHOOK: type: DESCDATABASE
+db2		pfile:/home/thiruvel/projects/hive/secure.trunk/build/ql/test/data/warehouse/db2	
+PREHOOK: query: use db2
+PREHOOK: type: SWITCHDATABASE
+POSTHOOK: query: use db2
+POSTHOOK: type: SWITCHDATABASE
+PREHOOK: query: create table table_db2 (name string, value int)
+PREHOOK: type: CREATETABLE
+POSTHOOK: query: create table table_db2 (name string, value int)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: db2@table_db2
+PREHOOK: query: describe formatted table_db2
+PREHOOK: type: DESCTABLE
+POSTHOOK: query: describe formatted table_db2
+POSTHOOK: type: DESCTABLE
+# col_name            	data_type           	comment             
+	 	 
+name                	string              	None                
+value               	int                 	None                
+	 	 
+# Detailed Table Information	 	 
+Database:           	db2                 	 
+Owner:              	thiruvel            	 
+CreateTime:         	Tue Jun 21 06:57:45 PDT 2011	 
+LastAccessTime:     	UNKNOWN             	 
+Protect Mode:       	None                	 
+Retention:          	0                   	 
+Location:           	pfile:/home/thiruvel/projects/hive/secure.trunk/build/ql/test/data/warehouse/db2/table_db2	 
+Table Type:         	MANAGED_TABLE       	 
+Table Parameters:	 	 
+	transient_lastDdlTime	1308664665          
+	 	 
+# Storage Information	 	 
+SerDe Library:      	org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe	 
+InputFormat:        	org.apache.hadoop.mapred.TextInputFormat	 
+OutputFormat:       	org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat	 
+Compressed:         	No                  	 
+Num Buckets:        	-1                  	 
+Bucket Columns:     	[]                  	 
+Sort Columns:       	[]                  	 
+Storage Desc Params:	 	 
+	serialization.format	1                   
+PREHOOK: query: show tables
+PREHOOK: type: SHOWTABLES
+POSTHOOK: query: show tables
+POSTHOOK: type: SHOWTABLES
+table_db2
+PREHOOK: query: drop database db2 cascade
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database db2 cascade
+POSTHOOK: type: DROPDATABASE
+PREHOOK: query: drop database db1 cascade
+PREHOOK: type: DROPDATABASE
+POSTHOOK: query: drop database db1 cascade
+POSTHOOK: type: DROPDATABASE