You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by cw...@apache.org on 2012/09/17 23:40:10 UTC

svn commit: r1386857 - in /hive/trunk/metastore/src: java/org/apache/hadoop/hive/metastore/ObjectStore.java java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java

Author: cws
Date: Mon Sep 17 21:40:10 2012
New Revision: 1386857

URL: http://svn.apache.org/viewvc?rev=1386857&view=rev
Log:
Backout HIVE-3443 (cws)

Modified:
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
    hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
    hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java?rev=1386857&r1=1386856&r2=1386857&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java Mon Sep 17 21:40:10 2012
@@ -4009,23 +4009,24 @@ public class ObjectStore implements RawS
    * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
    *
    */
+  @SuppressWarnings("finally")
   public Collection<?> executeJDOQLSelect(String query) {
     boolean committed = false;
     Collection<?> result = null;
 
+    LOG.info("Executing query: " + query);
+
     try {
       openTransaction();
       Query q = pm.newQuery(query);
       result = (Collection<?>) q.execute();
       committed = commitTransaction();
-      if (committed) {
-        return result;
-      } else {
-        return null;
-      }
     } finally {
       if (!committed) {
         rollbackTransaction();
+        return null;
+      } else  {
+        return result;
       }
     }
   }
@@ -4037,23 +4038,24 @@ public class ObjectStore implements RawS
   * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
   *
   */
+  @SuppressWarnings("finally")
   public long executeJDOQLUpdate(String query) {
     boolean committed = false;
     long numUpdated = 0;
 
+    LOG.info("Executing query: " + query);
+
     try {
       openTransaction();
       Query q = pm.newQuery(query);
       numUpdated = (Long) q.execute();
       committed = commitTransaction();
-      if (committed) {
-        return numUpdated;
-      } else {
-        return -1;
-      }
     } finally {
       if (!committed) {
         rollbackTransaction();
+        return -1;
+      } else {
+        return numUpdated;
       }
     }
   }
@@ -4065,6 +4067,7 @@ public class ObjectStore implements RawS
   * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
   *
   */
+  @SuppressWarnings("finally")
   public Set<String> listFSRoots() {
     boolean committed = false;
     Set<String> fsRoots = new HashSet<String>();
@@ -4079,14 +4082,12 @@ public class ObjectStore implements RawS
         fsRoots.add(mDB.getLocationUri());
       }
       committed = commitTransaction();
-      if (committed) {
-        return fsRoots;
-      } else {
-        return null;
-      }
     } finally {
       if (!committed) {
         rollbackTransaction();
+        return null;
+      } else {
+        return fsRoots;
       }
     }
   }
@@ -4127,322 +4128,155 @@ public class ObjectStore implements RawS
     return true;
   }
 
-  public class UpdateMDatabaseURIRetVal {
-    private List<String> badRecords;
-    private Map<String, String> updateLocations;
-
-    UpdateMDatabaseURIRetVal(List<String> badRecords, Map<String, String> updateLocations) {
-      this.badRecords = badRecords;
-      this.updateLocations = updateLocations;
-    }
-
-    public List<String> getBadRecords() {
-      return badRecords;
-    }
-
-    public void setBadRecords(List<String> badRecords) {
-      this.badRecords = badRecords;
-    }
-
-    public Map<String, String> getUpdateLocations() {
-      return updateLocations;
-    }
-
-    public void setUpdateLocations(Map<String, String> updateLocations) {
-      this.updateLocations = updateLocations;
-    }
-  }
-
-  /** The following APIs
-  *
-  *  - updateMDatabaseURI
-  *
-  * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
-  *
-  */
-  public UpdateMDatabaseURIRetVal updateMDatabaseURI(URI oldLoc, URI newLoc, boolean dryRun) {
-    boolean committed = false;
-    Map<String, String> updateLocations = new HashMap<String, String>();
-    List<String> badRecords = new ArrayList<String>();
-    UpdateMDatabaseURIRetVal retVal = null;
-
-    try {
-      openTransaction();
-      Query query = pm.newQuery(MDatabase.class);
-      List<MDatabase> mDBs = (List<MDatabase>) query.execute();
-      pm.retrieveAll(mDBs);
-
-      for(MDatabase mDB:mDBs) {
-        URI locationURI = null;
-        String location = mDB.getLocationUri();
-        try {
-          locationURI = new URI(location);
-        } catch(URISyntaxException e) {
-          badRecords.add(location);
-        }
-        if (locationURI == null) {
-          badRecords.add(location);
-        } else {
-          if (shouldUpdateURI(locationURI, oldLoc)) {
-            String dbLoc = mDB.getLocationUri().replaceAll(oldLoc.toString(), newLoc.toString());
+  private int updateMDatabaseURI(URI oldLoc, URI newLoc,
+      HashMap<String, String> updateLocations, boolean dryRun) {
+    int count = 0;
+    Query query = pm.newQuery(MDatabase.class);
+    List<MDatabase> mDBs = (List<MDatabase>) query.execute();
+    pm.retrieveAll(mDBs);
+
+    LOG.info("Looking for location in DB_LOCATION_URI field in DBS table...");
+
+    for(MDatabase mDB:mDBs) {
+      URI locationURI = null;
+      try {
+        locationURI = new URI(mDB.getLocationUri());
+      } catch(URISyntaxException e) {
+        LOG.error("Encountered error while validating location URI"
+            + e.getLocalizedMessage());
+      }
+      // locationURI is a valid URI
+      if (locationURI != null) {
+        if (shouldUpdateURI(locationURI, oldLoc)) {
+          String dbLoc = mDB.getLocationUri().replaceAll(oldLoc.toString(), newLoc.toString());
+          if (dryRun) {
             updateLocations.put(locationURI.toString(), dbLoc);
-            if (!dryRun) {
-              mDB.setLocationUri(dbLoc);
-            }
+          } else {
+            mDB.setLocationUri(dbLoc);
           }
+          count++;
         }
       }
-      committed = commitTransaction();
-      if (committed) {
-        retVal = new UpdateMDatabaseURIRetVal(badRecords, updateLocations);
-      }
-      return retVal;
-    } finally {
-      if (!committed) {
-        rollbackTransaction();
-      }
-    }
-  }
-
-  public class UpdateMStorageDescriptorTblPropURIRetVal {
-    private List<String> badRecords;
-    private Map<String, String> updateLocations;
-
-    UpdateMStorageDescriptorTblPropURIRetVal(List<String> badRecords,
-      Map<String, String> updateLocations) {
-      this.badRecords = badRecords;
-      this.updateLocations = updateLocations;
-    }
-
-    public List<String> getBadRecords() {
-      return badRecords;
-    }
-
-    public void setBadRecords(List<String> badRecords) {
-      this.badRecords = badRecords;
     }
 
-    public Map<String, String> getUpdateLocations() {
-      return updateLocations;
-    }
-
-    public void setUpdateLocations(Map<String, String> updateLocations) {
-      this.updateLocations = updateLocations;
-    }
+    LOG.info("Found  " + count + " records to update");
+    return count;
   }
 
-  /** The following APIs
-  *
-  *  - updateMStorageDescriptorTblPropURI
-  *
-  * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
-  *
-  */
-  public UpdateMStorageDescriptorTblPropURIRetVal updateMStorageDescriptorTblPropURI(URI oldLoc,
-      URI newLoc, String tblPropKey, boolean isDryRun) {
-    boolean committed = false;
-    Map<String, String> updateLocations = new HashMap<String, String>();
-    List<String> badRecords = new ArrayList<String>();
-    UpdateMStorageDescriptorTblPropURIRetVal retVal = null;
-
-    try {
-      openTransaction();
-      Query query = pm.newQuery(MStorageDescriptor.class);
-      List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute();
-      pm.retrieveAll(mSDSs);
-
-      for(MStorageDescriptor mSDS:mSDSs) {
-        URI tablePropLocationURI = null;
-        String tablePropLocation = mSDS.getParameters().get(tblPropKey);
-          if (tablePropLocation != null) {
-            try {
-              tablePropLocationURI = new URI(tablePropLocation);
-            } catch (URISyntaxException e) {
-              badRecords.add(tablePropLocation);
-            }
-          }
-          // if tablePropKey that was passed in lead to a valid URI resolution, update it if
-          //parts of it match the old-NN-loc, else add to badRecords
-          if (tablePropLocationURI == null) {
-            badRecords.add(tablePropLocation);
+  private int updateMStorageDescriptorURI(URI oldLoc, URI newLoc,
+      HashMap<String, String> updateLocations, boolean dryRun) {
+    int count = 0;
+    Query query = pm.newQuery(MStorageDescriptor.class);
+    List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute();
+    pm.retrieveAll(mSDSs);
+
+    LOG.info("Looking for location in LOCATION field in SDS table...");
+
+    for(MStorageDescriptor mSDS:mSDSs) {
+      URI locationURI = null;
+      try {
+        locationURI = new URI(mSDS.getLocation());
+      } catch (URISyntaxException e) {
+        LOG.error("Encountered error while validating location URI"
+            + e.getLocalizedMessage());
+      }
+      // locationURI is a valid URI
+      if (locationURI != null) {
+        if (shouldUpdateURI(locationURI, oldLoc)) {
+          String tblLoc = mSDS.getLocation().replaceAll(oldLoc.toString(), newLoc.toString());
+          if (dryRun) {
+            updateLocations.put(locationURI.toString(), tblLoc);
           } else {
-            if (shouldUpdateURI(tablePropLocationURI, oldLoc)) {
-              String tblPropLoc = mSDS.getParameters().get(tblPropKey).replaceAll(oldLoc.toString(),
-                  newLoc.toString());
-              updateLocations.put(tablePropLocationURI.toString(), tblPropLoc);
-              if (!isDryRun) {
-                mSDS.getParameters().put(tblPropKey, tblPropLoc);
-              }
-           }
-         }
-       }
-       committed = commitTransaction();
-       if (committed) {
-         retVal = new UpdateMStorageDescriptorTblPropURIRetVal(badRecords, updateLocations);
-       }
-       return retVal;
-     } finally {
-        if (!committed) {
-          rollbackTransaction();
+            mSDS.setLocation(tblLoc);
+          }
+          count++;
         }
-     }
-  }
-
-  public class UpdateMStorageDescriptorTblURIRetVal {
-    private List<String> badRecords;
-    private Map<String, String> updateLocations;
-
-    UpdateMStorageDescriptorTblURIRetVal(List<String> badRecords,
-      Map<String, String> updateLocations) {
-      this.badRecords = badRecords;
-      this.updateLocations = updateLocations;
-    }
-
-    public List<String> getBadRecords() {
-      return badRecords;
-    }
-
-    public void setBadRecords(List<String> badRecords) {
-      this.badRecords = badRecords;
-    }
-
-    public Map<String, String> getUpdateLocations() {
-      return updateLocations;
+      }
     }
 
-    public void setUpdateLocations(Map<String, String> updateLocations) {
-      this.updateLocations = updateLocations;
-    }
+    LOG.info("Found " + count + " records to update");
+    return count;
   }
 
-  /** The following APIs
-  *
-  *  - updateMStorageDescriptorTblURI
-  *
-  * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
-  *
-  */
-  public UpdateMStorageDescriptorTblURIRetVal updateMStorageDescriptorTblURI(URI oldLoc, URI newLoc,
-    boolean isDryRun) {
-    boolean committed = false;
-    Map<String, String> updateLocations = new HashMap<String, String>();
-    List<String> badRecords = new ArrayList<String>();
-    UpdateMStorageDescriptorTblURIRetVal retVal = null;
-
-    try {
-      openTransaction();
-      Query query = pm.newQuery(MStorageDescriptor.class);
-      List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute();
-      pm.retrieveAll(mSDSs);
-
-      for(MStorageDescriptor mSDS:mSDSs) {
-        URI locationURI = null;
-        String location = mSDS.getLocation();
+  private int updateAvroSerdeURI(URI oldLoc, URI newLoc,
+      HashMap<String, String> updateLocations, boolean dryRun) {
+    int count = 0;
+    Query query = pm.newQuery(MSerDeInfo.class);
+    List<MSerDeInfo> mSerdes = (List<MSerDeInfo>) query.execute();
+    pm.retrieveAll(mSerdes);
+
+    LOG.info("Looking for location in the value field of schema.url key in SERDES table...");
+
+    for(MSerDeInfo mSerde:mSerdes) {
+      String key = new String("schema.url");
+      String schemaLoc = mSerde.getParameters().get(key);
+      if (schemaLoc != null) {
+        URI schemaLocURI = null;
         try {
-          locationURI = new URI(location);
+          schemaLocURI = new URI(schemaLoc);
         } catch (URISyntaxException e) {
-          badRecords.add(location);
+          LOG.error("Encountered error while validating location URI"
+              + e.getLocalizedMessage());
         }
-        if (locationURI == null) {
-          badRecords.add(location);
-        } else {
-          if (shouldUpdateURI(locationURI, oldLoc)) {
-            String tblLoc = mSDS.getLocation().replaceAll(oldLoc.toString(), newLoc.toString());
-            updateLocations.put(locationURI.toString(), tblLoc);
-            if (!isDryRun) {
-              mSDS.setLocation(tblLoc);
+        // schemaLocURI is a valid URI
+        if (schemaLocURI != null) {
+          if (shouldUpdateURI(schemaLocURI, oldLoc)) {
+            String newSchemaLoc = schemaLoc.replaceAll(oldLoc.toString(), newLoc.toString());
+            if (dryRun) {
+              updateLocations.put(schemaLocURI.toString(), newSchemaLoc);
+            } else {
+              mSerde.getParameters().put(key, newSchemaLoc);
             }
+            count++;
           }
         }
       }
-      committed = commitTransaction();
-      if (committed) {
-        retVal = new UpdateMStorageDescriptorTblURIRetVal(badRecords, updateLocations);
-      }
-      return retVal;
-    } finally {
-        if (!committed) {
-          rollbackTransaction();
-        }
-     }
-  }
-
-  public class UpdateSerdeURIRetVal {
-    private List<String> badRecords;
-    private Map<String, String> updateLocations;
-
-    UpdateSerdeURIRetVal(List<String> badRecords, Map<String, String> updateLocations) {
-      this.badRecords = badRecords;
-      this.updateLocations = updateLocations;
-    }
-
-    public List<String> getBadRecords() {
-      return badRecords;
-    }
-
-    public void setBadRecords(List<String> badRecords) {
-      this.badRecords = badRecords;
     }
 
-    public Map<String, String> getUpdateLocations() {
-      return updateLocations;
-    }
-
-    public void setUpdateLocations(Map<String, String> updateLocations) {
-      this.updateLocations = updateLocations;
-    }
+    LOG.info("Found " + count + " records to update");
+    return count;
   }
 
   /** The following APIs
   *
-  *  - updateSerdeURI
+  *  - updateFSRootLocation
   *
   * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
   *
   */
-  public UpdateSerdeURIRetVal updateSerdeURI(URI oldLoc, URI newLoc, String serdeProp,
-    boolean isDryRun) {
+  @SuppressWarnings("finally")
+  public int updateFSRootLocation(URI oldLoc, URI newLoc,
+      HashMap<String, String> updateLocations, boolean dryRun) {
     boolean committed = false;
-    Map<String, String> updateLocations = new HashMap<String, String>();
-    List<String> badRecords = new ArrayList<String>();
-    UpdateSerdeURIRetVal retVal = null;
+    int count = 0;
+    int totalCount = 0;
+
+    LOG.info("Old FS root location: " + oldLoc.toString() +
+                    " New FS root location: " + newLoc.toString());
+    LOG.info("Updating FS root location...");
 
     try {
       openTransaction();
-      Query query = pm.newQuery(MSerDeInfo.class);
-      List<MSerDeInfo> mSerdes = (List<MSerDeInfo>) query.execute();
-      pm.retrieveAll(mSerdes);
-
-      for(MSerDeInfo mSerde:mSerdes) {
-        String schemaLoc = mSerde.getParameters().get(serdeProp);
-        if (schemaLoc != null) {
-          URI schemaLocURI = null;
-          try {
-            schemaLocURI = new URI(schemaLoc);
-          } catch (URISyntaxException e) {
-            badRecords.add(schemaLoc);
-          }
-          if (schemaLocURI == null) {
-            badRecords.add(schemaLoc);
-          } else {
-            if (shouldUpdateURI(schemaLocURI, oldLoc)) {
-              String newSchemaLoc = schemaLoc.replaceAll(oldLoc.toString(), newLoc.toString());
-              updateLocations.put(schemaLocURI.toString(), newSchemaLoc);
-              if (!isDryRun) {
-                mSerde.getParameters().put(serdeProp, newSchemaLoc);
-              }
-            }
-          }
-        }
-      }
+
+      // update locationURI in mDatabase
+      count = updateMDatabaseURI(oldLoc, newLoc, updateLocations, dryRun);
+      totalCount += count;
+
+      // update location in mStorageDescriptor
+      count = updateMStorageDescriptorURI(oldLoc, newLoc, updateLocations, dryRun);
+      totalCount += count;
+
+      // upgrade schema.url for avro serde
+      count = updateAvroSerdeURI(oldLoc, newLoc, updateLocations, dryRun);
+      totalCount += count;
+
       committed = commitTransaction();
-      if (committed) {
-        retVal = new UpdateSerdeURIRetVal(badRecords, updateLocations);
-      }
-      return retVal;
     } finally {
       if (!committed) {
         rollbackTransaction();
+        return -1;
+      } else {
+        return totalCount;
       }
     }
   }

Modified: hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java?rev=1386857&r1=1386856&r2=1386857&view=diff
==============================================================================
--- hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java (original)
+++ hive/trunk/metastore/src/java/org/apache/hadoop/hive/metastore/tools/HiveMetaTool.java Mon Sep 17 21:40:10 2012
@@ -20,9 +20,8 @@ package org.apache.hadoop.hive.metastore
 
 import java.net.URI;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
 import java.util.Set;
 
 import org.apache.commons.cli.CommandLine;
@@ -50,10 +49,9 @@ public class HiveMetaTool {
   private static final Log LOG = LogFactory.getLog(HiveMetaTool.class.getName());
   private final Options cmdLineOptions = new Options();
   private ObjectStore objStore;
-  private boolean isObjStoreInitialized;
+
 
   public HiveMetaTool() {
-    this.isObjStoreInitialized = false;
   }
 
   @SuppressWarnings("static-access")
@@ -77,78 +75,48 @@ public class HiveMetaTool {
     Option updateFSRootLoc =
         OptionBuilder
             .withArgName("new-loc> " + "<old-loc")
-            .hasArgs(2)
+            .hasArgs(3)
             .withDescription(
-                "Update FS root location in the metastore to new location.Both new-loc and " +
-                    "old-loc should be valid URIs with valid host names and schemes." +
-                    "When run with the dryRun option changes are displayed but are not " +
-                    "persisted. When run with the serdepropKey/tablePropKey option " +
-                    "updateLocation looks for the serde-prop-key/table-prop-key that is " +
-                    "specified and updates its value if found.")
-                    .create("updateLocation");
-    Option dryRun = new Option("dryRun" , "Perform a dry run of updateLocation changes.When " +
-      "run with the dryRun option updateLocation changes are displayed but not persisted. " +
-      "dryRun is valid only with the updateLocation option.");
-    Option serdePropKey =
-        OptionBuilder.withArgName("serde-prop-key")
-        .hasArgs()
-        .withValueSeparator()
-        .withDescription("Specify the key for serde property to be updated. serdePropKey option " +
-           "is valid only with updateLocation option.")
-        .create("serdePropKey");
-    Option tablePropKey =
-        OptionBuilder.withArgName("table-prop-key")
-        .hasArg()
-        .withValueSeparator()
-        .withDescription("Specify the key for table property to be updated. tablePropKey option " +
-          "is valid only with updateLocation option.")
-        .create("tablePropKey");
+                "update FS root location in the metastore to new location. Both new-loc and" +
+                    " old-loc should be valid URIs with valid host names and schemes. " +
+                    "when run with the dryRun option changes are displayed but are not persisted.")
+            .create("updateLocation");
+    Option dryRun = new Option("dryRun" , "dryRun is valid only with updateLocation option. when " +
+      "run with the dryRun option updateLocation changes are displayed but are not persisted.");
 
     cmdLineOptions.addOption(help);
     cmdLineOptions.addOption(listFSRoot);
     cmdLineOptions.addOption(executeJDOQL);
     cmdLineOptions.addOption(updateFSRootLoc);
     cmdLineOptions.addOption(dryRun);
-    cmdLineOptions.addOption(serdePropKey);
-    cmdLineOptions.addOption(tablePropKey);
+
   }
 
   private void initObjectStore(HiveConf hiveConf) {
-    if (!isObjStoreInitialized) {
-      objStore = new ObjectStore();
-      objStore.setConf(hiveConf);
-      isObjStoreInitialized = true;
-    }
+    objStore = new ObjectStore();
+    objStore.setConf(hiveConf);
   }
 
   private void shutdownObjectStore() {
-    if (isObjStoreInitialized) {
+    if (objStore != null) {
       objStore.shutdown();
-      isObjStoreInitialized = false;
     }
   }
 
   private void listFSRoot() {
-    HiveConf hiveConf = new HiveConf(HiveMetaTool.class);
-    initObjectStore(hiveConf);
-
     Set<String> hdfsRoots = objStore.listFSRoots();
     if (hdfsRoots != null) {
-      System.out.println("Listing FS Roots..");
+      System.out.println("HiveMetaTool:Listing FS Roots..");
       for (String s : hdfsRoots) {
         System.out.println(s);
       }
     } else {
-      System.err.println("Encountered error during listFSRoot - " +
+      System.err.println("HiveMetaTool:Encountered error during listFSRoot - " +
         "commit of JDO transaction failed");
     }
   }
 
   private void executeJDOQLSelect(String query) {
-    HiveConf hiveConf = new HiveConf(HiveMetaTool.class);
-    initObjectStore(hiveConf);
-
-    System.out.println("Executing query: " + query);
     Collection<?> result = objStore.executeJDOQLSelect(query);
     if (result != null) {
       Iterator<?> iter = result.iterator();
@@ -157,205 +125,46 @@ public class HiveMetaTool {
         System.out.println(o.toString());
       }
     } else {
-      System.err.println("Encountered error during executeJDOQLSelect -" +
+      System.err.println("HiveMetaTool:Encountered error during executeJDOQLSelect -" +
         "commit of JDO transaction failed.");
     }
   }
 
-  private long executeJDOQLUpdate(String query) {
-    HiveConf hiveConf = new HiveConf(HiveMetaTool.class);
-    initObjectStore(hiveConf);
-
-    System.out.println("Executing query: " + query);
+  private void executeJDOQLUpdate(String query) {
     long numUpdated = objStore.executeJDOQLUpdate(query);
     if (numUpdated >= 0) {
-      System.out.println("Number of records updated: " + numUpdated);
+      System.out.println("HiveMetaTool:Number of records updated: " + numUpdated);
     } else {
-      System.err.println("Encountered error during executeJDOQL -" +
+      System.err.println("HiveMetaTool:Encountered error during executeJDOQL -" +
         "commit of JDO transaction failed.");
     }
-    return numUpdated;
   }
 
-  private int printUpdateLocations(Map<String, String> updateLocations) {
-    int count = 0;
+  private void printUpdateLocations(HashMap<String, String> updateLocations) {
     for (String key: updateLocations.keySet()) {
       String value = updateLocations.get(key);
-      System.out.println("old location: " + key + " new location: " + value);
-      count++;
-    }
-    return count;
-  }
-
-  private void printTblURIUpdateSummary(ObjectStore.UpdateMStorageDescriptorTblURIRetVal retVal,
-    boolean isDryRun) {
-    String tblName = new String("SDS");
-    String fieldName = new String("LOCATION");
-
-    if (retVal == null) {
-      System.err.println("Encountered error while executing updateMStorageDescriptorTblURI - " +
-          "commit of JDO transaction failed. Failed to update FSRoot locations in " +
-          fieldName + "field in " + tblName + " table.");
-    } else {
-      Map<String, String> updateLocations = retVal.getUpdateLocations();
-      if (isDryRun) {
-        System.out.println("Dry Run of updateLocation on table " + tblName + "..");
-      } else {
-        System.out.println("Successfully updated the following locations..");
-      }
-      int count = printUpdateLocations(updateLocations);
-      if (isDryRun) {
-        System.out.println("Found " + count + " records in " + tblName + " table to update");
-      } else {
-        System.out.println("Updated " + count + " records in " + tblName + " table");
-      }
-      List<String> badRecords = retVal.getBadRecords();
-      if (badRecords.size() > 0) {
-        System.err.println("Found records with bad " + fieldName +  " in " + tblName + " table.. ");
-        for (String badRecord:badRecords) {
-          System.err.println("bad location URI: " + badRecord);
-        }
-      }
+      System.out.println("current location: " + key + " new location: " + value);
     }
   }
 
-  private void printDatabaseURIUpdateSummary(ObjectStore.UpdateMDatabaseURIRetVal retVal,
-    boolean isDryRun) {
-    String tblName = new String("DBS");
-    String fieldName = new String("LOCATION_URI");
-
-    if (retVal == null) {
-      System.err.println("Encountered error while executing updateMDatabaseURI - " +
-          "commit of JDO transaction failed. Failed to update FSRoot locations in " +
-          fieldName + "field in " + tblName + " table.");
+  private void updateFSRootLocation(URI oldURI, URI newURI, boolean dryRun) {
+    HashMap<String, String> updateLocations = new HashMap<String, String>();
+    int count = objStore.updateFSRootLocation(oldURI, newURI, updateLocations, dryRun);
+    if (count == -1) {
+      System.err.println("HiveMetaTool:Encountered error while executing updateFSRootLocation - " +
+        "commit of JDO transaction failed, failed to update FS Root locations.");
     } else {
-      Map<String, String> updateLocations = retVal.getUpdateLocations();
-      if (isDryRun) {
-        System.out.println("Dry Run of updateLocation on table " + tblName + "..");
-      } else {
-        System.out.println("Successfully updated the following locations..");
-      }
-      int count = printUpdateLocations(updateLocations);
-      if (isDryRun) {
-        System.out.println("Found " + count + " records in " + tblName + " table to update");
+      if (!dryRun) {
+        System.out.println("HiveMetaTool: Successfully updated " + count + "FS Root locations");
       } else {
-        System.out.println("Updated " + count + " records in " + tblName + " table");
-      }
-      List<String> badRecords = retVal.getBadRecords();
-      if (badRecords.size() > 0) {
-        System.err.println("Found records with bad " + fieldName +  " in " + tblName + " table.. ");
-        for (String badRecord:badRecords) {
-          System.err.println("bad location URI: " + badRecord);
-        }
+        printUpdateLocations(updateLocations);
       }
     }
   }
 
-  private void printTblPropURIUpdateSummary(
-    ObjectStore.UpdateMStorageDescriptorTblPropURIRetVal retVal, String tablePropKey,
-    boolean isDryRun) {
-    String tblName = new String("SD_PARAMS");
-
-    if (retVal == null) {
-      System.err.println("Encountered error while executing updateMStorageDescriptorTblPropURI - " +
-        "commit of JDO transaction failed. Failed to update FSRoot locations in " +
-        "value field corresponding to" + tablePropKey + " in " + tblName + " table.");
-    } else {
-      Map<String, String> updateLocations = retVal.getUpdateLocations();
-      if (isDryRun) {
-        System.out.println("Dry Run of updateLocation on table " + tblName + "..");
-      } else {
-        System.out.println("Successfully updated the following locations..");
-      }
-      int count = printUpdateLocations(updateLocations);
-      if (isDryRun) {
-        System.out.println("Found " + count + " records in " + tblName + " table to update");
-      } else {
-        System.out.println("Updated " + count + " records in " + tblName + " table");
-      }
-      List<String> badRecords = retVal.getBadRecords();
-      if (badRecords.size() > 0) {
-        System.err.println("Found records with bad " + tablePropKey +  " key in " +
-        tblName + " table.. ");
-        for (String badRecord:badRecords) {
-          System.err.println("bad location URI: " + badRecord);
-        }
-      }
-    }
-  }
-
-  private void printSerdePropURIUpdateSummary(ObjectStore.UpdateSerdeURIRetVal retVal,
-    String serdePropKey, boolean isDryRun) {
-    String tblName = new String("SERDE_PARAMS");
-
-    if (retVal == null) {
-      System.err.println("Encountered error while executing updateSerdeURI - " +
-        "commit of JDO transaction failed. Failed to update FSRoot locations in " +
-        "value field corresponding to " + serdePropKey + " in " + tblName + " table.");
-    } else {
-      Map<String, String> updateLocations = retVal.getUpdateLocations();
-      if (isDryRun) {
-        System.out.println("Dry Run of updateLocation on table " + tblName + "..");
-      } else {
-        System.out.println("Successfully updated the following locations..");
-      }
-      int count = printUpdateLocations(updateLocations);
-      if (isDryRun) {
-        System.out.println("Found " + count + " records in " + tblName + " table to update");
-      } else {
-        System.out.println("Updated " + count + " records in " + tblName + " table");
-      }
-      List<String> badRecords = retVal.getBadRecords();
-      if (badRecords.size() > 0) {
-        System.err.println("Found records with bad " + serdePropKey +  " key in " +
-        tblName + " table.. ");
-        for (String badRecord:badRecords) {
-          System.err.println("bad location URI: " + badRecord);
-        }
-      }
-    }
-  }
+  public static void main(String[] args) {
 
-  public void updateFSRootLocation(URI oldURI, URI newURI, String serdePropKey,
-      String tablePropKey, boolean isDryRun) {
     HiveConf hiveConf = new HiveConf(HiveMetaTool.class);
-    initObjectStore(hiveConf);
-
-    System.out.println("Looking for LOCATION_URI field in DBS table to update..");
-    ObjectStore.UpdateMDatabaseURIRetVal updateMDBURIRetVal = objStore.updateMDatabaseURI(oldURI,
-                                                                 newURI, isDryRun);
-    printDatabaseURIUpdateSummary(updateMDBURIRetVal, isDryRun);
-
-    System.out.println("Looking for LOCATION field in SDS table to update..");
-    ObjectStore.UpdateMStorageDescriptorTblURIRetVal updateTblURIRetVal =
-                         objStore.updateMStorageDescriptorTblURI(oldURI, newURI, isDryRun);
-    printTblURIUpdateSummary(updateTblURIRetVal, isDryRun);
-
-    if (tablePropKey != null) {
-      System.out.println("Looking for value of " + tablePropKey + " key in SD_PARAMS table " +
-        "to update..");
-      ObjectStore.UpdateMStorageDescriptorTblPropURIRetVal updateTblPropURIRetVal =
-                           objStore.updateMStorageDescriptorTblPropURI(oldURI, newURI,
-                               tablePropKey, isDryRun);
-      printTblPropURIUpdateSummary(updateTblPropURIRetVal, tablePropKey, isDryRun);
-    }
-
-    if (serdePropKey != null) {
-      System.out.println("Looking for value of " + serdePropKey + " key in SERDE_PARAMS table " +
-        "to update..");
-      ObjectStore.UpdateSerdeURIRetVal updateSerdeURIretVal = objStore.updateSerdeURI(oldURI,
-                                                                newURI, serdePropKey, isDryRun);
-      printSerdePropURIUpdateSummary(updateSerdeURIretVal, serdePropKey, isDryRun);
-    }
-  }
-
-  private static void printAndExit(HiveMetaTool metaTool) {
-    HelpFormatter formatter = new HelpFormatter();
-    formatter.printHelp("metatool", metaTool.cmdLineOptions);
-    System.exit(1);
-  }
-
-  public static void main(String[] args) {
     HiveMetaTool metaTool = new HiveMetaTool();
     metaTool.init();
     CommandLineParser parser = new GnuParser();
@@ -366,55 +175,35 @@ public class HiveMetaTool {
         line = parser.parse(metaTool.cmdLineOptions, args);
       } catch (ParseException e) {
         System.err.println("HiveMetaTool:Parsing failed.  Reason: " + e.getLocalizedMessage());
-        printAndExit(metaTool);
+        HelpFormatter formatter = new HelpFormatter();
+        formatter.printHelp("metatool", metaTool.cmdLineOptions);
+        System.exit(1);
       }
 
       if (line.hasOption("help")) {
         HelpFormatter formatter = new HelpFormatter();
         formatter.printHelp("metatool", metaTool.cmdLineOptions);
       } else if (line.hasOption("listFSRoot")) {
-        if (line.hasOption("dryRun")) {
-          System.err.println("HiveMetaTool: dryRun is not valid with listFSRoot");
-          printAndExit(metaTool);
-        } else if (line.hasOption("serdePropKey")) {
-          System.err.println("HiveMetaTool: serdePropKey is not valid with listFSRoot");
-          printAndExit(metaTool);
-        } else if (line.hasOption("tablePropKey")) {
-          System.err.println("HiveMetaTool: tablePropKey is not valid with listFSRoot");
-          printAndExit(metaTool);
-        }
+        metaTool.initObjectStore(hiveConf);
         metaTool.listFSRoot();
       } else if (line.hasOption("executeJDOQL")) {
         String query = line.getOptionValue("executeJDOQL");
-        if (line.hasOption("dryRun")) {
-          System.err.println("HiveMetaTool: dryRun is not valid with executeJDOQL");
-          printAndExit(metaTool);
-        } else if (line.hasOption("serdePropKey")) {
-          System.err.println("HiveMetaTool: serdePropKey is not valid with executeJDOQL");
-          printAndExit(metaTool);
-        } else if (line.hasOption("tablePropKey")) {
-          System.err.println("HiveMetaTool: tablePropKey is not valid with executeJDOQL");
-          printAndExit(metaTool);
-        }
+        metaTool.initObjectStore(hiveConf);
         if (query.toLowerCase().trim().startsWith("select")) {
           metaTool.executeJDOQLSelect(query);
         } else if (query.toLowerCase().trim().startsWith("update")) {
           metaTool.executeJDOQLUpdate(query);
         } else {
           System.err.println("HiveMetaTool:Unsupported statement type");
-          printAndExit(metaTool);
         }
       } else if (line.hasOption("updateLocation")) {
         String[] loc = line.getOptionValues("updateLocation");
-        boolean isDryRun = false;
-        String serdepropKey = null;
-        String tablePropKey = null;
+        boolean dryRun = false;
 
         if (loc.length != 2 && loc.length != 3) {
           System.err.println("HiveMetaTool:updateLocation takes in 2 required and 1 " +
-              "optional arguments but " +
-              "was passed " + loc.length + " arguments");
-          printAndExit(metaTool);
+              "optional arguements but " +
+              "was passed " + loc.length + " arguements");
         }
 
         Path newPath = new Path(loc[0]);
@@ -424,15 +213,7 @@ public class HiveMetaTool {
         URI newURI = newPath.toUri();
 
         if (line.hasOption("dryRun")) {
-          isDryRun = true;
-        }
-
-        if (line.hasOption("serdePropKey")) {
-          serdepropKey = line.getOptionValue("serdePropKey");
-        }
-
-        if (line.hasOption("tablePropKey")) {
-          tablePropKey = line.getOptionValue("tablePropKey");
+          dryRun = true;
         }
 
         /*
@@ -448,27 +229,20 @@ public class HiveMetaTool {
           } else if (oldURI.getScheme() == null || newURI.getScheme() == null) {
             System.err.println("HiveMetaTool:A valid scheme is required in both old-loc and new-loc");
           } else {
-            metaTool.updateFSRootLocation(oldURI, newURI, serdepropKey, tablePropKey, isDryRun);
+            metaTool.initObjectStore(hiveConf);
+            metaTool.updateFSRootLocation(oldURI, newURI, dryRun);
           }
         } else {
-          if (line.hasOption("dryRun")) {
-            System.err.println("HiveMetaTool: dryRun is not a valid standalone option");
-          } else if (line.hasOption("serdePropKey")) {
-            System.err.println("HiveMetaTool: serdePropKey is not a valid standalone option");
-          } else if (line.hasOption("tablePropKey")) {
-            System.err.println("HiveMetaTool: tablePropKey is not a valid standalone option");
-            printAndExit(metaTool);
-          } else {
-            System.err.print("HiveMetaTool:Parsing failed.  Reason: Invalid arguments: " );
-            for (String s : line.getArgs()) {
-              System.err.print(s + " ");
-            }
-            System.err.println();
+          System.err.print("HiveMetaTool:Invalid option:" + line.getOptions());
+          for (String s : line.getArgs()) {
+            System.err.print(s + " ");
           }
-          printAndExit(metaTool);
+          System.err.println();
+          HelpFormatter formatter = new HelpFormatter();
+          formatter.printHelp("metatool", metaTool.cmdLineOptions);
         }
       } finally {
         metaTool.shutdownObjectStore();
       }
    }
-}
\ No newline at end of file
+}

Modified: hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java
URL: http://svn.apache.org/viewvc/hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java?rev=1386857&r1=1386856&r2=1386857&view=diff
==============================================================================
--- hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java (original)
+++ hive/trunk/metastore/src/test/org/apache/hadoop/hive/metastore/TestHiveMetaTool.java Mon Sep 17 21:40:10 2012
@@ -37,7 +37,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.metastore.api.Type;
 import org.apache.hadoop.hive.metastore.tools.HiveMetaTool;
 import org.apache.hadoop.hive.serde.Constants;
-import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
 import org.apache.hadoop.util.StringUtils;
 
 public class TestHiveMetaTool extends TestCase {
@@ -73,20 +72,16 @@ public class TestHiveMetaTool extends Te
       ps = new PrintStream(os);
 
       // create a dummy database and a couple of dummy tables
-      String dbName = "TestHiveMetaToolDB";
+      String dbName = "testDB";
       String typeName = "Person";
       String tblName = "simpleTbl";
-      String badTblName = "badSimpleTbl";
 
       Database db = new Database();
       db.setName(dbName);
       client.dropTable(dbName, tblName);
-      client.dropTable(dbName, badTblName);
       dropDatabase(dbName);
       client.createDatabase(db);
       locationUri = db.getLocationUri();
-      String avroUri = "hdfs://nn.example.com/warehouse/hive/ab.avsc";
-      String badAvroUri = new String("hdfs:/hive");
 
       client.dropType(typeName);
       Type typ1 = new Type();
@@ -115,34 +110,10 @@ public class TestHiveMetaTool extends Te
       sd.getSerdeInfo().setParameters(new HashMap<String, String>());
       sd.getSerdeInfo().getParameters().put(
           org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
-      sd.getParameters().put(AvroSerdeUtils.SCHEMA_URL, avroUri);
       sd.getSerdeInfo().setSerializationLib(
-          org.apache.hadoop.hive.serde2.avro.AvroSerDe.class.getName());
+          org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
       tbl.setPartitionKeys(new ArrayList<FieldSchema>());
-      client.createTable(tbl);
 
-      //create a table with bad avro uri
-      tbl = new Table();
-      tbl.setDbName(dbName);
-      tbl.setTableName(badTblName);
-      sd = new StorageDescriptor();
-      tbl.setSd(sd);
-      sd.setCols(typ1.getFields());
-      sd.setCompressed(false);
-      sd.setNumBuckets(1);
-      sd.setParameters(new HashMap<String, String>());
-      sd.getParameters().put("test_param_1", "Use this for comments etc");
-      sd.setBucketCols(new ArrayList<String>(2));
-      sd.getBucketCols().add("name");
-      sd.setSerdeInfo(new SerDeInfo());
-      sd.getSerdeInfo().setName(tbl.getTableName());
-      sd.getSerdeInfo().setParameters(new HashMap<String, String>());
-      sd.getSerdeInfo().getParameters().put(
-          org.apache.hadoop.hive.serde.Constants.SERIALIZATION_FORMAT, "1");
-      sd.getParameters().put(AvroSerdeUtils.SCHEMA_URL, badAvroUri);
-      sd.getSerdeInfo().setSerializationLib(
-          org.apache.hadoop.hive.serde2.avro.AvroSerDe.class.getName());
-      tbl.setPartitionKeys(new ArrayList<FieldSchema>());
       client.createTable(tbl);
       client.close();
     } catch (Exception e) {
@@ -202,29 +173,37 @@ public class TestHiveMetaTool extends Te
   public void testUpdateFSRootLocation() throws Exception {
 
     redirectOutputStream();
-    String oldLocationUri = "hdfs://nn.example.com/";
-    String newLocationUri = "hdfs://nn-ha-uri/";
-    String[] args = new String[5];
+    String newLocationUri = "hdfs://nn-ha-uri/user/hive/warehouse";
+    String[] args = new String[3];
     args[0] = new String("-updateLocation");
     args[1] = new String(newLocationUri);
-    args[2] = new String(oldLocationUri);
-    args[3] = new String("-tablePropKey");
-    args[4] = new String("avro.schema.url");
+    args[2] = new String(locationUri);
+
+    String[] args2 = new String[1];
+    args2[0] = new String("-listFSRoot");
 
     try {
+
       // perform HA upgrade
       HiveMetaTool.main(args);
+
+      // obtain new HDFS root
+      HiveMetaTool.main(args2);
+
       String out = os.toString();
       boolean b = out.contains(newLocationUri);
-      restoreOutputStream();
-      assertTrue(b);
 
-      //restore the original HDFS root
-      args[1] = new String(oldLocationUri);
-      args[2] = new String(newLocationUri);
-      redirectOutputStream();
-      HiveMetaTool.main(args);
-      restoreOutputStream();
+      if (b) {
+        System.out.println("updateFSRootLocation successful");
+      } else {
+        System.out.println("updateFSRootLocation failed");
+      }
+      // restore the original HDFS root if needed
+      if (b) {
+        args[1] = new String(locationUri);
+        args[2] = new String(newLocationUri);
+        HiveMetaTool.main(args);
+      }
     } finally {
       restoreOutputStream();
       System.out.println("Completed testUpdateFSRootLocation..");
@@ -235,6 +214,7 @@ public class TestHiveMetaTool extends Te
   protected void tearDown() throws Exception {
     try {
       super.tearDown();
+
     } catch (Throwable e) {
       System.err.println("Unable to close metastore");
       System.err.println(StringUtils.stringifyException(e));