You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2008/11/12 21:44:07 UTC

svn commit: r713502 - in /hadoop/hbase/trunk: CHANGES.txt conf/hbase-env.sh src/java/org/apache/hadoop/hbase/HConstants.java src/java/org/apache/hadoop/hbase/util/Migrate.java

Author: stack
Date: Wed Nov 12 12:44:06 2008
New Revision: 713502

URL: http://svn.apache.org/viewvc?rev=713502&view=rev
Log:
HBASE-996 Migration script to up the versions in catalog tables

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/conf/hbase-env.sh
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=713502&r1=713501&r2=713502&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Wed Nov 12 12:44:06 2008
@@ -71,6 +71,7 @@
                for root region"
    HBASE-990   NoSuchElementException in flushSomeRegions
    HBASE-602   HBase Crash when network card has a IPv6 address
+   HBASE-996   Migration script to up the versions in catalog tables
       
   IMPROVEMENTS
    HBASE-901   Add a limit to key length, check key and value length on client side

Modified: hadoop/hbase/trunk/conf/hbase-env.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/hbase-env.sh?rev=713502&r1=713501&r2=713502&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/hbase-env.sh (original)
+++ hadoop/hbase/trunk/conf/hbase-env.sh Wed Nov 12 12:44:06 2008
@@ -23,7 +23,7 @@
 
 # The java implementation to use.  Required.
 # export JAVA_HOME=/usr/lib/j2sdk1.5-sun
-export JAVA_HOME=/usr
+export JAVA_HOME=/Users/stack/bin/jdk
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HBASE_CLASSPATH=

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java?rev=713502&r1=713501&r2=713502&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HConstants.java Wed Nov 12 12:44:06 2008
@@ -44,9 +44,10 @@
   
   /**
    * Current version of file system
-   * Version 4 supports only one kind of bloom filter
+   * Version 4 supports only one kind of bloom filter.
+   * Version 5 changes versions in catalog table regions.
    */
-  public static final String FILE_SYSTEM_VERSION = "4";
+  public static final String FILE_SYSTEM_VERSION = "5";
   
   // Configuration parameters
   

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java?rev=713502&r1=713501&r2=713502&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/Migrate.java Wed Nov 12 12:44:06 2008
@@ -21,40 +21,35 @@
 package org.apache.hadoop.hbase.util;
 
 import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
 
 import org.apache.commons.cli.Options;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configured;
-import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
 import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HRegionInfo;
-import org.apache.hadoop.hbase.HStoreKey;
-import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.MasterNotRunningException;
 import org.apache.hadoop.hbase.client.HBaseAdmin;
 import org.apache.hadoop.hbase.io.BatchUpdate;
 import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.HStoreFile;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
 /**
- * Perform a file system upgrade to convert older file layouts.
+ * Perform a migration.
  * HBase keeps a file in hdfs named hbase.version just under the hbase.rootdir.
  * This file holds the version of the hbase data in the Filesystem.  When the
  * software changes in a manner incompatible with the data in the Filesystem,
  * it updates its internal version number,
  * {@link HConstants#FILE_SYSTEM_VERSION}.  This wrapper script manages moving
  * the filesystem across versions until there's a match with current software's
- * version number.
+ * version number.  This script will only cross a particular version divide.  You may
+ * need to install earlier or later hbase to migrate earlier (or older) versions.
  * 
  * <p>This wrapper script comprises a set of migration steps.  Which steps
  * are run depends on the span between the version of the hbase data in the
@@ -72,13 +67,12 @@
  * edits made by previous migration steps not being apparent in later migration
  * steps).  The upshot is always verify presumptions migrating.
  * 
- * <p>This script will migrate an hbase 0.1 install to a 0.2 install only.
+ * <p>This script will migrate an hbase 0.18.x only.
  * 
  * @see <a href="http://wiki.apache.org/hadoop/Hbase/HowToMigrate">How To Migration</a>
  */
 public class Migrate extends Configured implements Tool {
   private static final Log LOG = LogFactory.getLog(Migrate.class);
-
   private final HBaseConfiguration conf;
   private FileSystem fs;
   
@@ -136,7 +130,7 @@
     try {
       // Verify file system is up.
       fs = FileSystem.get(conf);                        // get DFS handle
-      LOG.info("Verifying that file system is available...");
+      LOG.info("Verifying that file system is available..");
       FSUtils.checkFileSystemAvailable(fs);
       return true;
     } catch (IOException e) {
@@ -147,7 +141,8 @@
   
   private boolean notRunning() {
     // Verify HBase is down
-    LOG.info("Verifying that HBase is not running...");
+    LOG.info("Verifying that HBase is not running...." +
+          "Trys ten times  to connect to running master");
     try {
       HBaseAdmin.checkHBaseAvailable(conf);
       LOG.fatal("HBase cluster must be off-line.");
@@ -187,7 +182,7 @@
       }
       float version = Float.parseFloat(versionStr);
       if (version == HBASE_0_1_VERSION ||
-          Integer.valueOf(versionStr) < PREVIOUS_VERSION) {
+          Integer.valueOf(versionStr).intValue() < PREVIOUS_VERSION) {
         String msg = "Cannot upgrade from " + versionStr + " to " +
         HConstants.FILE_SYSTEM_VERSION + " you must install hbase-0.2.x, run " +
         "the upgrade tool, reinstall this version and run this utility again." +
@@ -196,7 +191,7 @@
         throw new IOException(msg);
       }
 
-      // insert call to new migration method here.
+      migrateTo5();
       
       if (!readOnly) {
         // Set file system version
@@ -213,37 +208,65 @@
     }
   }
   
-  private FileStatus[] getRootDirFiles() throws IOException {
-    FileStatus[] stats = fs.listStatus(FSUtils.getRootDir(this.conf));
-    if (stats == null || stats.length == 0) {
-      throw new IOException("No files found under root directory " +
-        FSUtils.getRootDir(this.conf).toString());
+  // Move the fileystem version from 4 to 5.
+  // In here we rewrite the catalog table regions so they keep 10 versions
+  // instead of 1.
+  private void migrateTo5() throws IOException {
+    if (this.readOnly && this.migrationNeeded) {
+      return;
+    }
+    final MetaUtils utils = new MetaUtils(this.conf);
+    try {
+      updateVersions(utils.getRootRegion().getRegionInfo());
+      // Scan the root region
+      utils.scanRootRegion(new MetaUtils.ScannerListener() {
+        public boolean processRow(HRegionInfo info)
+        throws IOException {
+          if (readOnly && !migrationNeeded) {
+            migrationNeeded = true;
+            return false;
+          }
+          updateVersions(utils.getRootRegion(), info);
+          return true;
+        }
+      });
+    } finally {
+      utils.shutdown();
     }
-    return stats;
   }
 
-  private void checkForUnrecoveredLogFiles(FileStatus[] rootFiles)
+  /*
+   * Move from old pre-v5 hregioninfo to current HRegionInfo
+   * Persist back into <code>r</code>
+   * @param mr
+   * @param oldHri
+   */
+  void updateVersions(HRegion mr, HRegionInfo oldHri)
   throws IOException {
-    List<String> unrecoveredLogs = new ArrayList<String>();
-    for (int i = 0; i < rootFiles.length; i++) {
-      String name = rootFiles[i].getPath().getName();
-      if (name.startsWith("log_")) {
-        unrecoveredLogs.add(name);
-      }
+    if (!updateVersions(oldHri)) {
+      return;
     }
-    if (unrecoveredLogs.size() != 0) {
-      throw new IOException("There are " + unrecoveredLogs.size() +
-          " unrecovered region server logs. Please uninstall this version of " +
-          "HBase, re-install the previous version, start your cluster and " +
-          "shut it down cleanly, so that all region server logs are recovered" +
-          " and deleted.  Or, if you are sure logs are vestige of old " +
-          "failures in hbase, remove them and then rerun the migration.  " +
-          "See 'Redo Logs' in http://wiki.apache.org/hadoop/Hbase/HowToMigrate. " + 
-          "Here are the problem log files: " + unrecoveredLogs);
+    BatchUpdate b = new BatchUpdate(oldHri.getRegionName());
+    b.put(HConstants.COL_REGIONINFO, Writables.getBytes(oldHri));
+    mr.batchUpdate(b);
+    LOG.info("Upped versions on " + oldHri.getRegionNameAsString());
+  }
+  
+  /*
+   * @param hri Update versions.
+   * @param true if we changed value
+   */
+  private boolean updateVersions(final HRegionInfo hri) {
+    HColumnDescriptor hcd =
+      hri.getTableDesc().getFamily(HConstants.COLUMN_FAMILY);
+    if (hcd.getMaxVersions() == 1) {
+      // Set it to 10, an arbitrary high number
+      hcd.setMaxVersions(10);
+      return true;
     }
+    return false;
   }
 
-  @SuppressWarnings("static-access")
   private int parseArgs(String[] args) {
     Options opts = new Options();
     GenericOptionsParser parser =
@@ -263,9 +286,11 @@
   }
   
   private void usage() {
-    System.err.println("Usage: bin/hbase migrate { check | upgrade } [options]\n");
+    System.err.println("Usage: bin/hbase migrate {check | upgrade} [options]");
+    System.err.println();
     System.err.println("  check                            perform upgrade checks only.");
-    System.err.println("  upgrade                          perform upgrade checks and modify hbase.\n");
+    System.err.println("  upgrade                          perform upgrade checks and modify hbase.");
+    System.err.println();
     System.err.println("  Options are:");
     System.err.println("    -conf <configuration file>     specify an application configuration file");
     System.err.println("    -D <property=value>            use value for given property");
@@ -282,9 +307,9 @@
     try {
       status = ToolRunner.run(new Migrate(), args);
     } catch (Exception e) {
-      LOG.error("exiting due to error", e);
+      LOG.error(e);
       status = -1;
     }
     System.exit(status);
   }
-}
+}
\ No newline at end of file