You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ap...@apache.org on 2009/07/10 09:14:12 UTC

svn commit: r792820 - in /hadoop/hbase/trunk_on_hadoop-0.18.3: ./ bin/ src/java/org/apache/hadoop/hbase/client/ src/java/org/apache/hadoop/hbase/regionserver/

Author: apurtell
Date: Fri Jul 10 07:14:12 2009
New Revision: 792820

URL: http://svn.apache.org/viewvc?rev=792820&view=rev
Log:
HBASE-1629,HBASE-1633,HBASE-1637,HBASE-1640

Modified:
    hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt
    hadoop/hbase/trunk_on_hadoop-0.18.3/bin/HBase.rb
    hadoop/hbase/trunk_on_hadoop-0.18.3/bin/hirb.rb
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
    hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt?rev=792820&r1=792819&r2=792820&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/CHANGES.txt Fri Jul 10 07:14:12 2009
@@ -249,6 +249,8 @@
                (Jon Gray via Stack)
    HBASE-1625  Adding check to Put.add(KeyValue kv), to see that it has the same
                row as when instantiated (Erik Holstad via Stack)
+   HBASE-1629  HRS unable to contact master
+   HBASE-1633  Can't delete in TRUNK shell; makes it hard doing admin repairs
 
   IMPROVEMENTS
    HBASE-1089  Add count of regions on filesystem to master UI; add percentage
@@ -453,6 +455,9 @@
    HBASE-1626  Allow emitting Deletes out of new TableReducer
                (Lars George via Stack)
    HBASE-1551  HBase should manage multiple node ZooKeeper quorum
+   HBASE-1637  Delete client class methods should return itself like Put, Get,
+               Scan (Jon Gray via Nitay)
+   HBASE-1640  Allow passing arguments to jruby script run when run by bin/hbase shell
 
   OPTIMIZATIONS
    HBASE-1412  Change values for delete column and column family in KeyValue

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/bin/HBase.rb
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/bin/HBase.rb?rev=792820&r1=792819&r2=792820&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/bin/HBase.rb (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/bin/HBase.rb Fri Jul 10 07:14:12 2009
@@ -13,6 +13,7 @@
 
 import org.apache.hadoop.hbase.client.HBaseAdmin
 import org.apache.hadoop.hbase.client.HTable
+import org.apache.hadoop.hbase.client.Delete
 import org.apache.hadoop.hbase.HConstants
 import org.apache.hadoop.hbase.io.BatchUpdate
 import org.apache.hadoop.hbase.io.RowResult
@@ -344,16 +345,17 @@
     # Delete a cell
     def delete(row, column, timestamp = HConstants::LATEST_TIMESTAMP)
       now = Time.now 
-      bu = BatchUpdate.new(row, timestamp)
-      bu.delete(column)
-      @table.commit(bu)
+      d = Delete.new(row.to_java_bytes, timestamp, nil)
+      d.deleteColumn(Bytes.toBytes(column))
+      @table.delete(d)
       @formatter.header()
       @formatter.footer(now)
     end
 
     def deleteall(row, column = nil, timestamp = HConstants::LATEST_TIMESTAMP)
       now = Time.now 
-      @table.deleteAll(row, column, timestamp)
+      d = Delete.new(row.to_java_bytes, timestamp, nil)
+      @table.delete(d)
       @formatter.header()
       @formatter.footer(now)
     end

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/bin/hirb.rb
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/bin/hirb.rb?rev=792820&r1=792819&r2=792820&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/bin/hirb.rb (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/bin/hirb.rb Fri Jul 10 07:14:12 2009
@@ -63,6 +63,8 @@
     # after we've set up some environment.
     script2run = arg
     found.push(arg)
+    # Presume that any other args are meant for the script.
+    break
   end
 end
 for arg in found

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java?rev=792820&r1=792819&r2=792820&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/Delete.java Fri Jul 10 07:14:12 2009
@@ -132,8 +132,9 @@
    * specified family.
    * @param family family name
    */
-  public void deleteFamily(byte [] family) {
-	this.deleteFamily(family, HConstants.LATEST_TIMESTAMP);
+  public Delete deleteFamily(byte [] family) {
+    this.deleteFamily(family, HConstants.LATEST_TIMESTAMP);
+    return this;
   }
 
   /**
@@ -145,7 +146,7 @@
    * @param family family name
    * @param timestamp maximum version timestamp
    */
-  public void deleteFamily(byte [] family, long timestamp) {
+  public Delete deleteFamily(byte [] family, long timestamp) {
     List<KeyValue> list = familyMap.get(family);
     if(list == null) {
       list = new ArrayList<KeyValue>();
@@ -154,6 +155,7 @@
     }
     list.add(new KeyValue(row, family, null, timestamp, KeyValue.Type.DeleteFamily));
     familyMap.put(family, list);
+    return this;
   }
   
   /**
@@ -161,8 +163,9 @@
    * @param family family name
    * @param qualifier column qualifier
    */
-  public void deleteColumns(byte [] family, byte [] qualifier) {
+  public Delete deleteColumns(byte [] family, byte [] qualifier) {
     this.deleteColumns(family, qualifier, HConstants.LATEST_TIMESTAMP);
+    return this;
   }
   
   /**
@@ -172,7 +175,7 @@
    * @param qualifier column qualifier
    * @param timestamp maximum version timestamp
    */
-  public void deleteColumns(byte [] family, byte [] qualifier, long timestamp) {
+  public Delete deleteColumns(byte [] family, byte [] qualifier, long timestamp) {
     List<KeyValue> list = familyMap.get(family);
     if (list == null) {
       list = new ArrayList<KeyValue>();
@@ -180,6 +183,7 @@
     list.add(new KeyValue(this.row, family, qualifier, timestamp,
       KeyValue.Type.DeleteColumn));
     familyMap.put(family, list);
+    return this;
   }
   
   /**
@@ -189,9 +193,10 @@
    * @param column colon-delimited family and qualifier
    * @param timestamp maximum version timestamp 
    */
-  public void deleteColumns(byte [] column, long timestamp) {
+  public Delete deleteColumns(byte [] column, long timestamp) {
     byte [][] parts = KeyValue.parseColumn(column);
     this.deleteColumns(parts[0], parts[1], timestamp);
+    return this;
   }
   
   /**
@@ -202,8 +207,9 @@
    * @param family family name
    * @param qualifier column qualifier
    */
-  public void deleteColumn(byte [] family, byte [] qualifier) {
+  public Delete deleteColumn(byte [] family, byte [] qualifier) {
     this.deleteColumn(family, qualifier, HConstants.LATEST_TIMESTAMP);
+    return this;
   }
   
   /**
@@ -212,7 +218,7 @@
    * @param qualifier column qualifier
    * @param timestamp version timestamp
    */
-  public void deleteColumn(byte [] family, byte [] qualifier, long timestamp) {
+  public Delete deleteColumn(byte [] family, byte [] qualifier, long timestamp) {
     List<KeyValue> list = familyMap.get(family);
     if(list == null) {
       list = new ArrayList<KeyValue>();
@@ -220,6 +226,7 @@
     list.add(new KeyValue(
         this.row, family, qualifier, timestamp, KeyValue.Type.Delete));
     familyMap.put(family, list);
+    return this;
   }
   
   /**
@@ -227,9 +234,10 @@
    * <code>family:qualifier</code> notation.
    * @param column colon-delimited family and qualifier 
    */
-  public void deleteColumn(byte [] column) {
+  public Delete deleteColumn(byte [] column) {
     byte [][] parts = KeyValue.parseColumn(column);
     this.deleteColumn(parts[0], parts[1], HConstants.LATEST_TIMESTAMP);
+    return this;
   }
   
   /**

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java?rev=792820&r1=792819&r2=792820&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java Fri Jul 10 07:14:12 2009
@@ -348,20 +348,21 @@
       getMaster();
       final TreeSet<HTableDescriptor> uniqueTables =
         new TreeSet<HTableDescriptor>();
-
       MetaScannerVisitor visitor = new MetaScannerVisitor() {
-
         public boolean processRow(Result result) throws IOException {
-          HRegionInfo info = Writables.getHRegionInfo(
+          try {
+            HRegionInfo info = Writables.getHRegionInfo(
               result.getValue(CATALOG_FAMILY, REGIONINFO_QUALIFIER));
-
-          // Only examine the rows where the startKey is zero length
-          if (info != null && info.getStartKey().length == 0) {
-            uniqueTables.add(info.getTableDesc());
+            // Only examine the rows where the startKey is zero length
+            if (info != null && info.getStartKey().length == 0) {
+              uniqueTables.add(info.getTableDesc());
+            }
+            return true;
+          } catch (RuntimeException e) {
+            LOG.error("Result=" + result);
+            throw e;
           }
-          return true;
         }
-
       };
       MetaScanner.metaScan(conf, visitor);
 

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java?rev=792820&r1=792819&r2=792820&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/client/HTable.java Fri Jul 10 07:14:12 2009
@@ -427,7 +427,6 @@
     connection.getRegionServerWithRetries(
         new ServerCallable<Boolean>(connection, tableName, delete.getRow()) {
           public Boolean call() throws IOException {
-            System.out.println("IN HT.get.ServerCallable,");
             server.delete(location.getRegionInfo().getRegionName(), delete);
             return null;
           }

Modified: hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java?rev=792820&r1=792819&r2=792820&view=diff
==============================================================================
--- hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java (original)
+++ hadoop/hbase/trunk_on_hadoop-0.18.3/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java Fri Jul 10 07:14:12 2009
@@ -1323,7 +1323,7 @@
    * Run initialization using parameters passed us by the master.
    */
   private MapWritable reportForDuty() {
-    while (!getMaster()) {
+    while (!stopRequested.get() && !getMaster()) {
       sleeper.sleep();
       LOG.warn("Unable to get master for initialization");
     }