You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2008/06/26 19:06:32 UTC

svn commit: r671951 - in /hadoop/hbase/trunk: ./ conf/ src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/io/ src/java/org/apache/hadoop/hbase/master/ src/java/org/apache/hadoop/hbase/regionserver/ src/java/org/apache/hadoop/hbase/util/

Author: stack
Date: Thu Jun 26 10:06:31 2008
New Revision: 671951

URL: http://svn.apache.org/viewvc?rev=671951&view=rev
Log:
HBASE-714 Showing bytes in log when should be string (2)

Modified:
    hadoop/hbase/trunk/CHANGES.txt
    hadoop/hbase/trunk/conf/hbase-env.sh
    hadoop/hbase/trunk/conf/hbase-site.xml
    hadoop/hbase/trunk/conf/log4j.properties
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
    hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftSortedMap.java

Modified: hadoop/hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/CHANGES.txt?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/CHANGES.txt (original)
+++ hadoop/hbase/trunk/CHANGES.txt Thu Jun 26 10:06:31 2008
@@ -68,6 +68,9 @@
    HBASE-702   deleteall doesn't
    HBASE-704   update new shell docs and commands on help menu
    HBASE-709   Deadlock while rolling WAL-log while finishing flush
+   HBASE-710   If clocks are way off, then we can have daughter split come
+               before rather than after its parent in .META.
+   HBASE-714   Showing bytes in log when should be string (2)
    
    
   IMPROVEMENTS

Modified: hadoop/hbase/trunk/conf/hbase-env.sh
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/hbase-env.sh?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/hbase-env.sh (original)
+++ hadoop/hbase/trunk/conf/hbase-env.sh Thu Jun 26 10:06:31 2008
@@ -23,6 +23,7 @@
 
 # The java implementation to use.  Required.
 # export JAVA_HOME=/usr/lib/j2sdk1.5-sun
+export JAVA_HOME=/usr
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HBASE_CLASSPATH=

Modified: hadoop/hbase/trunk/conf/hbase-site.xml
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/hbase-site.xml?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/hbase-site.xml (original)
+++ hadoop/hbase/trunk/conf/hbase-site.xml Thu Jun 26 10:06:31 2008
@@ -22,4 +22,30 @@
  */
 -->
 <configuration>
+<!--
+  <property>
+    <name>hbase.master</name>
+    <value>durruti.local:60000</value>
+    <description>The host and port that the HBase master runs at.
+    A value of 'local' runs the master and a regionserver in
+    a single process.
+    </description>
+  </property>
+  <property>
+    <name>hbase.rootdir</name>
+    <value>hdfs://durruti.local:10000/hbase</value>
+    <description>The directory shared by region servers.
+    Should be fully-qualified to include the filesystem to use.
+    E.g: hdfs://NAMENODE_SERVER:PORT/HBASE_ROOTDIR
+    </description>
+  </property>
+<property>
+  <name>dfs.replication</name>
+  <value>1</value>
+  <description>Default block replication. 
+  The actual number of replications can be specified when the file is created.
+  The default is used if replication is not specified in create time.
+  </description>
+</property>
+-->
 </configuration>

Modified: hadoop/hbase/trunk/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/conf/log4j.properties?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/conf/log4j.properties (original)
+++ hadoop/hbase/trunk/conf/log4j.properties Thu Jun 26 10:06:31 2008
@@ -41,3 +41,5 @@
 # Custom Logging levels
 
 #log4j.logger.org.apache.hadoop.fs.FSNamesystem=DEBUG
+#log4j.logger.org.apache.hadoop.hbase=DEBUG
+log4j.logger.org.apache.hadoop.dfs=DEBUG

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/HRegionInfo.java Thu Jun 26 10:06:31 2008
@@ -176,11 +176,28 @@
   public HRegionInfo(HTableDescriptor tableDesc, final byte [] startKey,
       final byte [] endKey, final boolean split)
   throws IllegalArgumentException {
+    this(tableDesc, startKey, endKey, split, System.currentTimeMillis());
+  }
+
+  /**
+   * Construct HRegionInfo with explicit parameters
+   * 
+   * @param tableDesc the table descriptor
+   * @param startKey first key in region
+   * @param endKey end of key range
+   * @param split true if this region has split and we have daughter regions
+   * regions that may or may not hold references to this region.
+   * @param regionid Region id to use.
+   * @throws IllegalArgumentException
+   */
+  public HRegionInfo(HTableDescriptor tableDesc, final byte [] startKey,
+    final byte [] endKey, final boolean split, final long regionid)
+  throws IllegalArgumentException {
     if (tableDesc == null) {
       throw new IllegalArgumentException("tableDesc cannot be null");
     }
     this.offLine = false;
-    this.regionId = System.currentTimeMillis();
+    this.regionId = regionid;
     this.regionName = createRegionName(tableDesc.getName(), startKey, regionId);
     this.regionNameStr = Bytes.toString(this.regionName);
     this.split = split;

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchOperation.java Thu Jun 26 10:06:31 2008
@@ -103,6 +103,11 @@
   public boolean isPut() {
     return this.value != null;
   }
+
+  @Override
+  public String toString() {
+    return "column => " + Bytes.toString(this.column) + ", value => '...'";
+  }
   
   // Writable methods
 

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/io/BatchUpdate.java Thu Jun 26 10:06:31 2008
@@ -208,6 +208,24 @@
     return operations.iterator();
   }
   
+  @Override
+  public String toString() {
+    StringBuilder sb = new StringBuilder();
+    sb.append("row => ");
+    sb.append(row == null? "": Bytes.toString(row));
+    sb.append(", {");
+    boolean morethanone = false;
+    for (BatchOperation bo: this.operations) {
+      if (morethanone) {
+        sb.append(", ");
+      }
+      morethanone = true;
+      sb.append(bo.toString());
+    }
+    sb.append("}");
+    return sb.toString();
+  }
+
   //
   // Writable
   //

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/master/BaseScanner.java Thu Jun 26 10:06:31 2008
@@ -335,7 +335,7 @@
     
     if (LOG.isDebugEnabled()) {
       LOG.debug(split.getRegionNameAsString() +
-        " no longer has references to " + parent.toString());
+        " no longer has references to " + Bytes.toString(parent));
     }
     
     BatchUpdate b = new BatchUpdate(parent);

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/CompactSplitThread.java Thu Jun 26 10:06:31 2008
@@ -93,15 +93,14 @@
         continue;
       } catch (IOException ex) {
         LOG.error("Compaction failed" +
-            (r != null ? (" for region " + r.getRegionName()) : ""),
+            (r != null ? (" for region " + Bytes.toString(r.getRegionName())) : ""),
             RemoteExceptionHandler.checkIOException(ex));
         if (!server.checkFileSystem()) {
           break;
         }
-
       } catch (Exception ex) {
         LOG.error("Compaction failed" +
-            (r != null ? (" for region " + r.getRegionName()) : ""),
+            (r != null ? (" for region " + Bytes.toString(r.getRegionName())) : ""),
             ex);
         if (!server.checkFileSystem()) {
           break;
@@ -154,7 +153,7 @@
       }
       t = meta;
     }
-    LOG.info("Updating " + t.getTableName() + " with region split info");
+    LOG.info("Updating " + Bytes.toString(t.getTableName()) + " with region split info");
 
     // Mark old region as offline and split in META.
     // NOTE: there is no need for retry logic here. HTable does it for us.
@@ -197,4 +196,4 @@
       this.interrupt();
     }
   }
-}
\ No newline at end of file
+}

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java Thu Jun 26 10:06:31 2008
@@ -723,15 +723,23 @@
       if(!this.fs.exists(splits)) {
         this.fs.mkdirs(splits);
       }
+      // Calculate regionid to use.  Can't be less than that of parent else
+      // it'll insert into wrong location over in .META. table: HBASE-710.
+      long rid = System.currentTimeMillis();
+      if (rid < this.regionInfo.getRegionId()) {
+        LOG.warn("Clock skew; parent regions id is " +
+          this.regionInfo.getRegionId() + " but current time here is " + rid);
+        rid = this.regionInfo.getRegionId() + 1;
+      }
       HRegionInfo regionAInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
-        startKey, midKey);
+        startKey, midKey, false, rid);
       Path dirA =
         new Path(splits, Integer.toString(regionAInfo.getEncodedName()));
       if(fs.exists(dirA)) {
         throw new IOException("Cannot split; target file collision at " + dirA);
       }
       HRegionInfo regionBInfo = new HRegionInfo(this.regionInfo.getTableDesc(),
-        midKey, endKey);
+        midKey, endKey, false, rid);
       Path dirB =
         new Path(splits, Integer.toString(regionBInfo.getEncodedName()));
       if(this.fs.exists(dirB)) {

Modified: hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftSortedMap.java
URL: http://svn.apache.org/viewvc/hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftSortedMap.java?rev=671951&r1=671950&r2=671951&view=diff
==============================================================================
--- hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftSortedMap.java (original)
+++ hadoop/hbase/trunk/src/java/org/apache/hadoop/hbase/util/SoftSortedMap.java Thu Jun 26 10:06:31 2008
@@ -29,6 +29,7 @@
 import java.util.Comparator;
 import java.util.Collection;
 import java.util.ArrayList;
+import org.apache.hadoop.hbase.util.Bytes;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -178,8 +179,9 @@
     Object obj;
     while((obj = referenceQueue.poll()) != null) {
       if (LOG.isDebugEnabled()) {
-        LOG.debug("Reference for key " + ((SoftValue<K,V>)obj).key.toString() +
-          " has been cleared.");
+        Object k = ((SoftValue<K,V>)obj).key;
+        String name = (k instanceof byte [])? Bytes.toString((byte [])k): k.toString();
+        LOG.debug("Reference for key " + name + " has been cleared.");
       }
       internalMap.remove(((SoftValue<K,V>)obj).key);
     }
@@ -203,4 +205,4 @@
       throw new RuntimeException("Not implemented");
     }
   }
-}
\ No newline at end of file
+}