You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2011/07/31 03:06:05 UTC
svn commit: r1152532 - in /hbase/trunk: CHANGES.txt
src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
Author: tedyu
Date: Sun Jul 31 01:06:02 2011
New Revision: 1152532
URL: http://svn.apache.org/viewvc?rev=1152532&view=rev
Log:
HBASE-4032 HBASE-451 improperly breaks public API HRegionInfo#getTableDesc
Modified:
hbase/trunk/CHANGES.txt
hbase/trunk/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
Modified: hbase/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hbase/trunk/CHANGES.txt?rev=1152532&r1=1152531&r2=1152532&view=diff
==============================================================================
--- hbase/trunk/CHANGES.txt (original)
+++ hbase/trunk/CHANGES.txt Sun Jul 31 01:06:02 2011
@@ -181,6 +181,7 @@ Release 0.91.0 - Unreleased
HBASE-4138 If zookeeper.znode.parent is not specifed explicitly in Client
code then HTable object loops continuously waiting for the root region
by using /hbase as the base node.(ramkrishna.s.vasudevan)
+ HBASE-4032 HBASE-451 improperly breaks public API HRegionInfo#getTableDesc
IMPROVEMENTS
HBASE-3290 Max Compaction Size (Nicolas Spiegelberg via Stack)
Modified: hbase/trunk/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java?rev=1152532&r1=1152531&r2=1152532&view=diff
==============================================================================
--- hbase/trunk/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java (original)
+++ hbase/trunk/src/main/java/org/apache/hadoop/hbase/HRegionInfo.java Sun Jul 31 01:06:02 2011
@@ -26,9 +26,13 @@ import java.util.Arrays;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.migration.HRegionInfo090x;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.JenkinsHash;
import org.apache.hadoop.hbase.util.MD5Hash;
import org.apache.hadoop.io.VersionedWritable;
@@ -535,18 +539,50 @@ public class HRegionInfo extends Version
Bytes.equals(endKey, HConstants.EMPTY_BYTE_ARRAY));
}
- /** @return the tableDesc */
+ /**
+ * @return the tableDesc
+ * @deprecated Do not use; expensive call
+ * use HRegionInfo.getTableNameAsString() in place of
+ * HRegionInfo.getTableDesc().getNameAsString()
+ */
@Deprecated
- public HTableDescriptor getTableDesc(){
- return null;
+ public HTableDescriptor getTableDesc() {
+ Configuration c = HBaseConfiguration.create();
+ FileSystem fs;
+ try {
+ fs = FileSystem.get(c);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ FSTableDescriptors fstd =
+ new FSTableDescriptors(fs, new Path(c.get(HConstants.HBASE_DIR)));
+ try {
+ return fstd.get(this.tableName);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
}
/**
* @param newDesc new table descriptor to use
+ * @deprecated Do not use; expensive call
*/
@Deprecated
public void setTableDesc(HTableDescriptor newDesc) {
- // do nothing.
+ Configuration c = HBaseConfiguration.create();
+ FileSystem fs;
+ try {
+ fs = FileSystem.get(c);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ FSTableDescriptors fstd =
+ new FSTableDescriptors(fs, new Path(c.get(HConstants.HBASE_DIR)));
+ try {
+ fstd.add(newDesc);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
}
/** @return true if this is the root region */
Modified: hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java?rev=1152532&r1=1152531&r2=1152532&view=diff
==============================================================================
--- hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java (original)
+++ hbase/trunk/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionInfo.java Sun Jul 31 01:06:02 2011
@@ -19,13 +19,20 @@
*/
package org.apache.hadoop.hbase.regionserver;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.MD5Hash;
-
import org.junit.Test;
-import static org.junit.Assert.*;
public class TestHRegionInfo {
@Test
@@ -53,6 +60,25 @@ public class TestHRegionInfo {
}
@Test
+ public void testGetSetOfHTD() {
+ HBaseTestingUtility HTU = new HBaseTestingUtility();
+ final String tablename = "testGetSetOfHTD";
+ HTableDescriptor htd = new HTableDescriptor(tablename);
+ FSUtils.createTableDescriptor(htd, HTU.getConfiguration());
+ HRegionInfo hri = new HRegionInfo(Bytes.toBytes("testGetSetOfHTD"),
+ HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
+ HTableDescriptor htd2 = hri.getTableDesc();
+ assertTrue(htd.equals(htd2));
+ final String key = "SOME_KEY";
+ assertNull(htd.getValue(key));
+ final String value = "VALUE";
+ htd.setValue(key, value);
+ hri.setTableDesc(htd);
+ HTableDescriptor htd3 = hri.getTableDesc();
+ assertTrue(htd.equals(htd3));
+ }
+
+ @Test
public void testContainsRange() {
HTableDescriptor tableDesc = new HTableDescriptor("testtable");
HRegionInfo hri = new HRegionInfo(