You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2014/10/30 17:16:48 UTC

git commit: HBASE-12375 LoadIncrementalHFiles fails to load data in table when CF name starts with '_'

Repository: hbase
Updated Branches:
  refs/heads/master 8b84840d5 -> 87939889b


HBASE-12375 LoadIncrementalHFiles fails to load data in table when CF name starts with '_'

Signed-off-by: stack <st...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/87939889
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/87939889
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/87939889

Branch: refs/heads/master
Commit: 87939889bb19817493027fb84ca2c2b76a4e384e
Parents: 8b84840
Author: Ashish Singhi <as...@huawei.com>
Authored: Thu Oct 30 12:33:07 2014 +0530
Committer: stack <st...@apache.org>
Committed: Thu Oct 30 09:16:37 2014 -0700

----------------------------------------------------------------------
 .../hbase/mapreduce/LoadIncrementalHFiles.java  |  4 ---
 .../mapreduce/TestLoadIncrementalHFiles.java    | 28 ++++++++++++++++++++
 2 files changed, 28 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/87939889/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 855417d..8376e85 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -203,8 +203,6 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
         continue;
       }
       Path familyDir = stat.getPath();
-      // Skip _logs, etc
-      if (familyDir.getName().startsWith("_")) continue;
       byte[] family = familyDir.getName().getBytes();
       Path[] hfiles = FileUtil.stat2Paths(fs.listStatus(familyDir));
       for (Path hfile : hfiles) {
@@ -850,8 +848,6 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
         continue;
       }
       Path familyDir = stat.getPath();
-      // Skip _logs, etc
-      if (familyDir.getName().startsWith("_")) continue;
       byte[] family = familyDir.getName().getBytes();
 
       hcd = new HColumnDescriptor(family);

http://git-wip-us.apache.org/repos/asf/hbase/blob/87939889/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
index d3019ce..a9a75c8 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
@@ -432,5 +432,33 @@ public class TestLoadIncrementalHFiles {
     String[] args = { "directory", "nonExistingTable" };
     loader.run(args);
   }
+
+  @Test
+  public void testTableWithCFNameStartWithUnderScore() throws Exception {
+    Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore");
+    FileSystem fs = util.getTestFileSystem();
+    dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory());
+    String family = "_cf";
+    Path familyDir = new Path(dir, family);
+
+    byte[] from = Bytes.toBytes("begin");
+    byte[] to = Bytes.toBytes("end");
+    Configuration conf = util.getConfiguration();
+    String tableName = "mytable_cfNameStartWithUnderScore";
+    Table table = util.createTable(TableName.valueOf(tableName), family);
+    HFileTestUtil.createHFile(conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family),
+      QUALIFIER, from, to, 1000);
+
+    LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
+    String[] args = { dir.toString(), tableName };
+    try {
+      loader.run(args);
+      assertEquals(1000, util.countRows(table));
+    } finally {
+      if (null != table) {
+        table.close();
+      }
+    }
+  }
 }