You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by te...@apache.org on 2014/03/06 02:09:34 UTC
svn commit: r1574736 -
/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
Author: tedyu
Date: Thu Mar 6 01:09:34 2014
New Revision: 1574736
URL: http://svn.apache.org/r1574736
Log:
HBASE-10615 Make LoadIncrementalHFiles skip reference files (Jerry He)
Modified:
hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java?rev=1574736&r1=1574735&r2=1574736&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java Thu Mar 6 01:09:34 2014
@@ -67,6 +67,7 @@ import org.apache.hadoop.hbase.client.HT
import org.apache.hadoop.hbase.client.RegionServerCallable;
import org.apache.hadoop.hbase.client.RpcRetryingCallerFactory;
import org.apache.hadoop.hbase.client.coprocessor.SecureBulkLoadClient;
+import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.io.HalfStoreFileReader;
import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
@@ -79,6 +80,7 @@ import org.apache.hadoop.hbase.protobuf.
import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.security.UserProvider;
import org.apache.hadoop.hbase.security.token.FsDelegationToken;
import org.apache.hadoop.hbase.util.Bytes;
@@ -184,7 +186,17 @@ public class LoadIncrementalHFiles exten
byte[] family = familyDir.getName().getBytes();
Path[] hfiles = FileUtil.stat2Paths(fs.listStatus(familyDir));
for (Path hfile : hfiles) {
- if (hfile.getName().startsWith("_")) continue;
+ // Skip "_", reference, HFileLink
+ String fileName = hfile.getName();
+ if (fileName.startsWith("_")) continue;
+ if (StoreFileInfo.isReference(fileName)) {
+ LOG.warn("Skipping reference " + fileName);
+ continue;
+ }
+ if (HFileLink.isHFileLink(fileName)) {
+ LOG.warn("Skipping HFileLink " + fileName);
+ continue;
+ }
ret.add(new LoadQueueItem(family, hfile));
}
}
@@ -791,7 +803,9 @@ public class LoadIncrementalHFiles exten
Path[] hfiles = FileUtil.stat2Paths(fs.listStatus(familyDir));
for (Path hfile : hfiles) {
- if (hfile.getName().startsWith("_")) continue;
+ String fileName = hfile.getName();
+ if (fileName.startsWith("_") || StoreFileInfo.isReference(fileName)
+ || HFileLink.isHFileLink(fileName)) continue;
HFile.Reader reader = HFile.createReader(fs, hfile,
new CacheConfig(getConf()), getConf());
final byte[] first, last;