You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by mb...@apache.org on 2013/03/05 19:25:45 UTC
svn commit: r1452936 [2/2] - in /hbase/trunk/hbase-server/src:
main/java/org/apache/hadoop/hbase/backup/
main/java/org/apache/hadoop/hbase/io/
main/java/org/apache/hadoop/hbase/master/
main/java/org/apache/hadoop/hbase/master/balancer/ main/java/org/ap...
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java Tue Mar 5 18:25:44 2013
@@ -30,8 +30,6 @@ import java.util.Map;
import java.util.SortedSet;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -46,9 +44,6 @@ import org.apache.hadoop.hbase.HDFSBlock
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.fs.HFileSystem;
-import org.apache.hadoop.hbase.io.HFileLink;
-import org.apache.hadoop.hbase.io.HalfStoreFileReader;
import org.apache.hadoop.hbase.io.Reference;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
@@ -64,7 +59,6 @@ import org.apache.hadoop.hbase.util.Bloo
import org.apache.hadoop.hbase.util.BloomFilterWriter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ChecksumType;
-import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableUtils;
@@ -125,29 +119,15 @@ public class StoreFile {
// Need to make it 8k for testing.
public static final int DEFAULT_BLOCKSIZE_SMALL = 8 * 1024;
+ private final StoreFileInfo fileInfo;
private final FileSystem fs;
- // This file's path.
- private final Path path;
-
- // If this storefile references another, this is the reference instance.
- private Reference reference;
-
- // If this StoreFile references another, this is the other files path.
- private Path referencePath;
-
- // If this storefile is a link to another, this is the link instance.
- private HFileLink link;
-
// Block cache configuration and reference.
private final CacheConfig cacheConf;
// What kind of data block encoding will be used
private final HFileDataBlockEncoder dataBlockEncoder;
- // HDFS blocks distribution information
- private HDFSBlocksDistribution hdfsBlocksDistribution;
-
// Keys for metadata stored in backing HFile.
// Set when we obtain a Reader.
private long sequenceid = -1;
@@ -183,27 +163,6 @@ public class StoreFile {
*/
private Map<byte[], byte[]> metadataMap;
- /**
- * A non-capture group, for hfiles, so that this can be embedded.
- * HFiles are uuid ([0-9a-z]+). Bulk loaded hfiles has (_SeqId_[0-9]+_) has suffix.
- */
- public static final String HFILE_NAME_REGEX = "[0-9a-f]+(?:_SeqId_[0-9]+_)?";
-
- /** Regex that will work for hfiles */
- private static final Pattern HFILE_NAME_PATTERN =
- Pattern.compile("^(" + HFILE_NAME_REGEX + ")");
-
- /**
- * Regex that will work for straight reference names (<hfile>.<parentEncRegion>)
- * and hfilelink reference names (<table>=<region>-<hfile>.<parentEncRegion>)
- * If reference, then the regex has more than just one group.
- * Group 1, hfile/hfilelink pattern, is this file's id.
- * Group 2 '(.+)' is the reference's parent region name.
- */
- private static final Pattern REF_NAME_PATTERN =
- Pattern.compile(String.format("^(%s|%s)\\.(.+)$",
- HFILE_NAME_REGEX, HFileLink.LINK_NAME_REGEX));
-
// StoreFile.Reader
private volatile Reader reader;
@@ -232,64 +191,63 @@ public class StoreFile {
* @param dataBlockEncoder data block encoding algorithm.
* @throws IOException When opening the reader fails.
*/
- public StoreFile(final FileSystem fs,
- final Path p,
- final Configuration conf,
- final CacheConfig cacheConf,
- final BloomType cfBloomType,
- final HFileDataBlockEncoder dataBlockEncoder)
- throws IOException {
+ public StoreFile(final FileSystem fs, final Path p, final Configuration conf,
+ final CacheConfig cacheConf, final BloomType cfBloomType,
+ final HFileDataBlockEncoder dataBlockEncoder) throws IOException {
+ this(fs, new StoreFileInfo(conf, fs, p), conf, cacheConf, cfBloomType, dataBlockEncoder);
+ }
+
+
+ /**
+ * Constructor, loads a reader and it's indices, etc. May allocate a
+ * substantial amount of ram depending on the underlying files (10-20MB?).
+ *
+ * @param fs The current file system to use.
+ * @param fileInfo The store file information.
+ * @param conf The current configuration.
+ * @param cacheConf The cache configuration and block cache reference.
+ * @param cfBloomType The bloom type to use for this store file as specified
+ * by column family configuration. This may or may not be the same
+ * as the Bloom filter type actually present in the HFile, because
+ * column family configuration might change. If this is
+ * {@link BloomType#NONE}, the existing Bloom filter is ignored.
+ * @param dataBlockEncoder data block encoding algorithm.
+ * @throws IOException When opening the reader fails.
+ */
+ public StoreFile(final FileSystem fs, final StoreFileInfo fileInfo, final Configuration conf,
+ final CacheConfig cacheConf, final BloomType cfBloomType,
+ final HFileDataBlockEncoder dataBlockEncoder) throws IOException {
this.fs = fs;
- this.path = p;
+ this.fileInfo = fileInfo;
this.cacheConf = cacheConf;
this.dataBlockEncoder =
dataBlockEncoder == null ? NoOpDataBlockEncoder.INSTANCE
: dataBlockEncoder;
- if (HFileLink.isHFileLink(p)) {
- this.link = new HFileLink(conf, p);
- LOG.debug("Store file " + p + " is a link");
- } else if (isReference(p)) {
- this.reference = Reference.read(fs, p);
- this.referencePath = getReferredToFile(this.path);
- if (HFileLink.isHFileLink(this.referencePath)) {
- this.link = new HFileLink(conf, this.referencePath);
- }
- LOG.debug("Store file " + p + " is a " + reference.getFileRegion() +
- " reference to " + this.referencePath);
- } else if (!isHFile(p)) {
- throw new IOException("path=" + path + " doesn't look like a valid StoreFile");
- }
-
if (BloomFilterFactory.isGeneralBloomEnabled(conf)) {
this.cfBloomType = cfBloomType;
} else {
- LOG.info("Ignoring bloom filter check for file " + path + ": " +
+ LOG.info("Ignoring bloom filter check for file " + this.getPath() + ": " +
"cfBloomType=" + cfBloomType + " (disabled in config)");
this.cfBloomType = BloomType.NONE;
}
// cache the modification time stamp of this store file
- FileStatus[] stats = FSUtils.listStatus(fs, p, null);
- if (stats != null && stats.length == 1) {
- this.modificationTimeStamp = stats[0].getModificationTime();
- } else {
- this.modificationTimeStamp = 0;
- }
+ this.modificationTimeStamp = fileInfo.getModificationTime();
}
/**
* @return Path or null if this StoreFile was made with a Stream.
*/
public Path getPath() {
- return this.path;
+ return this.fileInfo.getPath();
}
/**
* @return The Store/ColumnFamily this file belongs to.
*/
byte [] getFamily() {
- return Bytes.toBytes(this.path.getParent().getName());
+ return Bytes.toBytes(this.getPath().getParent().getName());
}
/**
@@ -297,64 +255,7 @@ public class StoreFile {
* else may get wrong answer.
*/
public boolean isReference() {
- return this.reference != null;
- }
-
- /**
- * @return <tt>true</tt> if this StoreFile is an HFileLink
- */
- boolean isLink() {
- return this.link != null && this.reference == null;
- }
-
- private static boolean isHFile(final Path path) {
- Matcher m = HFILE_NAME_PATTERN.matcher(path.getName());
- return m.matches() && m.groupCount() > 0;
- }
-
- /**
- * @param p Path to check.
- * @return True if the path has format of a HStoreFile reference.
- */
- public static boolean isReference(final Path p) {
- return isReference(p.getName());
- }
-
- /**
- * @param name file name to check.
- * @return True if the path has format of a HStoreFile reference.
- */
- public static boolean isReference(final String name) {
- Matcher m = REF_NAME_PATTERN.matcher(name);
- return m.matches() && m.groupCount() > 1;
- }
-
- /*
- * Return path to the file referred to by a Reference. Presumes a directory
- * hierarchy of <code>${hbase.rootdir}/tablename/regionname/familyname</code>.
- * @param p Path to a Reference file.
- * @return Calculated path to parent region file.
- * @throws IllegalArgumentException when path regex fails to match.
- */
- public static Path getReferredToFile(final Path p) {
- Matcher m = REF_NAME_PATTERN.matcher(p.getName());
- if (m == null || !m.matches()) {
- LOG.warn("Failed match of store file name " + p.toString());
- throw new IllegalArgumentException("Failed match of store file name " +
- p.toString());
- }
-
- // Other region name is suffix on the passed Reference file name
- String otherRegion = m.group(2);
- // Tabledir is up two directories from where Reference was written.
- Path tableDir = p.getParent().getParent().getParent();
- String nameStrippedOfSuffix = m.group(1);
- LOG.debug("reference '" + p + "' to region=" + otherRegion + " hfile=" + nameStrippedOfSuffix);
-
- // Build up new path with the referenced region in place of our current
- // region in the reference path. Also strip regionname suffix from name.
- return new Path(new Path(new Path(tableDir, otherRegion),
- p.getParent().getName()), nameStrippedOfSuffix);
+ return this.fileInfo.isReference();
}
/**
@@ -444,65 +345,7 @@ public class StoreFile {
* calculated when store file is opened.
*/
public HDFSBlocksDistribution getHDFSBlockDistribution() {
- return this.hdfsBlocksDistribution;
- }
-
- /**
- * helper function to compute HDFS blocks distribution of a given reference
- * file.For reference file, we don't compute the exact value. We use some
- * estimate instead given it might be good enough. we assume bottom part
- * takes the first half of reference file, top part takes the second half
- * of the reference file. This is just estimate, given
- * midkey ofregion != midkey of HFile, also the number and size of keys vary.
- * If this estimate isn't good enough, we can improve it later.
- * @param fs The FileSystem
- * @param reference The reference
- * @param status The reference FileStatus
- * @return HDFS blocks distribution
- */
- static private HDFSBlocksDistribution computeRefFileHDFSBlockDistribution(
- FileSystem fs, Reference reference, FileStatus status) throws IOException {
- if (status == null) {
- return null;
- }
-
- long start = 0;
- long length = 0;
-
- if (Reference.isTopFileRegion(reference.getFileRegion())) {
- start = status.getLen()/2;
- length = status.getLen() - status.getLen()/2;
- } else {
- start = 0;
- length = status.getLen()/2;
- }
- return FSUtils.computeHDFSBlocksDistribution(fs, status, start, length);
- }
-
- /**
- * compute HDFS block distribution, for reference file, it is an estimate
- */
- private void computeHDFSBlockDistribution() throws IOException {
- if (isReference()) {
- FileStatus status;
- if (this.link != null) {
- status = this.link.getFileStatus(fs);
- } else {
- status = fs.getFileStatus(this.referencePath);
- }
- this.hdfsBlocksDistribution = computeRefFileHDFSBlockDistribution(
- this.fs, this.reference, status);
- } else {
- FileStatus status;
- if (isLink()) {
- status = link.getFileStatus(fs);
- } else {
- status = this.fs.getFileStatus(path);
- }
- long length = status.getLen();
- this.hdfsBlocksDistribution = FSUtils.computeHDFSBlocksDistribution(
- this.fs, status, 0, length);
- }
+ return this.fileInfo.getHDFSBlockDistribution();
}
/**
@@ -515,24 +358,9 @@ public class StoreFile {
if (this.reader != null) {
throw new IllegalAccessError("Already open");
}
- if (isReference()) {
- if (this.link != null) {
- this.reader = new HalfStoreFileReader(this.fs, this.referencePath, this.link,
- this.cacheConf, this.reference, dataBlockEncoder.getEncodingInCache());
- } else {
- this.reader = new HalfStoreFileReader(this.fs, this.referencePath,
- this.cacheConf, this.reference, dataBlockEncoder.getEncodingInCache());
- }
- } else if (isLink()) {
- long size = link.getFileStatus(fs).getLen();
- this.reader = new Reader(this.fs, this.path, link, size, this.cacheConf,
- dataBlockEncoder.getEncodingInCache(), true);
- } else {
- this.reader = new Reader(this.fs, this.path, this.cacheConf,
- dataBlockEncoder.getEncodingInCache());
- }
- computeHDFSBlockDistribution();
+ // Open the StoreFile.Reader
+ this.reader = fileInfo.open(this.fs, this.cacheConf, dataBlockEncoder.getEncodingInCache());
// Load up indices and fileinfo. This also loads Bloom filter type.
metadataMap = Collections.unmodifiableMap(this.reader.loadFileInfo());
@@ -546,26 +374,22 @@ public class StoreFile {
// since store files are distinguished by sequence id, the one half would
// subsume the other.
this.sequenceid = Bytes.toLong(b);
- if (isReference()) {
- if (Reference.isTopFileRegion(this.reference.getFileRegion())) {
- this.sequenceid += 1;
- }
+ if (fileInfo.isTopReference()) {
+ this.sequenceid += 1;
}
}
if (isBulkLoadResult()){
// generate the sequenceId from the fileName
// fileName is of the form <randomName>_SeqId_<id-when-loaded>_
- String fileName = this.path.getName();
+ String fileName = this.getPath().getName();
int startPos = fileName.indexOf("SeqId_");
if (startPos != -1) {
this.sequenceid = Long.parseLong(fileName.substring(startPos + 6,
fileName.indexOf('_', startPos + 6)));
// Handle reference files as done above.
- if (isReference()) {
- if (Reference.isTopFileRegion(this.reference.getFileRegion())) {
- this.sequenceid += 1;
- }
+ if (fileInfo.isTopReference()) {
+ this.sequenceid += 1;
}
}
}
@@ -635,7 +459,7 @@ public class StoreFile {
} catch (IOException e) {
try {
this.closeReader(true);
- } catch (IOException ee) {
+ } catch (IOException ee) {
}
throw e;
}
@@ -675,8 +499,7 @@ public class StoreFile {
@Override
public String toString() {
- return this.path.toString() +
- (isReference()? "-" + this.referencePath + "-" + reference.toString(): "");
+ return this.fileInfo.toString();
}
/**
@@ -684,7 +507,7 @@ public class StoreFile {
*/
public String toStringDetailed() {
StringBuilder sb = new StringBuilder();
- sb.append(this.path.toString());
+ sb.append(this.getPath().toString());
sb.append(", isReference=").append(isReference());
sb.append(", isBulkLoadResult=").append(isBulkLoadResult());
if (isBulkLoadResult()) {
@@ -869,48 +692,7 @@ public class StoreFile {
throw new IOException("Expecting " + dir.toString() +
" to be a directory");
}
- return getRandomFilename(fs, dir);
- }
-
- /**
- *
- * @param fs
- * @param dir
- * @return Path to a file that doesn't exist at time of this invocation.
- * @throws IOException
- */
- static Path getRandomFilename(final FileSystem fs, final Path dir)
- throws IOException {
- return getRandomFilename(fs, dir, null);
- }
-
- /**
- *
- * @param fs
- * @param dir
- * @param suffix
- * @return Path to a file that doesn't exist at time of this invocation.
- * @throws IOException
- */
- static Path getRandomFilename(final FileSystem fs,
- final Path dir,
- final String suffix)
- throws IOException {
- return new Path(dir, UUID.randomUUID().toString().replaceAll("-", "")
- + (suffix == null ? "" : suffix));
- }
-
- /**
- * Validate the store file name.
- * @param fileName name of the file to validate
- * @return <tt>true</tt> if the file could be a valid store file, <tt>false</tt> otherwise
- */
- public static boolean validateStoreFileName(String fileName) {
- if (HFileLink.isHFileLink(fileName))
- return true;
- if (isReference(fileName))
- return true;
- return !fileName.contains("-");
+ return new Path(dir, UUID.randomUUID().toString().replaceAll("-", ""));
}
/**
@@ -1336,17 +1118,10 @@ public class StoreFile {
bloomFilterType = BloomType.NONE;
}
- public Reader(FileSystem fs, Path path, HFileLink hfileLink, long size,
+ public Reader(FileSystem fs, Path path, FSDataInputStream in,
+ final FSDataInputStream inNoChecksum, long size,
CacheConfig cacheConf, DataBlockEncoding preferredEncodingInCache,
boolean closeIStream) throws IOException {
-
- FSDataInputStream in = hfileLink.open(fs);
- FSDataInputStream inNoChecksum = in;
- if (fs instanceof HFileSystem) {
- FileSystem noChecksumFs = ((HFileSystem)fs).getNoChecksumFs();
- inNoChecksum = hfileLink.open(noChecksumFs);
- }
-
reader = HFile.createReaderWithEncoding(fs, path, in, inNoChecksum,
size, cacheConf, preferredEncodingInCache, closeIStream);
bloomFilterType = BloomType.NONE;
Added: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java?rev=1452936&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java (added)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileInfo.java Tue Mar 5 18:25:44 2013
@@ -0,0 +1,378 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HDFSBlocksDistribution;
+import org.apache.hadoop.hbase.fs.HFileSystem;
+import org.apache.hadoop.hbase.io.HFileLink;
+import org.apache.hadoop.hbase.io.Reference;
+import org.apache.hadoop.hbase.io.HalfStoreFileReader;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.util.FSUtils;
+
+/**
+ * Describe a StoreFile (hfile, reference, link)
+ */
+@InterfaceAudience.Private
+public class StoreFileInfo {
+ public static final Log LOG = LogFactory.getLog(StoreFileInfo.class);
+
+ /**
+ * A non-capture group, for hfiles, so that this can be embedded.
+ * HFiles are uuid ([0-9a-z]+). Bulk loaded hfiles has (_SeqId_[0-9]+_) has suffix.
+ */
+ public static final String HFILE_NAME_REGEX = "[0-9a-f]+(?:_SeqId_[0-9]+_)?";
+
+ /** Regex that will work for hfiles */
+ private static final Pattern HFILE_NAME_PATTERN =
+ Pattern.compile("^(" + HFILE_NAME_REGEX + ")");
+
+ /**
+ * Regex that will work for straight reference names (<hfile>.<parentEncRegion>)
+ * and hfilelink reference names (<table>=<region>-<hfile>.<parentEncRegion>)
+ * If reference, then the regex has more than just one group.
+ * Group 1, hfile/hfilelink pattern, is this file's id.
+ * Group 2 '(.+)' is the reference's parent region name.
+ */
+ private static final Pattern REF_NAME_PATTERN =
+ Pattern.compile(String.format("^(%s|%s)\\.(.+)$",
+ HFILE_NAME_REGEX, HFileLink.LINK_NAME_REGEX));
+
+ // HDFS blocks distribution information
+ private HDFSBlocksDistribution hdfsBlocksDistribution = null;
+
+ // If this storefile references another, this is the reference instance.
+ private final Reference reference;
+
+ // If this storefile is a link to another, this is the link instance.
+ private final HFileLink link;
+
+ // FileSystem information for the file.
+ private final FileStatus fileStatus;
+
+ /**
+ * Create a Store File Info
+ * @param conf the {@link Configuration} to use
+ * @param fs The current file system to use.
+ * @param path The {@link Path} of the file
+ */
+ public StoreFileInfo(final Configuration conf, final FileSystem fs, final Path path)
+ throws IOException {
+ this(conf, fs, fs.getFileStatus(path));
+ }
+
+ /**
+ * Create a Store File Info
+ * @param conf the {@link Configuration} to use
+ * @param fs The current file system to use.
+ * @param fileStatus The {@link FileStatus} of the file
+ */
+ public StoreFileInfo(final Configuration conf, final FileSystem fs, final FileStatus fileStatus)
+ throws IOException {
+ this.fileStatus = fileStatus;
+
+ Path p = fileStatus.getPath();
+ if (HFileLink.isHFileLink(p)) {
+ // HFileLink
+ this.reference = null;
+ this.link = new HFileLink(conf, p);
+ LOG.debug("Store file " + p + " is a link");
+ } else if (isReference(p)) {
+ this.reference = Reference.read(fs, p);
+ Path referencePath = getReferredToFile(p);
+ if (HFileLink.isHFileLink(referencePath)) {
+ // HFileLink Reference
+ this.link = new HFileLink(conf, referencePath);
+ } else {
+ // Reference
+ this.link = null;
+ }
+ LOG.debug("Store file " + p + " is a " + reference.getFileRegion() +
+ " reference to " + referencePath);
+ } else if (isHFile(p)) {
+ // HFile
+ this.reference = null;
+ this.link = null;
+ } else {
+ throw new IOException("path=" + p + " doesn't look like a valid StoreFile");
+ }
+ }
+
+ /** @return True if the store file is a Reference */
+ public boolean isReference() {
+ return this.reference != null;
+ }
+
+ /** @return True if the store file is a top Reference */
+ public boolean isTopReference() {
+ return this.reference != null && Reference.isTopFileRegion(this.reference.getFileRegion());
+ }
+
+ /** @return True if the store file is a link */
+ public boolean isLink() {
+ return this.link != null && this.reference == null;
+ }
+
+ /** @return the HDFS block distribution */
+ public HDFSBlocksDistribution getHDFSBlockDistribution() {
+ return this.hdfsBlocksDistribution;
+ }
+
+ /**
+ * Open a Reader for the StoreFile
+ * @param fs The current file system to use.
+ * @param cacheConf The cache configuration and block cache reference.
+ * @param dataBlockEncoding data block encoding algorithm.
+ * @return The StoreFile.Reader for the file
+ */
+ public StoreFile.Reader open(final FileSystem fs, final CacheConfig cacheConf,
+ final DataBlockEncoding dataBlockEncoding) throws IOException {
+ FSDataInputStream inNoChecksum = null;
+ FileSystem noChecksumFs = null;
+ FSDataInputStream in;
+ FileStatus status;
+
+ if (fs instanceof HFileSystem) {
+ noChecksumFs = ((HFileSystem)fs).getNoChecksumFs();
+ }
+
+ if (this.reference != null) {
+ if (this.link != null) {
+ // HFileLink Reference
+ in = this.link.open(fs);
+ inNoChecksum = (noChecksumFs != null) ? this.link.open(noChecksumFs) : in;
+ status = this.link.getFileStatus(fs);
+ } else {
+ // HFile Reference
+ Path referencePath = getReferredToFile(this.getPath());
+ in = fs.open(referencePath);
+ inNoChecksum = (noChecksumFs != null) ? noChecksumFs.open(referencePath) : in;
+ status = fs.getFileStatus(referencePath);
+ }
+
+ hdfsBlocksDistribution = computeRefFileHDFSBlockDistribution(fs, reference, status);
+ return new HalfStoreFileReader(fs, this.getPath(), in, inNoChecksum, status.getLen(),
+ cacheConf, reference, dataBlockEncoding);
+ } else {
+ if (this.link != null) {
+ // HFileLink
+ in = this.link.open(fs);
+ inNoChecksum = (noChecksumFs != null) ? link.open(noChecksumFs) : in;
+ status = this.link.getFileStatus(fs);
+ } else {
+ // HFile
+ status = fileStatus;
+ in = fs.open(this.getPath());
+ inNoChecksum = (noChecksumFs != null) ? noChecksumFs.open(this.getPath()) : in;
+ }
+
+ long length = status.getLen();
+ hdfsBlocksDistribution = FSUtils.computeHDFSBlocksDistribution(fs, status, 0, length);
+ return new StoreFile.Reader(fs, this.getPath(), in, inNoChecksum, length,
+ cacheConf, dataBlockEncoding, true);
+ }
+ }
+
+ /**
+ * Compute the HDFS Block Distribution for this StoreFile
+ */
+ public HDFSBlocksDistribution computeHDFSBlocksDistribution(final FileSystem fs)
+ throws IOException {
+ FileStatus status;
+ if (this.reference != null) {
+ if (this.link != null) {
+ // HFileLink Reference
+ status = link.getFileStatus(fs);
+ } else {
+ // HFile Reference
+ Path referencePath = getReferredToFile(this.getPath());
+ status = fs.getFileStatus(referencePath);
+ }
+ return computeRefFileHDFSBlockDistribution(fs, reference, status);
+ } else {
+ if (this.link != null) {
+ // HFileLink
+ status = link.getFileStatus(fs);
+ } else {
+ status = this.fileStatus;
+ }
+ return FSUtils.computeHDFSBlocksDistribution(fs, status, 0, status.getLen());
+ }
+ }
+
+ /** @return The {@link Path} of the file */
+ public Path getPath() {
+ return this.fileStatus.getPath();
+ }
+
+ /** @return The {@link FileStatus} of the file */
+ public FileStatus getFileStatus() {
+ return this.fileStatus;
+ }
+
+ /** @return Get the modification time of the file. */
+ public long getModificationTime() {
+ return this.fileStatus.getModificationTime();
+ }
+
+ @Override
+ public String toString() {
+ return this.getPath() +
+ (isReference() ? "-" + getReferredToFile(this.getPath()) + "-" + reference : "");
+ }
+
+ /**
+ * @param path Path to check.
+ * @return True if the path has format of a HFile.
+ */
+ public static boolean isHFile(final Path path) {
+ return isHFile(path.getName());
+ }
+
+ public static boolean isHFile(final String fileName) {
+ Matcher m = HFILE_NAME_PATTERN.matcher(fileName);
+ return m.matches() && m.groupCount() > 0;
+ }
+
+ /**
+ * @param path Path to check.
+ * @return True if the path has format of a HStoreFile reference.
+ */
+ public static boolean isReference(final Path path) {
+ return isReference(path.getName());
+ }
+
+ /**
+ * @param name file name to check.
+ * @return True if the path has format of a HStoreFile reference.
+ */
+ public static boolean isReference(final String name) {
+ Matcher m = REF_NAME_PATTERN.matcher(name);
+ return m.matches() && m.groupCount() > 1;
+ }
+
+ /*
+ * Return path to the file referred to by a Reference. Presumes a directory
+ * hierarchy of <code>${hbase.rootdir}/tablename/regionname/familyname</code>.
+ * @param p Path to a Reference file.
+ * @return Calculated path to parent region file.
+ * @throws IllegalArgumentException when path regex fails to match.
+ */
+ public static Path getReferredToFile(final Path p) {
+ Matcher m = REF_NAME_PATTERN.matcher(p.getName());
+ if (m == null || !m.matches()) {
+ LOG.warn("Failed match of store file name " + p.toString());
+ throw new IllegalArgumentException("Failed match of store file name " +
+ p.toString());
+ }
+
+ // Other region name is suffix on the passed Reference file name
+ String otherRegion = m.group(2);
+ // Tabledir is up two directories from where Reference was written.
+ Path tableDir = p.getParent().getParent().getParent();
+ String nameStrippedOfSuffix = m.group(1);
+ LOG.debug("reference '" + p + "' to region=" + otherRegion + " hfile=" + nameStrippedOfSuffix);
+
+ // Build up new path with the referenced region in place of our current
+ // region in the reference path. Also strip regionname suffix from name.
+ return new Path(new Path(new Path(tableDir, otherRegion),
+ p.getParent().getName()), nameStrippedOfSuffix);
+ }
+
+ /**
+ * Validate the store file name.
+ * @param fileName name of the file to validate
+ * @return <tt>true</tt> if the file could be a valid store file, <tt>false</tt> otherwise
+ */
+ public static boolean validateStoreFileName(final String fileName) {
+ if (HFileLink.isHFileLink(fileName) || isReference(fileName))
+ return(true);
+ return !fileName.contains("-");
+ }
+
+ /**
+ * Return if the specified file is a valid store file or not.
+ * @param fileStatus The {@link FileStatus} of the file
+ * @return <tt>true</tt> if the file is valid
+ */
+ public static boolean isValid(final FileStatus fileStatus)
+ throws IOException {
+ final Path p = fileStatus.getPath();
+
+ if (fileStatus.isDir())
+ return false;
+
+ // Check for empty hfile. Should never be the case but can happen
+ // after data loss in hdfs for whatever reason (upgrade, etc.): HBASE-646
+ // NOTE: that the HFileLink is just a name, so it's an empty file.
+ if (!HFileLink.isHFileLink(p) && fileStatus.getLen() <= 0) {
+ LOG.warn("Skipping " + p + " beccreateStoreDirause its empty. HBASE-646 DATA LOSS?");
+ return false;
+ }
+
+ return validateStoreFileName(p.getName());
+ }
+
+ /**
+ * helper function to compute HDFS blocks distribution of a given reference
+ * file.For reference file, we don't compute the exact value. We use some
+ * estimate instead given it might be good enough. we assume bottom part
+ * takes the first half of reference file, top part takes the second half
+ * of the reference file. This is just estimate, given
+ * midkey ofregion != midkey of HFile, also the number and size of keys vary.
+ * If this estimate isn't good enough, we can improve it later.
+ * @param fs The FileSystem
+ * @param reference The reference
+ * @param status The reference FileStatus
+ * @return HDFS blocks distribution
+ */
+ private static HDFSBlocksDistribution computeRefFileHDFSBlockDistribution(
+ final FileSystem fs, final Reference reference, final FileStatus status)
+ throws IOException {
+ if (status == null) {
+ return null;
+ }
+
+ long start = 0;
+ long length = 0;
+
+ if (Reference.isTopFileRegion(reference.getFileRegion())) {
+ start = status.getLen()/2;
+ length = status.getLen() - status.getLen()/2;
+ } else {
+ start = 0;
+ length = status.getLen()/2;
+ }
+ return FSUtils.computeHDFSBlocksDistribution(fs, status, start, length);
+ }
+}
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/RestoreSnapshotHelper.java Tue Mar 5 18:25:44 2013
@@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.io.HFileL
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.FSVisitor;
@@ -453,7 +453,7 @@ public class RestoreSnapshotHelper {
final String hfileName) throws IOException {
if (HFileLink.isHFileLink(hfileName)) {
HFileLink.createFromHFileLink(conf, fs, familyDir, hfileName);
- } else if (StoreFile.isReference(hfileName)) {
+ } else if (StoreFileInfo.isReference(hfileName)) {
restoreReferenceFile(familyDir, regionInfo, hfileName);
} else {
HFileLink.create(conf, fs, familyDir, regionInfo, hfileName);
@@ -482,7 +482,7 @@ public class RestoreSnapshotHelper {
final String hfileName) throws IOException {
// Extract the referred information (hfile name and parent region)
String tableName = snapshotDesc.getTable();
- Path refPath = StoreFile.getReferredToFile(new Path(new Path(new Path(tableName,
+ Path refPath = StoreFileInfo.getReferredToFile(new Path(new Path(new Path(tableName,
regionInfo.getEncodedName()), familyDir.getName()), hfileName));
String snapshotRegionName = refPath.getParent().getParent().getName();
String fileName = refPath.getName();
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/SnapshotDescriptionUtils.java Tue Mar 5 18:25:44 2013
@@ -206,7 +206,7 @@ public class SnapshotDescriptionUtils {
* Get the directory to store the snapshot instance
* @param snapshotsDir hbase-global directory for storing all snapshots
* @param snapshotName name of the snapshot to take
- * @return
+ * @return the final directory for the completed snapshot
*/
private static final Path getCompletedSnapshotDir(final Path snapshotsDir, String snapshotName) {
return new Path(snapshotsDir, snapshotName);
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/TakeSnapshotUtils.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/TakeSnapshotUtils.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/TakeSnapshotUtils.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/snapshot/TakeSnapshotUtils.java Tue Mar 5 18:25:44 2013
@@ -74,19 +74,6 @@ public class TakeSnapshotUtils {
}
/**
- * Get the home directory for store-level snapshot files.
- * <p>
- * Specific files per store are kept in a similar layout as per the current directory layout.
- * @param regionDir snapshot directory for the parent region, <b>not</b> the standard region
- * directory. See {@link #getRegionSnapshotDirectory}
- * @param family name of the store to snapshot
- * @return path to the snapshot home directory for the store/family
- */
- public static Path getStoreSnapshotDirectory(Path regionDir, String family) {
- return HStore.getStoreHomedir(regionDir, Bytes.toBytes(family));
- }
-
- /**
* Get the snapshot directory for each family to be added to the the snapshot
* @param snapshot description of the snapshot being take
* @param snapshotRegionDir directory in the snapshot where the region directory information
@@ -102,7 +89,7 @@ public class TakeSnapshotUtils {
List<Path> familyDirs = new ArrayList<Path>(families.length);
for (FileStatus family : families) {
// build the reference directory name
- familyDirs.add(getStoreSnapshotDirectory(snapshotRegionDir, family.getPath().getName()));
+ familyDirs.add(new Path(snapshotRegionDir, family.getPath().getName()));
}
return familyDirs;
}
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HBaseFsck.java Tue Mar 5 18:25:44 2013
@@ -87,7 +87,7 @@ import org.apache.hadoop.hbase.master.Ma
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.regionserver.wal.HLogUtil;
import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.HBaseFsck.ErrorReporter.ERROR_CODE;
@@ -619,7 +619,7 @@ public class HBaseFsck extends Configure
for (Path path: allFiles.values()) {
boolean isReference = false;
try {
- isReference = StoreFile.isReference(path);
+ isReference = StoreFileInfo.isReference(path);
} catch (Throwable t) {
// Ignore. Some files may not be store files at all.
// For example, files under .oldlogs folder in .META.
@@ -628,7 +628,7 @@ public class HBaseFsck extends Configure
}
if (!isReference) continue;
- Path referredToFile = StoreFile.getReferredToFile(path);
+ Path referredToFile = StoreFileInfo.getReferredToFile(path);
if (fs.exists(referredToFile)) continue; // good, expected
// Found a lingering reference file
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/HFileArchiveUtil.java Tue Mar 5 18:25:44 2013
@@ -49,20 +49,22 @@ public class HFileArchiveUtil {
public static Path getStoreArchivePath(final Configuration conf, final String tableName,
final String regionName, final String familyName) throws IOException {
Path tableArchiveDir = getTableArchivePath(conf, tableName);
- return HStore.getStoreHomedir(tableArchiveDir, regionName, familyName);
+ return HStore.getStoreHomedir(tableArchiveDir, regionName, Bytes.toBytes(familyName));
}
/**
* Get the directory to archive a store directory
* @param conf {@link Configuration} to read for the archive directory name
- * @param region parent region information under which the store currently
- * lives
- * @param family name of the family in the store
+ * @param tableName table name under which the store currently lives
+ * @param region parent region information under which the store currently lives
+ * @param familyName name of the family in the store
* @return {@link Path} to the directory to archive the given store or
* <tt>null</tt> if it should not be archived
*/
- public static Path getStoreArchivePath(Configuration conf, HRegion region, byte [] family){
- return getStoreArchivePath(conf, region.getRegionInfo(), region.getTableDir(), family);
+ public static Path getStoreArchivePath(final Configuration conf, final String tableName,
+ final HRegionInfo region, final String familyName) throws IOException {
+ Path tableArchiveDir = getTableArchivePath(conf, tableName);
+ return HStore.getStoreHomedir(tableArchiveDir, region, Bytes.toBytes(familyName));
}
/**
Modified: hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java (original)
+++ hbase/trunk/hbase-server/src/main/java/org/apache/hadoop/hbase/util/RegionSplitter.java Tue Mar 5 18:25:44 2013
@@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.client.HB
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.NoServerForRegionException;
import org.apache.hadoop.hbase.regionserver.HStore;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
@@ -669,11 +669,10 @@ public class RegionSplitter {
HTableDescriptor htd = table.getTableDescriptor();
// check every Column Family for that region
for (HColumnDescriptor c : htd.getFamilies()) {
- Path cfDir = HStore.getStoreHomedir(tableDir, hri.getEncodedName(),
- c.getName());
+ Path cfDir = HStore.getStoreHomedir(tableDir, hri, c.getName());
if (fs.exists(cfDir)) {
for (FileStatus file : fs.listStatus(cfDir)) {
- refFound |= StoreFile.isReference(file.getPath());
+ refFound |= StoreFileInfo.isReference(file.getPath());
if (refFound)
break;
}
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java Tue Mar 5 18:25:44 2013
@@ -699,7 +699,7 @@ public class TestHFileOutputFormat {
// deep inspection: get the StoreFile dir
final Path storePath = HStore.getStoreHomedir(
HTableDescriptor.getTableDir(FSUtils.getRootDir(conf), TABLE_NAME),
- admin.getTableRegions(TABLE_NAME).get(0).getEncodedName(),
+ admin.getTableRegions(TABLE_NAME).get(0),
FAMILIES[0]);
assertEquals(0, fs.listStatus(storePath).length);
@@ -767,7 +767,7 @@ public class TestHFileOutputFormat {
// deep inspection: get the StoreFile dir
final Path storePath = HStore.getStoreHomedir(
HTableDescriptor.getTableDir(FSUtils.getRootDir(conf), TABLE_NAME),
- admin.getTableRegions(TABLE_NAME).get(0).getEncodedName(),
+ admin.getTableRegions(TABLE_NAME).get(0),
FAMILIES[0]);
assertEquals(0, fs.listStatus(storePath).length);
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestCatalogJanitor.java Tue Mar 5 18:25:44 2013
@@ -354,7 +354,7 @@ public class TestCatalogJanitor {
Path rootdir = services.getMasterFileSystem().getRootDir();
Path tabledir =
HTableDescriptor.getTableDir(rootdir, htd.getName());
- Path storedir = HStore.getStoreHomedir(tabledir, splita.getEncodedName(),
+ Path storedir = HStore.getStoreHomedir(tabledir, splita,
htd.getColumnFamilies()[0].getName());
Reference ref = Reference.createTopReference(Bytes.toBytes("ccc"));
long now = System.currentTimeMillis();
@@ -600,8 +600,7 @@ public class TestCatalogJanitor {
// the single test passes, but when the full suite is run, things get borked).
FSUtils.setRootDir(fs.getConf(), rootdir);
Path tabledir = HTableDescriptor.getTableDir(rootdir, htd.getName());
- Path storedir = HStore.getStoreHomedir(tabledir, parent.getEncodedName(),
- htd.getColumnFamilies()[0].getName());
+ Path storedir = HStore.getStoreHomedir(tabledir, parent, htd.getColumnFamilies()[0].getName());
Path storeArchive = HFileArchiveUtil.getStoreArchivePath(services.getConfiguration(), parent,
tabledir, htd.getColumnFamilies()[0].getName());
LOG.debug("Table dir:" + tabledir);
@@ -682,8 +681,7 @@ public class TestCatalogJanitor {
// the single test passes, but when the full suite is run, things get borked).
FSUtils.setRootDir(fs.getConf(), rootdir);
Path tabledir = HTableDescriptor.getTableDir(rootdir, parent.getTableName());
- Path storedir = HStore.getStoreHomedir(tabledir, parent.getEncodedName(),
- htd.getColumnFamilies()[0].getName());
+ Path storedir = HStore.getStoreHomedir(tabledir, parent, htd.getColumnFamilies()[0].getName());
System.out.println("Old root:" + rootdir);
System.out.println("Old table:" + tabledir);
System.out.println("Old store:" + storedir);
@@ -766,7 +764,7 @@ public class TestCatalogJanitor {
throws IOException {
Path rootdir = services.getMasterFileSystem().getRootDir();
Path tabledir = HTableDescriptor.getTableDir(rootdir, parent.getTableName());
- Path storedir = HStore.getStoreHomedir(tabledir, daughter.getEncodedName(),
+ Path storedir = HStore.getStoreHomedir(tabledir, daughter,
htd.getColumnFamilies()[0].getName());
Reference ref =
top? Reference.createTopReference(midkey): Reference.createBottomReference(midkey);
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/master/cleaner/TestHFileLinkCleaner.java Tue Mar 5 18:25:44 2013
@@ -70,9 +70,9 @@ public class TestHFileLinkCleaner {
Path archiveDir = HFileArchiveUtil.getArchivePath(conf);
Path archiveStoreDir = HFileArchiveUtil.getStoreArchivePath(conf,
- tableName, hri.getEncodedName(), familyName);
+ tableName, hri, familyName);
Path archiveLinkStoreDir = HFileArchiveUtil.getStoreArchivePath(conf,
- tableLinkName, hriLink.getEncodedName(), familyName);
+ tableLinkName, hriLink, familyName);
// Create hfile /hbase/table-link/region/cf/getEncodedName.HFILE(conf);
Path familyPath = getFamilyDirPath(archiveDir, tableName, hri.getEncodedName(), familyName);
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/HFileReadWriteTest.java Tue Mar 5 18:25:44 2013
@@ -344,20 +344,10 @@ public class HFileReadWriteTest {
columnDescriptor.setDataBlockEncoding(dataBlockEncoding);
HRegionInfo regionInfo = new HRegionInfo();
HTableDescriptor htd = new HTableDescriptor(TABLE_NAME);
- HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd,
- null);
- HStore store = new HStore(outputDir, region, columnDescriptor, fs, conf);
-
- StoreFile.Writer writer = new StoreFile.WriterBuilder(conf,
- new CacheConfig(conf), fs, blockSize)
- .withOutputDir(outputDir)
- .withCompression(compression)
- .withDataBlockEncoder(dataBlockEncoder)
- .withBloomType(bloomType)
- .withMaxKeyCount(maxKeyCount)
- .withChecksumType(HFile.DEFAULT_CHECKSUM_TYPE)
- .withBytesPerChecksum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
- .build();
+ HRegion region = new HRegion(outputDir, null, fs, conf, regionInfo, htd, null);
+ HStore store = new HStore(region, columnDescriptor, conf);
+
+ StoreFile.Writer writer = store.createWriterInTmp(maxKeyCount, compression, false);
StatisticsPrinter statsPrinter = new StatisticsPrinter();
statsPrinter.startThread();
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCacheOnWriteInSchema.java Tue Mar 5 18:25:44 2013
@@ -165,7 +165,7 @@ public class TestCacheOnWriteInSchema {
hlog = HLogFactory.createHLog(fs, basedir, logName, conf);
region = new HRegion(basedir, hlog, fs, conf, info, htd, null);
- store = new HStore(basedir, region, hcd, fs, conf);
+ store = new HStore(region, hcd, conf);
}
@After
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompaction.java Tue Mar 5 18:25:44 2013
@@ -525,7 +525,7 @@ public class TestCompaction extends HBas
assertEquals(compactionThreshold, s.getStorefilesCount());
assertTrue(s.getStorefilesSize() > 15*1000);
// and no new store files persisted past compactStores()
- FileStatus[] ls = FileSystem.get(conf).listStatus(r.getTmpDir());
+ FileStatus[] ls = r.getFilesystem().listStatus(r.getRegionFileSystem().getTempDir());
assertEquals(0, ls.length);
} finally {
@@ -605,16 +605,14 @@ public class TestCompaction extends HBas
List<Path> newFiles = tool.compactForTesting(storeFiles, false);
// Now lets corrupt the compacted file.
- FileSystem fs = FileSystem.get(conf);
+ FileSystem fs = store.getFileSystem();
// default compaction policy created one and only one new compacted file
- Path origPath = newFiles.get(0);
- Path homedir = store.getHomedir();
- Path dstPath = new Path(homedir, origPath.getName());
- FSDataOutputStream stream = fs.create(origPath, null, true, 512, (short) 3,
- (long) 1024,
- null);
+ Path dstPath = store.getRegionFileSystem().createTempName();
+ FSDataOutputStream stream = fs.create(dstPath, null, true, 512, (short)3, (long)1024, null);
stream.writeChars("CORRUPT FILE!!!!");
stream.close();
+ Path origPath = store.getRegionFileSystem().commitStoreFile(
+ Bytes.toString(COLUMN_FAMILY), dstPath);
try {
((HStore)store).moveFileIntoPlace(origPath);
@@ -629,7 +627,7 @@ public class TestCompaction extends HBas
fail("testCompactionWithCorruptResult failed since no exception was" +
"thrown while completing a corrupt file");
}
-
+
/**
* Test for HBASE-5920 - Test user requested major compactions always occurring
*/
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestDefaultCompactSelection.java Tue Mar 5 18:25:44 2013
@@ -93,17 +93,16 @@ public class TestDefaultCompactSelection
htd.addFamily(hcd);
HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
- hlog = HLogFactory.createHLog(fs, basedir,
- logName, conf);
+ hlog = HLogFactory.createHLog(fs, basedir, logName, conf);
region = HRegion.createHRegion(info, basedir, conf, htd);
HRegion.closeHRegion(region);
Path tableDir = new Path(basedir, Bytes.toString(htd.getName()));
region = new HRegion(tableDir, hlog, fs, conf, info, htd, null);
- store = new HStore(basedir, region, hcd, fs, conf);
+ store = new HStore(region, hcd, conf);
- TEST_FILE = StoreFile.getRandomFilename(fs, store.getHomedir());
- fs.create(TEST_FILE);
+ TEST_FILE = region.getRegionFileSystem().createTempName();
+ fs.createNewFile(TEST_FILE);
}
@After
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java Tue Mar 5 18:25:44 2013
@@ -3426,8 +3426,7 @@ public class TestHRegion extends HBaseTe
// static method is used by load balancer or other components
HDFSBlocksDistribution blocksDistribution2 =
HRegion.computeHDFSBlocksDistribution(htu.getConfiguration(),
- firstRegion.getTableDesc(),
- firstRegion.getRegionInfo().getEncodedName());
+ firstRegion.getTableDesc(), firstRegion.getRegionInfo());
long uniqueBlocksWeight2 =
blocksDistribution2.getUniqueBlocksTotalWeight();
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegionFileSystem.java Tue Mar 5 18:25:44 2013
@@ -24,6 +24,7 @@ import static org.junit.Assert.assertFal
import static org.junit.Assert.assertTrue;
import java.io.IOException;
+import java.util.Collection;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -71,4 +72,34 @@ public class TestHRegionFileSystem {
fs.delete(rootDir, true);
}
+
+ @Test
+ public void testTempAndCommit() throws IOException {
+ Path rootDir = TEST_UTIL.getDataTestDirOnTestFS("testTempAndCommit");
+ FileSystem fs = TEST_UTIL.getTestFileSystem();
+ Configuration conf = TEST_UTIL.getConfiguration();
+
+ // Create a Region
+ String familyName = "cf";
+ HRegionInfo hri = new HRegionInfo(Bytes.toBytes("TestTable"));
+ HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, rootDir, hri);
+
+ // New region, no store files
+ Collection<StoreFileInfo> storeFiles = regionFs.getStoreFiles(familyName);
+ assertEquals(0, storeFiles != null ? storeFiles.size() : 0);
+
+ // Create a new file in temp (no files in the family)
+ Path buildPath = regionFs.createTempName();
+ fs.createNewFile(buildPath);
+ storeFiles = regionFs.getStoreFiles(familyName);
+ assertEquals(0, storeFiles != null ? storeFiles.size() : 0);
+
+ // commit the file
+ Path dstPath = regionFs.commitStoreFile(familyName, buildPath);
+ storeFiles = regionFs.getStoreFiles(familyName);
+ assertEquals(0, storeFiles != null ? storeFiles.size() : 0);
+ assertFalse(fs.exists(buildPath));
+
+ fs.delete(rootDir, true);
+ }
}
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Tue Mar 5 18:25:44 2013
@@ -56,6 +56,7 @@ import org.apache.hadoop.hbase.io.encodi
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.monitoring.MonitoredTask;
+import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionContext;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
@@ -158,7 +159,7 @@ public class TestStore extends TestCase
HLog hlog = HLogFactory.createHLog(fs, basedir, logName, conf);
HRegion region = new HRegion(basedir, hlog, fs, conf, info, htd, null);
- store = new HStore(basedir, region, hcd, fs, conf);
+ store = new HStore(region, hcd, conf);
}
/**
@@ -323,10 +324,7 @@ public class TestStore extends TestCase
w.close();
this.store.close();
// Reopen it... should pick up two files
- this.store = new HStore(storedir.getParent().getParent(),
- this.store.getHRegion(),
- this.store.getFamily(), fs, c);
- System.out.println(this.store.getRegionInfo().getEncodedName());
+ this.store = new HStore(this.store.getHRegion(), this.store.getFamily(), c);
assertEquals(2, this.store.getStorefilesCount());
result = HBaseTestingUtility.getFromStoreFile(store,
@@ -651,10 +649,10 @@ public class TestStore extends TestCase
store.add(new KeyValue(row, family, qf3, 1, (byte[])null));
LOG.info("Before flush, we should have no files");
- FileStatus[] files = fs.listStatus(store.getHomedir());
- Path[] paths = FileUtil.stat2Paths(files);
- System.err.println("Got paths: " + Joiner.on(",").join(paths));
- assertEquals(0, paths.length);
+
+ Collection<StoreFileInfo> files =
+ store.getRegionFileSystem().getStoreFiles(store.getColumnFamilyName());
+ assertEquals(0, files != null ? files.size() : 0);
//flush
try {
@@ -666,10 +664,8 @@ public class TestStore extends TestCase
}
LOG.info("After failed flush, we should still have no files!");
- files = fs.listStatus(store.getHomedir());
- paths = FileUtil.stat2Paths(files);
- System.err.println("Got paths: " + Joiner.on(",").join(paths));
- assertEquals(0, paths.length);
+ files = store.getRegionFileSystem().getStoreFiles(store.getColumnFamilyName());
+ assertEquals(0, files != null ? files.size() : 0);
return null;
}
});
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFile.java Tue Mar 5 18:25:44 2013
@@ -198,9 +198,10 @@ public class TestStoreFile extends HBase
HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
// Try to open store file from link
- StoreFile hsf = new StoreFile(this.fs, linkFilePath, testConf, cacheConf,
+ StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath);
+ StoreFile hsf = new StoreFile(this.fs, storeFileInfo, testConf, cacheConf,
BloomType.NONE, NoOpDataBlockEncoder.INSTANCE);
- assertTrue(hsf.isLink());
+ assertTrue(storeFileInfo.isLink());
// Now confirm that I can read from the link
int count = 1;
@@ -213,30 +214,6 @@ public class TestStoreFile extends HBase
}
/**
- * Validate that we can handle valid tables with '.', '_', and '-' chars.
- */
- public void testStoreFileNames() {
- String[] legalHFileLink = { "MyTable_02=abc012-def345", "MyTable_02.300=abc012-def345",
- "MyTable_02-400=abc012-def345", "MyTable_02-400.200=abc012-def345",
- "MyTable_02=abc012-def345_SeqId_1_", "MyTable_02=abc012-def345_SeqId_20_" };
- for (String name: legalHFileLink) {
- assertTrue("should be a valid link: " + name, HFileLink.isHFileLink(name));
- assertTrue("should be a valid StoreFile" + name, StoreFile.validateStoreFileName(name));
- assertFalse("should not be a valid reference: " + name, StoreFile.isReference(name));
-
- String refName = name + ".6789";
- assertTrue("should be a valid link reference: " + refName, StoreFile.isReference(refName));
- assertTrue("should be a valid StoreFile" + refName, StoreFile.validateStoreFileName(refName));
- }
-
- String[] illegalHFileLink = { ".MyTable_02=abc012-def345", "-MyTable_02.300=abc012-def345",
- "MyTable_02-400=abc0_12-def345", "MyTable_02-400.200=abc012-def345...." };
- for (String name: illegalHFileLink) {
- assertFalse("should not be a valid link: " + name, HFileLink.isHFileLink(name));
- }
- }
-
- /**
* This test creates an hfile and then the dir structures and files to verify that references
* to hfilelinks (created by snapshot clones) can be properly interpreted.
*/
Added: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java?rev=1452936&view=auto
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java (added)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStoreFileInfo.java Tue Mar 5 18:25:44 2013
@@ -0,0 +1,96 @@
+/**
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.regionserver;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeSet;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HBaseTestCase;
+import org.apache.hadoop.hbase.HBaseTestingUtility;
+import org.apache.hadoop.hbase.HConstants;
+import org.apache.hadoop.hbase.HRegionInfo;
+import org.apache.hadoop.hbase.KeyValue;
+import org.apache.hadoop.hbase.SmallTests;
+import org.apache.hadoop.hbase.client.Scan;
+import org.apache.hadoop.hbase.io.HFileLink;
+import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
+import org.apache.hadoop.hbase.io.hfile.BlockCache;
+import org.apache.hadoop.hbase.io.hfile.CacheConfig;
+import org.apache.hadoop.hbase.io.hfile.CacheStats;
+import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
+import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
+import org.apache.hadoop.hbase.io.hfile.HFileScanner;
+import org.apache.hadoop.hbase.io.hfile.NoOpDataBlockEncoder;
+import org.apache.hadoop.hbase.util.BloomFilterFactory;
+import org.apache.hadoop.hbase.util.Bytes;
+import org.apache.hadoop.hbase.util.ChecksumType;
+import org.apache.hadoop.hbase.util.FSUtils;
+import org.junit.experimental.categories.Category;
+import org.mockito.Mockito;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+
+/**
+ * Test HStoreFile
+ */
+@Category(SmallTests.class)
+public class TestStoreFileInfo extends HBaseTestCase {
+ private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
+
+ /**
+ * Validate that we can handle valid tables with '.', '_', and '-' chars.
+ */
+ public void testStoreFileNames() {
+ String[] legalHFileLink = { "MyTable_02=abc012-def345", "MyTable_02.300=abc012-def345",
+ "MyTable_02-400=abc012-def345", "MyTable_02-400.200=abc012-def345",
+ "MyTable_02=abc012-def345_SeqId_1_", "MyTable_02=abc012-def345_SeqId_20_" };
+ for (String name: legalHFileLink) {
+ assertTrue("should be a valid link: " + name, HFileLink.isHFileLink(name));
+ assertTrue("should be a valid StoreFile" + name, StoreFileInfo.validateStoreFileName(name));
+ assertFalse("should not be a valid reference: " + name, StoreFileInfo.isReference(name));
+
+ String refName = name + ".6789";
+ assertTrue("should be a valid link reference: " + refName,
+ StoreFileInfo.isReference(refName));
+ assertTrue("should be a valid StoreFile" + refName,
+ StoreFileInfo.validateStoreFileName(refName));
+ }
+
+ String[] illegalHFileLink = { ".MyTable_02=abc012-def345", "-MyTable_02.300=abc012-def345",
+ "MyTable_02-400=abc0_12-def345", "MyTable_02-400.200=abc012-def345...." };
+ for (String name: illegalHFileLink) {
+ assertFalse("should not be a valid link: " + name, HFileLink.isHFileLink(name));
+ }
+ }
+}
+
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestWALReplay.java Tue Mar 5 18:25:44 2013
@@ -574,9 +574,8 @@ public class TestWALReplay {
HRegion region = new HRegion(basedir, wal, this.fs, this.conf, hri, htd,
rsServices) {
@Override
- protected HStore instantiateHStore(Path tableDir, HColumnDescriptor c)
- throws IOException {
- return new HStore(tableDir, this, c, fs, conf) {
+ protected HStore instantiateHStore(final HColumnDescriptor family) throws IOException {
+ return new HStore(this, family, conf) {
@Override
protected Path flushCache(final long logCacheFlushId,
SortedSet<KeyValue> snapshot,
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/TestRestoreSnapshotHelper.java Tue Mar 5 18:25:44 2013
@@ -47,7 +47,7 @@ import org.apache.hadoop.hbase.errorhand
import org.apache.hadoop.hbase.io.HFileLink;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescription;
import org.apache.hadoop.hbase.regionserver.HRegion;
-import org.apache.hadoop.hbase.regionserver.StoreFile;
+import org.apache.hadoop.hbase.regionserver.StoreFileInfo;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hbase.util.FSUtils;
@@ -110,7 +110,7 @@ public class TestRestoreSnapshotHelper {
String[] files = getHFiles(HTableDescriptor.getTableDir(rootDir, htdClone.getName()));
assertEquals(2, files.length);
assertTrue(files[0] + " should be a HFileLink", HFileLink.isHFileLink(files[0]));
- assertTrue(files[1] + " should be a Referene", StoreFile.isReference(files[1]));
+ assertTrue(files[1] + " should be a Referene", StoreFileInfo.isReference(files[1]));
assertEquals(sourceHtd.getNameAsString(), HFileLink.getReferencedTableName(files[0]));
assertEquals(TEST_HFILE, HFileLink.getReferencedHFileName(files[0]));
Path refPath = getReferredToFile(files[1]);
@@ -182,7 +182,7 @@ public class TestRestoreSnapshotHelper {
private Path getReferredToFile(final String referenceName) {
Path fakeBasePath = new Path(new Path("table", "region"), "cf");
- return StoreFile.getReferredToFile(new Path(fakeBasePath, referenceName));
+ return StoreFileInfo.getReferredToFile(new Path(fakeBasePath, referenceName));
}
private String[] getHFiles(final Path tableDir) throws IOException {
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java Tue Mar 5 18:25:44 2013
@@ -32,6 +32,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.Store;
@@ -222,8 +223,11 @@ public class HFileArchiveTestingUtil {
* @param store store that is archiving files
* @return {@link Path} to the store archive directory for the given region
*/
- public static Path getStoreArchivePath(Configuration conf, HRegion region, Store store) {
- return HFileArchiveUtil.getStoreArchivePath(conf, region, store.getFamily().getName());
+ public static Path getStoreArchivePath(Configuration conf, HRegion region, Store store)
+ throws IOException {
+ HRegionInfo hri = region.getRegionInfo();
+ return HFileArchiveUtil.getStoreArchivePath(conf, hri.getTableNameAsString(), hri,
+ store.getFamily().getNameAsString());
}
public static Path getStoreArchivePath(HBaseTestingUtility util, String tableName,
Modified: hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java
URL: http://svn.apache.org/viewvc/hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java?rev=1452936&r1=1452935&r2=1452936&view=diff
==============================================================================
--- hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java (original)
+++ hbase/trunk/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestHFileArchiveUtil.java Tue Mar 5 18:25:44 2013
@@ -65,14 +65,5 @@ public class TestHFileArchiveUtil {
assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, region, tabledir, family));
conf = new Configuration();
assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, region, tabledir, family));
-
- // do a little mocking of a region to get the same results
- HRegion mockRegion = Mockito.mock(HRegion.class);
- Mockito.when(mockRegion.getRegionInfo()).thenReturn(region);
- Mockito.when(mockRegion.getTableDir()).thenReturn(tabledir);
-
- assertNotNull(HFileArchiveUtil.getStoreArchivePath(null, mockRegion, family));
- conf = new Configuration();
- assertNotNull(HFileArchiveUtil.getStoreArchivePath(conf, mockRegion, family));
}
}