You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by ns...@apache.org on 2011/10/11 04:03:23 UTC
svn commit: r1181376 - in /hbase/branches/0.89/src:
main/java/org/apache/hadoop/hbase/io/hfile/
main/java/org/apache/hadoop/hbase/mapreduce/
main/java/org/apache/hadoop/hbase/regionserver/
main/java/org/apache/hadoop/hbase/util/ test/java/org/apache/ha...
Author: nspiegelberg
Date: Tue Oct 11 02:03:20 2011
New Revision: 1181376
URL: http://svn.apache.org/viewvc?rev=1181376&view=rev
Log:
Allow HFile's bytes per checksum to be configured
Summary:
Make the bytes per checksum to be configurable for HFile.
Test Plan:
Manual test
DiffCamp Revision: 156412
Reviewed By: kannan
Commenters: jgray
CC: jgray, hkuang, kannan, hbase-hdfs@lists
Tasks:
#246581: Experiment with alternate settings for io.bytes.per.checksum for
HFiles (HBASE-2478)
Revert Plan:
OK
Modified:
hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java
hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
Modified: hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java?rev=1181376&r1=1181375&r2=1181376&view=diff
==============================================================================
--- hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java (original)
+++ hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/io/hfile/HFile.java Tue Oct 11 02:03:20 2011
@@ -45,6 +45,7 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.KeyValue.KeyComparator;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo;
@@ -153,6 +154,11 @@ public class HFile {
public final static int DEFAULT_BLOCKSIZE = 64 * 1024;
/**
+ * Default byes per checksum for hfile
+ */
+ public final static int DEFAULT_BYTES_PER_CHECKSUM = 512;
+
+ /**
* Default compression: none.
*/
public final static Compression.Algorithm DEFAULT_COMPRESSION_ALGORITHM =
@@ -192,6 +198,23 @@ public class HFile {
}
/**
+ * Get the configured bytes per checksum for HFile
+ * if not configured, return the default value
+ * @param hconf hbase configuration
+ * @param fsconf dfs configuration
+ * @return bytes per checksum for HFile
+ */
+ public static int getBytesPerChecksum(Configuration hconf,
+ Configuration fsconf) {
+ int bytesPerChecksum = HFile.DEFAULT_BYTES_PER_CHECKSUM;
+ if (hconf != null) {
+ bytesPerChecksum = hconf.getInt("hfile.io.bytes.per.checksum",
+ fsconf.getInt("io.bytes.per.checksum",
+ HFile.DEFAULT_BYTES_PER_CHECKSUM));
+ }
+ return bytesPerChecksum;
+ }
+ /**
* HFile Writer.
*/
public static class Writer implements Closeable {
@@ -265,7 +288,8 @@ public class HFile {
*/
public Writer(FileSystem fs, Path path)
throws IOException {
- this(fs, path, DEFAULT_BLOCKSIZE, (Compression.Algorithm) null, null);
+ this(fs, path, DEFAULT_BLOCKSIZE, DEFAULT_BYTES_PER_CHECKSUM,
+ (Compression.Algorithm) null, null);
}
/**
@@ -273,15 +297,16 @@ public class HFile {
* @param fs
* @param path
* @param blocksize
+ * @param bytesPerChecksum
* @param compress
* @param comparator
* @throws IOException
* @throws IOException
*/
- public Writer(FileSystem fs, Path path, int blocksize,
+ public Writer(FileSystem fs, Path path, int blocksize, int bytesPerChecksum,
String compress, final KeyComparator comparator)
throws IOException {
- this(fs, path, blocksize,
+ this(fs, path, blocksize, bytesPerChecksum,
compress == null? DEFAULT_COMPRESSION_ALGORITHM:
Compression.getCompressionAlgorithmByName(compress),
comparator);
@@ -292,15 +317,24 @@ public class HFile {
* @param fs
* @param path
* @param blocksize
+ * @param bytesPerChecksum
* @param compress
* @param comparator
* @throws IOException
*/
- public Writer(FileSystem fs, Path path, int blocksize,
+ public Writer(FileSystem fs, Path path, int blocksize, int bytesPerChecksum,
Compression.Algorithm compress,
final KeyComparator comparator)
throws IOException {
- this(fs.create(path), blocksize, compress, comparator);
+ this(fs.create(path,
+ FsPermission.getDefault(),
+ true,
+ fs.getConf().getInt("io.file.buffer.size", 4096),
+ fs.getDefaultReplication(),
+ fs.getDefaultBlockSize(),
+ bytesPerChecksum,
+ null),
+ blocksize, compress, comparator);
this.closeOutputStream = true;
this.name = path.toString();
this.path = path;
Modified: hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java?rev=1181376&r1=1181375&r2=1181376&view=diff
==============================================================================
--- hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java (original)
+++ hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat.java Tue Oct 11 02:03:20 2011
@@ -79,6 +79,7 @@ public class HFileOutputFormat extends F
// Invented config. Add to hbase-*.xml if other than default compression.
final String compression = conf.get("hfile.compression",
Compression.Algorithm.NONE.getName());
+ final int bytesPerChecksum = HFile.getBytesPerChecksum(conf, conf);
return new RecordWriter<ImmutableBytesWritable, KeyValue>() {
// Map of families to writers and how much has been output on the writer.
@@ -127,7 +128,7 @@ public class HFileOutputFormat extends F
throws IOException {
close(writer);
return new HFile.Writer(fs, StoreFile.getUniqueFile(fs, familydir),
- blocksize, compression, KeyValue.KEY_COMPARATOR);
+ blocksize, bytesPerChecksum, compression, KeyValue.KEY_COMPARATOR);
}
private void close(final HFile.Writer w) throws IOException {
Modified: hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java?rev=1181376&r1=1181375&r2=1181376&view=diff
==============================================================================
--- hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java (original)
+++ hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java Tue Oct 11 02:03:20 2011
@@ -686,7 +686,10 @@ public class StoreFile {
Compression.Algorithm compress, final Configuration conf,
final KVComparator comparator, BloomType bloomType, int maxKeys)
throws IOException {
- writer = new HFile.Writer(fs, path, blocksize, compress, comparator.getRawComparator());
+
+ writer = new HFile.Writer(
+ fs, path, blocksize, HFile.getBytesPerChecksum(conf, fs.getConf()),
+ compress, comparator.getRawComparator());
this.kvComparator = comparator;
Modified: hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java?rev=1181376&r1=1181375&r2=1181376&view=diff
==============================================================================
--- hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java (original)
+++ hbase/branches/0.89/src/main/java/org/apache/hadoop/hbase/util/CompressionTest.java Tue Oct 11 02:03:20 2011
@@ -65,7 +65,7 @@ public class CompressionTest {
DistributedFileSystem dfs = openConnection(args[0]);
dfs.delete(path, false);
HFile.Writer writer = new HFile.Writer(dfs, path,
- HFile.DEFAULT_BLOCKSIZE, args[1], null);
+ HFile.DEFAULT_BLOCKSIZE, HFile.DEFAULT_BYTES_PER_CHECKSUM, args[1], null);
writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval"));
writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval"));
writer.close();
Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java?rev=1181376&r1=1181375&r2=1181376&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/HFilePerformanceEvaluation.java Tue Oct 11 02:03:20 2011
@@ -188,7 +188,8 @@ public class HFilePerformanceEvaluation
@Override
void setUp() throws Exception {
- writer = new HFile.Writer(this.fs, this.mf, RFILE_BLOCKSIZE, (Compression.Algorithm) null, null);
+ writer = new HFile.Writer(this.fs, this.mf, RFILE_BLOCKSIZE,
+ HFile.DEFAULT_BYTES_PER_CHECKSUM, (Compression.Algorithm) null, null);
}
@Override
Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java?rev=1181376&r1=1181375&r2=1181376&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/mapreduce/TestLoadIncrementalHFiles.java Tue Oct 11 02:03:20 2011
@@ -173,7 +173,8 @@ public class TestLoadIncrementalHFiles {
byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows) throws IOException
{
- HFile.Writer writer = new HFile.Writer(fs, path, BLOCKSIZE, COMPRESSION,
+ HFile.Writer writer = new HFile.Writer(fs, path, BLOCKSIZE,
+ HFile.DEFAULT_BYTES_PER_CHECKSUM, COMPRESSION,
KeyValue.KEY_COMPARATOR);
long now = System.currentTimeMillis();
try {
Modified: hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java?rev=1181376&r1=1181375&r2=1181376&view=diff
==============================================================================
--- hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java (original)
+++ hbase/branches/0.89/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java Tue Oct 11 02:03:20 2011
@@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.FilterFileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
@@ -56,6 +57,7 @@ import org.apache.hadoop.hbase.client.Sc
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.security.UnixUserGroupInformation;
+import org.apache.hadoop.util.Progressable;
import com.google.common.base.Joiner;
@@ -396,8 +398,17 @@ public class TestStore extends TestCase
}
@Override
- public FSDataOutputStream create(Path p) throws IOException {
- return new FaultyOutputStream(super.create(p), faultPos);
+ public FSDataOutputStream create(Path p,
+ FsPermission permission,
+ boolean overwrite,
+ int bufferSize,
+ short replication,
+ long blockSize,
+ int bytesPerChecksum,
+ Progressable progress) throws IOException {
+ return new FaultyOutputStream(super.create(p,
+ permission, overwrite, bufferSize, replication,
+ blockSize, bytesPerChecksum, progress), faultPos);
}
}