You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2015/05/11 07:49:00 UTC
hbase git commit: Revert "Deprecate old methods" Revert commit of
HBASE-13655 Deprecate duplicate getCompression methods in HColumnDescriptor I
committed with bad commit message.
Repository: hbase
Updated Branches:
refs/heads/master 2ad411414 -> 17b6f59a9
Revert "Deprecate old methods"
Revert commit of HBASE-13655 Deprecate duplicate getCompression methods in HColumnDescriptor
I committed with bad commit message.
This reverts commit 5732bdb483d6c6882f26a33a247f0544329745eb.
Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/17b6f59a
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/17b6f59a
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/17b6f59a
Branch: refs/heads/master
Commit: 17b6f59a98b3851be64bee07b5fb3624ef2eb210
Parents: 2ad4114
Author: stack <st...@apache.org>
Authored: Sun May 10 22:48:09 2015 -0700
Committer: stack <st...@apache.org>
Committed: Sun May 10 22:48:09 2015 -0700
----------------------------------------------------------------------
.../apache/hadoop/hbase/HColumnDescriptor.java | 41 +++++++-------------
.../hbase/mapreduce/HFileOutputFormat2.java | 2 +-
.../hbase/mapreduce/LoadIncrementalHFiles.java | 2 +-
.../org/apache/hadoop/hbase/master/HMaster.java | 4 +-
.../hbase/regionserver/DefaultStoreFlusher.java | 2 +-
.../hadoop/hbase/regionserver/HRegion.java | 4 +-
.../hbase/regionserver/StripeStoreFlusher.java | 2 +-
.../regionserver/compactions/Compactor.java | 2 +-
.../compactions/StripeCompactor.java | 2 +-
.../hbase/mapreduce/TestHFileOutputFormat.java | 2 +-
.../hbase/mapreduce/TestHFileOutputFormat2.java | 2 +-
.../hadoop/hbase/regionserver/TestStore.java | 2 +-
.../hadoop/hbase/thrift/ThriftUtilities.java | 2 +-
13 files changed, 29 insertions(+), 40 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
----------------------------------------------------------------------
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
index 4091c11..6cbe804 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/HColumnDescriptor.java
@@ -453,26 +453,23 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
return this;
}
- /**
- * @return compression type being used for the column family
- * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
- * (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>).
- * Use {@link #getCompressionType()}.
- */
- @Deprecated
+ /** @return compression type being used for the column family */
public Compression.Algorithm getCompression() {
- return getCompressionType();
+ String n = getValue(COMPRESSION);
+ if (n == null) {
+ return Compression.Algorithm.NONE;
+ }
+ return Compression.Algorithm.valueOf(n.toUpperCase());
}
- /**
- * @return compression type being used for the column family for major compaction
- * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
- * (<a href="https://issues.apache.org/jira/browse/HBASE-13655">HBASE-13655</a>).
- * Use {@link #getCompactionCompressionType()}.
- */
- @Deprecated
+ /** @return compression type being used for the column family for major
+ compression */
public Compression.Algorithm getCompactionCompression() {
- return getCompactionCompressionType();
+ String n = getValue(COMPRESSION_COMPACT);
+ if (n == null) {
+ return getCompression();
+ }
+ return Compression.Algorithm.valueOf(n.toUpperCase());
}
/** @return maximum number of versions */
@@ -532,11 +529,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
* @return Compression type setting.
*/
public Compression.Algorithm getCompressionType() {
- String n = getValue(COMPRESSION);
- if (n == null) {
- return Compression.Algorithm.NONE;
- }
- return Compression.Algorithm.valueOf(n.toUpperCase());
+ return getCompression();
}
/**
@@ -606,11 +599,7 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
* @return Compression type setting.
*/
public Compression.Algorithm getCompactionCompressionType() {
- String n = getValue(COMPRESSION_COMPACT);
- if (n == null) {
- return getCompressionType();
- }
- return Compression.Algorithm.valueOf(n.toUpperCase());
+ return getCompactionCompression();
}
/**
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
index 015cca0..4e94308 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java
@@ -627,7 +627,7 @@ public class HFileOutputFormat2
familyDescriptor.getNameAsString(), "UTF-8"));
compressionConfigValue.append('=');
compressionConfigValue.append(URLEncoder.encode(
- familyDescriptor.getCompressionType().getName(), "UTF-8"));
+ familyDescriptor.getCompression().getName(), "UTF-8"));
}
// Get rid of the last ampersand
conf.set(COMPRESSION_FAMILIES_CONF_KEY, compressionConfigValue.toString());
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
index 40601d6..d9829a7 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/LoadIncrementalHFiles.java
@@ -805,7 +805,7 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
Map<byte[], byte[]> fileInfo = halfReader.loadFileInfo();
int blocksize = familyDescriptor.getBlocksize();
- Algorithm compression = familyDescriptor.getCompressionType();
+ Algorithm compression = familyDescriptor.getCompression();
BloomType bloomFilterType = familyDescriptor.getBloomFilterType();
HFileContext hFileContext = new HFileContextBuilder()
.withCompression(compression)
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
index a39b3ea..995b979 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/master/HMaster.java
@@ -1561,8 +1561,8 @@ public class HMaster extends HRegionServer implements MasterServices, Server {
private void checkCompression(final HColumnDescriptor hcd)
throws IOException {
if (!this.masterCheckCompression) return;
- CompressionTest.testCompression(hcd.getCompressionType());
- CompressionTest.testCompression(hcd.getCompactionCompressionType());
+ CompressionTest.testCompression(hcd.getCompression());
+ CompressionTest.testCompression(hcd.getCompactionCompression());
}
private void checkEncryption(final Configuration conf, final HTableDescriptor htd)
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
index e68d267..73b8cb9 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/DefaultStoreFlusher.java
@@ -64,7 +64,7 @@ public class DefaultStoreFlusher extends StoreFlusher {
status.setStatus("Flushing " + store + ": creating writer");
// Write the map out to the disk
writer = store.createWriterInTmp(
- cellsCount, store.getFamily().getCompressionType(), false, true, true);
+ cellsCount, store.getFamily().getCompression(), false, true, true);
writer.setTimeRangeTracker(snapshot.getTimeRangeTracker());
IOException e = null;
try {
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
index cf20e62..cf48619 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java
@@ -6083,8 +6083,8 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
private void checkCompressionCodecs() throws IOException {
for (HColumnDescriptor fam: this.htableDescriptor.getColumnFamilies()) {
- CompressionTest.testCompression(fam.getCompressionType());
- CompressionTest.testCompression(fam.getCompactionCompressionType());
+ CompressionTest.testCompression(fam.getCompression());
+ CompressionTest.testCompression(fam.getCompactionCompression());
}
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
index c87b246..136934c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StripeStoreFlusher.java
@@ -109,7 +109,7 @@ public class StripeStoreFlusher extends StoreFlusher {
@Override
public Writer createWriter() throws IOException {
StoreFile.Writer writer = store.createWriterInTmp(
- kvCount, store.getFamily().getCompressionType(), false, true, true);
+ kvCount, store.getFamily().getCompression(), false, true, true);
writer.setTimeRangeTracker(tracker);
return writer;
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index 15ead14..d1bb657 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -72,7 +72,7 @@ public abstract class Compactor {
this.compactionKVMax =
this.conf.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT);
this.compactionCompression = (this.store.getFamily() == null) ?
- Compression.Algorithm.NONE : this.store.getFamily().getCompactionCompressionType();
+ Compression.Algorithm.NONE : this.store.getFamily().getCompactionCompression();
this.keepSeqIdPeriod = Math.max(this.conf.getInt(HConstants.KEEP_SEQID_PERIOD,
HConstants.MIN_KEEP_SEQID_PERIOD), HConstants.MIN_KEEP_SEQID_PERIOD);
}
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
index 1f6290e..10e3cf0 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/StripeCompactor.java
@@ -119,7 +119,7 @@ public class StripeCompactor extends Compactor {
final boolean needMvcc = fd.maxMVCCReadpoint > 0;
- final Compression.Algorithm compression = store.getFamily().getCompactionCompressionType();
+ final Compression.Algorithm compression = store.getFamily().getCompactionCompression();
StripeMultiFileWriter.WriterFactory factory = new StripeMultiFileWriter.WriterFactory() {
@Override
public Writer createWriter() throws IOException {
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
index b3c29b7..314b7b2 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat.java
@@ -859,7 +859,7 @@ public class TestHFileOutputFormat {
"(reader: " + reader + ")",
hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)));
assertEquals("Incorrect compression used for column family " + familyStr +
- "(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression());
+ "(reader: " + reader + ")", hcd.getCompression(), reader.getFileContext().getCompression());
}
} finally {
dir.getFileSystem(conf).delete(dir, true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index 3b066f2..7aa5dc4 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -862,7 +862,7 @@ public class TestHFileOutputFormat2 {
"(reader: " + reader + ")",
hcd.getBloomFilterType(), BloomType.valueOf(Bytes.toString(bloomFilter)));
assertEquals("Incorrect compression used for column family " + familyStr +
- "(reader: " + reader + ")", hcd.getCompressionType(), reader.getFileContext().getCompression());
+ "(reader: " + reader + ")", hcd.getCompression(), reader.getFileContext().getCompression());
}
} finally {
dir.getFileSystem(conf).delete(dir, true);
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
----------------------------------------------------------------------
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
index 3adef9d..0517db7 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestStore.java
@@ -268,7 +268,7 @@ public class TestStore {
init(name.getMethodName(), conf, hcd);
// Test createWriterInTmp()
- StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompressionType(), false, true, false);
+ StoreFile.Writer writer = store.createWriterInTmp(4, hcd.getCompression(), false, true, false);
Path path = writer.getPath();
writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));
http://git-wip-us.apache.org/repos/asf/hbase/blob/17b6f59a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
----------------------------------------------------------------------
diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
index 29db5be..57c237e 100644
--- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
+++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftUtilities.java
@@ -89,7 +89,7 @@ public class ThriftUtilities {
ColumnDescriptor col = new ColumnDescriptor();
col.name = ByteBuffer.wrap(Bytes.add(in.getName(), KeyValue.COLUMN_FAMILY_DELIM_ARRAY));
col.maxVersions = in.getMaxVersions();
- col.compression = in.getCompressionType().toString();
+ col.compression = in.getCompression().toString();
col.inMemory = in.isInMemory();
col.blockCacheEnabled = in.isBlockCacheEnabled();
col.bloomFilterType = in.getBloomFilterType().toString();