You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2013/07/10 19:54:37 UTC
svn commit: r1501881 [2/4] - in /hbase/branches/0.95:
hbase-client/src/main/java/org/apache/hadoop/hbase/
hbase-client/src/main/java/org/apache/hadoop/hbase/catalog/
hbase-client/src/main/java/org/apache/hadoop/hbase/client/
hbase-client/src/main/java/...
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerSource.java Wed Jul 10 17:54:35 2013
@@ -28,22 +28,22 @@ public interface MetricsRegionServerSour
/**
* The name of the metrics
*/
- static final String METRICS_NAME = "Server";
+ String METRICS_NAME = "Server";
/**
* The name of the metrics context that metrics will be under.
*/
- static final String METRICS_CONTEXT = "regionserver";
+ String METRICS_CONTEXT = "regionserver";
/**
* Description
*/
- static final String METRICS_DESCRIPTION = "Metrics about HBase RegionServer";
+ String METRICS_DESCRIPTION = "Metrics about HBase RegionServer";
/**
* The name of the metrics context that metrics will be under in jmx
*/
- static final String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
+ String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
/**
* Update the Put time histogram
@@ -113,103 +113,103 @@ public interface MetricsRegionServerSour
void incrSlowAppend();
// Strings used for exporting to metrics system.
- static final String REGION_COUNT = "regionCount";
- static final String REGION_COUNT_DESC = "Number of regions";
- static final String STORE_COUNT = "storeCount";
- static final String STORE_COUNT_DESC = "Number of Stores";
- static final String STOREFILE_COUNT = "storeFileCount";
- static final String STOREFILE_COUNT_DESC = "Number of Store Files";
- static final String MEMSTORE_SIZE = "memStoreSize";
- static final String MEMSTORE_SIZE_DESC = "Size of the memstore";
- static final String STOREFILE_SIZE = "storeFileSize";
- static final String STOREFILE_SIZE_DESC = "Size of storefiles being served.";
- static final String TOTAL_REQUEST_COUNT = "totalRequestCount";
- static final String TOTAL_REQUEST_COUNT_DESC =
+ String REGION_COUNT = "regionCount";
+ String REGION_COUNT_DESC = "Number of regions";
+ String STORE_COUNT = "storeCount";
+ String STORE_COUNT_DESC = "Number of Stores";
+ String STOREFILE_COUNT = "storeFileCount";
+ String STOREFILE_COUNT_DESC = "Number of Store Files";
+ String MEMSTORE_SIZE = "memStoreSize";
+ String MEMSTORE_SIZE_DESC = "Size of the memstore";
+ String STOREFILE_SIZE = "storeFileSize";
+ String STOREFILE_SIZE_DESC = "Size of storefiles being served.";
+ String TOTAL_REQUEST_COUNT = "totalRequestCount";
+ String TOTAL_REQUEST_COUNT_DESC =
"Total number of requests this RegionServer has answered.";
- static final String READ_REQUEST_COUNT = "readRequestCount";
- static final String READ_REQUEST_COUNT_DESC =
+ String READ_REQUEST_COUNT = "readRequestCount";
+ String READ_REQUEST_COUNT_DESC =
"Number of read requests this region server has answered.";
- static final String WRITE_REQUEST_COUNT = "writeRequestCount";
- static final String WRITE_REQUEST_COUNT_DESC =
+ String WRITE_REQUEST_COUNT = "writeRequestCount";
+ String WRITE_REQUEST_COUNT_DESC =
"Number of mutation requests this region server has answered.";
- static final String CHECK_MUTATE_FAILED_COUNT = "checkMutateFailedCount";
- static final String CHECK_MUTATE_FAILED_COUNT_DESC =
+ String CHECK_MUTATE_FAILED_COUNT = "checkMutateFailedCount";
+ String CHECK_MUTATE_FAILED_COUNT_DESC =
"Number of Check and Mutate calls that failed the checks.";
- static final String CHECK_MUTATE_PASSED_COUNT = "checkMutatePassedCount";
- static final String CHECK_MUTATE_PASSED_COUNT_DESC =
+ String CHECK_MUTATE_PASSED_COUNT = "checkMutatePassedCount";
+ String CHECK_MUTATE_PASSED_COUNT_DESC =
"Number of Check and Mutate calls that passed the checks.";
- static final String STOREFILE_INDEX_SIZE = "storeFileIndexSize";
- static final String STOREFILE_INDEX_SIZE_DESC = "Size of indexes in storefiles on disk.";
- static final String STATIC_INDEX_SIZE = "staticIndexSize";
- static final String STATIC_INDEX_SIZE_DESC = "Uncompressed size of the static indexes.";
- static final String STATIC_BLOOM_SIZE = "staticBloomSize";
- static final String STATIC_BLOOM_SIZE_DESC =
+ String STOREFILE_INDEX_SIZE = "storeFileIndexSize";
+ String STOREFILE_INDEX_SIZE_DESC = "Size of indexes in storefiles on disk.";
+ String STATIC_INDEX_SIZE = "staticIndexSize";
+ String STATIC_INDEX_SIZE_DESC = "Uncompressed size of the static indexes.";
+ String STATIC_BLOOM_SIZE = "staticBloomSize";
+ String STATIC_BLOOM_SIZE_DESC =
"Uncompressed size of the static bloom filters.";
- static final String NUMBER_OF_MUTATIONS_WITHOUT_WAL = "mutationsWithoutWALCount";
- static final String NUMBER_OF_MUTATIONS_WITHOUT_WAL_DESC =
+ String NUMBER_OF_MUTATIONS_WITHOUT_WAL = "mutationsWithoutWALCount";
+ String NUMBER_OF_MUTATIONS_WITHOUT_WAL_DESC =
"Number of mutations that have been sent by clients with the write ahead logging turned off.";
- static final String DATA_SIZE_WITHOUT_WAL = "mutationsWithoutWALSize";
- static final String DATA_SIZE_WITHOUT_WAL_DESC =
+ String DATA_SIZE_WITHOUT_WAL = "mutationsWithoutWALSize";
+ String DATA_SIZE_WITHOUT_WAL_DESC =
"Size of data that has been sent by clients with the write ahead logging turned off.";
- static final String PERCENT_FILES_LOCAL = "percentFilesLocal";
- static final String PERCENT_FILES_LOCAL_DESC =
+ String PERCENT_FILES_LOCAL = "percentFilesLocal";
+ String PERCENT_FILES_LOCAL_DESC =
"The percent of HFiles that are stored on the local hdfs data node.";
- static final String COMPACTION_QUEUE_LENGTH = "compactionQueueLength";
- static final String COMPACTION_QUEUE_LENGTH_DESC = "Length of the queue for compactions.";
- static final String FLUSH_QUEUE_LENGTH = "flushQueueLength";
- static final String FLUSH_QUEUE_LENGTH_DESC = "Length of the queue for region flushes";
- static final String BLOCK_CACHE_FREE_SIZE = "blockCacheFreeSize";
- static final String BLOCK_CACHE_FREE_DESC =
+ String COMPACTION_QUEUE_LENGTH = "compactionQueueLength";
+ String COMPACTION_QUEUE_LENGTH_DESC = "Length of the queue for compactions.";
+ String FLUSH_QUEUE_LENGTH = "flushQueueLength";
+ String FLUSH_QUEUE_LENGTH_DESC = "Length of the queue for region flushes";
+ String BLOCK_CACHE_FREE_SIZE = "blockCacheFreeSize";
+ String BLOCK_CACHE_FREE_DESC =
"Size of the block cache that is not occupied.";
- static final String BLOCK_CACHE_COUNT = "blockCacheCount";
- static final String BLOCK_CACHE_COUNT_DESC = "Number of block in the block cache.";
- static final String BLOCK_CACHE_SIZE = "blockCacheSize";
- static final String BLOCK_CACHE_SIZE_DESC = "Size of the block cache.";
- static final String BLOCK_CACHE_HIT_COUNT = "blockCacheHitCount";
- static final String BLOCK_CACHE_HIT_COUNT_DESC = "Count of the hit on the block cache.";
- static final String BLOCK_CACHE_MISS_COUNT = "blockCacheMissCount";
- static final String BLOCK_COUNT_MISS_COUNT_DESC =
+ String BLOCK_CACHE_COUNT = "blockCacheCount";
+ String BLOCK_CACHE_COUNT_DESC = "Number of block in the block cache.";
+ String BLOCK_CACHE_SIZE = "blockCacheSize";
+ String BLOCK_CACHE_SIZE_DESC = "Size of the block cache.";
+ String BLOCK_CACHE_HIT_COUNT = "blockCacheHitCount";
+ String BLOCK_CACHE_HIT_COUNT_DESC = "Count of the hit on the block cache.";
+ String BLOCK_CACHE_MISS_COUNT = "blockCacheMissCount";
+ String BLOCK_COUNT_MISS_COUNT_DESC =
"Number of requests for a block that missed the block cache.";
- static final String BLOCK_CACHE_EVICTION_COUNT = "blockCacheEvictionCount";
- static final String BLOCK_CACHE_EVICTION_COUNT_DESC =
+ String BLOCK_CACHE_EVICTION_COUNT = "blockCacheEvictionCount";
+ String BLOCK_CACHE_EVICTION_COUNT_DESC =
"Count of the number of blocks evicted from the block cache.";
- static final String BLOCK_CACHE_HIT_PERCENT = "blockCountHitPercent";
- static final String BLOCK_CACHE_HIT_PERCENT_DESC =
+ String BLOCK_CACHE_HIT_PERCENT = "blockCountHitPercent";
+ String BLOCK_CACHE_HIT_PERCENT_DESC =
"Percent of block cache requests that are hits";
- static final String BLOCK_CACHE_EXPRESS_HIT_PERCENT = "blockCacheExpressHitPercent";
- static final String BLOCK_CACHE_EXPRESS_HIT_PERCENT_DESC =
+ String BLOCK_CACHE_EXPRESS_HIT_PERCENT = "blockCacheExpressHitPercent";
+ String BLOCK_CACHE_EXPRESS_HIT_PERCENT_DESC =
"The percent of the time that requests with the cache turned on hit the cache.";
- static final String RS_START_TIME_NAME = "regionServerStartTime";
- static final String ZOOKEEPER_QUORUM_NAME = "zookeeperQuorum";
- static final String SERVER_NAME_NAME = "serverName";
- static final String CLUSTER_ID_NAME = "clusterId";
- static final String RS_START_TIME_DESC = "RegionServer Start Time";
- static final String ZOOKEEPER_QUORUM_DESC = "Zookeeper Quorum";
- static final String SERVER_NAME_DESC = "Server Name";
- static final String CLUSTER_ID_DESC = "Cluster Id";
- static final String UPDATES_BLOCKED_TIME = "updatesBlockedTime";
- static final String UPDATES_BLOCKED_DESC =
+ String RS_START_TIME_NAME = "regionServerStartTime";
+ String ZOOKEEPER_QUORUM_NAME = "zookeeperQuorum";
+ String SERVER_NAME_NAME = "serverName";
+ String CLUSTER_ID_NAME = "clusterId";
+ String RS_START_TIME_DESC = "RegionServer Start Time";
+ String ZOOKEEPER_QUORUM_DESC = "Zookeeper Quorum";
+ String SERVER_NAME_DESC = "Server Name";
+ String CLUSTER_ID_DESC = "Cluster Id";
+ String UPDATES_BLOCKED_TIME = "updatesBlockedTime";
+ String UPDATES_BLOCKED_DESC =
"Number of MS updates have been blocked so that the memstore can be flushed.";
- static final String DELETE_KEY = "delete";
- static final String GET_KEY = "get";
- static final String INCREMENT_KEY = "increment";
- static final String MUTATE_KEY = "mutate";
- static final String APPEND_KEY = "append";
- static final String REPLAY_KEY = "replay";
- static final String SCAN_NEXT_KEY = "scanNext";
- static final String SLOW_MUTATE_KEY = "slowPutCount";
- static final String SLOW_GET_KEY = "slowGetCount";
- static final String SLOW_DELETE_KEY = "slowDeleteCount";
- static final String SLOW_INCREMENT_KEY = "slowIncrementCount";
- static final String SLOW_APPEND_KEY = "slowAppendCount";
- static final String SLOW_MUTATE_DESC =
+ String DELETE_KEY = "delete";
+ String GET_KEY = "get";
+ String INCREMENT_KEY = "increment";
+ String MUTATE_KEY = "mutate";
+ String APPEND_KEY = "append";
+ String REPLAY_KEY = "replay";
+ String SCAN_NEXT_KEY = "scanNext";
+ String SLOW_MUTATE_KEY = "slowPutCount";
+ String SLOW_GET_KEY = "slowGetCount";
+ String SLOW_DELETE_KEY = "slowDeleteCount";
+ String SLOW_INCREMENT_KEY = "slowIncrementCount";
+ String SLOW_APPEND_KEY = "slowAppendCount";
+ String SLOW_MUTATE_DESC =
"The number of Multis that took over 1000ms to complete";
- static final String SLOW_DELETE_DESC =
+ String SLOW_DELETE_DESC =
"The number of Deletes that took over 1000ms to complete";
- static final String SLOW_GET_DESC = "The number of Gets that took over 1000ms to complete";
- static final String SLOW_INCREMENT_DESC =
+ String SLOW_GET_DESC = "The number of Gets that took over 1000ms to complete";
+ String SLOW_INCREMENT_DESC =
"The number of Increments that took over 1000ms to complete";
- static final String SLOW_APPEND_DESC =
+ String SLOW_APPEND_DESC =
"The number of Appends that took over 1000ms to complete";
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionServerWrapper.java Wed Jul 10 17:54:35 2013
@@ -27,35 +27,35 @@ public interface MetricsRegionServerWrap
/**
* Get ServerName
*/
- public String getServerName();
+ String getServerName();
/**
* Get the Cluster ID
*
* @return Cluster ID
*/
- public String getClusterId();
+ String getClusterId();
/**
* Get the Zookeeper Quorum Info
*
* @return Zookeeper Quorum Info
*/
- public String getZookeeperQuorum();
+ String getZookeeperQuorum();
/**
* Get the co-processors
*
* @return Co-processors
*/
- public String getCoprocessors();
+ String getCoprocessors();
/**
* Get HRegionServer start time
*
* @return Start time of RegionServer in milliseconds
*/
- public long getStartCode();
+ long getStartCode();
/**
* The number of online regions
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/MetricsRegionSource.java Wed Jul 10 17:54:35 2013
@@ -25,8 +25,8 @@ package org.apache.hadoop.hbase.regionse
*/
public interface MetricsRegionSource extends Comparable<MetricsRegionSource> {
- public static final String OPS_SAMPLE_NAME = "ops";
- public static final String SIZE_VALUE_NAME = "size";
+ String OPS_SAMPLE_NAME = "ops";
+ String SIZE_VALUE_NAME = "size";
/**
* Close the region's metrics as this region is closing.
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsEditsReplaySource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsEditsReplaySource.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsEditsReplaySource.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsEditsReplaySource.java Wed Jul 10 17:54:35 2013
@@ -29,30 +29,30 @@ public interface MetricsEditsReplaySourc
/**
* The name of the metrics
*/
- static final String METRICS_NAME = "replay";
+ String METRICS_NAME = "replay";
/**
* The name of the metrics context that metrics will be under.
*/
- static final String METRICS_CONTEXT = "regionserver";
+ String METRICS_CONTEXT = "regionserver";
/**
* Description
*/
- static final String METRICS_DESCRIPTION = "Metrics about HBase RegionServer HLog Edits Replay";
+ String METRICS_DESCRIPTION = "Metrics about HBase RegionServer HLog Edits Replay";
/**
* The name of the metrics context that metrics will be under in jmx
*/
- static final String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
+ String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
- static final String REPLAY_TIME_NAME = "replayTime";
- static final String REPLAY_TIME_DESC = "Time an replay operation took.";
- static final String REPLAY_BATCH_SIZE_NAME = "replayBatchSize";
- static final String REPLAY_BATCH_SIZE_DESC = "Number of changes in each replay batch.";
- static final String REPLAY_DATA_SIZE_NAME = "replayDataSize";
- static final String REPLAY_DATA_SIZE_DESC = "Size (in bytes) of the data of each replay.";
+ String REPLAY_TIME_NAME = "replayTime";
+ String REPLAY_TIME_DESC = "Time an replay operation took.";
+ String REPLAY_BATCH_SIZE_NAME = "replayBatchSize";
+ String REPLAY_BATCH_SIZE_DESC = "Number of changes in each replay batch.";
+ String REPLAY_DATA_SIZE_NAME = "replayDataSize";
+ String REPLAY_DATA_SIZE_DESC = "Size (in bytes) of the data of each replay.";
/**
* Add the time a replay command took
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWALSource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWALSource.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWALSource.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/regionserver/wal/MetricsWALSource.java Wed Jul 10 17:54:35 2013
@@ -29,34 +29,34 @@ public interface MetricsWALSource extend
/**
* The name of the metrics
*/
- static final String METRICS_NAME = "WAL";
+ String METRICS_NAME = "WAL";
/**
* The name of the metrics context that metrics will be under.
*/
- static final String METRICS_CONTEXT = "regionserver";
+ String METRICS_CONTEXT = "regionserver";
/**
* Description
*/
- static final String METRICS_DESCRIPTION = "Metrics about HBase RegionServer HLog";
+ String METRICS_DESCRIPTION = "Metrics about HBase RegionServer HLog";
/**
* The name of the metrics context that metrics will be under in jmx
*/
- static final String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
+ String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
- static final String APPEND_TIME = "appendTime";
- static final String APPEND_TIME_DESC = "Time an append to the log took.";
- static final String APPEND_COUNT = "appendCount";
- static final String APPEND_COUNT_DESC = "Number of appends to the write ahead log.";
- static final String APPEND_SIZE = "appendSize";
- static final String APPEND_SIZE_DESC = "Size (in bytes) of the data appended to the HLog.";
- static final String SLOW_APPEND_COUNT = "slowAppendCount";
- static final String SLOW_APPEND_COUNT_DESC = "Number of appends that were slow.";
- static final String SYNC_TIME = "syncTime";
- static final String SYNC_TIME_DESC = "The time it took to sync the HLog to HDFS.";
+ String APPEND_TIME = "appendTime";
+ String APPEND_TIME_DESC = "Time an append to the log took.";
+ String APPEND_COUNT = "appendCount";
+ String APPEND_COUNT_DESC = "Number of appends to the write ahead log.";
+ String APPEND_SIZE = "appendSize";
+ String APPEND_SIZE_DESC = "Size (in bytes) of the data appended to the HLog.";
+ String SLOW_APPEND_COUNT = "slowAppendCount";
+ String SLOW_APPEND_COUNT_DESC = "Number of appends that were slow.";
+ String SYNC_TIME = "syncTime";
+ String SYNC_TIME_DESC = "The time it took to sync the HLog to HDFS.";
/**
* Add the append size.
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSource.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSource.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/replication/regionserver/MetricsReplicationSource.java Wed Jul 10 17:54:35 2013
@@ -28,21 +28,21 @@ public interface MetricsReplicationSourc
/**
* The name of the metrics
*/
- static final String METRICS_NAME = "Replication";
+ String METRICS_NAME = "Replication";
/**
* The name of the metrics context that metrics will be under.
*/
- static final String METRICS_CONTEXT = "regionserver";
+ String METRICS_CONTEXT = "regionserver";
/**
* The name of the metrics context that metrics will be under.
*/
- static final String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
+ String METRICS_JMX_CONTEXT = "RegionServer,sub=" + METRICS_NAME;
/**
* A description.
*/
- static final String METRICS_DESCRIPTION = "Metrics about HBase replication";
+ String METRICS_DESCRIPTION = "Metrics about HBase replication";
}
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/rest/MetricsRESTSource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/rest/MetricsRESTSource.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/rest/MetricsRESTSource.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/rest/MetricsRESTSource.java Wed Jul 10 17:54:35 2013
@@ -25,27 +25,27 @@ import org.apache.hadoop.hbase.metrics.B
*/
public interface MetricsRESTSource extends BaseSource {
- public static String METRICS_NAME = "REST";
+ String METRICS_NAME = "REST";
- public static String CONTEXT = "rest";
+ String CONTEXT = "rest";
- public static String JMX_CONTEXT = "REST";
+ String JMX_CONTEXT = "REST";
- public static String METRICS_DESCRIPTION = "Metrics about the HBase REST server";
+ String METRICS_DESCRIPTION = "Metrics about the HBase REST server";
- static String REQUEST_KEY = "requests";
+ String REQUEST_KEY = "requests";
- static String SUCCESSFUL_GET_KEY = "successfulGet";
+ String SUCCESSFUL_GET_KEY = "successfulGet";
- static String SUCCESSFUL_PUT_KEY = "successfulPut";
+ String SUCCESSFUL_PUT_KEY = "successfulPut";
- static String SUCCESSFUL_DELETE_KEY = "successfulDelete";
+ String SUCCESSFUL_DELETE_KEY = "successfulDelete";
- static String FAILED_GET_KEY = "failedGet";
+ String FAILED_GET_KEY = "failedGet";
- static String FAILED_PUT_KEY = "failedPut";
+ String FAILED_PUT_KEY = "failedPut";
- static String FAILED_DELETE_KEY = "failedDelete";
+ String FAILED_DELETE_KEY = "failedDelete";
/**
* Increment the number of requests
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSource.java Wed Jul 10 17:54:35 2013
@@ -25,12 +25,12 @@ import org.apache.hadoop.hbase.metrics.B
*/
public interface MetricsThriftServerSource extends BaseSource {
- static final String BATCH_GET_KEY = "batchGet";
- static final String BATCH_MUTATE_KEY = "batchMutate";
- static final String TIME_IN_QUEUE_KEY = "timeInQueue";
- static final String THRIFT_CALL_KEY = "thriftCall";
- static final String SLOW_THRIFT_CALL_KEY = "slowThriftCall";
- static final String CALL_QUEUE_LEN_KEY = "callQueueLen";
+ String BATCH_GET_KEY = "batchGet";
+ String BATCH_MUTATE_KEY = "batchMutate";
+ String TIME_IN_QUEUE_KEY = "timeInQueue";
+ String THRIFT_CALL_KEY = "thriftCall";
+ String SLOW_THRIFT_CALL_KEY = "slowThriftCall";
+ String CALL_QUEUE_LEN_KEY = "callQueueLen";
/**
* Add how long an operation was in the queue.
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactory.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactory.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactory.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/hbase/thrift/MetricsThriftServerSourceFactory.java Wed Jul 10 17:54:35 2013
@@ -21,12 +21,12 @@ package org.apache.hadoop.hbase.thrift;
/** Factory that will be used to create metrics sources for the two diffent types of thrift servers. */
public interface MetricsThriftServerSourceFactory {
- static final String METRICS_NAME = "Thrift";
- static final String METRICS_DESCRIPTION = "Thrift Server Metrics";
- static final String THRIFT_ONE_METRICS_CONTEXT = "thrift-one";
- static final String THRIFT_ONE_JMX_CONTEXT = "Thrift,sub=ThriftOne";
- static final String THRIFT_TWO_METRICS_CONTEXT = "thrift-two";
- static final String THRIFT_TWO_JMX_CONTEXT = "Thrift,sub=ThriftTwo";
+ String METRICS_NAME = "Thrift";
+ String METRICS_DESCRIPTION = "Thrift Server Metrics";
+ String THRIFT_ONE_METRICS_CONTEXT = "thrift-one";
+ String THRIFT_ONE_JMX_CONTEXT = "Thrift,sub=ThriftOne";
+ String THRIFT_TWO_METRICS_CONTEXT = "thrift-two";
+ String THRIFT_TWO_JMX_CONTEXT = "Thrift,sub=ThriftTwo";
/** Create a Source for a thrift one server */
MetricsThriftServerSource createThriftOneSource();
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/MetricHistogram.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/MetricHistogram.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/MetricHistogram.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/main/java/org/apache/hadoop/metrics2/MetricHistogram.java Wed Jul 10 17:54:35 2013
@@ -25,14 +25,14 @@ package org.apache.hadoop.metrics2;
public interface MetricHistogram {
//Strings used to create metrics names.
- static final String NUM_OPS_METRIC_NAME = "_num_ops";
- static final String MIN_METRIC_NAME = "_min";
- static final String MAX_METRIC_NAME = "_max";
- static final String MEAN_METRIC_NAME = "_mean";
- static final String MEDIAN_METRIC_NAME = "_median";
- static final String SEVENTY_FIFTH_PERCENTILE_METRIC_NAME = "_75th_percentile";
- static final String NINETY_FIFTH_PERCENTILE_METRIC_NAME = "_95th_percentile";
- static final String NINETY_NINETH_PERCENTILE_METRIC_NAME = "_99th_percentile";
+ String NUM_OPS_METRIC_NAME = "_num_ops";
+ String MIN_METRIC_NAME = "_min";
+ String MAX_METRIC_NAME = "_max";
+ String MEAN_METRIC_NAME = "_mean";
+ String MEDIAN_METRIC_NAME = "_median";
+ String SEVENTY_FIFTH_PERCENTILE_METRIC_NAME = "_75th_percentile";
+ String NINETY_FIFTH_PERCENTILE_METRIC_NAME = "_95th_percentile";
+ String NINETY_NINETH_PERCENTILE_METRIC_NAME = "_99th_percentile";
/**
* Add a single value to a histogram's stream of values.
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/HadoopShims.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/HadoopShims.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/HadoopShims.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/HadoopShims.java Wed Jul 10 17:54:35 2013
@@ -32,6 +32,6 @@ public interface HadoopShims {
* TaskAttemptId.forName()
* @return a concrete TaskAttemptContext instance of o.a.h.mapreduce.TaskAttemptContext
*/
- public <T,J> T createTestTaskAttemptContext(final J job, final String taskId);
+ <T,J> T createTestTaskAttemptContext(final J job, final String taskId);
}
Modified: hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelper.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelper.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelper.java (original)
+++ hbase/branches/0.95/hbase-hadoop-compat/src/test/java/org/apache/hadoop/hbase/test/MetricsAssertHelper.java Wed Jul 10 17:54:35 2013
@@ -27,7 +27,7 @@ public interface MetricsAssertHelper {
* Init helper. This method will make sure that the metrics system is set
* up for tests.
*/
- public void init();
+ void init();
/**
* Assert that a tag exists and has a given value.
@@ -37,7 +37,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertTag(String name, String expected, BaseSource source);
+ void assertTag(String name, String expected, BaseSource source);
/**
* Assert that a gauge exists and that it's value is equal to the expected value.
@@ -47,7 +47,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertGauge(String name, long expected, BaseSource source);
+ void assertGauge(String name, long expected, BaseSource source);
/**
* Assert that a gauge exists and it's value is greater than a given value
@@ -57,7 +57,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertGaugeGt(String name, long expected, BaseSource source);
+ void assertGaugeGt(String name, long expected, BaseSource source);
/**
* Assert that a gauge exists and it's value is less than a given value
@@ -67,7 +67,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertGaugeLt(String name, long expected, BaseSource source);
+ void assertGaugeLt(String name, long expected, BaseSource source);
/**
* Assert that a gauge exists and that it's value is equal to the expected value.
@@ -77,7 +77,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertGauge(String name, double expected, BaseSource source);
+ void assertGauge(String name, double expected, BaseSource source);
/**
* Assert that a gauge exists and it's value is greater than a given value
@@ -87,7 +87,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertGaugeGt(String name, double expected, BaseSource source);
+ void assertGaugeGt(String name, double expected, BaseSource source);
/**
* Assert that a gauge exists and it's value is less than a given value
@@ -97,7 +97,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertGaugeLt(String name, double expected, BaseSource source);
+ void assertGaugeLt(String name, double expected, BaseSource source);
/**
* Assert that a counter exists and that it's value is equal to the expected value.
@@ -107,7 +107,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertCounter(String name, long expected, BaseSource source);
+ void assertCounter(String name, long expected, BaseSource source);
/**
* Assert that a counter exists and that it's value is greater than the given value.
@@ -117,7 +117,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertCounterGt(String name, long expected, BaseSource source);
+ void assertCounterGt(String name, long expected, BaseSource source);
/**
* Assert that a counter exists and that it's value is less than the given value.
@@ -127,7 +127,7 @@ public interface MetricsAssertHelper {
* @param source The BaseSource{@link BaseSource} that will provide the tags,
* gauges, and counters.
*/
- public void assertCounterLt(String name, long expected, BaseSource source);
+ void assertCounterLt(String name, long expected, BaseSource source);
/**
* Get the value of a counter.
@@ -137,7 +137,7 @@ public interface MetricsAssertHelper {
* gauges, and counters.
* @return long value of the counter.
*/
- public long getCounter(String name, BaseSource source);
+ long getCounter(String name, BaseSource source);
/**
* Get the value of a gauge as a double.
@@ -147,7 +147,7 @@ public interface MetricsAssertHelper {
* gauges, and counters.
* @return double value of the gauge.
*/
- public double getGaugeDouble(String name, BaseSource source);
+ double getGaugeDouble(String name, BaseSource source);
/**
* Get the value of a gauge as a long.
@@ -157,5 +157,5 @@ public interface MetricsAssertHelper {
* gauges, and counters.
* @return long value of the gauge.
*/
- public long getGaugeLong(String name, BaseSource source);
+ long getGaugeLong(String name, BaseSource source);
}
Modified: hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java (original)
+++ hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/builder/TestTokenizerData.java Wed Jul 10 17:54:35 2013
@@ -31,7 +31,7 @@ public interface TestTokenizerData {
List<byte[]> getInputs();
List<byte[]> getOutputs();
- public static class InMemory {
+ class InMemory {
public Collection<Object[]> getAllAsObjectArray() {
List<Object[]> all = Lists.newArrayList();
all.add(new Object[] { new TestTokenizerDataBasic() });
Modified: hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java (original)
+++ hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/column/TestColumnData.java Wed Jul 10 17:54:35 2013
@@ -32,7 +32,7 @@ public interface TestColumnData {
List<ByteRange> getInputs();
List<ByteRange> getOutputs();
- public static class InMemory {
+ class InMemory {
public Collection<Object[]> getAllAsObjectArray() {
List<Object[]> all = Lists.newArrayList();
all.add(new Object[] { new TestColumnDataSimple() });
Modified: hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java (original)
+++ hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/row/TestRowData.java Wed Jul 10 17:54:35 2013
@@ -54,7 +54,7 @@ public interface TestRowData {
void individualSearcherAssertions(CellSearcher searcher);
- public static class InMemory {
+ class InMemory {
/*
* The following are different styles of data that the codec may encounter. Having these small
Modified: hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java (original)
+++ hbase/branches/0.95/hbase-prefix-tree/src/test/java/org/apache/hadoop/hbase/codec/prefixtree/timestamp/TestTimestampData.java Wed Jul 10 17:54:35 2013
@@ -33,7 +33,7 @@ public interface TestTimestampData {
long getMinimum();
List<Long> getOutputs();
- public static class InMemory {
+ class InMemory {
public Collection<Object[]> getAllAsObjectArray() {
List<Object[]> all = Lists.newArrayList();
all.add(new Object[] { new TestTimestampDataBasic() });
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessLock.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessLock.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessLock.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessLock.java Wed Jul 10 17:54:35 2013
@@ -36,7 +36,7 @@ public interface InterProcessLock {
* @throws InterruptedException If current thread is interrupted while
* waiting for the lock
*/
- public void acquire() throws IOException, InterruptedException;
+ void acquire() throws IOException, InterruptedException;
/**
* Acquire the lock within a wait time.
@@ -50,7 +50,7 @@ public interface InterProcessLock {
* @throws InterruptedException If the thread is interrupted while waiting to
* acquire the lock
*/
- public boolean tryAcquire(long timeoutMs)
+ boolean tryAcquire(long timeoutMs)
throws IOException, InterruptedException;
/**
@@ -59,7 +59,7 @@ public interface InterProcessLock {
* @throws InterruptedException If the thread is interrupted while releasing
* the lock
*/
- public void release() throws IOException, InterruptedException;
+ void release() throws IOException, InterruptedException;
/**
* If supported, attempts to reap all the locks of this type by forcefully
@@ -69,7 +69,7 @@ public interface InterProcessLock {
* lock holder is still alive.
* @throws IOException If there is an unrecoverable error reaping the locks
*/
- public void reapExpiredLocks(long expireTimeoutMs) throws IOException;
+ void reapExpiredLocks(long expireTimeoutMs) throws IOException;
/**
* If supported, attempts to reap all the locks of this type by forcefully
@@ -80,12 +80,12 @@ public interface InterProcessLock {
* with timeout=0.
* @throws IOException If there is an unrecoverable error reaping the locks
*/
- public void reapAllLocks() throws IOException;
+ void reapAllLocks() throws IOException;
/**
* An interface for objects that process lock metadata.
*/
- public static interface MetadataHandler {
+ interface MetadataHandler {
/**
* Called after lock metadata is successfully read from a distributed
@@ -93,7 +93,7 @@ public interface InterProcessLock {
* printing the metadata in a humanly-readable format.
* @param metadata The metadata
*/
- public void handleMetadata(byte[] metadata);
+ void handleMetadata(byte[] metadata);
}
/**
@@ -101,5 +101,5 @@ public interface InterProcessLock {
* {@link MetadataHandler}.
* @throws InterruptedException If there is an unrecoverable error
*/
- public void visitLocks(MetadataHandler handler) throws IOException;
+ void visitLocks(MetadataHandler handler) throws IOException;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessReadWriteLock.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessReadWriteLock.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessReadWriteLock.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/InterProcessReadWriteLock.java Wed Jul 10 17:54:35 2013
@@ -34,7 +34,7 @@ public interface InterProcessReadWriteLo
* which the lock was acquired).
* @return An instantiated InterProcessLock instance
*/
- public InterProcessLock readLock(byte[] metadata);
+ InterProcessLock readLock(byte[] metadata);
/**
* Obtain a write lock containing given metadata.
@@ -43,5 +43,5 @@ public interface InterProcessReadWriteLo
* which the lock was acquired).
* @return An instantiated InterProcessLock instance
*/
- public InterProcessLock writeLock(byte[] metadata);
+ InterProcessLock writeLock(byte[] metadata);
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/TableDescriptors.java Wed Jul 10 17:54:35 2013
@@ -35,7 +35,7 @@ public interface TableDescriptors {
* @return HTableDescriptor for tablename
* @throws IOException
*/
- public HTableDescriptor get(final String tablename)
+ HTableDescriptor get(final String tablename)
throws IOException;
/**
@@ -43,7 +43,7 @@ public interface TableDescriptors {
* @return HTableDescriptor for tablename
* @throws IOException
*/
- public HTableDescriptor get(final byte[] tablename)
+ HTableDescriptor get(final byte[] tablename)
throws IOException;
/**
@@ -52,7 +52,7 @@ public interface TableDescriptors {
* @return Map of all descriptors.
* @throws IOException
*/
- public Map<String, HTableDescriptor> getAll()
+ Map<String, HTableDescriptor> getAll()
throws IOException;
/**
@@ -60,7 +60,7 @@ public interface TableDescriptors {
* @param htd Descriptor to set into TableDescriptors
* @throws IOException
*/
- public void add(final HTableDescriptor htd)
+ void add(final HTableDescriptor htd)
throws IOException;
/**
@@ -68,6 +68,6 @@ public interface TableDescriptors {
* @return Instance of table descriptor or null if none found.
* @throws IOException
*/
- public HTableDescriptor remove(final String tablename)
+ HTableDescriptor remove(final String tablename)
throws IOException;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/constraint/Constraint.java Wed Jul 10 17:54:35 2013
@@ -76,6 +76,6 @@ public interface Constraint extends Conf
* @throws org.apache.hadoop.hbase.exceptions.ConstraintException when the {@link Put} does not match the
* constraint.
*/
- public void check(Put p) throws ConstraintException;
+ void check(Put p) throws ConstraintException;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorService.java Wed Jul 10 17:54:35 2013
@@ -29,5 +29,5 @@ import org.apache.hadoop.classification.
@InterfaceAudience.Public
@InterfaceStability.Evolving
public interface CoprocessorService {
- public Service getService();
+ Service getService();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionCoprocessorEnvironment.java Wed Jul 10 17:54:35 2013
@@ -31,12 +31,12 @@ import org.apache.hadoop.hbase.regionser
@InterfaceStability.Evolving
public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment {
/** @return the region associated with this coprocessor */
- public HRegion getRegion();
+ HRegion getRegion();
/** @return reference to the region server services */
- public RegionServerServices getRegionServerServices();
+ RegionServerServices getRegionServerServices();
/** @return shared data between all instances of this coprocessor */
- public ConcurrentMap<String, Object> getSharedData();
+ ConcurrentMap<String, Object> getSharedData();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/WALCoprocessorEnvironment.java Wed Jul 10 17:54:35 2013
@@ -28,5 +28,5 @@ import org.apache.hadoop.hbase.regionser
@InterfaceStability.Evolving
public interface WALCoprocessorEnvironment extends CoprocessorEnvironment {
/** @return reference to the region server services */
- public HLog getWAL();
+ HLog getWAL();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionListener.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionListener.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionListener.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionListener.java Wed Jul 10 17:54:35 2013
@@ -36,5 +36,5 @@ public interface ForeignExceptionListene
* Implementers must ensure that this method is thread-safe.
* @param e exception causing the error. Implementations must accept and handle null here.
*/
- public void receive(ForeignException e);
-}
\ No newline at end of file
+ void receive(ForeignException e);
+}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionSnare.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionSnare.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionSnare.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/errorhandling/ForeignExceptionSnare.java Wed Jul 10 17:54:35 2013
@@ -47,7 +47,7 @@ public interface ForeignExceptionSnare {
* @throws ForeignException
* all exceptions from remote sources are procedure exceptions
*/
- public void rethrowException() throws ForeignException;
+ void rethrowException() throws ForeignException;
/**
* Non-exceptional form of {@link #rethrowException()}. Checks to see if any
@@ -56,12 +56,12 @@ public interface ForeignExceptionSnare {
*
* @return <tt>true</tt> if there has been an error,<tt>false</tt> otherwise
*/
- public boolean hasException();
+ boolean hasException();
/**
* Get the value of the captured exception.
*
* @return the captured foreign exception or null if no exception captured.
*/
- public ForeignException getException();
+ ForeignException getException();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/executor/EventHandler.java Wed Jul 10 17:54:35 2013
@@ -87,12 +87,12 @@ public abstract class EventHandler imple
* Called before any event is processed
* @param event The event handler whose process method is about to be called.
*/
- public void beforeProcess(EventHandler event);
+ void beforeProcess(EventHandler event);
/**
* Called after any event is processed
* @param event The event handler whose process method is about to be called.
*/
- public void afterProcess(EventHandler event);
+ void afterProcess(EventHandler event);
}
/**
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java Wed Jul 10 17:54:35 2013
@@ -296,7 +296,7 @@ public class HFileSystem extends FilterF
/**
* Interface to implement to add a specific reordering logic in hdfs.
*/
- static interface ReorderBlocks {
+ interface ReorderBlocks {
/**
*
* @param conf - the conf to use
@@ -304,7 +304,7 @@ public class HFileSystem extends FilterF
* @param src - the file name currently read
* @throws IOException - if something went wrong
*/
- public void reorderBlocks(Configuration conf, LocatedBlocks lbs, String src) throws IOException;
+ void reorderBlocks(Configuration conf, LocatedBlocks lbs, String src) throws IOException;
}
/**
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/WritableWithSize.java Wed Jul 10 17:54:35 2013
@@ -34,5 +34,5 @@ public interface WritableWithSize {
*
* @return the size of the writable
*/
- public long getWritableSize();
+ long getWritableSize();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java Wed Jul 10 17:54:35 2013
@@ -36,14 +36,14 @@ public interface BlockCache {
* @param buf The block contents wrapped in a ByteBuffer.
* @param inMemory Whether block should be treated as in-memory
*/
- public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory);
+ void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory);
/**
* Add block to cache (defaults to not in-memory).
* @param cacheKey The block's cache key.
* @param buf The object to cache.
*/
- public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf);
+ void cacheBlock(BlockCacheKey cacheKey, Cacheable buf);
/**
* Fetch block from cache.
@@ -54,62 +54,62 @@ public interface BlockCache {
* @return Block or null if block is not in 2 cache.
* @see HFileReaderV2#readBlock(long, long, boolean, boolean, boolean, BlockType)
*/
- public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat);
+ Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat);
/**
* Evict block from cache.
* @param cacheKey Block to evict
* @return true if block existed and was evicted, false if not
*/
- public boolean evictBlock(BlockCacheKey cacheKey);
+ boolean evictBlock(BlockCacheKey cacheKey);
/**
* Evicts all blocks for the given HFile.
*
* @return the number of blocks evicted
*/
- public int evictBlocksByHfileName(String hfileName);
+ int evictBlocksByHfileName(String hfileName);
/**
* Get the statistics for this block cache.
* @return Stats
*/
- public CacheStats getStats();
+ CacheStats getStats();
/**
* Shutdown the cache.
*/
- public void shutdown();
+ void shutdown();
/**
* Returns the total size of the block cache, in bytes.
* @return size of cache, in bytes
*/
- public long size();
+ long size();
/**
* Returns the free size of the block cache, in bytes.
* @return free space in cache, in bytes
*/
- public long getFreeSize();
+ long getFreeSize();
/**
* Returns the occupied size of the block cache, in bytes.
* @return occupied space in cache, in bytes
*/
- public long getCurrentSize();
+ long getCurrentSize();
/**
* Returns the number of evictions that have occurred.
* @return number of evictions
*/
- public long getEvictedCount();
+ long getEvictedCount();
/**
* Returns the number of blocks currently cached in the block cache.
* @return number of blocks in the cache
*/
- public long getBlockCount();
+ long getBlockCount();
/**
* Performs a BlockCache summary and returns a List of BlockCacheColumnFamilySummary objects.
@@ -123,5 +123,5 @@ public interface BlockCache {
* @return List of BlockCacheColumnFamilySummary
* @throws IOException exception
*/
- public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException;
+ List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/Cacheable.java Wed Jul 10 17:54:35 2013
@@ -42,23 +42,23 @@ public interface Cacheable extends HeapS
* @return int length in bytes of the serialized form.
*/
- public int getSerializedLength();
+ int getSerializedLength();
/**
* Serializes its data into destination.
*/
- public void serialize(ByteBuffer destination);
+ void serialize(ByteBuffer destination);
/**
* Returns CacheableDeserializer instance which reconstructs original object from ByteBuffer.
*
* @return CacheableDeserialzer instance.
*/
- public CacheableDeserializer<Cacheable> getDeserializer();
+ CacheableDeserializer<Cacheable> getDeserializer();
/**
* @return the block type of this cached HFile block
*/
- public BlockType getBlockType();
+ BlockType getBlockType();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/CacheableDeserializer.java Wed Jul 10 17:54:35 2013
@@ -33,7 +33,7 @@ public interface CacheableDeserializer<T
*
* @return T the deserialized object.
*/
- public T deserialize(ByteBuffer b) throws IOException;
+ T deserialize(ByteBuffer b) throws IOException;
/**
*
@@ -43,12 +43,12 @@ public interface CacheableDeserializer<T
* @return T the deserialized object.
* @throws IOException
*/
- public T deserialize(ByteBuffer b, boolean reuse) throws IOException;
+ T deserialize(ByteBuffer b, boolean reuse) throws IOException;
/**
* Get the identifier of this deserialiser. Identifier is unique for each
* deserializer and generated by {@link CacheableDeserializerIdManager}
* @return identifier number of this cacheable deserializer
*/
- public int getDeserialiserIdentifier();
+ int getDeserialiserIdentifier();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileDataBlockEncoder.java Wed Jul 10 17:54:35 2013
@@ -49,8 +49,9 @@ public interface HFileDataBlockEncoder {
* generated).
* @return non null block which is coded according to the settings.
*/
- public HFileBlock diskToCacheFormat(HFileBlock block,
- boolean isCompaction);
+ HFileBlock diskToCacheFormat(
+ HFileBlock block, boolean isCompaction
+ );
/**
* Should be called before an encoded or unencoded data block is written to
@@ -60,37 +61,39 @@ public interface HFileDataBlockEncoder {
* @param blockType block type
* @throws IOException
*/
- public void beforeWriteToDisk(
- ByteBuffer in, boolean includesMemstoreTS,
- HFileBlockEncodingContext encodingResult,
- BlockType blockType) throws IOException;
+ void beforeWriteToDisk(
+ ByteBuffer in,
+ boolean includesMemstoreTS,
+ HFileBlockEncodingContext encodingResult,
+ BlockType blockType
+ ) throws IOException;
/**
* Decides whether we should use a scanner over encoded blocks.
* @param isCompaction whether we are in a compaction.
* @return Whether to use encoded scanner.
*/
- public boolean useEncodedScanner(boolean isCompaction);
+ boolean useEncodedScanner(boolean isCompaction);
/**
* Save metadata in HFile which will be written to disk
* @param writer writer for a given HFile
* @exception IOException on disk problems
*/
- public void saveMetadata(HFile.Writer writer)
+ void saveMetadata(HFile.Writer writer)
throws IOException;
/** @return the on-disk data block encoding */
- public DataBlockEncoding getEncodingOnDisk();
+ DataBlockEncoding getEncodingOnDisk();
/** @return the preferred in-cache data block encoding for normal reads */
- public DataBlockEncoding getEncodingInCache();
+ DataBlockEncoding getEncodingInCache();
/**
* @return the effective in-cache data block encoding, taking into account
* whether we are doing a compaction.
*/
- public DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);
+ DataBlockEncoding getEffectiveEncodingInCache(boolean isCompaction);
/**
* Create an encoder specific encoding context object for writing. And the
@@ -101,8 +104,9 @@ public interface HFileDataBlockEncoder {
* @param headerBytes header bytes
* @return a new {@link HFileBlockEncodingContext} object
*/
- public HFileBlockEncodingContext newOnDiskDataBlockEncodingContext(
- Algorithm compressionAlgorithm, byte[] headerBytes);
+ HFileBlockEncodingContext newOnDiskDataBlockEncodingContext(
+ Algorithm compressionAlgorithm, byte[] headerBytes
+ );
/**
* create a encoder specific decoding context for reading. And the
@@ -112,7 +116,8 @@ public interface HFileDataBlockEncoder {
* @param compressionAlgorithm
* @return a new {@link HFileBlockDecodingContext} object
*/
- public HFileBlockDecodingContext newOnDiskDataBlockDecodingContext(
- Algorithm compressionAlgorithm);
+ HFileBlockDecodingContext newOnDiskDataBlockDecodingContext(
+ Algorithm compressionAlgorithm
+ );
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileScanner.java Wed Jul 10 17:54:35 2013
@@ -54,8 +54,8 @@ public interface HFileScanner {
* false when it is called.
* @throws IOException
*/
- public int seekTo(byte[] key) throws IOException;
- public int seekTo(byte[] key, int offset, int length) throws IOException;
+ int seekTo(byte[] key) throws IOException;
+ int seekTo(byte[] key, int offset, int length) throws IOException;
/**
* Reseek to or just before the passed <code>key</code>. Similar to seekTo
* except that this can be called even if the scanner is not at the beginning
@@ -76,8 +76,8 @@ public interface HFileScanner {
* 1, such that k[i] < key, and scanner is left in position i.
* @throws IOException
*/
- public int reseekTo(byte[] key) throws IOException;
- public int reseekTo(byte[] key, int offset, int length) throws IOException;
+ int reseekTo(byte[] key) throws IOException;
+ int reseekTo(byte[] key, int offset, int length) throws IOException;
/**
* Consider the key stream of all the keys in the file,
* <code>k[0] .. k[n]</code>, where there are n keys in the file.
@@ -88,28 +88,28 @@ public interface HFileScanner {
* return false (EOF).
* @throws IOException
*/
- public boolean seekBefore(byte [] key) throws IOException;
- public boolean seekBefore(byte []key, int offset, int length) throws IOException;
+ boolean seekBefore(byte[] key) throws IOException;
+ boolean seekBefore(byte[] key, int offset, int length) throws IOException;
/**
* Positions this scanner at the start of the file.
* @return False if empty file; i.e. a call to next would return false and
* the current key and value are undefined.
* @throws IOException
*/
- public boolean seekTo() throws IOException;
+ boolean seekTo() throws IOException;
/**
* Scans to the next entry in the file.
* @return Returns false if you are at the end otherwise true if more in file.
* @throws IOException
*/
- public boolean next() throws IOException;
+ boolean next() throws IOException;
/**
* Gets a buffer view to the current key. You must call
* {@link #seekTo(byte[])} before this method.
* @return byte buffer for the key. The limit is set to the key size, and the
* position is 0, the start of the buffer view.
*/
- public ByteBuffer getKey();
+ ByteBuffer getKey();
/**
* Gets a buffer view to the current value. You must call
* {@link #seekTo(byte[])} before this method.
@@ -117,31 +117,31 @@ public interface HFileScanner {
* @return byte buffer for the value. The limit is set to the value size, and
* the position is 0, the start of the buffer view.
*/
- public ByteBuffer getValue();
+ ByteBuffer getValue();
/**
* @return Instance of {@link KeyValue}.
*/
- public KeyValue getKeyValue();
+ KeyValue getKeyValue();
/**
* Convenience method to get a copy of the key as a string - interpreting the
* bytes as UTF8. You must call {@link #seekTo(byte[])} before this method.
* @return key as a string
*/
- public String getKeyString();
+ String getKeyString();
/**
* Convenience method to get a copy of the value as a string - interpreting
* the bytes as UTF8. You must call {@link #seekTo(byte[])} before this method.
* @return value as a string
*/
- public String getValueString();
+ String getValueString();
/**
* @return Reader that underlies this Scanner instance.
*/
- public HFile.Reader getReader();
+ HFile.Reader getReader();
/**
* @return True is scanner has had one of the seek calls invoked; i.e.
* {@link #seekBefore(byte[])} or {@link #seekTo()} or {@link #seekTo(byte[])}.
* Otherwise returns false.
*/
- public boolean isSeeked();
+ boolean isSeeked();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/Delayable.java Wed Jul 10 17:54:35 2013
@@ -35,17 +35,17 @@ public interface Delayable {
* should be set when ending the delay or right away. There are cases when
* the return value can be set right away, even if the call is delayed.
*/
- public void startDelay(boolean delayReturnValue);
+ void startDelay(boolean delayReturnValue);
/**
* @return is the call delayed?
*/
- public boolean isDelayed();
+ boolean isDelayed();
/**
* @return is the return value delayed?
*/
- public boolean isReturnValueDelayed();
+ boolean isReturnValueDelayed();
/**
* Signal that the RPC server is now allowed to send the response.
@@ -54,14 +54,14 @@ public interface Delayable {
* not be delayed, this parameter must be null.
* @throws IOException
*/
- public void endDelay(Object result) throws IOException;
+ void endDelay(Object result) throws IOException;
/**
* Signal the end of a delayed RPC, without specifying the return value. Use
* this only if the return value was not delayed
* @throws IOException
*/
- public void endDelay() throws IOException;
+ void endDelay() throws IOException;
/**
* End the call, throwing and exception to the caller. This works regardless
@@ -69,5 +69,5 @@ public interface Delayable {
* @param t Object to throw to the client.
* @throws IOException
*/
- public void endDelayThrowing(Throwable t) throws IOException;
-}
\ No newline at end of file
+ void endDelayThrowing(Throwable t) throws IOException;
+}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRPCErrorHandler.java Wed Jul 10 17:54:35 2013
@@ -31,5 +31,5 @@ public interface HBaseRPCErrorHandler {
* @param e the throwable
* @return if the server should be shut down
*/
- public boolean checkOOME(final Throwable e) ;
+ boolean checkOOME(final Throwable e) ;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServerInterface.java Wed Jul 10 17:54:35 2013
@@ -65,7 +65,7 @@ public interface RpcServerInterface {
*/
MetricsHBaseServer getMetrics();
- public void setQosFunction(Function<Pair<RequestHeader, Message>, Integer> newFunc);
+ void setQosFunction(Function<Pair<RequestHeader, Message>, Integer> newFunc);
/**
* Refresh autentication manager policy.
@@ -73,4 +73,4 @@ public interface RpcServerInterface {
*/
@VisibleForTesting
void refreshAuthManager(PolicyProvider pp);
-}
\ No newline at end of file
+}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java Wed Jul 10 17:54:35 2013
@@ -1145,11 +1145,11 @@ public class AssignmentManager extends Z
/**
* A specific runnable that works only on a region.
*/
- private static interface RegionRunnable extends Runnable{
+ private interface RegionRunnable extends Runnable{
/**
* @return - the name of the region it works on.
*/
- public String getRegionName();
+ String getRegionName();
}
/**
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/ClusterStatusPublisher.java Wed Jul 10 17:54:35 2013
@@ -220,14 +220,14 @@ public class ClusterStatusPublisher exte
}
- public static interface Publisher extends Closeable {
+ public interface Publisher extends Closeable {
- public void connect(Configuration conf) throws IOException;
+ void connect(Configuration conf) throws IOException;
- public void publish(ClusterStatus cs);
+ void publish(ClusterStatus cs);
@Override
- public void close();
+ void close();
}
public static class MulticastPublisher implements Publisher {
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/LoadBalancer.java Wed Jul 10 17:54:35 2013
@@ -50,21 +50,21 @@ public interface LoadBalancer extends Co
* Set the current cluster status. This allows a LoadBalancer to map host name to a server
* @param st
*/
- public void setClusterStatus(ClusterStatus st);
+ void setClusterStatus(ClusterStatus st);
/**
* Set the master service.
* @param masterServices
*/
- public void setMasterServices(MasterServices masterServices);
+ void setMasterServices(MasterServices masterServices);
/**
* Perform the major balance operation
* @param clusterState
* @return List of plans
*/
- public List<RegionPlan> balanceCluster(Map<ServerName, List<HRegionInfo>> clusterState);
+ List<RegionPlan> balanceCluster(Map<ServerName, List<HRegionInfo>> clusterState);
/**
* Perform a Round Robin assignment of regions.
@@ -72,7 +72,10 @@ public interface LoadBalancer extends Co
* @param servers
* @return Map of servername to regioninfos
*/
- public Map<ServerName, List<HRegionInfo>> roundRobinAssignment(List<HRegionInfo> regions, List<ServerName> servers);
+ Map<ServerName, List<HRegionInfo>> roundRobinAssignment(
+ List<HRegionInfo> regions,
+ List<ServerName> servers
+ );
/**
* Assign regions to the previously hosting region server
@@ -80,7 +83,10 @@ public interface LoadBalancer extends Co
* @param servers
* @return List of plans
*/
- public Map<ServerName, List<HRegionInfo>> retainAssignment(Map<HRegionInfo, ServerName> regions, List<ServerName> servers);
+ Map<ServerName, List<HRegionInfo>> retainAssignment(
+ Map<HRegionInfo, ServerName> regions,
+ List<ServerName> servers
+ );
/**
* Sync assign a region
@@ -88,7 +94,10 @@ public interface LoadBalancer extends Co
* @param servers
* @return Map regioninfos to servernames
*/
- public Map<HRegionInfo, ServerName> immediateAssignment(List<HRegionInfo> regions, List<ServerName> servers);
+ Map<HRegionInfo, ServerName> immediateAssignment(
+ List<HRegionInfo> regions,
+ List<ServerName> servers
+ );
/**
* Get a random region server from the list
@@ -96,6 +105,7 @@ public interface LoadBalancer extends Co
* @param servers
* @return Servername
*/
- public ServerName randomAssignment(HRegionInfo regionInfo,
- List<ServerName> servers);
+ ServerName randomAssignment(
+ HRegionInfo regionInfo, List<ServerName> servers
+ );
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/MasterServices.java Wed Jul 10 17:54:35 2013
@@ -40,32 +40,32 @@ public interface MasterServices extends
/**
* @return Master's instance of the {@link AssignmentManager}
*/
- public AssignmentManager getAssignmentManager();
+ AssignmentManager getAssignmentManager();
/**
* @return Master's filesystem {@link MasterFileSystem} utility class.
*/
- public MasterFileSystem getMasterFileSystem();
+ MasterFileSystem getMasterFileSystem();
/**
* @return Master's {@link ServerManager} instance.
*/
- public ServerManager getServerManager();
+ ServerManager getServerManager();
/**
* @return Master's instance of {@link ExecutorService}
*/
- public ExecutorService getExecutorService();
+ ExecutorService getExecutorService();
/**
* @return Master's instance of {@link TableLockManager}
*/
- public TableLockManager getTableLockManager();
+ TableLockManager getTableLockManager();
/**
* @return Master's instance of {@link MasterCoprocessorHost}
*/
- public MasterCoprocessorHost getCoprocessorHost();
+ MasterCoprocessorHost getCoprocessorHost();
/**
* Check table is modifiable; i.e. exists and is offline.
@@ -75,7 +75,7 @@ public interface MasterServices extends
* @throws IOException
*/
// We actually throw the exceptions mentioned in the
- public void checkTableModifiable(final byte [] tableName)
+ void checkTableModifiable(final byte[] tableName)
throws IOException, TableNotFoundException, TableNotDisabledException;
/**
@@ -84,7 +84,7 @@ public interface MasterServices extends
* @param splitKeys Starting row keys for the initial table regions. If null
* a single region is created.
*/
- public void createTable(HTableDescriptor desc, byte [][] splitKeys)
+ void createTable(HTableDescriptor desc, byte[][] splitKeys)
throws IOException;
/**
@@ -92,7 +92,7 @@ public interface MasterServices extends
* @param tableName The table name
* @throws IOException
*/
- public void deleteTable(final byte[] tableName) throws IOException;
+ void deleteTable(final byte[] tableName) throws IOException;
/**
* Modify the descriptor of an existing table
@@ -100,7 +100,7 @@ public interface MasterServices extends
* @param descriptor The updated table descriptor
* @throws IOException
*/
- public void modifyTable(final byte[] tableName, final HTableDescriptor descriptor)
+ void modifyTable(final byte[] tableName, final HTableDescriptor descriptor)
throws IOException;
/**
@@ -108,14 +108,14 @@ public interface MasterServices extends
* @param tableName The table name
* @throws IOException
*/
- public void enableTable(final byte[] tableName) throws IOException;
+ void enableTable(final byte[] tableName) throws IOException;
/**
* Disable an existing table
* @param tableName The table name
* @throws IOException
*/
- public void disableTable(final byte[] tableName) throws IOException;
+ void disableTable(final byte[] tableName) throws IOException;
/**
* Add a new column to an existing table
@@ -123,7 +123,7 @@ public interface MasterServices extends
* @param column The column definition
* @throws IOException
*/
- public void addColumn(final byte[] tableName, final HColumnDescriptor column)
+ void addColumn(final byte[] tableName, final HColumnDescriptor column)
throws IOException;
/**
@@ -132,7 +132,7 @@ public interface MasterServices extends
* @param descriptor The updated column definition
* @throws IOException
*/
- public void modifyColumn(byte[] tableName, HColumnDescriptor descriptor)
+ void modifyColumn(byte[] tableName, HColumnDescriptor descriptor)
throws IOException;
/**
@@ -141,18 +141,18 @@ public interface MasterServices extends
* @param columnName The column name
* @throws IOException
*/
- public void deleteColumn(final byte[] tableName, final byte[] columnName)
+ void deleteColumn(final byte[] tableName, final byte[] columnName)
throws IOException;
/**
* @return Return table descriptors implementation.
*/
- public TableDescriptors getTableDescriptors();
+ TableDescriptors getTableDescriptors();
/**
* @return true if master enables ServerShutdownHandler;
*/
- public boolean isServerShutdownHandlerEnabled();
+ boolean isServerShutdownHandlerEnabled();
/**
* Registers a new protocol buffer {@link Service} subclass as a master coprocessor endpoint.
@@ -167,7 +167,7 @@ public interface MasterServices extends
* @return {@code true} if the registration was successful, {@code false}
* otherwise
*/
- public boolean registerService(Service instance);
+ boolean registerService(Service instance);
/**
* Merge two regions. The real implementation is on the regionserver, master
@@ -178,12 +178,13 @@ public interface MasterServices extends
* two adjacent regions
* @throws IOException
*/
- public void dispatchMergingRegions(final HRegionInfo region_a,
- final HRegionInfo region_b, final boolean forcible) throws IOException;
+ void dispatchMergingRegions(
+ final HRegionInfo region_a, final HRegionInfo region_b, final boolean forcible
+ ) throws IOException;
/**
* @return true if master is initialized
*/
- public boolean isInitialized();
+ boolean isInitialized();
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SnapshotSentinel.java Wed Jul 10 17:54:35 2013
@@ -34,30 +34,30 @@ public interface SnapshotSentinel {
* @return <tt>false</tt> if the snapshot is still in progress, <tt>true</tt> if the snapshot has
* finished
*/
- public boolean isFinished();
+ boolean isFinished();
/**
* @return -1 if the snapshot is in progress, otherwise the completion timestamp.
*/
- public long getCompletionTimestamp();
+ long getCompletionTimestamp();
/**
* Actively cancel a running snapshot.
* @param why Reason for cancellation.
*/
- public void cancel(String why);
+ void cancel(String why);
/**
* @return the description of the snapshot being run
*/
- public SnapshotDescription getSnapshot();
+ SnapshotDescription getSnapshot();
/**
* Get the exception that caused the snapshot to fail, if the snapshot has failed.
* @return {@link ForeignException} that caused the snapshot to fail, or <tt>null</tt> if the
* snapshot is still in progress or has succeeded
*/
- public ForeignException getExceptionIfFailed();
+ ForeignException getExceptionIfFailed();
/**
* Rethrow the exception returned by {@link SnapshotSentinel#getExceptionIfFailed}.
@@ -65,5 +65,5 @@ public interface SnapshotSentinel {
*
* @throws ForeignException all exceptions from remote sources are procedure exceptions
*/
- public void rethrowExceptionIfFailed() throws ForeignException;
+ void rethrowExceptionIfFailed() throws ForeignException;
}
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/SplitLogManager.java Wed Jul 10 17:54:35 2013
@@ -1593,11 +1593,11 @@ public class SplitLogManager extends Zoo
* a serialization point at the end of the task processing. Must be
* restartable and idempotent.
*/
- static public interface TaskFinisher {
+ public interface TaskFinisher {
/**
* status that can be returned finish()
*/
- static public enum Status {
+ enum Status {
/**
* task completed successfully
*/
@@ -1616,7 +1616,7 @@ public class SplitLogManager extends Zoo
* @param taskname
* @return DONE if task completed successfully, ERR otherwise
*/
- public Status finish(ServerName workerName, String taskname);
+ Status finish(ServerName workerName, String taskname);
}
enum ResubmitDirective {
Modified: hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java
URL: http://svn.apache.org/viewvc/hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java?rev=1501881&r1=1501880&r2=1501881&view=diff
==============================================================================
--- hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java (original)
+++ hbase/branches/0.95/hbase-server/src/main/java/org/apache/hadoop/hbase/master/TableLockManager.java Wed Jul 10 17:54:35 2013
@@ -82,20 +82,20 @@ public abstract class TableLockManager {
* A distributed lock for a table.
*/
@InterfaceAudience.Private
- public static interface TableLock {
+ public interface TableLock {
/**
* Acquire the lock, with the configured lock timeout.
* @throws LockTimeoutException If unable to acquire a lock within a specified
* time period (if any)
* @throws IOException If unrecoverable error occurs
*/
- public void acquire() throws IOException;
+ void acquire() throws IOException;
/**
* Release the lock already held.
* @throws IOException If there is an unrecoverable error releasing the lock
*/
- public void release() throws IOException;
+ void release() throws IOException;
}
/**