You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by mi...@apache.org on 2015/12/16 18:00:16 UTC

[30/30] hbase-site git commit: Updated 0.94 docs to 0f35a32ab123ee299f4aaaea02b4ba2d2b43cff2

Updated 0.94 docs to 0f35a32ab123ee299f4aaaea02b4ba2d2b43cff2


Project: http://git-wip-us.apache.org/repos/asf/hbase-site/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase-site/commit/ecb8d8ba
Tree: http://git-wip-us.apache.org/repos/asf/hbase-site/tree/ecb8d8ba
Diff: http://git-wip-us.apache.org/repos/asf/hbase-site/diff/ecb8d8ba

Branch: refs/heads/asf-site
Commit: ecb8d8ba0173303118524a580b35d9f9ca9bb012
Parents: 539ad17
Author: Misty Stanley-Jones <ms...@cloudera.com>
Authored: Wed Dec 16 08:59:16 2015 -0800
Committer: Misty Stanley-Jones <ms...@cloudera.com>
Committed: Wed Dec 16 08:59:37 2015 -0800

----------------------------------------------------------------------
 0.94/acid-semantics.html                        |   206 +-
 0.94/apidocs/deprecated-list.html               |     2 +-
 0.94/apidocs/index-all.html                     |    98 +-
 .../apache/hadoop/hbase/Coprocessor.State.html  |     2 +-
 .../apache/hadoop/hbase/HBaseFileSystem.html    |     4 +-
 .../apache/hadoop/hbase/HColumnDescriptor.html  |     2 +-
 .../apache/hadoop/hbase/HConstants.Modify.html  |     2 +-
 .../hbase/HConstants.OperationStatusCode.html   |     2 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |   142 +-
 .../org/apache/hadoop/hbase/KeyValue.Type.html  |     2 +-
 .../org/apache/hadoop/hbase/ServerName.html     |     4 +-
 .../hadoop/hbase/avro/package-summary.html      |     8 +-
 .../hbase/class-use/HColumnDescriptor.html      |     8 +-
 .../hadoop/hbase/client/ClientSmallScanner.html |     2 +-
 .../apache/hadoop/hbase/client/Durability.html  |     2 +-
 .../apache/hadoop/hbase/client/HBaseAdmin.html  |    12 +-
 .../hadoop/hbase/client/HConnectionManager.html |     2 +-
 .../org/apache/hadoop/hbase/client/HTable.html  |    14 +-
 .../hadoop/hbase/client/HTableInterface.html    |     8 +-
 .../apache/hadoop/hbase/client/HTablePool.html  |     8 +-
 .../apache/hadoop/hbase/client/HTableUtil.html  |    14 +-
 .../hadoop/hbase/client/IsolationLevel.html     |     2 +-
 .../apache/hadoop/hbase/client/MultiPut.html    |    10 +-
 .../apache/hadoop/hbase/client/Operation.html   |    10 +-
 .../org/apache/hadoop/hbase/client/Scan.html    |    12 +-
 .../client/UnmodifyableHTableDescriptor.html    |    20 +-
 .../hadoop/hbase/client/class-use/Result.html   |     2 +-
 .../hbase/client/coprocessor/Batch.Call.html    |     4 +-
 .../hadoop/hbase/client/coprocessor/Batch.html  |     2 +-
 .../BigDecimalColumnInterpreter.html            |     2 +-
 .../hadoop/hbase/client/coprocessor/Exec.html   |     4 +-
 .../hbase/client/coprocessor/ExecResult.html    |     4 +-
 .../hadoop/hbase/constraint/Constraint.html     |     2 +-
 .../hbase/constraint/package-summary.html       |   106 +-
 .../coprocessor/AggregateImplementation.html    |     2 +-
 .../hbase/coprocessor/AggregateProtocol.html    |     2 +-
 .../coprocessor/example/BulkDeleteProtocol.html |     2 +-
 .../example/ZooKeeperScanPolicyObserver.html    |     2 +-
 .../errorhandling/ForeignExceptionSnare.html    |     2 +-
 .../hbase/errorhandling/TimeoutException.html   |     2 +-
 .../hbase/executor/EventHandler.EventType.html  |     2 +-
 .../executor/ExecutorService.ExecutorType.html  |     2 +-
 .../hbase/filter/BitComparator.BitwiseOp.html   |     2 +-
 .../hbase/filter/CompareFilter.CompareOp.html   |     2 +-
 .../hbase/filter/DependentColumnFilter.html     |     6 +-
 .../hadoop/hbase/filter/Filter.ReturnCode.html  |     2 +-
 .../org/apache/hadoop/hbase/filter/Filter.html  |     2 +-
 .../apache/hadoop/hbase/filter/FilterBase.html  |     2 +-
 .../hbase/filter/FilterList.Operator.html       |     2 +-
 .../apache/hadoop/hbase/filter/FilterList.html  |     2 +-
 .../hadoop/hbase/filter/KeyOnlyFilter.html      |     2 +-
 .../apache/hadoop/hbase/filter/SkipFilter.html  |     2 +-
 .../hadoop/hbase/filter/WhileMatchFilter.html   |     2 +-
 .../hbase/filter/class-use/CompareFilter.html   |     2 +-
 .../hadoop/hbase/filter/class-use/Filter.html   |     2 +-
 .../hbase/filter/class-use/FilterBase.html      |     2 +-
 .../hadoop/hbase/filter/package-summary.html    |     2 +-
 .../org/apache/hadoop/hbase/fs/HFileSystem.html |    14 +-
 .../apache/hadoop/hbase/io/Reference.Range.html |     2 +-
 .../io/encoding/CopyKeyDataBlockEncoder.html    |    15 +-
 .../DataBlockEncoder.EncodedSeeker.html         |     2 +-
 .../hbase/io/encoding/DataBlockEncoding.html    |     2 +-
 ...iffKeyDeltaEncoder.DiffCompressionState.html |     6 +-
 .../DiffKeyDeltaEncoder.DiffSeekerState.html    |     6 +-
 .../hbase/io/encoding/DiffKeyDeltaEncoder.html  |    15 +-
 ...astDiffDeltaEncoder.FastDiffSeekerState.html |     6 +-
 .../hbase/io/encoding/FastDiffDeltaEncoder.html |    15 +-
 .../io/encoding/PrefixKeyDeltaEncoder.html      |    15 +-
 .../io/hfile/BlockCacheColumnFamilySummary.html |     2 +-
 .../hbase/io/hfile/BlockType.BlockCategory.html |     2 +-
 .../apache/hadoop/hbase/io/hfile/BlockType.html |     2 +-
 .../hbase/io/hfile/Compression.Algorithm.html   |     2 +-
 .../hadoop/hbase/io/hfile/FixedFileTrailer.html |     4 +-
 .../hbase/io/hfile/HFileBlock.Writer.html       |     2 +-
 .../hadoop/hbase/io/hfile/HFileBlock.html       |     2 +-
 .../hadoop/hbase/io/hfile/package-summary.html  |     2 +-
 .../hadoop/hbase/io/hfile/package-use.html      |     8 +-
 .../hadoop/hbase/ipc/HRegionInterface.html      |     8 +-
 .../apache/hadoop/hbase/mapreduce/Import.html   |     6 +-
 .../hbase/mapreduce/LoadIncrementalHFiles.html  |     4 +-
 .../hbase/mapreduce/MultiTableInputFormat.html  |     4 +-
 .../hadoop/hbase/mapreduce/PutSortReducer.html  |     2 +-
 .../mapreduce/SimpleTotalOrderPartitioner.html  |     2 +-
 .../hbase/mapreduce/TableMapReduceUtil.html     |     2 +-
 .../hbase/mapreduce/TableOutputFormat.html      |     2 +-
 .../mapreduce/TableSnapshotInputFormat.html     |    14 +-
 .../class-use/MultiTableInputFormatBase.html    |     2 +-
 .../mapreduce/hadoopbackport/JarFinder.html     |     2 +-
 .../hadoopbackport/TotalOrderPartitioner.html   |     2 +-
 .../hadoop/hbase/mapreduce/package-summary.html |     2 +-
 .../VerifyReplication.Verifier.Counters.html    |     2 +-
 .../hadoop/hbase/master/AssignCallable.html     |     2 +-
 .../AssignmentManager.RegionState.State.html    |     2 +-
 .../hbase/master/DefaultLoadBalancer.html       |    10 +-
 .../org/apache/hadoop/hbase/master/HMaster.html |     4 +-
 .../SplitLogManager.TaskFinisher.Status.html    |     2 +-
 .../cleaner/BaseHFileCleanerDelegate.html       |     2 +-
 .../PersistentMetricsTimeVaryingRate.html       |     2 +-
 .../metrics/histogram/MetricsHistogram.html     |     4 +-
 .../hbase/monitoring/MonitoredRPCHandler.html   |     4 +-
 .../monitoring/MonitoredRPCHandlerImpl.html     |    44 +-
 .../hbase/monitoring/MonitoredTask.State.html   |     2 +-
 .../hadoop/hbase/monitoring/MonitoredTask.html  |     8 +-
 .../hadoop/hbase/monitoring/TaskMonitor.html    |     4 +-
 .../class-use/MonitoredRPCHandler.html          |     2 +-
 .../monitoring/class-use/MonitoredTask.html     |     8 +-
 .../hbase/monitoring/package-summary.html       |     4 +-
 .../hadoop/hbase/monitoring/package-use.html    |     6 +-
 .../apache/hadoop/hbase/package-summary.html    |     2 +-
 .../org/apache/hadoop/hbase/package-use.html    |    38 +-
 .../hbase/regionserver/ColumnTracker.html       |     2 +-
 .../DeleteTracker.DeleteCompare.html            |     2 +-
 .../DeleteTracker.DeleteResult.html             |     2 +-
 .../regionserver/ExplicitColumnTracker.html     |     2 +-
 .../hbase/regionserver/HRegionServer.html       |     8 +-
 .../MiniBatchOperationInProgress.html           |     2 +-
 .../hbase/regionserver/RegionOpeningState.html  |     2 +-
 .../regionserver/RegionServerAccounting.html    |     2 +-
 .../hbase/regionserver/RegionSplitPolicy.html   |     2 +-
 .../hbase/regionserver/ReplicationService.html  |     2 +-
 .../regionserver/ReplicationSourceService.html  |     2 +-
 .../ScanQueryMatcher.MatchCode.html             |     2 +-
 .../hadoop/hbase/regionserver/ScanType.html     |     2 +-
 .../SplitLogWorker.TaskExecutor.Status.html     |     2 +-
 .../hbase/regionserver/StoreFile.BloomType.html |     2 +-
 .../compactions/CompactSelection.html           |    10 +-
 .../CompactionRequest.CompactionState.html      |     2 +-
 .../metrics/RegionServerDynamicMetrics.html     |     2 +-
 .../metrics/SchemaMetrics.BlockMetricType.html  |     2 +-
 .../metrics/SchemaMetrics.StoreMetricType.html  |     2 +-
 .../hadoop/hbase/regionserver/wal/HLog.html     |     2 +-
 .../hadoop/hbase/regionserver/wal/HLogKey.html  |     2 +-
 .../regionserver/wal/HLogPrettyPrinter.html     |     8 +-
 .../ReplicationZookeeper.PeerState.html         |     2 +-
 .../hbase/replication/package-summary.html      |     2 +-
 .../replication/regionserver/Replication.html   |     2 +-
 .../regionserver/ReplicationSource.html         |     2 +-
 .../apache/hadoop/hbase/rest/client/Client.html |     2 +-
 .../hadoop/hbase/rest/client/RemoteHTable.html  |     4 +-
 .../hadoop/hbase/rest/model/CellSetModel.html   |     8 +-
 .../hbase/rest/model/ColumnSchemaModel.html     |     2 +-
 .../hadoop/hbase/rest/model/RowModel.html       |     4 +-
 .../hadoop/hbase/rest/model/ScannerModel.html   |     2 +-
 .../rest/model/StorageClusterStatusModel.html   |     4 +-
 .../rest/model/StorageClusterVersionModel.html  |     2 +-
 .../hadoop/hbase/rest/model/TableInfoModel.html |     6 +-
 .../hadoop/hbase/rest/model/TableModel.html     |     2 +-
 .../hbase/rest/model/TableRegionModel.html      |     2 +-
 .../hbase/rest/model/TableSchemaModel.html      |     4 +-
 .../hadoop/hbase/rest/package-summary.html      |    62 +-
 .../rest/provider/JAXBContextResolver.html      |     2 +-
 .../producer/PlainTextMessageBodyProducer.html  |     2 +-
 .../hbase/snapshot/ExportSnapshot.Counter.html  |     2 +-
 .../hadoop/hbase/snapshot/SnapshotInfo.html     |     2 +-
 .../hadoop/hbase/snapshot/package-summary.html  |     2 +-
 .../hadoop/hbase/snapshot/package-use.html      |     2 +-
 .../thrift/generated/AlreadyExists._Fields.html |     2 +-
 .../thrift/generated/BatchMutation._Fields.html |     2 +-
 .../generated/ColumnDescriptor._Fields.html     |     2 +-
 .../Hbase.atomicIncrement_args._Fields.html     |     2 +-
 .../Hbase.atomicIncrement_result._Fields.html   |     2 +-
 .../generated/Hbase.compact_args._Fields.html   |     2 +-
 .../generated/Hbase.compact_result._Fields.html |     2 +-
 .../Hbase.createTable_args._Fields.html         |     2 +-
 .../Hbase.createTable_result._Fields.html       |     2 +-
 .../Hbase.deleteAllRowTs_args._Fields.html      |     2 +-
 .../Hbase.deleteAllRowTs_result._Fields.html    |     2 +-
 .../Hbase.deleteAllRow_args._Fields.html        |     2 +-
 .../Hbase.deleteAllRow_result._Fields.html      |     2 +-
 .../Hbase.deleteAllTs_args._Fields.html         |     2 +-
 .../Hbase.deleteAllTs_result._Fields.html       |     2 +-
 .../generated/Hbase.deleteAll_args._Fields.html |     2 +-
 .../Hbase.deleteAll_result._Fields.html         |     2 +-
 .../Hbase.deleteTable_args._Fields.html         |     2 +-
 .../Hbase.deleteTable_result._Fields.html       |     2 +-
 .../Hbase.disableTable_args._Fields.html        |     2 +-
 .../Hbase.disableTable_result._Fields.html      |     2 +-
 .../Hbase.enableTable_args._Fields.html         |     2 +-
 .../Hbase.enableTable_result._Fields.html       |     2 +-
 ...Hbase.getColumnDescriptors_args._Fields.html |     2 +-
 ...ase.getColumnDescriptors_result._Fields.html |     2 +-
 .../Hbase.getRegionInfo_args._Fields.html       |     2 +-
 .../Hbase.getRegionInfo_result._Fields.html     |     2 +-
 .../Hbase.getRowOrBefore_args._Fields.html      |     2 +-
 .../Hbase.getRowOrBefore_result._Fields.html    |     2 +-
 .../generated/Hbase.getRowTs_args._Fields.html  |     2 +-
 .../Hbase.getRowTs_result._Fields.html          |     2 +-
 .../Hbase.getRowWithColumnsTs_args._Fields.html |     2 +-
 ...base.getRowWithColumnsTs_result._Fields.html |     2 +-
 .../Hbase.getRowWithColumns_args._Fields.html   |     2 +-
 .../Hbase.getRowWithColumns_result._Fields.html |     2 +-
 .../generated/Hbase.getRow_args._Fields.html    |     2 +-
 .../generated/Hbase.getRow_result._Fields.html  |     2 +-
 .../generated/Hbase.getRowsTs_args._Fields.html |     2 +-
 .../Hbase.getRowsTs_result._Fields.html         |     2 +-
 ...Hbase.getRowsWithColumnsTs_args._Fields.html |     2 +-
 ...ase.getRowsWithColumnsTs_result._Fields.html |     2 +-
 .../Hbase.getRowsWithColumns_args._Fields.html  |     2 +-
 ...Hbase.getRowsWithColumns_result._Fields.html |     2 +-
 .../generated/Hbase.getRows_args._Fields.html   |     2 +-
 .../generated/Hbase.getRows_result._Fields.html |     2 +-
 .../Hbase.getTableNames_args._Fields.html       |     2 +-
 .../Hbase.getTableNames_result._Fields.html     |     2 +-
 .../Hbase.getTableRegions_args._Fields.html     |     2 +-
 .../Hbase.getTableRegions_result._Fields.html   |     2 +-
 .../generated/Hbase.getVerTs_args._Fields.html  |     2 +-
 .../Hbase.getVerTs_result._Fields.html          |     2 +-
 .../generated/Hbase.getVer_args._Fields.html    |     2 +-
 .../generated/Hbase.getVer_result._Fields.html  |     2 +-
 .../generated/Hbase.get_args._Fields.html       |     2 +-
 .../generated/Hbase.get_result._Fields.html     |     2 +-
 .../Hbase.incrementRows_args._Fields.html       |     2 +-
 .../Hbase.incrementRows_result._Fields.html     |     2 +-
 .../generated/Hbase.increment_args._Fields.html |     2 +-
 .../Hbase.increment_result._Fields.html         |     2 +-
 .../Hbase.isTableEnabled_args._Fields.html      |     2 +-
 .../Hbase.isTableEnabled_result._Fields.html    |     2 +-
 .../Hbase.majorCompact_args._Fields.html        |     2 +-
 .../Hbase.majorCompact_result._Fields.html      |     2 +-
 .../Hbase.mutateRowTs_args._Fields.html         |     2 +-
 .../Hbase.mutateRowTs_result._Fields.html       |     2 +-
 .../generated/Hbase.mutateRow_args._Fields.html |     2 +-
 .../Hbase.mutateRow_result._Fields.html         |     2 +-
 .../Hbase.mutateRowsTs_args._Fields.html        |     2 +-
 .../Hbase.mutateRowsTs_result._Fields.html      |     2 +-
 .../Hbase.mutateRows_args._Fields.html          |     2 +-
 .../Hbase.mutateRows_result._Fields.html        |     2 +-
 .../Hbase.scannerClose_args._Fields.html        |     2 +-
 .../Hbase.scannerClose_result._Fields.html      |     2 +-
 .../Hbase.scannerGetList_args._Fields.html      |     2 +-
 .../Hbase.scannerGetList_result._Fields.html    |     2 +-
 .../Hbase.scannerGet_args._Fields.html          |     2 +-
 .../Hbase.scannerGet_result._Fields.html        |     2 +-
 .../Hbase.scannerOpenTs_args._Fields.html       |     2 +-
 .../Hbase.scannerOpenTs_result._Fields.html     |     2 +-
 ...base.scannerOpenWithPrefix_args._Fields.html |     2 +-
 ...se.scannerOpenWithPrefix_result._Fields.html |     2 +-
 .../Hbase.scannerOpenWithScan_args._Fields.html |     2 +-
 ...base.scannerOpenWithScan_result._Fields.html |     2 +-
 ...base.scannerOpenWithStopTs_args._Fields.html |     2 +-
 ...se.scannerOpenWithStopTs_result._Fields.html |     2 +-
 .../Hbase.scannerOpenWithStop_args._Fields.html |     2 +-
 ...base.scannerOpenWithStop_result._Fields.html |     2 +-
 .../Hbase.scannerOpen_args._Fields.html         |     2 +-
 .../Hbase.scannerOpen_result._Fields.html       |     2 +-
 .../hbase/thrift/generated/IOError._Fields.html |     2 +-
 .../generated/IllegalArgument._Fields.html      |     2 +-
 .../thrift/generated/Mutation._Fields.html      |     2 +-
 .../hbase/thrift/generated/TCell._Fields.html   |     2 +-
 .../hbase/thrift/generated/TColumn._Fields.html |     2 +-
 .../thrift/generated/TIncrement._Fields.html    |     2 +-
 .../thrift/generated/TRegionInfo._Fields.html   |     2 +-
 .../thrift/generated/TRowResult._Fields.html    |     2 +-
 .../hbase/thrift/generated/TScan._Fields.html   |     2 +-
 .../hadoop/hbase/thrift/package-summary.html    |     4 +-
 .../thrift2/ThriftHBaseServiceHandler.html      |    10 +-
 .../hadoop/hbase/thrift2/ThriftUtilities.html   |     2 +-
 .../thrift2/generated/TColumn._Fields.html      |     2 +-
 .../generated/TColumnIncrement._Fields.html     |     2 +-
 .../thrift2/generated/TColumnValue._Fields.html |     2 +-
 .../thrift2/generated/TDelete._Fields.html      |     2 +-
 .../hadoop/hbase/thrift2/generated/TDelete.html |     8 +-
 .../hbase/thrift2/generated/TDeleteType.html    |     2 +-
 .../hbase/thrift2/generated/TDurability.html    |     2 +-
 .../hbase/thrift2/generated/TGet._Fields.html   |     2 +-
 .../hadoop/hbase/thrift2/generated/TGet.html    |     6 +-
 .../thrift2/generated/THBaseService.Client.html |    10 +-
 .../thrift2/generated/THBaseService.Iface.html  |    10 +-
 ...BaseService.checkAndDelete_args._Fields.html |     2 +-
 ...seService.checkAndDelete_result._Fields.html |     2 +-
 .../THBaseService.checkAndPut_args._Fields.html |     2 +-
 ...HBaseService.checkAndPut_result._Fields.html |     2 +-
 ...THBaseService.closeScanner_args._Fields.html |     2 +-
 ...BaseService.closeScanner_result._Fields.html |     2 +-
 ...BaseService.deleteMultiple_args._Fields.html |     2 +-
 ...seService.deleteMultiple_result._Fields.html |     2 +-
 ...THBaseService.deleteSingle_args._Fields.html |     2 +-
 ...BaseService.deleteSingle_result._Fields.html |     2 +-
 .../THBaseService.exists_args._Fields.html      |     2 +-
 .../THBaseService.exists_result._Fields.html    |     2 +-
 .../THBaseService.getMultiple_args._Fields.html |     2 +-
 ...HBaseService.getMultiple_result._Fields.html |     2 +-
 ...eService.getScannerResults_args._Fields.html |     2 +-
 ...ervice.getScannerResults_result._Fields.html |     2 +-
 ...BaseService.getScannerRows_args._Fields.html |     2 +-
 ...seService.getScannerRows_result._Fields.html |     2 +-
 .../THBaseService.get_args._Fields.html         |     2 +-
 .../THBaseService.get_result._Fields.html       |     2 +-
 .../THBaseService.increment_args._Fields.html   |     2 +-
 .../THBaseService.increment_result._Fields.html |     2 +-
 .../THBaseService.mutateRow_args._Fields.html   |     2 +-
 .../THBaseService.mutateRow_result._Fields.html |     2 +-
 .../THBaseService.openScanner_args._Fields.html |     2 +-
 ...HBaseService.openScanner_result._Fields.html |     2 +-
 .../THBaseService.putMultiple_args._Fields.html |     2 +-
 ...HBaseService.putMultiple_result._Fields.html |     2 +-
 .../THBaseService.put_args._Fields.html         |     2 +-
 .../THBaseService.put_result._Fields.html       |     2 +-
 .../thrift2/generated/TIOError._Fields.html     |     2 +-
 .../generated/TIllegalArgument._Fields.html     |     2 +-
 .../thrift2/generated/TIncrement._Fields.html   |     2 +-
 .../hbase/thrift2/generated/TIncrement.html     |     2 +-
 .../thrift2/generated/TMutation._Fields.html    |     2 +-
 .../hbase/thrift2/generated/TPut._Fields.html   |     2 +-
 .../hadoop/hbase/thrift2/generated/TPut.html    |     4 +-
 .../thrift2/generated/TResult._Fields.html      |     2 +-
 .../generated/TRowMutations._Fields.html        |     2 +-
 .../hbase/thrift2/generated/TScan._Fields.html  |     2 +-
 .../thrift2/generated/TTimeRange._Fields.html   |     2 +-
 .../hadoop/hbase/thrift2/package-summary.html   |     8 +-
 .../hbase/util/Base64.Base64InputStream.html    |     4 +-
 .../hbase/util/Base64.Base64OutputStream.html   |     4 +-
 .../org/apache/hadoop/hbase/util/Base64.html    |    20 +-
 .../hadoop/hbase/util/ByteBloomFilter.html      |     2 +-
 .../hadoop/hbase/util/ChecksumFactory.html      |     2 +-
 .../apache/hadoop/hbase/util/ChecksumType.html  |     4 +-
 .../hadoop/hbase/util/FSTableDescriptors.html   |     2 +-
 .../HBaseFsck.ErrorReporter.ERROR_CODE.html     |     2 +-
 .../org/apache/hadoop/hbase/util/HasThread.html |     2 +-
 .../org/apache/hadoop/hbase/util/Hash.html      |     4 +-
 .../apache/hadoop/hbase/util/JenkinsHash.html   |     4 +-
 .../org/apache/hadoop/hbase/util/Keying.html    |     2 +-
 .../apache/hadoop/hbase/util/MurmurHash.html    |     4 +-
 .../hadoop/hbase/util/PoolMap.PoolType.html     |     2 +-
 .../hbase/util/RegionSplitCalculator.html       |     4 +-
 .../hadoop/hbase/util/ShutdownHookManager.html  |     2 +-
 .../hbase/util/hbck/OfflineMetaRepair.html      |     4 +-
 .../hbase/zookeeper/RecoverableZooKeeper.html   |    42 +-
 .../apache/hadoop/hbase/zookeeper/ZKAssign.html |     2 +-
 .../hbase/zookeeper/ZKSplitLog.TaskState.html   |     2 +-
 .../hbase/zookeeper/ZKTable.TableState.html     |     2 +-
 .../io/encoding/CopyKeyDataBlockEncoder.html    |   137 +-
 ...iffKeyDeltaEncoder.DiffCompressionState.html |  1019 +-
 .../DiffKeyDeltaEncoder.DiffSeekerState.html    |  1019 +-
 .../hbase/io/encoding/DiffKeyDeltaEncoder.html  |  1019 +-
 ...astDiffDeltaEncoder.FastDiffSeekerState.html |  1019 +-
 .../hbase/io/encoding/FastDiffDeltaEncoder.html |  1019 +-
 .../io/encoding/PrefixKeyDeltaEncoder.html      |   331 +-
 0.94/book.html                                  |   164 +-
 0.94/book/book.html                             |     2 +-
 0.94/bulk-loads.html                            |     6 +-
 0.94/css/master_site.css                        |   119 +
 0.94/cygwin.html                                |     6 +-
 .../apache/hadoop/hbase/HTableDescriptor.html   |     4 +-
 .../hadoop/hbase/avro/package-summary.html      |     2 +-
 .../hadoop/hbase/filter/package-tree.html       |     4 +-
 .../io/encoding/CopyKeyDataBlockEncoder.html    |    21 +-
 ...iffKeyDeltaEncoder.DiffCompressionState.html |     6 +-
 .../DiffKeyDeltaEncoder.DiffSeekerState.html    |     6 +-
 .../hbase/io/encoding/DiffKeyDeltaEncoder.html  |    21 +-
 ...astDiffDeltaEncoder.FastDiffSeekerState.html |     6 +-
 .../hbase/io/encoding/FastDiffDeltaEncoder.html |    21 +-
 .../io/encoding/PrefixKeyDeltaEncoder.html      |    21 +-
 .../hadoop/hbase/io/hfile/package-tree.html     |     4 +-
 .../hadoop/hbase/master/package-tree.html       |     2 +-
 .../org/apache/hadoop/hbase/package-tree.html   |     2 +-
 .../hadoop/hbase/regionserver/package-tree.html |     8 +-
 .../hadoop/hbase/security/package-tree.html     |     2 +-
 .../apache/hadoop/hbase/util/package-tree.html  |     2 +-
 .../io/encoding/CopyKeyDataBlockEncoder.html    |   137 +-
 ...iffKeyDeltaEncoder.DiffCompressionState.html |  1019 +-
 .../DiffKeyDeltaEncoder.DiffSeekerState.html    |  1019 +-
 .../hbase/io/encoding/DiffKeyDeltaEncoder.html  |  1019 +-
 ...astDiffDeltaEncoder.FastDiffSeekerState.html |  1019 +-
 .../hbase/io/encoding/FastDiffDeltaEncoder.html |  1019 +-
 .../io/encoding/PrefixKeyDeltaEncoder.html      |   331 +-
 0.94/failsafe-report.html                       |     6 +-
 0.94/index.html                                 |     6 +-
 0.94/integration.html                           |     6 +-
 0.94/issue-tracking.html                        |     6 +-
 0.94/license.html                               |     6 +-
 0.94/mail-lists.html                            |     6 +-
 0.94/metrics.html                               |     6 +-
 0.94/old_news.html                              |     6 +-
 0.94/project-info.html                          |     6 +-
 0.94/project-reports.html                       |     6 +-
 0.94/pseudo-distributed.html                    |     6 +-
 0.94/rat-report.html                            | 23348 +++++++++++------
 0.94/replication.html                           |     6 +-
 0.94/resources.html                             |     6 +-
 0.94/source-repository.html                     |     6 +-
 0.94/sponsors.html                              |     6 +-
 0.94/team-list.html                             |     6 +-
 .../org/apache/hadoop/hbase/package-tree.html   |     2 +-
 .../io/encoding/CopyKeyDataBlockEncoder.html    |   137 +-
 .../hbase/io/encoding/DiffKeyDeltaEncoder.html  |  1019 +-
 .../hbase/io/encoding/FastDiffDeltaEncoder.html |  1019 +-
 .../io/encoding/PrefixKeyDeltaEncoder.html      |   331 +-
 388 files changed, 23438 insertions(+), 15829 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ecb8d8ba/0.94/acid-semantics.html
----------------------------------------------------------------------
diff --git a/0.94/acid-semantics.html b/0.94/acid-semantics.html
index ff22672..0244403 100644
--- a/0.94/acid-semantics.html
+++ b/0.94/acid-semantics.html
@@ -1,11 +1,11 @@
 <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
 
-<!-- Generated by Apache Maven Doxia at 2015-11-03 -->
+<!-- Generated by Apache Maven Doxia at 2015-12-16 -->
 <html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
   <head>
     <meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
-    <title>HBase -
-
+    <title>HBase - 
+     
       Apache HBase (TM) ACID Properties
     </title>
     <style type="text/css" media="all">
@@ -15,7 +15,7 @@
     </style>
     <link rel="stylesheet" href="./css/print.css" type="text/css" media="print" />
     <link rel="shortcut icon" href="/images/favicon.ico" />
-    <meta name="Date-Revision-yyyymmdd" content="20151103" />
+    <meta name="Date-Revision-yyyymmdd" content="20151216" />
     <meta http-equiv="Content-Language" content="en" />
         <!--Google Analytics-->
 <script type="text/javascript">
@@ -57,8 +57,8 @@
     </div>
     <div id="leftColumn">
       <div id="navcolumn">
-
-
+             
+                
                                 <h5>Apache HBase Project</h5>
                   <ul>
                   <li class="none">
@@ -158,8 +158,8 @@
                              <a href="http://maven.apache.org/" title="Built by Maven" class="poweredBy">
         <img class="poweredBy" alt="Built by Maven" src="./images/logos/maven-feather.png" />
       </a>
-
-
+                   
+                
             </div>
     </div>
     <div id="bodyColumn">
@@ -175,53 +175,53 @@
   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   See the License for the specific language governing permissions and
   limitations under the License. -->
-
+  
     <div class="section">
 <h2>About this Document<a name="About_this_Document"></a></h2>
-
+      
 <p>Apache HBase (TM) is not an ACID compliant database. However, it does guarantee certain specific
       properties.</p>
-
+      
 <p>This specification enumerates the ACID properties of HBase.</p>
     </div>
-
+    
 <div class="section">
 <h2>Definitions<a name="Definitions"></a></h2>
-
+      
 <p>For the sake of common vocabulary, we define the following terms:</p>
-
+      
 <dl>
-
+        
 <dt>Atomicity</dt>
-
+        
 <dd>an operation is atomic if it either completes entirely or not at all</dd>
 
-
+        
 <dt>Consistency</dt>
-
+        
 <dd>
           all actions cause the table to transition from one valid state directly to another
           (eg a row will not disappear during an update, etc)
         </dd>
 
-
+        
 <dt>Isolation</dt>
-
+        
 <dd>
           an operation is isolated if it appears to complete independently of any other concurrent transaction
         </dd>
 
-
+        
 <dt>Durability</dt>
-
+        
 <dd>any update that reports &quot;successful&quot; to the client will not be lost</dd>
 
-
+        
 <dt>Visibility</dt>
-
+        
 <dd>an update is considered visible if any subsequent read will see the update as having been committed</dd>
       </dl>
-
+      
 <p>
         The terms <i>must</i> and <i>may</i> are used as specified by RFC 2119.
         In short, the word &quot;must&quot; implies that, if some case exists where the statement
@@ -229,151 +229,151 @@
         is provided in a current release, users should not rely on it.
       </p>
     </div>
-
+    
 <div class="section">
 <h2>APIs to consider<a name="APIs_to_consider"></a></h2>
-
+      
 <ul>
-
+        
 <li>Read APIs
-
+        
 <ul>
-
+          
 <li>get</li>
-
+          
 <li>scan</li>
         </ul>
         </li>
-
+        
 <li>Write APIs</li>
-
+        
 <ul>
-
+          
 <li>put</li>
-
+          
 <li>batch put</li>
-
+          
 <li>delete</li>
         </ul>
-
+        
 <li>Combination (read-modify-write) APIs</li>
-
+        
 <ul>
-
+          
 <li>incrementColumnValue</li>
-
+          
 <li>checkAndPut</li>
         </ul>
       </ul>
     </div>
 
-
+    
 <div class="section">
 <h2>Guarantees Provided<a name="Guarantees_Provided"></a></h2>
 
-
+      
 <div class="section">
 <h2>Atomicity<a name="Atomicity"></a></h2>
 
-
+        
 <ol style="list-style-type: decimal">
-
+          
 <li>All mutations are atomic within a row. Any put will either wholely succeed or wholely fail.[3]</li>
-
+          
 <ol style="list-style-type: decimal">
-
+            
 <li>An operation that returns a &quot;success&quot; code has completely succeeded.</li>
-
+            
 <li>An operation that returns a &quot;failure&quot; code has completely failed.</li>
-
+            
 <li>An operation that times out may have succeeded and may have failed. However,
             it will not have partially succeeded or failed.</li>
           </ol>
-
+          
 <li> This is true even if the mutation crosses multiple column families within a row.</li>
-
+          
 <li> APIs that mutate several rows will _not_ be atomic across the multiple rows.
           For example, a multiput that operates on rows 'a','b', and 'c' may return having
           mutated some but not all of the rows. In such cases, these APIs will return a list
           of success codes, each of which may be succeeded, failed, or timed out as described above.</li>
-
+          
 <li> The checkAndPut API happens atomically like the typical compareAndSet (CAS) operation
           found in many hardware architectures.</li>
-
+          
 <li> The order of mutations is seen to happen in a well-defined order for each row, with no
           interleaving. For example, if one writer issues the mutation &quot;a=1,b=1,c=1&quot; and
           another writer issues the mutation &quot;a=2,b=2,c=2&quot;, the row must either
           be &quot;a=1,b=1,c=1&quot; or &quot;a=2,b=2,c=2&quot; and must <i>not</i> be something
           like &quot;a=1,b=2,c=1&quot;.</li>
-
+          
 <ol style="list-style-type: decimal">
-
+            
 <li>Please note that this is not true _across rows_ for multirow batch mutations.</li>
           </ol>
         </ol>
       </div>
-
+      
 <div class="section">
 <h2>Consistency and Isolation<a name="Consistency_and_Isolation"></a></h2>
-
+        
 <ol style="list-style-type: decimal">
-
+          
 <li>All rows returned via any access API will consist of a complete row that existed at
           some point in the table's history.</li>
-
+          
 <li>This is true across column families - i.e a get of a full row that occurs concurrent
           with some mutations 1,2,3,4,5 will return a complete row that existed at some point in time
           between mutation i and i+1 for some i between 1 and 5.</li>
-
+          
 <li>The state of a row will only move forward through the history of edits to it.</li>
         </ol>
 
-
+        
 <div class="section">
 <h2>Consistency of Scans<a name="Consistency_of_Scans"></a></h2>
-
+        
 <p>
           A scan is <b>not</b> a consistent view of a table. Scans do
           <b>not</b> exhibit <i>snapshot isolation</i>.
         </p>
-
+        
 <p>
           Rather, scans have the following properties:
         </p>
 
-
+        
 <ol style="list-style-type: decimal">
-
+          
 <li>
             Any row returned by the scan will be a consistent view (i.e. that version
             of the complete row existed at some point in time) [1]
           </li>
-
+          
 <li>
             A scan will always reflect a view of the data <i>at least as new as</i>
             the beginning of the scan. This satisfies the visibility guarantees
           enumerated below.</li>
-
+          
 <ol style="list-style-type: decimal">
-
+            
 <li>For example, if client A writes data X and then communicates via a side
             channel to client B, any scans started by client B will contain data at least
             as new as X.</li>
-
+            
 <li>A scan _must_ reflect all mutations committed prior to the construction
             of the scanner, and _may_ reflect some mutations committed subsequent to the
             construction of the scanner.</li>
-
+            
 <li>Scans must include <i>all</i> data written prior to the scan (except in
             the case where data is subsequently mutated, in which case it _may_ reflect
             the mutation)</li>
           </ol>
         </ol>
-
+        
 <p>
           Those familiar with relational databases will recognize this isolation level as &quot;read committed&quot;.
         </p>
-
+        
 <p>
           Please note that the guarantees listed above regarding scanner consistency
           are referring to &quot;transaction commit time&quot;, not the &quot;timestamp&quot;
@@ -383,99 +383,99 @@
         </p>
         </div>
       </div>
-
+      
 <div class="section">
 <h2>Visibility<a name="Visibility"></a></h2>
-
+        
 <ol style="list-style-type: decimal">
-
+          
 <li> When a client receives a &quot;success&quot; response for any mutation, that
           mutation is immediately visible to both that client and any client with whom it
           later communicates through side channels. [3]</li>
-
+          
 <li> A row must never exhibit so-called &quot;time-travel&quot; properties. That
           is to say, if a series of mutations moves a row sequentially through a series of
           states, any sequence of concurrent reads will return a subsequence of those states.</li>
-
+          
 <ol style="list-style-type: decimal">
-
+            
 <li>For example, if a row's cells are mutated using the &quot;incrementColumnValue&quot;
             API, a client must never see the value of any cell decrease.</li>
-
+            
 <li>This is true regardless of which read API is used to read back the mutation.</li>
           </ol>
-
+          
 <li> Any version of a cell that has been returned to a read operation is guaranteed to
           be durably stored.</li>
         </ol>
 
       </div>
-
+      
 <div class="section">
 <h2>Durability<a name="Durability"></a></h2>
-
+        
 <ol style="list-style-type: decimal">
-
+          
 <li> All visible data is also durable data. That is to say, a read will never return
           data that has not been made durable on disk[2]</li>
-
+          
 <li> Any operation that returns a &quot;success&quot; code (eg does not throw an exception)
           will be made durable.[3]</li>
-
+          
 <li> Any operation that returns a &quot;failure&quot; code will not be made durable
           (subject to the Atomicity guarantees above)</li>
-
+          
 <li> All reasonable failure scenarios will not affect any of the guarantees of this document.</li>
 
         </ol>
       </div>
-
+      
 <div class="section">
 <h2>Tunability<a name="Tunability"></a></h2>
-
+        
 <p>All of the above guarantees must be possible within Apache HBase. For users who would like to trade
         off some guarantees for performance, HBase may offer several tuning options. For example:</p>
-
+        
 <ul>
-
+          
 <li>Visibility may be tuned on a per-read basis to allow stale reads or time travel.</li>
-
+          
 <li>Durability may be tuned to only flush data to disk on a periodic basis</li>
         </ul>
       </div>
     </div>
-
+    
 <div class="section">
 <h2>More Information<a name="More_Information"></a></h2>
-
+      
 <p>
-      For more information, see the <a href="book.html#client">client architecture</a> or <a href="book.html#datamodel">data model</a> sections in the Apache HBase Reference Guide.
+      For more information, see the <a href="book.html#client">client architecture</a> or <a href="book.html#datamodel">data model</a> sections in the Apache HBase Reference Guide. 
       </p>
     </div>
-
-
+    
+    
 <div class="section">
 <h2>Footnotes<a name="Footnotes"></a></h2>
-
+      
 <p>[1] A consistent view is not guaranteed intra-row scanning -- i.e. fetching a portion of
           a row in one RPC then going back to fetch another portion of the row in a subsequent RPC.
           Intra-row scanning happens when you set a limit on how many values to return per Scan#next
           (See <a class="externalLink" href="http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/client/Scan.html#setBatch(int)">Scan#setBatch(int)</a>).
       </p>
 
-
+      
 <p>[2] In the context of Apache HBase, &quot;durably on disk&quot; implies an hflush() call on the transaction
       log. This does not actually imply an fsync() to magnetic media, but rather just that the data has been
       written to the OS cache on all replicas of the log. In the case of a full datacenter power loss, it is
       possible that the edits are not truly durable.</p>
-
+      
 <p>[3] Puts will either wholely succeed or wholely fail, provided that they are actually sent
       to the RegionServer.  If the writebuffer is used, Puts will not be sent until the writebuffer is filled
       or it is explicitly flushed.</p>
-
+      
     </div>
 
-
+  
 
       </div>
     </div>
@@ -483,9 +483,9 @@
       <hr/>
     </div>
     <div id="footer">
-       <div class="xright">
-
-                 <span id="publishDate">Last Published: 2015-11-03</span>
+       <div class="xright">      
+                
+                 <span id="publishDate">Last Published: 2015-12-16</span>
               &nbsp;| <span id="projectVersion">Version: 0.94.27</span>
             &nbsp;
         </div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ecb8d8ba/0.94/apidocs/deprecated-list.html
----------------------------------------------------------------------
diff --git a/0.94/apidocs/deprecated-list.html b/0.94/apidocs/deprecated-list.html
index 623c0aa..6ea87c7 100644
--- a/0.94/apidocs/deprecated-list.html
+++ b/0.94/apidocs/deprecated-list.html
@@ -441,7 +441,7 @@
 <tr class="altColor">
 <td class="colOne"><a href="org/apache/hadoop/hbase/client/HTableInterface.html#getRowOrBefore(byte[],%20byte[])">org.apache.hadoop.hbase.client.HTableInterface.getRowOrBefore(byte[], byte[])</a>
 <div class="block"><i>As of version 0.92 this method is deprecated without
- replacement.
+ replacement.   
  getRowOrBefore is used internally to find entries in .META. and makes
  various assumptions about the table (which are true for .META. but not
  in general) to be efficient.</i></div>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ecb8d8ba/0.94/apidocs/index-all.html
----------------------------------------------------------------------
diff --git a/0.94/apidocs/index-all.html b/0.94/apidocs/index-all.html
index b4bc3f2..77ae598 100644
--- a/0.94/apidocs/index-all.html
+++ b/0.94/apidocs/index-all.html
@@ -1916,7 +1916,7 @@
 </dd>
 <dt><a href="./org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.html" title="class in org.apache.hadoop.hbase.io.hfile"><span class="strong">BlockCacheColumnFamilySummary</span></a> - Class in <a href="./org/apache/hadoop/hbase/io/hfile/package-summary.html">org.apache.hadoop.hbase.io.hfile</a></dt>
 <dd>
-<div class="block">BlockCacheColumnFamilySummary represents a summary of the blockCache usage
+<div class="block">BlockCacheColumnFamilySummary represents a summary of the blockCache usage 
  at Table/ColumnFamily granularity.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.html#BlockCacheColumnFamilySummary()">BlockCacheColumnFamilySummary()</a></span> - Constructor for class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/BlockCacheColumnFamilySummary.html" title="class in org.apache.hadoop.hbase.io.hfile">BlockCacheColumnFamilySummary</a></dt>
@@ -4085,7 +4085,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredTask.html#clone()">clone()</a></span> - Method in interface org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a></dt>
 <dd>
-<div class="block">Public exposure of Object.clone() in order to allow clients to easily
+<div class="block">Public exposure of Object.clone() in order to allow clients to easily 
  capture current state.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.Cell.Builder.html#clone()">clone()</a></span> - Method in class org.apache.hadoop.hbase.rest.protobuf.generated.<a href="./org/apache/hadoop/hbase/rest/protobuf/generated/CellMessage.Cell.Builder.html" title="class in org.apache.hadoop.hbase.rest.protobuf.generated">CellMessage.Cell.Builder</a></dt>
@@ -6552,7 +6552,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/fs/HFileSystem.html#createNonRecursive(org.apache.hadoop.fs.Path,%20boolean,%20int,%20short,%20long,%20org.apache.hadoop.util.Progressable)">createNonRecursive(Path, boolean, int, short, long, Progressable)</a></span> - Method in class org.apache.hadoop.hbase.fs.<a href="./org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a></dt>
 <dd>
-<div class="block">The org.apache.hadoop.fs.FilterFileSystem does not yet support
+<div class="block">The org.apache.hadoop.fs.FilterFileSystem does not yet support 
  createNonRecursive.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/zookeeper/ZKAssign.html#createOrForceNodeOffline(org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher,%20org.apache.hadoop.hbase.HRegionInfo,%20org.apache.hadoop.hbase.ServerName)">createOrForceNodeOffline(ZooKeeperWatcher, HRegionInfo, ServerName)</a></span> - Static method in class org.apache.hadoop.hbase.zookeeper.<a href="./org/apache/hadoop/hbase/zookeeper/ZKAssign.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKAssign</a></dt>
@@ -7549,7 +7549,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#DEFAULT_MEMSTORE_FLUSH_SIZE">DEFAULT_MEMSTORE_FLUSH_SIZE</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">Constant that denotes the maximum default size of the memstore after which
+<div class="block">Constant that denotes the maximum default size of the memstore after which 
  the contents are flushed to the store files</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HColumnDescriptor.html#DEFAULT_MIN_VERSIONS">DEFAULT_MIN_VERSIONS</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HColumnDescriptor.html" title="class in org.apache.hadoop.hbase">HColumnDescriptor</a></dt>
@@ -7667,7 +7667,7 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#DEFERRED_LOG_FLUSH">DEFERRED_LOG_FLUSH</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em>INTERNAL</em> Used by HBase Shell interface to access this metadata
+<div class="block"><em>INTERNAL</em> Used by HBase Shell interface to access this metadata 
  attribute which denotes if the deferred log flush option is enabled</div>
 </dd>
 <dt><a href="./org/apache/hadoop/hbase/ipc/Delayable.html" title="interface in org.apache.hadoop.hbase.ipc"><span class="strong">Delayable</span></a> - Interface in <a href="./org/apache/hadoop/hbase/ipc/package-summary.html">org.apache.hadoop.hbase.ipc</a></dt>
@@ -8192,7 +8192,7 @@
 <div class="block">A filter for adding inter-column timestamp matching
  Only cells with a correspondingly timestamped entry in
  the target column will be retained
- Not compatible with Scan.setBatch as operations need
+ Not compatible with Scan.setBatch as operations need 
  full rows for correct filtering</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/filter/DependentColumnFilter.html#DependentColumnFilter()">DependentColumnFilter()</a></span> - Constructor for class org.apache.hadoop.hbase.filter.<a href="./org/apache/hadoop/hbase/filter/DependentColumnFilter.html" title="class in org.apache.hadoop.hbase.filter">DependentColumnFilter</a></dt>
@@ -13797,7 +13797,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html#getClient()">getClient()</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring">MonitoredRPCHandlerImpl</a></dt>
 <dd>
-<div class="block">If an RPC call is currently running, produces a String representation of
+<div class="block">If an RPC call is currently running, produces a String representation of 
  the connection from which it was received.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/thrift/generated/Hbase.Client.Factory.html#getClient(org.apache.thrift.protocol.TProtocol)">getClient(TProtocol)</a></span> - Method in class org.apache.hadoop.hbase.thrift.generated.<a href="./org/apache/hadoop/hbase/thrift/generated/Hbase.Client.Factory.html" title="class in org.apache.hadoop.hbase.thrift.generated">Hbase.Client.Factory</a></dt>
@@ -13941,7 +13941,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#getColumnFamilies()">getColumnFamilies()</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">Returns an array all the <a href="./org/apache/hadoop/hbase/HColumnDescriptor.html" title="class in org.apache.hadoop.hbase"><code>HColumnDescriptor</code></a> of the column families
+<div class="block">Returns an array all the <a href="./org/apache/hadoop/hbase/HColumnDescriptor.html" title="class in org.apache.hadoop.hbase"><code>HColumnDescriptor</code></a> of the column families 
  of the table.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/thrift/generated/Hbase.createTable_args.html#getColumnFamilies()">getColumnFamilies()</a></span> - Method in class org.apache.hadoop.hbase.thrift.generated.<a href="./org/apache/hadoop/hbase/thrift/generated/Hbase.createTable_args.html" title="class in org.apache.hadoop.hbase.thrift.generated">Hbase.createTable_args</a></dt>
@@ -14211,8 +14211,8 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/compactions/CompactSelection.html#getCompactSelectionRatio()">getCompactSelectionRatio()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.compactions.<a href="./org/apache/hadoop/hbase/regionserver/compactions/CompactSelection.html" title="class in org.apache.hadoop.hbase.regionserver.compactions">CompactSelection</a></dt>
 <dd>
-<div class="block">If the current hour falls in the off peak times and there are no
- outstanding off peak compactions, the current compaction is
+<div class="block">If the current hour falls in the off peak times and there are no 
+ outstanding off peak compactions, the current compaction is 
  promoted to an off peak compaction.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/HRegionServer.html#getCompactSplitThread()">getCompactSplitThread()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></dt>
@@ -15235,7 +15235,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#getFamilies()">getFamilies()</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">Returns an unmodifiable collection of all the <a href="./org/apache/hadoop/hbase/HColumnDescriptor.html" title="class in org.apache.hadoop.hbase"><code>HColumnDescriptor</code></a>
+<div class="block">Returns an unmodifiable collection of all the <a href="./org/apache/hadoop/hbase/HColumnDescriptor.html" title="class in org.apache.hadoop.hbase"><code>HColumnDescriptor</code></a> 
  of all the column families of the table.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#getFamiliesKeys()">getFamiliesKeys()</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
@@ -15248,7 +15248,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#getFamily(byte[])">getFamily(byte[])</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">Returns the HColumnDescriptor for a specific column family with name as
+<div class="block">Returns the HColumnDescriptor for a specific column family with name as 
  specified by the parameter column.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/KeyValue.html#getFamily()">getFamily()</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/KeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue</a></dt>
@@ -15986,7 +15986,7 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/Operation.html#getFingerprint()">getFingerprint()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/Operation.html" title="class in org.apache.hadoop.hbase.client">Operation</a></dt>
 <dd>
-<div class="block">Produces a Map containing a fingerprint which identifies the type and
+<div class="block">Produces a Map containing a fingerprint which identifies the type and 
  the static schema components of a query (i.e.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/Scan.html#getFingerprint()">getFingerprint()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/Scan.html" title="class in org.apache.hadoop.hbase.client">Scan</a></dt>
@@ -17592,7 +17592,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/fs/HFileSystem.html#getNoChecksumFs()">getNoChecksumFs()</a></span> - Method in class org.apache.hadoop.hbase.fs.<a href="./org/apache/hadoop/hbase/fs/HFileSystem.html" title="class in org.apache.hadoop.hbase.fs">HFileSystem</a></dt>
 <dd>
-<div class="block">Returns the filesystem that is specially setup for
+<div class="block">Returns the filesystem that is specially setup for 
  doing reads from storage.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html#getNode()">getNode()</a></span> - Method in class org.apache.hadoop.hbase.zookeeper.<a href="./org/apache/hadoop/hbase/zookeeper/ZKUtil.NodeAndData.html" title="class in org.apache.hadoop.hbase.zookeeper">ZKUtil.NodeAndData</a></dt>
@@ -18550,7 +18550,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#getRegionSplitPolicyClassName()">getRegionSplitPolicyClassName()</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">This get the class associated with the region split policy which
+<div class="block">This get the class associated with the region split policy which 
  determines when a region split should occur.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/rest/TableResource.html#getRegionsResource()">getRegionsResource()</a></span> - Method in class org.apache.hadoop.hbase.rest.<a href="./org/apache/hadoop/hbase/rest/TableResource.html" title="class in org.apache.hadoop.hbase.rest">TableResource</a></dt>
@@ -19056,7 +19056,7 @@
 <dd>
 <div class="block"><span class="strong">Deprecated.</span>
 <div class="block"><i>As of version 0.92 this method is deprecated without
- replacement.
+ replacement.   
  getRowOrBefore is used internally to find entries in .META. and makes
  various assumptions about the table (which are true for .META. but not
  in general) to be efficient.</i></div>
@@ -19276,7 +19276,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html#getRPCQueueTime()">getRPCQueueTime()</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring">MonitoredRPCHandlerImpl</a></dt>
 <dd>
-<div class="block">Accesses the queue time for the currently running RPC on the
+<div class="block">Accesses the queue time for the currently running RPC on the 
  monitored Handler.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/HRegionServer.html#getRpcServer()">getRpcServer()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></dt>
@@ -19289,7 +19289,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html#getRPCStartTime()">getRPCStartTime()</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring">MonitoredRPCHandlerImpl</a></dt>
 <dd>
-<div class="block">Accesses the start time for the currently running RPC on the
+<div class="block">Accesses the start time for the currently running RPC on the 
  monitored Handler.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/ipc/HBaseRPC.html#getRpcTimeout()">getRpcTimeout()</a></span> - Static method in class org.apache.hadoop.hbase.ipc.<a href="./org/apache/hadoop/hbase/ipc/HBaseRPC.html" title="class in org.apache.hadoop.hbase.ipc">HBaseRPC</a></dt>
@@ -20000,7 +20000,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html#getStatus()">getStatus()</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring">MonitoredRPCHandlerImpl</a></dt>
 <dd>
-<div class="block">Gets the status of this handler; if it is currently servicing an RPC,
+<div class="block">Gets the status of this handler; if it is currently servicing an RPC, 
  this status will include the RPC information.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredTask.html#getStatus()">getStatus()</a></span> - Method in interface org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a></dt>
@@ -20507,7 +20507,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#getTableDir(org.apache.hadoop.fs.Path,%20byte[])">getTableDir(Path, byte[])</a></span> - Static method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">Returns the <code>Path</code> object representing the table directory under
+<div class="block">Returns the <code>Path</code> object representing the table directory under 
  path rootdir</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/HRegion.html#getTableDir()">getTableDir()</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></dt>
@@ -20806,7 +20806,7 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/TaskMonitor.html#getTasks()">getTasks()</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/TaskMonitor.html" title="class in org.apache.hadoop.hbase.monitoring">TaskMonitor</a></dt>
 <dd>
-<div class="block">Produces a list containing copies of the current state of all non-expired
+<div class="block">Produces a list containing copies of the current state of all non-expired 
  MonitoredTasks handled by this TaskMonitor.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/master/MasterFileSystem.html#getTempDir()">getTempDir()</a></span> - Method in class org.apache.hadoop.hbase.master.<a href="./org/apache/hadoop/hbase/master/MasterFileSystem.html" title="class in org.apache.hadoop.hbase.master">MasterFileSystem</a></dt>
@@ -22168,7 +22168,7 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/util/Hash.html#hash(byte[],%20int,%20int,%20int)">hash(byte[], int, int, int)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="./org/apache/hadoop/hbase/util/Hash.html" title="class in org.apache.hadoop.hbase.util">Hash</a></dt>
 <dd>
-<div class="block">Calculate a hash using bytes from <code>offset</code> to <code>offset +
+<div class="block">Calculate a hash using bytes from <code>offset</code> to <code>offset + 
  length</code>, and the provided seed value.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/util/JenkinsHash.html#hash(byte[],%20int,%20int,%20int)">hash(byte[], int, int, int)</a></span> - Method in class org.apache.hadoop.hbase.util.<a href="./org/apache/hadoop/hbase/util/JenkinsHash.html" title="class in org.apache.hadoop.hbase.util">JenkinsHash</a></dt>
@@ -25555,7 +25555,7 @@
 <dt><a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase"><span class="strong">HTableDescriptor</span></a> - Class in <a href="./org/apache/hadoop/hbase/package-summary.html">org.apache.hadoop.hbase</a></dt>
 <dd>
 <div class="block">HTableDescriptor contains the details about an HBase table  such as the descriptors of
- all the column families, is the table a catalog table, <code> -ROOT- </code> or
+ all the column families, is the table a catalog table, <code> -ROOT- </code> or 
  <code> .META.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#HTableDescriptor(byte[],%20org.apache.hadoop.hbase.HColumnDescriptor[])">HTableDescriptor(byte[], HColumnDescriptor[])</a></span> - Constructor for class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
@@ -26758,13 +26758,13 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#IS_META">IS_META</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em>INTERNAL</em> Used by rest interface to access this metadata
+<div class="block"><em>INTERNAL</em> Used by rest interface to access this metadata 
  attribute which denotes if it is a catalog table, either
  <code> .META.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#IS_ROOT">IS_ROOT</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em>INTERNAL</em> Used by rest interface to access this metadata
+<div class="block"><em>INTERNAL</em> Used by rest interface to access this metadata 
  attribute which denotes if the table is a -ROOT- region or not</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/Abortable.html#isAborted()">isAborted()</a></span> - Method in interface org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/Abortable.html" title="interface in org.apache.hadoop.hbase">Abortable</a></dt>
@@ -27241,7 +27241,7 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#isMetaTable(byte[])">isMetaTable(byte[])</a></span> - Static method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">Checks of the tableName being passed represents either
+<div class="block">Checks of the tableName being passed represents either 
  <code > -ROOT- </code> or <code> .META.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/migration/HRegionInfo090x.html#isMetaTable()">isMetaTable()</a></span> - Method in class org.apache.hadoop.hbase.migration.<a href="./org/apache/hadoop/hbase/migration/HRegionInfo090x.html" title="class in org.apache.hadoop.hbase.migration">HRegionInfo090x</a></dt>
@@ -27276,7 +27276,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html#isOperationRunning()">isOperationRunning()</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring">MonitoredRPCHandlerImpl</a></dt>
 <dd>
-<div class="block">Indicates to the client whether this task is monitoring a currently active
+<div class="block">Indicates to the client whether this task is monitoring a currently active 
  RPC call to a database command.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/protobuf/ProtobufUtil.html#isPBMagicPrefix(byte[])">isPBMagicPrefix(byte[])</a></span> - Static method in class org.apache.hadoop.hbase.protobuf.<a href="./org/apache/hadoop/hbase/protobuf/ProtobufUtil.html" title="class in org.apache.hadoop.hbase.protobuf">ProtobufUtil</a></dt>
@@ -27360,7 +27360,7 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html#isRPCRunning()">isRPCRunning()</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring">MonitoredRPCHandlerImpl</a></dt>
 <dd>
-<div class="block">Indicates to the client whether this task is monitoring a currently active
+<div class="block">Indicates to the client whether this task is monitoring a currently active 
  RPC call.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/util/FSHDFSUtils.html#isSameHdfs(org.apache.hadoop.conf.Configuration,%20org.apache.hadoop.fs.FileSystem,%20org.apache.hadoop.fs.FileSystem)">isSameHdfs(Configuration, FileSystem, FileSystem)</a></span> - Static method in class org.apache.hadoop.hbase.util.<a href="./org/apache/hadoop/hbase/util/FSHDFSUtils.html" title="class in org.apache.hadoop.hbase.util">FSHDFSUtils</a></dt>
@@ -31092,8 +31092,8 @@
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#MAX_FILESIZE">MAX_FILESIZE</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em>INTERNAL</em> Used by HBase Shell interface to access this metadata
- attribute which denotes the maximum size of the store file after which
+<div class="block"><em>INTERNAL</em> Used by HBase Shell interface to access this metadata 
+ attribute which denotes the maximum size of the store file after which 
  a region split occurs</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/hfile/HFile.html#MAX_FORMAT_VERSION">MAX_FORMAT_VERSION</a></span> - Static variable in class org.apache.hadoop.hbase.io.hfile.<a href="./org/apache/hadoop/hbase/io/hfile/HFile.html" title="class in org.apache.hadoop.hbase.io.hfile">HFile</a></dt>
@@ -31223,8 +31223,8 @@
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#MEMSTORE_FLUSHSIZE">MEMSTORE_FLUSHSIZE</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em>INTERNAL</em> Used by HBase Shell interface to access this metadata
- attribute which represents the maximum size of the memstore after which
+<div class="block"><em>INTERNAL</em> Used by HBase Shell interface to access this metadata 
+ attribute which represents the maximum size of the memstore after which 
  its contents are flushed onto the disk</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/HRegion.html#MEMSTORE_PERIODIC_FLUSH_INTERVAL">MEMSTORE_PERIODIC_FLUSH_INTERVAL</a></span> - Static variable in class org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/HRegion.html" title="class in org.apache.hadoop.hbase.regionserver">HRegion</a></dt>
@@ -32010,12 +32010,12 @@
 </dd>
 <dt><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandler.html" title="interface in org.apache.hadoop.hbase.monitoring"><span class="strong">MonitoredRPCHandler</span></a> - Interface in <a href="./org/apache/hadoop/hbase/monitoring/package-summary.html">org.apache.hadoop.hbase.monitoring</a></dt>
 <dd>
-<div class="block">A MonitoredTask implementation optimized for use with RPC Handlers
+<div class="block">A MonitoredTask implementation optimized for use with RPC Handlers 
  handling frequent, short duration tasks.</div>
 </dd>
 <dt><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring"><span class="strong">MonitoredRPCHandlerImpl</span></a> - Class in <a href="./org/apache/hadoop/hbase/monitoring/package-summary.html">org.apache.hadoop.hbase.monitoring</a></dt>
 <dd>
-<div class="block">A MonitoredTask implementation designed for use with RPC Handlers
+<div class="block">A MonitoredTask implementation designed for use with RPC Handlers 
  handling frequent, short duration tasks.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html#MonitoredRPCHandlerImpl()">MonitoredRPCHandlerImpl()</a></span> - Constructor for class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring">MonitoredRPCHandlerImpl</a></dt>
@@ -32143,7 +32143,7 @@
 </dd>
 <dt><a href="./org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.html" title="class in org.apache.hadoop.hbase.mapreduce"><span class="strong">MultiTableInputFormat</span></a> - Class in <a href="./org/apache/hadoop/hbase/mapreduce/package-summary.html">org.apache.hadoop.hbase.mapreduce</a></dt>
 <dd>
-<div class="block">Convert HBase tabular data from multiple scanners into a format that
+<div class="block">Convert HBase tabular data from multiple scanners into a format that 
  is consumable by Map/Reduce.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.html#MultiTableInputFormat()">MultiTableInputFormat()</a></span> - Constructor for class org.apache.hadoop.hbase.mapreduce.<a href="./org/apache/hadoop/hbase/mapreduce/MultiTableInputFormat.html" title="class in org.apache.hadoop.hbase.mapreduce">MultiTableInputFormat</a></dt>
@@ -36156,7 +36156,7 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#readFields(java.io.DataInput)">readFields(DataInput)</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em> INTERNAL </em> This method is a part of <code>WritableComparable</code> interface
+<div class="block"><em> INTERNAL </em> This method is a part of <code>WritableComparable</code> interface 
  and is used for de-serialization of the HTableDescriptor over RPC</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/HbaseMapWritable.html#readFields(java.io.DataInput)">readFields(DataInput)</a></span> - Method in class org.apache.hadoop.hbase.io.<a href="./org/apache/hadoop/hbase/io/HbaseMapWritable.html" title="class in org.apache.hadoop.hbase.io">HbaseMapWritable</a></dt>
@@ -36256,7 +36256,7 @@ service.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#READONLY">READONLY</a></span> - Static variable in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em>INTERNAL</em> Used by rest interface to access this metadata
+<div class="block"><em>INTERNAL</em> Used by rest interface to access this metadata 
  attribute which denotes if the table is Read Only</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.TableSchema.html#READONLY_FIELD_NUMBER">READONLY_FIELD_NUMBER</a></span> - Static variable in class org.apache.hadoop.hbase.rest.protobuf.generated.<a href="./org/apache/hadoop/hbase/rest/protobuf/generated/TableSchemaMessage.TableSchema.html" title="class in org.apache.hadoop.hbase.rest.protobuf.generated">TableSchemaMessage.TableSchema</a></dt>
@@ -37163,7 +37163,7 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#removeFamily(byte[])">removeFamily(byte[])</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">Removes the HColumnDescriptor with name specified by the parameter column
+<div class="block">Removes the HColumnDescriptor with name specified by the parameter column 
  from the table descriptor</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/regionserver/HRegionServer.html#removeFromOnlineRegions(java.lang.String)">removeFromOnlineRegions(String)</a></span> - Method in class org.apache.hadoop.hbase.regionserver.<a href="./org/apache/hadoop/hbase/regionserver/HRegionServer.html" title="class in org.apache.hadoop.hbase.regionserver">HRegionServer</a></dt>
@@ -39029,7 +39029,7 @@ service.</div>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html#seekToKeyInBlock(byte[],%20int,%20int,%20boolean)">seekToKeyInBlock(byte[], int, int, boolean)</a></span> - Method in interface org.apache.hadoop.hbase.io.encoding.<a href="./org/apache/hadoop/hbase/io/encoding/DataBlockEncoder.EncodedSeeker.html" title="interface in org.apache.hadoop.hbase.io.encoding">DataBlockEncoder.EncodedSeeker</a></dt>
 <dd>
 <div class="block">Moves the seeker position within the current block to:
-
+ 
  the last key that that is less than or equal to the given key if
  <code>seekBefore</code> is false
  the last key that is strictly less than the given key if <code>
@@ -41200,7 +41200,7 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#setMemStoreFlushSize(long)">setMemStoreFlushSize(long)</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block">Represents the maximum size of the memstore after which the contents of the
+<div class="block">Represents the maximum size of the memstore after which the contents of the 
  memstore are flushed to the filesystem.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HServerLoad.RegionLoad.html#setMemStoreSizeMB(int)">setMemStoreSizeMB(int)</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HServerLoad.RegionLoad.html" title="class in org.apache.hadoop.hbase">HServerLoad.RegionLoad</a></dt>
@@ -41235,7 +41235,7 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#setMetaRegion(boolean)">setMetaRegion(boolean)</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em> INTERNAL </em> Used to denote if the current table represents
+<div class="block"><em> INTERNAL </em> Used to denote if the current table represents 
  <code> -ROOT- </code> or <code> .META.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/thrift2/generated/TTimeRange.html#setMinStamp(long)">setMinStamp(long)</a></span> - Method in class org.apache.hadoop.hbase.thrift2.generated.<a href="./org/apache/hadoop/hbase/thrift2/generated/TTimeRange.html" title="class in org.apache.hadoop.hbase.thrift2.generated">TTimeRange</a></dt>
@@ -41611,7 +41611,7 @@ service.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#setRootRegion(boolean)">setRootRegion(boolean)</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em> INTERNAL </em> Used to denote if the current table represents
+<div class="block"><em> INTERNAL </em> Used to denote if the current table represents 
  <code> -ROOT- </code> region.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/KeyValue.SplitKeyValue.html#setRow(byte[])">setRow(byte[])</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/KeyValue.SplitKeyValue.html" title="class in org.apache.hadoop.hbase">KeyValue.SplitKeyValue</a></dt>
@@ -41891,7 +41891,7 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html#setRPCPacket(org.apache.hadoop.io.Writable)">setRPCPacket(Writable)</a></span> - Method in class org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredRPCHandlerImpl.html" title="class in org.apache.hadoop.hbase.monitoring">MonitoredRPCHandlerImpl</a></dt>
 <dd>
-<div class="block">Gives this instance a reference to the Writable received by the RPC, so
+<div class="block">Gives this instance a reference to the Writable received by the RPC, so 
  that it can later compute its size if asked for it.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/ipc/HBaseRPC.html#setRpcTimeout(int)">setRpcTimeout(int)</a></span> - Static method in class org.apache.hadoop.hbase.ipc.<a href="./org/apache/hadoop/hbase/ipc/HBaseRPC.html" title="class in org.apache.hadoop.hbase.ipc">HBaseRPC</a></dt>
@@ -43719,7 +43719,7 @@ service.</div>
 <dt><a href="./org/apache/hadoop/hbase/snapshot/SnapshotInfo.SnapshotStats.html" title="class in org.apache.hadoop.hbase.snapshot"><span class="strong">SnapshotInfo.SnapshotStats</span></a> - Class in <a href="./org/apache/hadoop/hbase/snapshot/package-summary.html">org.apache.hadoop.hbase.snapshot</a></dt>
 <dd>
 <div class="block">Statistics about the snapshot
-
+ 
   How many store files and logs are in the archive
   How many store files and logs are shared with the table
   Total store files and logs size and shared amount
@@ -46599,8 +46599,8 @@ service.</div>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/MultiPut.html#toMap(int)">toMap(int)</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/MultiPut.html" title="class in org.apache.hadoop.hbase.client">MultiPut</a></dt>
 <dd>
 <div class="block"><span class="strong">Deprecated.</span></div>
-<div class="block">Compile the details beyond the scope of getFingerprint (mostly
- toMap from the Puts) into a Map along with the fingerprinted
+<div class="block">Compile the details beyond the scope of getFingerprint (mostly 
+ toMap from the Puts) into a Map along with the fingerprinted 
  information.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/Mutation.html#toMap(int)">toMap(int)</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/Mutation.html" title="class in org.apache.hadoop.hbase.client">Mutation</a></dt>
@@ -46610,7 +46610,7 @@ service.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/Operation.html#toMap(int)">toMap(int)</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/Operation.html" title="class in org.apache.hadoop.hbase.client">Operation</a></dt>
 <dd>
-<div class="block">Produces a Map containing a summary of the details of a query
+<div class="block">Produces a Map containing a summary of the details of a query 
  beyond the scope of the fingerprint (i.e.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/client/Operation.html#toMap()">toMap()</a></span> - Method in class org.apache.hadoop.hbase.client.<a href="./org/apache/hadoop/hbase/client/Operation.html" title="class in org.apache.hadoop.hbase.client">Operation</a></dt>
@@ -46626,7 +46626,7 @@ service.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/monitoring/MonitoredTask.html#toMap()">toMap()</a></span> - Method in interface org.apache.hadoop.hbase.monitoring.<a href="./org/apache/hadoop/hbase/monitoring/MonitoredTask.html" title="interface in org.apache.hadoop.hbase.monitoring">MonitoredTask</a></dt>
 <dd>
-<div class="block">Creates a string map of internal details for extensible exposure of
+<div class="block">Creates a string map of internal details for extensible exposure of 
  monitored tasks.</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/util/Bytes.html#toShort(byte[])">toShort(byte[])</a></span> - Static method in class org.apache.hadoop.hbase.util.<a href="./org/apache/hadoop/hbase/util/Bytes.html" title="class in org.apache.hadoop.hbase.util">Bytes</a></dt>
@@ -51424,7 +51424,7 @@ the order they are declared.</div>
 <dd>&nbsp;</dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/HTableDescriptor.html#write(java.io.DataOutput)">write(DataOutput)</a></span> - Method in class org.apache.hadoop.hbase.<a href="./org/apache/hadoop/hbase/HTableDescriptor.html" title="class in org.apache.hadoop.hbase">HTableDescriptor</a></dt>
 <dd>
-<div class="block"><em> INTERNAL </em> This method is a part of <code>WritableComparable</code> interface
+<div class="block"><em> INTERNAL </em> This method is a part of <code>WritableComparable</code> interface 
  and is used for serialization of the HTableDescriptor over RPC</div>
 </dd>
 <dt><span class="strong"><a href="./org/apache/hadoop/hbase/io/DoubleOutputStream.html#write(int)">write(int)</a></span> - Method in class org.apache.hadoop.hbase.io.<a href="./org/apache/hadoop/hbase/io/DoubleOutputStream.html" title="class in org.apache.hadoop.hbase.io">DoubleOutputStream</a></dt>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ecb8d8ba/0.94/apidocs/org/apache/hadoop/hbase/Coprocessor.State.html
----------------------------------------------------------------------
diff --git a/0.94/apidocs/org/apache/hadoop/hbase/Coprocessor.State.html b/0.94/apidocs/org/apache/hadoop/hbase/Coprocessor.State.html
index d521dd5..566e120 100644
--- a/0.94/apidocs/org/apache/hadoop/hbase/Coprocessor.State.html
+++ b/0.94/apidocs/org/apache/hadoop/hbase/Coprocessor.State.html
@@ -291,7 +291,7 @@ for (Coprocessor.State c : Coprocessor.State.values())
 <pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/Coprocessor.State.html" title="enum in org.apache.hadoop.hbase">Coprocessor.State</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/Coprocessor.State.html#line.52">valueOf</a>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
-enum constant in this type.  (Extraneous whitespace characters are
+enum constant in this type.  (Extraneous whitespace characters are 
 not permitted.)</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>name</code> - the name of the enum constant to be returned.</dd>
 <dt><span class="strong">Returns:</span></dt><dd>the enum constant with the specified name</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ecb8d8ba/0.94/apidocs/org/apache/hadoop/hbase/HBaseFileSystem.html
----------------------------------------------------------------------
diff --git a/0.94/apidocs/org/apache/hadoop/hbase/HBaseFileSystem.html b/0.94/apidocs/org/apache/hadoop/hbase/HBaseFileSystem.html
index 4778538..08fcc46 100644
--- a/0.94/apidocs/org/apache/hadoop/hbase/HBaseFileSystem.html
+++ b/0.94/apidocs/org/apache/hadoop/hbase/HBaseFileSystem.html
@@ -390,7 +390,7 @@ extends <a href="http://docs.oracle.com/javase/6/docs/api/java/lang/Object.html?
                                                              boolean&nbsp;overwrite)
                                                                       throws <a href="http://docs.oracle.com/javase/6/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
 <div class="block">Creates a path on the file system. Checks whether the path exists already or not, and use it
- for retrying in case underlying fs throws an exception.
+ for retrying in case underlying fs throws an exception. 
  If the dir already exists and overwrite flag is false, the underlying FileSystem throws
   an IOE. It is not retried and the IOE is re-thrown to the caller.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>fs</code> - </dd><dd><code>dir</code> - </dd><dd><code>overwrite</code> - </dd>
@@ -410,7 +410,7 @@ extends <a href="http://docs.oracle.com/javase/6/docs/api/java/lang/Object.html?
                                                                       org.apache.hadoop.fs.permission.FsPermission&nbsp;perm,
                                                                       boolean&nbsp;overwrite)
                                                                                throws <a href="http://docs.oracle.com/javase/6/docs/api/java/io/IOException.html?is-external=true" title="class or interface in java.io">IOException</a></pre>
-<div class="block">Creates the specified file with the given permission.
+<div class="block">Creates the specified file with the given permission. 
  If the dir already exists and the overwrite flag is false, underlying FileSystem throws
  an IOE. It is not retried and the IOE is re-thrown to the caller.</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>fs</code> - </dd><dd><code>path</code> - </dd><dd><code>perm</code> - </dd><dd><code>overwrite</code> - </dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ecb8d8ba/0.94/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
----------------------------------------------------------------------
diff --git a/0.94/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html b/0.94/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
index 9a0eb29..e3e614d 100644
--- a/0.94/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
+++ b/0.94/apidocs/org/apache/hadoop/hbase/HColumnDescriptor.html
@@ -1289,7 +1289,7 @@ public&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HColumnDescrip
 <li class="blockList">
 <h4>getCompactionCompression</h4>
 <pre>public&nbsp;<a href="../../../../org/apache/hadoop/hbase/io/hfile/Compression.Algorithm.html" title="enum in org.apache.hadoop.hbase.io.hfile">Compression.Algorithm</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HColumnDescriptor.html#line.525">getCompactionCompression</a>()</pre>
-<dl><dt><span class="strong">Returns:</span></dt><dd>compression type being used for the column family for major
+<dl><dt><span class="strong">Returns:</span></dt><dd>compression type being used for the column family for major 
       compression</dd></dl>
 </li>
 </ul>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ecb8d8ba/0.94/apidocs/org/apache/hadoop/hbase/HConstants.Modify.html
----------------------------------------------------------------------
diff --git a/0.94/apidocs/org/apache/hadoop/hbase/HConstants.Modify.html b/0.94/apidocs/org/apache/hadoop/hbase/HConstants.Modify.html
index 239434b..7b367f0 100644
--- a/0.94/apidocs/org/apache/hadoop/hbase/HConstants.Modify.html
+++ b/0.94/apidocs/org/apache/hadoop/hbase/HConstants.Modify.html
@@ -291,7 +291,7 @@ for (HConstants.Modify c : HConstants.Modify.values())
 <pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.Modify.html" title="enum in org.apache.hadoop.hbase">HConstants.Modify</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.Modify.html#line.431">valueOf</a>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
-enum constant in this type.  (Extraneous whitespace characters are
+enum constant in this type.  (Extraneous whitespace characters are 
 not permitted.)</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>name</code> - the name of the enum constant to be returned.</dd>
 <dt><span class="strong">Returns:</span></dt><dd>the enum constant with the specified name</dd>

http://git-wip-us.apache.org/repos/asf/hbase-site/blob/ecb8d8ba/0.94/apidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
----------------------------------------------------------------------
diff --git a/0.94/apidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html b/0.94/apidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
index 790028a..0a028af 100644
--- a/0.94/apidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
+++ b/0.94/apidocs/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html
@@ -279,7 +279,7 @@ for (HConstants.OperationStatusCode c : HConstants.OperationStatusCode.values())
 <pre>public static&nbsp;<a href="../../../../org/apache/hadoop/hbase/HConstants.OperationStatusCode.html" title="enum in org.apache.hadoop.hbase">HConstants.OperationStatusCode</a>&nbsp;<a href="../../../../src-html/org/apache/hadoop/hbase/HConstants.OperationStatusCode.html#line.741">valueOf</a>(<a href="http://docs.oracle.com/javase/6/docs/api/java/lang/String.html?is-external=true" title="class or interface in java.lang">String</a>&nbsp;name)</pre>
 <div class="block">Returns the enum constant of this type with the specified name.
 The string must match <i>exactly</i> an identifier used to declare an
-enum constant in this type.  (Extraneous whitespace characters are
+enum constant in this type.  (Extraneous whitespace characters are 
 not permitted.)</div>
 <dl><dt><span class="strong">Parameters:</span></dt><dd><code>name</code> - the name of the enum constant to be returned.</dd>
 <dt><span class="strong">Returns:</span></dt><dd>the enum constant with the specified name</dd>