You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by st...@apache.org on 2022/05/18 11:12:23 UTC

[hadoop] branch trunk updated: HADOOP-18229. Fix Hadoop-Common JavaDoc Errors (#4292)

This is an automated email from the ASF dual-hosted git repository.

stevel pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new f6fa5bd1aa0 HADOOP-18229. Fix Hadoop-Common JavaDoc Errors (#4292)
f6fa5bd1aa0 is described below

commit f6fa5bd1aa085a4d22f3450b545bb70063da9f51
Author: slfan1989 <55...@users.noreply.github.com>
AuthorDate: Wed May 18 04:12:04 2022 -0700

    HADOOP-18229. Fix Hadoop-Common JavaDoc Errors (#4292)
    
    
    
    Contributed by slfan1989
---
 hadoop-common-project/hadoop-common/pom.xml        |  10 +
 .../org/apache/hadoop/conf/ConfigRedactor.java     |   4 +-
 .../java/org/apache/hadoop/conf/Configuration.java |  52 ++--
 .../java/org/apache/hadoop/conf/Configured.java    |   4 +-
 .../org/apache/hadoop/conf/Reconfigurable.java     |   6 +
 .../org/apache/hadoop/conf/ReconfigurableBase.java |   3 +
 .../hadoop/conf/ReconfigurationException.java      |  10 +
 .../hadoop/conf/ReconfigurationTaskStatus.java     |   4 +-
 .../java/org/apache/hadoop/crypto/CryptoCodec.java |  10 +-
 .../apache/hadoop/crypto/CryptoInputStream.java    |   2 +-
 .../apache/hadoop/crypto/CryptoOutputStream.java   |   2 +-
 .../apache/hadoop/crypto/CryptoStreamUtils.java    |  35 ++-
 .../org/apache/hadoop/crypto/OpensslCipher.java    |  23 +-
 .../org/apache/hadoop/crypto/key/KeyProvider.java  |  48 +--
 .../crypto/key/KeyProviderCryptoExtension.java     |   9 +-
 .../key/KeyProviderDelegationTokenExtension.java   |   4 +-
 .../org/apache/hadoop/crypto/key/KeyShell.java     |   4 +-
 .../apache/hadoop/crypto/key/kms/ValueQueue.java   |  12 +-
 .../org/apache/hadoop/fs/AbstractFileSystem.java   | 277 +++++++++++++++--
 .../java/org/apache/hadoop/fs/AvroFSInput.java     |  13 +-
 .../apache/hadoop/fs/BatchedRemoteIterator.java    |   3 +
 .../java/org/apache/hadoop/fs/BlockLocation.java   |  45 +++
 .../java/org/apache/hadoop/fs/ByteBufferUtil.java  |   6 +
 .../org/apache/hadoop/fs/CachingGetSpaceUsed.java  |   9 +
 .../org/apache/hadoop/fs/ChecksumFileSystem.java   |  40 ++-
 .../main/java/org/apache/hadoop/fs/ChecksumFs.java |  37 ++-
 .../hadoop/fs/CommonConfigurationKeysPublic.java   |   7 +-
 .../apache/hadoop/fs/CompositeCrcFileChecksum.java |   8 +-
 .../java/org/apache/hadoop/fs/ContentSummary.java  |  26 +-
 .../main/java/org/apache/hadoop/fs/CreateFlag.java |   2 +
 .../src/main/java/org/apache/hadoop/fs/DF.java     |  10 +-
 .../apache/hadoop/fs/DelegationTokenRenewer.java   |  26 +-
 .../main/java/org/apache/hadoop/fs/FSBuilder.java  |  45 ++-
 .../hadoop/fs/FSDataOutputStreamBuilder.java       |  32 ++
 .../java/org/apache/hadoop/fs/FSInputChecker.java  |  10 +-
 .../java/org/apache/hadoop/fs/FSLinkResolver.java  |   2 +-
 .../java/org/apache/hadoop/fs/FSOutputSummer.java  |   8 +
 .../java/org/apache/hadoop/fs/FileChecksum.java    |  25 +-
 .../java/org/apache/hadoop/fs/FileContext.java     | 112 ++++---
 .../org/apache/hadoop/fs/FileEncryptionInfo.java   |   3 +
 .../main/java/org/apache/hadoop/fs/FileStatus.java |  14 +
 .../main/java/org/apache/hadoop/fs/FileSystem.java | 170 +++++++++--
 .../apache/hadoop/fs/FileSystemLinkResolver.java   |   8 +-
 .../main/java/org/apache/hadoop/fs/FileUtil.java   |  87 ++++--
 .../org/apache/hadoop/fs/FilterFileSystem.java     |   4 +-
 .../main/java/org/apache/hadoop/fs/FsShell.java    |   2 +-
 .../main/java/org/apache/hadoop/fs/FsStatus.java   |  23 +-
 .../java/org/apache/hadoop/fs/GlobExpander.java    |   4 +-
 .../apache/hadoop/fs/GlobalStorageStatistics.java  |   2 +
 .../java/org/apache/hadoop/fs/HarFileSystem.java   |   8 +-
 .../main/java/org/apache/hadoop/fs/HardLink.java   |   6 +
 .../org/apache/hadoop/fs/HasFileDescriptor.java    |   2 +-
 .../org/apache/hadoop/fs/LocalDirAllocator.java    |  31 +-
 .../java/org/apache/hadoop/fs/LocalFileSystem.java |   6 +-
 .../fs/MD5MD5CRC32CastagnoliFileChecksum.java      |   8 +-
 .../apache/hadoop/fs/MD5MD5CRC32FileChecksum.java  |  13 +-
 .../hadoop/fs/MD5MD5CRC32GzipFileChecksum.java     |   8 +-
 .../org/apache/hadoop/fs/MultipartUploader.java    |   3 +-
 .../apache/hadoop/fs/MultipartUploaderBuilder.java |  19 +-
 .../main/java/org/apache/hadoop/fs/Options.java    |   7 +-
 .../main/java/org/apache/hadoop/fs/QuotaUsage.java |  64 +++-
 .../org/apache/hadoop/fs/RawLocalFileSystem.java   |   7 +-
 .../main/java/org/apache/hadoop/fs/Seekable.java   |  14 +-
 .../src/main/java/org/apache/hadoop/fs/Stat.java   |   4 +-
 .../org/apache/hadoop/fs/StorageStatistics.java    |   5 +
 .../src/main/java/org/apache/hadoop/fs/Trash.java  |  43 ++-
 .../java/org/apache/hadoop/fs/TrashPolicy.java     |  16 +-
 .../main/java/org/apache/hadoop/fs/XAttrCodec.java |   6 +-
 .../hadoop/fs/impl/AbstractFSBuilderImpl.java      |   2 +
 .../hadoop/fs/impl/AbstractMultipartUploader.java  |   2 +-
 .../fs/impl/FutureDataInputStreamBuilderImpl.java  |   5 +
 .../org/apache/hadoop/fs/impl/FutureIOSupport.java |   2 +
 .../fs/impl/MultipartUploaderBuilderImpl.java      |   3 +
 .../org/apache/hadoop/fs/permission/AclStatus.java |   4 +-
 .../org/apache/hadoop/fs/permission/FsAction.java  |  20 +-
 .../apache/hadoop/fs/permission/FsCreateModes.java |   9 +-
 .../apache/hadoop/fs/permission/FsPermission.java  |  54 +++-
 .../hadoop/fs/permission/PermissionStatus.java     |  39 ++-
 .../java/org/apache/hadoop/fs/shell/Command.java   |  24 +-
 .../hadoop/fs/shell/CommandWithDestination.java    |   3 +
 .../java/org/apache/hadoop/fs/shell/PathData.java  |   3 +-
 .../hadoop/fs/shell/find/BaseExpression.java       |  19 +-
 .../apache/hadoop/fs/shell/find/Expression.java    |  15 +-
 .../apache/hadoop/fs/shell/find/FindOptions.java   |   1 +
 .../org/apache/hadoop/fs/shell/find/Result.java    |  21 +-
 .../hadoop/fs/statistics/IOStatisticsSnapshot.java |   8 +-
 .../hadoop/fs/statistics/IOStatisticsSupport.java  |   1 +
 .../apache/hadoop/fs/statistics/MeanStatistic.java |   1 +
 .../fs/statistics/impl/IOStatisticsBinding.java    |   4 +
 .../org/apache/hadoop/fs/store/DataBlocks.java     |   4 +
 .../hadoop/fs/store/audit/AuditingFunctions.java   |   2 +
 .../org/apache/hadoop/fs/viewfs/ConfigUtil.java    |  56 ++--
 .../java/org/apache/hadoop/fs/viewfs/FsGetter.java |   9 +
 .../org/apache/hadoop/fs/viewfs/InodeTree.java     |  53 ++--
 .../hadoop/fs/viewfs/MountTableConfigLoader.java   |   1 +
 .../apache/hadoop/fs/viewfs/ViewFileSystem.java    |  18 +-
 .../fs/viewfs/ViewFileSystemOverloadScheme.java    |  10 +-
 .../hadoop/fs/viewfs/ViewFileSystemUtil.java       |   5 +-
 .../java/org/apache/hadoop/fs/viewfs/ViewFs.java   |   2 +-
 .../org/apache/hadoop/ha/ActiveStandbyElector.java |  31 +-
 .../main/java/org/apache/hadoop/ha/HAAdmin.java    |   3 +
 .../org/apache/hadoop/ha/HAServiceProtocol.java    |   8 +-
 .../java/org/apache/hadoop/ha/HAServiceTarget.java |   8 +-
 .../java/org/apache/hadoop/ha/HealthMonitor.java   |   3 +
 .../org/apache/hadoop/ha/ZKFailoverController.java |   2 +
 .../java/org/apache/hadoop/http/HtmlQuoting.java   |   1 +
 .../java/org/apache/hadoop/http/HttpServer2.java   |  36 ++-
 .../org/apache/hadoop/io/AbstractMapWritable.java  |  22 +-
 .../main/java/org/apache/hadoop/io/ArrayFile.java  |  68 ++++-
 .../apache/hadoop/io/ArrayPrimitiveWritable.java   |   4 +-
 .../org/apache/hadoop/io/BinaryComparable.java     |   9 +
 .../java/org/apache/hadoop/io/BloomMapFile.java    |   2 +-
 .../java/org/apache/hadoop/io/BooleanWritable.java |   9 +-
 .../hadoop/io/BoundedByteArrayOutputStream.java    |  14 +-
 .../java/org/apache/hadoop/io/ByteWritable.java    |  10 +-
 .../java/org/apache/hadoop/io/BytesWritable.java   |   4 +
 .../org/apache/hadoop/io/CompressedWritable.java   |  13 +-
 .../java/org/apache/hadoop/io/DataInputBuffer.java |  23 +-
 .../org/apache/hadoop/io/DataOutputBuffer.java     |  32 +-
 .../java/org/apache/hadoop/io/EnumSetWritable.java |  15 +-
 .../java/org/apache/hadoop/io/FloatWritable.java   |  10 +-
 .../java/org/apache/hadoop/io/GenericWritable.java |   4 +-
 .../main/java/org/apache/hadoop/io/IOUtils.java    |  14 +-
 .../java/org/apache/hadoop/io/InputBuffer.java     |  23 +-
 .../java/org/apache/hadoop/io/IntWritable.java     |  10 +-
 .../java/org/apache/hadoop/io/LongWritable.java    |  10 +-
 .../main/java/org/apache/hadoop/io/MD5Hash.java    |  81 ++++-
 .../main/java/org/apache/hadoop/io/MapFile.java    | 230 ++++++++++++---
 .../org/apache/hadoop/io/MultipleIOException.java  |  11 +-
 .../java/org/apache/hadoop/io/NullWritable.java    |   5 +-
 .../java/org/apache/hadoop/io/ObjectWritable.java  |  61 +++-
 .../java/org/apache/hadoop/io/OutputBuffer.java    |  20 +-
 .../java/org/apache/hadoop/io/RawComparator.java   |   2 +-
 .../java/org/apache/hadoop/io/ReadaheadPool.java   |   2 +-
 .../java/org/apache/hadoop/io/SecureIOUtils.java   |  26 +-
 .../java/org/apache/hadoop/io/SequenceFile.java    | 328 +++++++++++++++------
 .../main/java/org/apache/hadoop/io/SetFile.java    |  75 ++++-
 .../java/org/apache/hadoop/io/ShortWritable.java   |   7 +-
 .../src/main/java/org/apache/hadoop/io/Text.java   |  77 ++++-
 .../src/main/java/org/apache/hadoop/io/UTF8.java   |  48 ++-
 .../java/org/apache/hadoop/io/VIntWritable.java    |   7 +-
 .../java/org/apache/hadoop/io/VLongWritable.java   |   7 +-
 .../org/apache/hadoop/io/VersionedWritable.java    |   2 +-
 .../main/java/org/apache/hadoop/io/Writable.java   |   4 +-
 .../org/apache/hadoop/io/WritableComparator.java   | 120 ++++++--
 .../org/apache/hadoop/io/WritableFactories.java    |  26 +-
 .../java/org/apache/hadoop/io/WritableFactory.java |   2 +-
 .../java/org/apache/hadoop/io/WritableName.java    |  32 +-
 .../java/org/apache/hadoop/io/WritableUtils.java   |  34 ++-
 .../org/apache/hadoop/io/compress/BZip2Codec.java  |   8 +-
 .../io/compress/BlockDecompressorStream.java       |   4 +-
 .../org/apache/hadoop/io/compress/CodecPool.java   |  10 +-
 .../hadoop/io/compress/CompressionCodec.java       |   8 +-
 .../io/compress/CompressionCodecFactory.java       |   5 +-
 .../hadoop/io/compress/CompressionInputStream.java |   8 +-
 .../io/compress/CompressionOutputStream.java       |   4 +-
 .../org/apache/hadoop/io/compress/Compressor.java  |   3 +
 .../apache/hadoop/io/compress/Decompressor.java    |   2 +-
 .../hadoop/io/compress/DecompressorStream.java     |   2 +-
 .../org/apache/hadoop/io/compress/Lz4Codec.java    |   8 +-
 .../org/apache/hadoop/io/compress/SnappyCodec.java |   8 +-
 .../io/compress/SplittableCompressionCodec.java    |   2 +
 .../apache/hadoop/io/compress/ZStandardCodec.java  |   8 +-
 .../hadoop/io/compress/bzip2/Bzip2Compressor.java  |   1 +
 .../io/compress/bzip2/Bzip2Decompressor.java       |   2 +
 .../io/compress/bzip2/CBZip2InputStream.java       |   8 +-
 .../io/compress/bzip2/CBZip2OutputStream.java      |  10 +-
 .../hadoop/io/compress/lz4/Lz4Decompressor.java    |   2 +-
 .../io/compress/snappy/SnappyDecompressor.java     |   2 +-
 .../hadoop/io/compress/zlib/ZlibCompressor.java    |   1 +
 .../hadoop/io/compress/zlib/ZlibDecompressor.java  |   2 +
 .../hadoop/io/compress/zlib/ZlibFactory.java       |   2 +-
 .../io/compress/zstd/ZStandardCompressor.java      |   2 +
 .../io/compress/zstd/ZStandardDecompressor.java    |   1 +
 .../apache/hadoop/io/erasurecode/CodecUtil.java    |   2 +
 .../hadoop/io/erasurecode/ErasureCodeNative.java   |   2 +
 .../hadoop/io/erasurecode/coder/ErasureCoder.java  |   1 +
 .../io/erasurecode/coder/ErasureCodingStep.java    |   5 +-
 .../io/erasurecode/coder/ErasureDecoder.java       |  10 +-
 .../io/erasurecode/coder/ErasureDecodingStep.java  |   6 +-
 .../io/erasurecode/coder/ErasureEncoder.java       |   2 +-
 .../io/erasurecode/coder/ErasureEncodingStep.java  |   6 +-
 .../io/erasurecode/coder/HHErasureCodingStep.java  |   4 +-
 .../coder/HHXORErasureDecodingStep.java            |   4 +-
 .../coder/HHXORErasureEncodingStep.java            |   4 +-
 .../io/erasurecode/coder/XORErasureDecoder.java    |   2 +-
 .../hadoop/io/erasurecode/coder/util/HHUtil.java   |   2 +
 .../io/erasurecode/grouper/BlockGrouper.java       |   6 +-
 .../io/erasurecode/rawcoder/DecodingValidator.java |   4 +-
 .../io/erasurecode/rawcoder/RawErasureDecoder.java |   2 +
 .../io/erasurecode/rawcoder/RawErasureEncoder.java |   5 +-
 .../io/erasurecode/rawcoder/util/DumpUtil.java     |  10 +-
 .../hadoop/io/erasurecode/rawcoder/util/GF256.java |  10 +-
 .../io/erasurecode/rawcoder/util/GaloisField.java  |  38 ++-
 .../io/erasurecode/rawcoder/util/RSUtil.java       |  15 +
 .../org/apache/hadoop/io/file/tfile/ByteArray.java |   2 +-
 .../org/apache/hadoop/io/file/tfile/TFile.java     |  95 +++---
 .../org/apache/hadoop/io/file/tfile/Utils.java     |  20 +-
 .../org/apache/hadoop/io/nativeio/NativeIO.java    |  46 ++-
 .../apache/hadoop/io/retry/AsyncCallHandler.java   |  11 +-
 .../org/apache/hadoop/io/retry/RetryPolicies.java  |  37 +++
 .../org/apache/hadoop/io/retry/RetryProxy.java     |   5 +
 .../org/apache/hadoop/io/retry/RetryUtils.java     |   4 +-
 .../apache/hadoop/io/serializer/Deserializer.java  |   7 +-
 .../io/serializer/DeserializerComparator.java      |   2 +-
 .../io/serializer/JavaSerializationComparator.java |   2 +-
 .../apache/hadoop/io/serializer/Serialization.java |   7 +-
 .../hadoop/io/serializer/SerializationFactory.java |   2 +
 .../apache/hadoop/io/serializer/Serializer.java    |   7 +-
 .../io/serializer/avro/AvroSerialization.java      |   6 +
 .../org/apache/hadoop/ipc/AlignmentContext.java    |   2 +-
 .../org/apache/hadoop/ipc/CallQueueManager.java    |   6 +
 .../main/java/org/apache/hadoop/ipc/Client.java    |  28 +-
 .../java/org/apache/hadoop/ipc/ClientCache.java    |   2 +
 .../main/java/org/apache/hadoop/ipc/ClientId.java  |  12 +-
 .../apache/hadoop/ipc/GenericRefreshProtocol.java  |   6 +-
 .../java/org/apache/hadoop/ipc/ProtobufHelper.java |   4 +-
 .../org/apache/hadoop/ipc/ProtobufRpcEngine.java   |   8 +
 .../org/apache/hadoop/ipc/ProtobufRpcEngine2.java  |   6 +
 .../apache/hadoop/ipc/ProtocolMetaInterface.java   |   2 +-
 .../java/org/apache/hadoop/ipc/ProtocolProxy.java  |   3 +-
 .../src/main/java/org/apache/hadoop/ipc/RPC.java   | 180 +++++++----
 .../hadoop/ipc/RefreshCallQueueProtocol.java       |   2 +-
 .../org/apache/hadoop/ipc/RefreshRegistry.java     |   1 +
 .../org/apache/hadoop/ipc/RemoteException.java     |   5 +-
 .../java/org/apache/hadoop/ipc/RetryCache.java     |  22 +-
 .../java/org/apache/hadoop/ipc/RpcClientUtil.java  |   6 +-
 .../main/java/org/apache/hadoop/ipc/RpcEngine.java |  37 ++-
 .../java/org/apache/hadoop/ipc/RpcScheduler.java   |   9 +-
 .../org/apache/hadoop/ipc/RpcServerException.java  |   4 +-
 .../main/java/org/apache/hadoop/ipc/Server.java    |  68 ++++-
 .../org/apache/hadoop/ipc/VersionedProtocol.java   |   1 +
 .../org/apache/hadoop/ipc/WritableRpcEngine.java   |  50 +++-
 .../metrics/DecayRpcSchedulerDetailedMetrics.java  |   7 +-
 .../java/org/apache/hadoop/jmx/JMXJsonServlet.java |  11 +-
 .../main/java/org/apache/hadoop/log/LogLevel.java  |   2 +
 .../org/apache/hadoop/log/LogThrottlingHelper.java |   8 +-
 .../org/apache/hadoop/metrics2/MetricsSystem.java  |   6 +-
 .../hadoop/metrics2/MetricsSystemMXBean.java       |  10 +-
 .../hadoop/metrics2/lib/MutableMetricsFactory.java |   4 +-
 .../metrics2/lib/MutableRollingAverages.java       |   4 +-
 .../apache/hadoop/metrics2/lib/MutableStat.java    |   2 +-
 .../org/apache/hadoop/metrics2/package-info.java   |  10 +-
 .../metrics2/sink/PrometheusMetricsSink.java       |   4 +
 .../metrics2/sink/ganglia/AbstractGangliaSink.java |   5 +-
 .../metrics2/sink/ganglia/GangliaSink30.java       |   2 +-
 .../metrics2/sink/ganglia/GangliaSink31.java       |   2 +-
 .../org/apache/hadoop/metrics2/util/MBeans.java    |   8 +-
 .../hadoop/metrics2/util/SampleQuantiles.java      |   2 +-
 .../src/main/java/org/apache/hadoop/net/DNS.java   |  10 +-
 .../org/apache/hadoop/net/DNSToSwitchMapping.java  |   2 +
 .../org/apache/hadoop/net/DomainNameResolver.java  |  13 +-
 .../main/java/org/apache/hadoop/net/InnerNode.java |   5 +-
 .../java/org/apache/hadoop/net/InnerNodeImpl.java  |  15 +-
 .../main/java/org/apache/hadoop/net/NetUtils.java  |  60 ++--
 .../org/apache/hadoop/net/NetworkTopology.java     |  24 +-
 .../org/apache/hadoop/net/ScriptBasedMapping.java  |   8 +-
 .../net/ScriptBasedMappingWithDependency.java      |   5 +-
 .../org/apache/hadoop/net/SocketInputStream.java   |   8 +-
 .../org/apache/hadoop/net/SocketOutputStream.java  |  13 +-
 .../org/apache/hadoop/net/unix/DomainSocket.java   |  13 +-
 .../org/apache/hadoop/security/Credentials.java    |  33 ++-
 .../security/GroupMappingServiceProvider.java      |   8 +-
 .../java/org/apache/hadoop/security/Groups.java    |   4 +-
 .../apache/hadoop/security/HadoopKerberosName.java |   4 +-
 .../java/org/apache/hadoop/security/KDiag.java     |   5 +-
 .../org/apache/hadoop/security/KerberosInfo.java   |   5 +-
 .../apache/hadoop/security/NullGroupsMapping.java  |   2 +-
 .../org/apache/hadoop/security/ProviderUtils.java  |   1 +
 .../security/RefreshUserMappingsProtocol.java      |   4 +-
 .../apache/hadoop/security/SaslInputStream.java    |   2 +-
 .../hadoop/security/SaslPropertiesResolver.java    |   2 +-
 .../org/apache/hadoop/security/SaslRpcClient.java  |  14 +-
 .../org/apache/hadoop/security/SaslRpcServer.java  |  25 +-
 .../org/apache/hadoop/security/SecurityUtil.java   |  14 +-
 .../hadoop/security/ShellBasedIdMapping.java       |   9 +-
 .../ShellBasedUnixGroupsNetgroupMapping.java       |   2 +
 .../hadoop/security/UserGroupInformation.java      |  46 +--
 .../hadoop/security/alias/CredentialProvider.java  |  13 +-
 .../hadoop/security/alias/CredentialShell.java     |   6 +-
 .../security/authorize/AccessControlList.java      |   1 +
 .../security/authorize/ImpersonationProvider.java  |   4 +-
 .../hadoop/security/authorize/ProxyUsers.java      |  12 +-
 .../RefreshAuthorizationPolicyProtocol.java        |   2 +-
 .../security/ssl/ReloadingX509KeystoreManager.java |   4 +-
 .../security/token/DelegationTokenIssuer.java      |  13 +
 .../apache/hadoop/security/token/DtFetcher.java    |  23 +-
 .../hadoop/security/token/DtFileOperations.java    |  18 +-
 .../apache/hadoop/security/token/DtUtilShell.java  |   4 +-
 .../org/apache/hadoop/security/token/Token.java    |  18 +-
 .../apache/hadoop/security/token/TokenInfo.java    |   6 +-
 .../apache/hadoop/security/token/TokenRenewer.java |  40 ++-
 .../AbstractDelegationTokenSecretManager.java      |  94 ++++--
 .../web/DelegationTokenAuthenticatedURL.java       |   6 +
 .../web/DelegationTokenAuthenticationFilter.java   |   1 +
 .../web/DelegationTokenAuthenticator.java          |   8 +
 .../org/apache/hadoop/service/AbstractService.java |   2 +-
 .../apache/hadoop/service/CompositeService.java    |   2 +-
 .../apache/hadoop/service/ServiceStateModel.java   |   3 +
 .../launcher/AbstractLaunchableService.java        |   2 +
 .../hadoop/service/launcher/ServiceLauncher.java   |   8 +-
 .../hadoop/service/launcher/package-info.java      |  27 +-
 .../java/org/apache/hadoop/tools/CommandShell.java |   4 +-
 .../org/apache/hadoop/tools/GetGroupsBase.java     |   6 +-
 .../hadoop/tools/GetUserMappingsProtocol.java      |   2 +-
 .../java/org/apache/hadoop/tools/TableListing.java |  11 +-
 .../org/apache/hadoop/util/AsyncDiskService.java   |   7 +-
 .../util/BlockingThreadPoolExecutorService.java    |   1 +
 .../java/org/apache/hadoop/util/CrcComposer.java   |  27 ++
 .../main/java/org/apache/hadoop/util/CrcUtil.java  |  36 +++
 .../main/java/org/apache/hadoop/util/Daemon.java   |  11 +-
 .../java/org/apache/hadoop/util/DataChecksum.java  |  55 +++-
 .../org/apache/hadoop/util/DirectBufferPool.java   |   3 +
 .../java/org/apache/hadoop/util/DiskChecker.java   |  16 +-
 .../apache/hadoop/util/DiskValidatorFactory.java   |   2 +
 .../java/org/apache/hadoop/util/GcTimeMonitor.java |  35 ++-
 .../apache/hadoop/util/GenericOptionsParser.java   |  38 ++-
 .../java/org/apache/hadoop/util/GenericsUtil.java  |   4 +
 .../main/java/org/apache/hadoop/util/IPList.java   |   2 +-
 .../java/org/apache/hadoop/util/IdGenerator.java   |   5 +-
 .../org/apache/hadoop/util/IdentityHashStore.java  |  11 +
 .../org/apache/hadoop/util/IndexedSortable.java    |   7 +
 .../java/org/apache/hadoop/util/IndexedSorter.java |   8 +
 .../org/apache/hadoop/util/InstrumentedLock.java   |   1 +
 .../apache/hadoop/util/IntrusiveCollection.java    |  23 ++
 .../org/apache/hadoop/util/JsonSerialization.java  |   4 +-
 .../org/apache/hadoop/util/JvmPauseMonitor.java    |   3 +
 .../org/apache/hadoop/util/LightWeightCache.java   |   7 +-
 .../org/apache/hadoop/util/LightWeightGSet.java    |  19 +-
 .../hadoop/util/LightWeightResizableGSet.java      |   2 +
 .../java/org/apache/hadoop/util/LineReader.java    |   6 +-
 .../main/java/org/apache/hadoop/util/Lists.java    |  30 +-
 .../java/org/apache/hadoop/util/MachineList.java   |   6 +-
 .../org/apache/hadoop/util/NativeCodeLoader.java   |   8 +-
 .../apache/hadoop/util/NativeLibraryChecker.java   |   3 +-
 .../org/apache/hadoop/util/OperationDuration.java  |   4 +-
 .../main/java/org/apache/hadoop/util/Options.java  |   2 +-
 .../org/apache/hadoop/util/PrintJarMainClass.java  |   2 +-
 .../java/org/apache/hadoop/util/PriorityQueue.java |  36 ++-
 .../java/org/apache/hadoop/util/ProgramDriver.java |  20 +-
 .../main/java/org/apache/hadoop/util/Progress.java |  49 ++-
 .../java/org/apache/hadoop/util/ProtoUtil.java     |   4 +
 .../java/org/apache/hadoop/util/QuickSort.java     |   3 +
 .../org/apache/hadoop/util/ReflectionUtils.java    |  15 +-
 .../main/java/org/apache/hadoop/util/RunJar.java   |   7 +-
 .../org/apache/hadoop/util/SequentialNumber.java   |  17 +-
 .../java/org/apache/hadoop/util/ServletUtil.java   |  16 +-
 .../src/main/java/org/apache/hadoop/util/Sets.java |  64 +++-
 .../main/java/org/apache/hadoop/util/Shell.java    |  98 +++++-
 .../apache/hadoop/util/ShutdownThreadsHelper.java  |   8 +-
 .../java/org/apache/hadoop/util/StopWatch.java     |   3 +
 .../org/apache/hadoop/util/StringInterner.java     |   3 +
 .../java/org/apache/hadoop/util/StringUtils.java   |  60 +++-
 .../src/main/java/org/apache/hadoop/util/Time.java |   2 +
 .../src/main/java/org/apache/hadoop/util/Tool.java |   6 +-
 .../java/org/apache/hadoop/util/ToolRunner.java    |  10 +-
 .../main/java/org/apache/hadoop/util/XMLUtils.java |   6 +-
 .../main/java/org/apache/hadoop/util/ZKUtil.java   |   1 +
 .../java/org/apache/hadoop/util/bloom/Key.java     |   4 +-
 .../apache/hadoop/util/concurrent/AsyncGet.java    |   8 +-
 .../hadoop/util/curator/ZKCuratorManager.java      |  13 +-
 .../util/functional/CommonCallableSupplier.java    |   5 +
 .../hadoop/util/functional/RemoteIterators.java    |  20 +-
 .../hadoop/util/functional/package-info.java       |   6 +-
 .../org/apache/hadoop/util/hash/JenkinsHash.java   |   2 +-
 .../java/org/apache/hadoop/util/TestShell.java     |   2 +-
 366 files changed, 4843 insertions(+), 1353 deletions(-)

diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 6e762f567c1..d8e2dd35422 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -1171,6 +1171,16 @@
               </execution>
              </executions>
           </plugin>
+          <plugin>
+            <groupId>org.apache.maven.plugins</groupId>
+            <artifactId>maven-javadoc-plugin</artifactId>
+            <configuration>
+              <sourceFileExcludes>
+                <sourceFileExclude>**/FSProtos.java</sourceFileExclude>
+              </sourceFileExcludes>
+              <excludePackageNames>*.proto:*.tracing:*.protobuf</excludePackageNames>
+            </configuration>
+          </plugin>
         </plugins>
       </build>
     </profile>
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
index 5b2d1449f9c..881a2ce811b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
@@ -57,8 +57,8 @@ public class ConfigRedactor {
    * Given a key / value pair, decides whether or not to redact and returns
    * either the original value or text indicating it has been redacted.
    *
-   * @param key
-   * @param value
+   * @param key param key.
+   * @param value param value, will return if conditions permit.
    * @return Original value, or text indicating it has been redacted
    */
   public String redact(String key, String value) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 1f809b7b547..5f720841d76 100755
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -317,7 +317,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   private boolean loadDefaults = true;
 
   /**
-   * Configuration objects
+   * Configuration objects.
    */
   private static final WeakHashMap<Configuration,Object> REGISTRY = 
     new WeakHashMap<Configuration,Object>();
@@ -1908,6 +1908,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param name Property name
    * @param vStr The string value with time unit suffix to be converted.
    * @param unit Unit to convert the stored property, if it exists.
+   * @return time duration in given time unit.
    */
   public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) {
     return getTimeDurationHelper(name, vStr, unit, unit);
@@ -1922,6 +1923,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param vStr The string value with time unit suffix to be converted.
    * @param defaultUnit Unit to convert the stored property, if it exists.
    * @param returnUnit Unit for the returned value.
+   * @return time duration in given time unit.
    */
   private long getTimeDurationHelper(String name, String vStr,
       TimeUnit defaultUnit, TimeUnit returnUnit) {
@@ -2206,7 +2208,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     }
 
     /**
-     * Is the given value in the set of ranges
+     * Is the given value in the set of ranges.
      * @param value the value to check
      * @return is the value in the ranges?
      */
@@ -2263,7 +2265,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   }
 
   /**
-   * Parse the given attribute as a set of integer ranges
+   * Parse the given attribute as a set of integer ranges.
    * @param name the attribute name
    * @param defaultValue the default value if it is not set
    * @return a new set of ranges from the configured value
@@ -2482,7 +2484,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
 
   /**
    * Fallback to clear text passwords in configuration.
-   * @param name
+   * @param name the property name.
    * @return clear text password or null
    */
   protected char[] getPasswordFromConfig(String name) {
@@ -2547,6 +2549,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   /**
    * Set the socket address for the <code>name</code> property as
    * a <code>host:port</code>.
+   * @param name property name.
+   * @param addr inetSocketAddress addr.
    */
   public void setSocketAddr(String name, InetSocketAddress addr) {
     set(name, NetUtils.getHostPortString(addr));
@@ -2724,6 +2728,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param name the conf key name.
    * @param defaultValue default value.
    * @param xface the interface implemented by the named class.
+   * @param <U> Interface class type.
    * @return property value as a <code>Class</code>, 
    *         or <code>defaultValue</code>.
    */
@@ -2753,6 +2758,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param name the property name.
    * @param xface the interface implemented by the classes named by
    *        <code>name</code>.
+   * @param <U> Interface class type.
    * @return a <code>List</code> of objects implementing <code>xface</code>.
    */
   @SuppressWarnings("unchecked")
@@ -2785,15 +2791,16 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     set(name, theClass.getName());
   }
 
-  /** 
+  /**
    * Get a local file under a directory named by <i>dirsProp</i> with
    * the given <i>path</i>.  If <i>dirsProp</i> contains multiple directories,
    * then one is chosen based on <i>path</i>'s hash code.  If the selected
    * directory does not exist, an attempt is made to create it.
-   * 
+   *
    * @param dirsProp directory in which to locate the file.
    * @param path file-path.
    * @return local file under the directory with the given path.
+   * @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPath(String dirsProp, String path)
     throws IOException {
@@ -2817,15 +2824,16 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     throw new IOException("No valid local directories in property: "+dirsProp);
   }
 
-  /** 
+  /**
    * Get a local file name under a directory named in <i>dirsProp</i> with
    * the given <i>path</i>.  If <i>dirsProp</i> contains multiple directories,
    * then one is chosen based on <i>path</i>'s hash code.  If the selected
    * directory does not exist, an attempt is made to create it.
-   * 
+   *
    * @param dirsProp directory in which to locate the file.
    * @param path file-path.
    * @return local file under the directory with the given path.
+   * @throws IOException raised on errors performing I/O.
    */
   public File getFile(String dirsProp, String path)
     throws IOException {
@@ -3437,7 +3445,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
 
   /**
    * Add tags defined in HADOOP_TAGS_SYSTEM, HADOOP_TAGS_CUSTOM.
-   * @param prop
+   * @param prop properties.
    */
   public void addTags(Properties prop) {
     // Get all system tags
@@ -3538,7 +3546,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
 
   /**
    * Print a warning if a property with a given name already exists with a
-   * different value
+   * different value.
    */
   private void checkForOverride(Properties properties, String name, String attr, String value) {
     String propertyValue = properties.getProperty(attr);
@@ -3548,11 +3556,12 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     }
   }
 
-  /** 
+  /**
    * Write out the non-default properties in this configuration to the given
    * {@link OutputStream} using UTF-8 encoding.
-   * 
+   *
    * @param out the output stream to write to.
+   * @throws IOException raised on errors performing I/O.
    */
   public void writeXml(OutputStream out) throws IOException {
     writeXml(new OutputStreamWriter(out, "UTF-8"));
@@ -3582,7 +3591,9 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * the configuration, this method throws an {@link IllegalArgumentException}.
    * </li>
    * </ul>
+   * @param propertyName xml property name.
    * @param out the writer to write to.
+   * @throws IOException raised on errors performing I/O.
    */
   public void writeXml(@Nullable String propertyName, Writer out)
       throws IOException, IllegalArgumentException {
@@ -3736,7 +3747,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param config the configuration
    * @param propertyName property name
    * @param out the Writer to write to
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @throws IllegalArgumentException when property name is not
    *   empty and the property is not found in configuration
    **/
@@ -3783,7 +3794,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    *
    * @param config the configuration
    * @param out the Writer to write to
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static void dumpConfiguration(Configuration config,
       Writer out) throws IOException {
@@ -3812,7 +3823,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
    * @param jsonGen json writer
    * @param config configuration
    * @param name property name
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   private static void appendJSONProperty(JsonGenerator jsonGen,
       Configuration config, String name, ConfigRedactor redactor)
@@ -3894,7 +3905,10 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
     return this.quietmode;
   }
   
-  /** For debugging.  List non-default properties to the terminal and exit. */
+  /** For debugging.  List non-default properties to the terminal and exit.
+   * @param args the argument to be parsed.
+   * @throws Exception exception.
+   */
   public static void main(String[] args) throws Exception {
     new Configuration().writeXml(System.out);
   }
@@ -3928,8 +3942,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   }
   
   /**
-   * get keys matching the the regex 
-   * @param regex
+   * get keys matching the the regex.
+   * @param regex the regex to match against.
    * @return {@literal Map<String,String>} with matching keys
    */
   public Map<String,String> getValByRegex(String regex) {
@@ -3974,6 +3988,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
   /**
    * Returns whether or not a deprecated name has been warned. If the name is not
    * deprecated then always return false
+   * @param name proprties.
+   * @return true if name is a warned deprecation.
    */
   public static boolean hasWarnedDeprecation(String name) {
     DeprecationContext deprecations = deprecationContext.get();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
index f06af2b98df..77a7117d196 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
@@ -33,7 +33,9 @@ public class Configured implements Configurable {
     this(null);
   }
   
-  /** Construct a Configured. */
+  /** Construct a Configured.
+   * @param conf the Configuration object.
+   */
   public Configured(Configuration conf) {
     setConf(conf);
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
index c93dc31a881..915faf4c237 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
@@ -33,6 +33,9 @@ public interface Reconfigurable extends Configurable {
    * (or null if it was not previously set). If newVal is null, set the property
    * to its default value;
    *
+   * @param property property name.
+   * @param newVal new value.
+   * @throws ReconfigurationException if there was an error applying newVal.
    * If the property cannot be changed, throw a 
    * {@link ReconfigurationException}.
    */
@@ -45,11 +48,14 @@ public interface Reconfigurable extends Configurable {
    * If isPropertyReconfigurable returns true for a property,
    * then changeConf should not throw an exception when changing
    * this property.
+   * @param property property name.
+   * @return true if property reconfigurable; false if not.
    */
   boolean isPropertyReconfigurable(String property);
 
   /**
    * Return all the properties that can be changed at run time.
+   * @return reconfigurable propertys.
    */
   Collection<String> getReconfigurableProperties();
 }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
index 35dfeb99f0b..1c451ca6d30 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
@@ -79,6 +79,7 @@ public abstract class ReconfigurableBase
   /**
    * Construct a ReconfigurableBase with the {@link Configuration}
    * conf.
+   * @param conf configuration.
    */
   public ReconfigurableBase(Configuration conf) {
     super((conf == null) ? new Configuration() : conf);
@@ -91,6 +92,7 @@ public abstract class ReconfigurableBase
 
   /**
    * Create a new configuration.
+   * @return configuration.
    */
   protected abstract Configuration getNewConf();
 
@@ -162,6 +164,7 @@ public abstract class ReconfigurableBase
 
   /**
    * Start a reconfiguration task to reload configuration in background.
+   * @throws IOException raised on errors performing I/O.
    */
   public void startReconfigurationTask() throws IOException {
     synchronized (reconfigLock) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
index 0935bf025fd..b22af76c9eb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
@@ -59,6 +59,10 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Create a new instance of {@link ReconfigurationException}.
+   * @param property property name.
+   * @param newVal new value.
+   * @param oldVal old value.
+   * @param cause original exception.
    */
   public ReconfigurationException(String property, 
                                   String newVal, String oldVal,
@@ -71,6 +75,9 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Create a new instance of {@link ReconfigurationException}.
+   * @param property property name.
+   * @param newVal new value.
+   * @param oldVal old value.
    */
   public ReconfigurationException(String property, 
                                   String newVal, String oldVal) {
@@ -82,6 +89,7 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Get property that cannot be changed.
+   * @return property info.
    */
   public String getProperty() {
     return property;
@@ -89,6 +97,7 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Get value to which property was supposed to be changed.
+   * @return new value.
    */
   public String getNewValue() {
     return newVal;
@@ -96,6 +105,7 @@ public class ReconfigurationException extends Exception {
 
   /**
    * Get old value of property that cannot be changed.
+   * @return old value.
    */
   public String getOldValue() {
     return oldVal;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
index 05ec90758e5..ca9ddb61566 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
@@ -42,7 +42,8 @@ public class ReconfigurationTaskStatus {
   /**
    * Return true if
    *   - A reconfiguration task has finished or
-   *   - an active reconfiguration task is running
+   *   - an active reconfiguration task is running.
+   * @return true if startTime &gt; 0; false if not.
    */
   public boolean hasTask() {
     return startTime > 0;
@@ -51,6 +52,7 @@ public class ReconfigurationTaskStatus {
   /**
    * Return true if the latest reconfiguration task has finished and there is
    * no another active task running.
+   * @return true if endTime &gt; 0; false if not.
    */
   public boolean stopped() {
     return endTime > 0;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
index 64c754faa59..e6813b96a26 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
@@ -145,14 +145,18 @@ public abstract class CryptoCodec implements Configurable, Closeable {
   public abstract CipherSuite getCipherSuite();
 
   /**
-   * Create a {@link org.apache.hadoop.crypto.Encryptor}. 
-   * @return Encryptor the encryptor
+   * Create a {@link org.apache.hadoop.crypto.Encryptor}.
+   *
+   * @return Encryptor the encryptor.
+   * @throws GeneralSecurityException thrown if create encryptor error.
    */
   public abstract Encryptor createEncryptor() throws GeneralSecurityException;
-  
+
   /**
    * Create a {@link org.apache.hadoop.crypto.Decryptor}.
+   *
    * @return Decryptor the decryptor
+   * @throws GeneralSecurityException thrown if create decryptor error.
    */
   public abstract Decryptor createDecryptor() throws GeneralSecurityException;
   
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
index 5ab5d341fb8..067abde9dfb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
@@ -157,7 +157,7 @@ public class CryptoInputStream extends FilterInputStream implements
    * @param off the buffer offset.
    * @param len the maximum number of decrypted data bytes to read.
    * @return int the total number of decrypted data bytes read into the buffer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public int read(byte[] b, int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
index 8e752211255..2a1335b6e74 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
@@ -146,7 +146,7 @@ public class CryptoOutputStream extends FilterOutputStream implements
    * @param b the data.
    * @param off the start offset in the data.
    * @param len the number of bytes to write.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public synchronized void write(byte[] b, int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
index 318975fd6ce..dad4d20df2a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
@@ -39,7 +39,11 @@ public class CryptoStreamUtils {
   private static final Logger LOG =
       LoggerFactory.getLogger(CryptoStreamUtils.class);
 
-  /** Forcibly free the direct buffer. */
+  /**
+   * Forcibly free the direct buffer.
+   *
+   * @param buffer buffer.
+   */
   public static void freeDB(ByteBuffer buffer) {
     if (CleanerUtil.UNMAP_SUPPORTED) {
       try {
@@ -52,13 +56,22 @@ public class CryptoStreamUtils {
     }
   }
 
-  /** Read crypto buffer size */
+  /**
+   * Read crypto buffer size.
+   *
+   * @param conf configuration.
+   * @return hadoop.security.crypto.buffer.size.
+   */
   public static int getBufferSize(Configuration conf) {
     return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY, 
         HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT);
   }
-  
-  /** AES/CTR/NoPadding or SM4/CTR/NoPadding is required. */
+
+  /**
+   * AES/CTR/NoPadding or SM4/CTR/NoPadding is required.
+   *
+   * @param codec crypto codec.
+   */
   public static void checkCodec(CryptoCodec codec) {
     if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING &&
             codec.getCipherSuite() != CipherSuite.SM4_CTR_NOPADDING) {
@@ -67,17 +80,27 @@ public class CryptoStreamUtils {
     }
   }
 
-  /** Check and floor buffer size */
+  /**
+   * Check and floor buffer size.
+   *
+   * @param codec crypto codec.
+   * @param bufferSize the size of the buffer to be used.
+   * @return calc buffer size.
+   */
   public static int checkBufferSize(CryptoCodec codec, int bufferSize) {
     Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE, 
         "Minimum value of buffer size is " + MIN_BUFFER_SIZE + ".");
     return bufferSize - bufferSize % codec.getCipherSuite()
         .getAlgorithmBlockSize();
   }
-  
+
   /**
    * If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's
    * current position, otherwise return 0;
+   *
+   * @param in wrapper.
+   * @return current position, otherwise return 0.
+   * @throws IOException raised on errors performing I/O.
    */
   public static long getInputStreamOffset(InputStream in) throws IOException {
     if (in instanceof Seekable) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index 0c65b74b291..b166cfc8611 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -225,34 +225,33 @@ public final class OpensslCipher {
     output.position(output.position() + len);
     return len;
   }
-  
+
   /**
    * Finishes a multiple-part operation. The data is encrypted or decrypted,
    * depending on how this cipher was initialized.
    * <p>
-   * 
    * The result is stored in the output buffer. Upon return, the output buffer's
    * position will have advanced by n, where n is the value returned by this
    * method; the output buffer's limit will not have changed.
-   * <p>
-   * 
+   * </p>
    * If <code>output.remaining()</code> bytes are insufficient to hold the result,
    * a <code>ShortBufferException</code> is thrown.
    * <p>
-   * 
    * Upon finishing, this method resets this cipher object to the state it was
    * in when previously initialized. That is, the object is available to encrypt
    * or decrypt more data.
-   * <p>
-   * 
-   * If any exception is thrown, this cipher object need to be reset before it 
+   * </p>
+   * If any exception is thrown, this cipher object need to be reset before it
    * can be used again.
-   * 
+   *
    * @param output the output ByteBuffer
    * @return int number of bytes stored in <code>output</code>
-   * @throws ShortBufferException
-   * @throws IllegalBlockSizeException
-   * @throws BadPaddingException
+   * @throws ShortBufferException      if there is insufficient space in the output buffer.
+   * @throws IllegalBlockSizeException This exception is thrown when the length
+   *                                   of data provided to a block cipher is incorrect.
+   * @throws BadPaddingException       This exception is thrown when a particular
+   *                                   padding mechanism is expected for the input
+   *                                   data but the data is not padded properly.
    */
   public int doFinal(ByteBuffer output) throws ShortBufferException, 
       IllegalBlockSizeException, BadPaddingException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index dafdaf7e15b..4d1674bd7b8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -242,7 +242,7 @@ public abstract class KeyProvider implements Closeable {
     /**
      * Serialize the metadata to a set of bytes.
      * @return the serialized bytes
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     protected byte[] serialize() throws IOException {
       ByteArrayOutputStream buffer = new ByteArrayOutputStream();
@@ -281,7 +281,7 @@ public abstract class KeyProvider implements Closeable {
     /**
      * Deserialize a new metadata object from a set of bytes.
      * @param bytes the serialized metadata
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     protected Metadata(byte[] bytes) throws IOException {
       String cipher = null;
@@ -450,7 +450,7 @@ public abstract class KeyProvider implements Closeable {
    * when decrypting data.
    * @param versionName the name of a specific version of the key
    * @return the key material
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract KeyVersion getKeyVersion(String versionName
                                             ) throws IOException;
@@ -458,14 +458,15 @@ public abstract class KeyProvider implements Closeable {
   /**
    * Get the key names for all keys.
    * @return the list of key names
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract List<String> getKeys() throws IOException;
 
   /**
    * Get key metadata in bulk.
    * @param names the names of the keys to get
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
+   * @return Metadata Array.
    */
   public Metadata[] getKeysMetadata(String... names) throws IOException {
     Metadata[] result = new Metadata[names.length];
@@ -477,8 +478,10 @@ public abstract class KeyProvider implements Closeable {
 
   /**
    * Get the key material for all versions of a specific key name.
+   *
+   * @param name the base name of the key.
    * @return the list of key material
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract List<KeyVersion> getKeyVersions(String name) throws IOException;
 
@@ -488,7 +491,7 @@ public abstract class KeyProvider implements Closeable {
    * @param name the base name of the key
    * @return the version name of the current version of the key or null if the
    *    key version doesn't exist
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public KeyVersion getCurrentKey(String name) throws IOException {
     Metadata meta = getMetadata(name);
@@ -502,7 +505,7 @@ public abstract class KeyProvider implements Closeable {
    * Get metadata about the key.
    * @param name the basename of the key
    * @return the key's metadata or null if the key doesn't exist
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract Metadata getMetadata(String name) throws IOException;
 
@@ -512,7 +515,7 @@ public abstract class KeyProvider implements Closeable {
    * @param material the key material for the first version of the key.
    * @param options the options for the new key.
    * @return the version name of the first version of the key.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract KeyVersion createKey(String name, byte[] material,
                                        Options options) throws IOException;
@@ -537,7 +540,7 @@ public abstract class KeyProvider implements Closeable {
    * @param size length of the key.
    * @param algorithm algorithm to use for generating the key.
    * @return the generated key.
-   * @throws NoSuchAlgorithmException
+   * @throws NoSuchAlgorithmException no such algorithm exception.
    */
   protected byte[] generateKey(int size, String algorithm)
       throws NoSuchAlgorithmException {
@@ -558,8 +561,8 @@ public abstract class KeyProvider implements Closeable {
    * @param name the base name of the key
    * @param options the options for the new key.
    * @return the version name of the first version of the key.
-   * @throws IOException
-   * @throws NoSuchAlgorithmException
+   * @throws IOException raised on errors performing I/O.
+   * @throws NoSuchAlgorithmException no such algorithm exception.
    */
   public KeyVersion createKey(String name, Options options)
       throws NoSuchAlgorithmException, IOException {
@@ -570,7 +573,7 @@ public abstract class KeyProvider implements Closeable {
   /**
    * Delete the given key.
    * @param name the name of the key to delete
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void deleteKey(String name) throws IOException;
 
@@ -579,7 +582,7 @@ public abstract class KeyProvider implements Closeable {
    * @param name the basename of the key
    * @param material the new key material
    * @return the name of the new version of the key
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract KeyVersion rollNewVersion(String name,
                                              byte[] material
@@ -601,7 +604,10 @@ public abstract class KeyProvider implements Closeable {
    *
    * @param name the basename of the key
    * @return the name of the new version of the key
-   * @throws IOException
+   * @throws IOException              raised on errors performing I/O.
+   * @throws NoSuchAlgorithmException This exception is thrown when a particular
+   *                                  cryptographic algorithm is requested
+   *                                  but is not available in the environment.
    */
   public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
                                                        IOException {
@@ -620,7 +626,7 @@ public abstract class KeyProvider implements Closeable {
    * version of the given key.
    *
    * @param name the basename of the key
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void invalidateCache(String name) throws IOException {
     // NOP
@@ -628,7 +634,7 @@ public abstract class KeyProvider implements Closeable {
 
   /**
    * Ensures that any changes to the keys are written to persistent store.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void flush() throws IOException;
 
@@ -637,7 +643,7 @@ public abstract class KeyProvider implements Closeable {
    * "/aaa/bbb".
    * @param versionName the version name to split
    * @return the base name of the key
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static String getBaseName(String versionName) throws IOException {
     int div = versionName.lastIndexOf('@');
@@ -660,9 +666,11 @@ public abstract class KeyProvider implements Closeable {
 
   /**
    * Find the provider with the given key.
+   *
    * @param providerList the list of providers
-   * @param keyName the key name we are looking for
+   * @param keyName the key name we are looking for.
    * @return the KeyProvider that has the key
+   * @throws IOException raised on errors performing I/O.
    */
   public static KeyProvider findProvider(List<KeyProvider> providerList,
                                          String keyName) throws IOException {
@@ -680,7 +688,7 @@ public abstract class KeyProvider implements Closeable {
    * means. If true, the password should be provided by the caller using
    * setPassword().
    * @return Whether or not the provider requires a password
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public boolean needsPassword() throws IOException {
     return false;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
index 3f3c367fc39..d706e5ef100 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
@@ -178,6 +178,7 @@ public class KeyProviderCryptoExtension extends
      * Calls to this method allows the underlying KeyProvider to warm-up any
      * implementation specific caches used to store the Encrypted Keys.
      * @param keyNames Array of Key Names
+     * @throws IOException thrown if the key material could not be encrypted.
      */
     public void warmUpEncryptedKeys(String... keyNames)
         throws IOException;
@@ -474,8 +475,9 @@ public class KeyProviderCryptoExtension extends
   /**
    * This constructor is to be used by sub classes that provide
    * delegating/proxying functionality to the {@link KeyProviderCryptoExtension}
-   * @param keyProvider
-   * @param extension
+   *
+   * @param keyProvider key provider.
+   * @param extension crypto extension.
    */
   protected KeyProviderCryptoExtension(KeyProvider keyProvider,
       CryptoExtension extension) {
@@ -486,6 +488,7 @@ public class KeyProviderCryptoExtension extends
    * Notifies the Underlying CryptoExtension implementation to warm up any
    * implementation specific caches for the specified KeyVersions
    * @param keyNames Arrays of key Names
+   * @throws IOException raised on errors performing I/O.
    */
   public void warmUpEncryptedKeys(String... keyNames)
       throws IOException {
@@ -557,7 +560,7 @@ public class KeyProviderCryptoExtension extends
    * Calls {@link CryptoExtension#drain(String)} for the given key name on the
    * underlying {@link CryptoExtension}.
    *
-   * @param keyName
+   * @param keyName key name.
    */
   public void drain(String keyName) {
     getExtension().drain(keyName);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
index 1fdc2fe1245..3c1af424eb7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
@@ -48,14 +48,14 @@ public class KeyProviderDelegationTokenExtension extends
      * Renews the given token.
      * @param token The token to be renewed.
      * @return The token's lifetime after renewal, or 0 if it can't be renewed.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     long renewDelegationToken(final Token<?> token) throws IOException;
 
     /**
      * Cancels the given token.
      * @param token The token to be cancelled.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     Void cancelDelegationToken(final Token<?> token) throws IOException;
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
index a75f7d3aa63..c18d0d41bc0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
@@ -75,7 +75,7 @@ public class KeyShell extends CommandShell {
    * </pre>
    * @param args Command line arguments.
    * @return 0 on success, 1 on failure.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   protected int init(String[] args) throws IOException {
@@ -547,7 +547,7 @@ public class KeyShell extends CommandShell {
    * success and 1 for failure.
    *
    * @param args Command line arguments.
-   * @throws Exception
+   * @throws Exception raised on errors performing I/O.
    */
   public static void main(String[] args) throws Exception {
     int res = ToolRunner.run(new Configuration(), new KeyShell(), args);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
index be2db05842c..65eded918d6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
@@ -63,7 +63,7 @@ public class ValueQueue <E> {
      * @param keyName Key name
      * @param keyQueue Queue that needs to be filled
      * @param numValues number of Values to be added to the queue.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public void fillQueueForKey(String keyName,
         Queue<E> keyQueue, int numValues) throws IOException;
@@ -268,7 +268,7 @@ public class ValueQueue <E> {
    * Initializes the Value Queues for the provided keys by calling the
    * fill Method with "numInitValues" values
    * @param keyNames Array of key Names
-   * @throws ExecutionException
+   * @throws ExecutionException executionException.
    */
   public void initializeQueuesForKeys(String... keyNames)
       throws ExecutionException {
@@ -285,8 +285,8 @@ public class ValueQueue <E> {
    * function to add 1 value to Queue and then drain it.
    * @param keyName String key name
    * @return E the next value in the Queue
-   * @throws IOException
-   * @throws ExecutionException
+   * @throws IOException raised on errors performing I/O.
+   * @throws ExecutionException executionException.
    */
   public E getNext(String keyName)
       throws IOException, ExecutionException {
@@ -344,8 +344,8 @@ public class ValueQueue <E> {
    * @param keyName String key name
    * @param num Minimum number of values to return.
    * @return {@literal List<E>} values returned
-   * @throws IOException
-   * @throws ExecutionException
+   * @throws IOException raised on errors performing I/O.
+   * @throws ExecutionException execution exception.
    */
   public List<E> getAtMost(String keyName, int num) throws IOException,
       ExecutionException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
index d9818b472f0..a4737c548c8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
@@ -272,7 +272,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param supportedScheme the scheme supported by the implementor
    * @param authorityNeeded if true then theURI must have authority, if false
    *          then the URI must have null authority.
-   *
+   * @param defaultPort default port to use if port is not specified in the URI.
    * @throws URISyntaxException <code>uri</code> has syntax error
    */
   public AbstractFileSystem(final URI uri, final String supportedScheme,
@@ -281,11 +281,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
     myUri = getUri(uri, supportedScheme, authorityNeeded, defaultPort);
     statistics = getStatistics(uri); 
   }
-  
+
   /**
-   * Check that the Uri's scheme matches
-   * @param uri
-   * @param supportedScheme
+   * Check that the Uri's scheme matches.
+   *
+   * @param uri name URI of the FS.
+   * @param supportedScheme supported scheme.
    */
   public void checkScheme(URI uri, String supportedScheme) {
     String scheme = uri.getScheme();
@@ -362,7 +363,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * If the path is fully qualified URI, then its scheme and authority
    * matches that of this file system. Otherwise the path must be 
    * slash-relative name.
-   * 
+   * @param path the path.
    * @throws InvalidPathException if the path is invalid
    */
   public void checkPath(Path path) {
@@ -431,7 +432,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   
   /**
    * Make the path fully qualified to this file system
-   * @param path
+   * @param path the path.
    * @return the qualified path
    */
   public Path makeQualified(Path path) {
@@ -496,9 +497,9 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * through any internal symlinks or mount point
    * @param p path to be resolved
    * @return fully qualified path 
-   * @throws FileNotFoundException
-   * @throws AccessControlException
-   * @throws IOException
+   * @throws FileNotFoundException when file not find throw.
+   * @throws AccessControlException when accees control error throw.
+   * @throws IOException raised on errors performing I/O.
    * @throws UnresolvedLinkException if symbolic link on path cannot be
    * resolved internally
    */
@@ -513,6 +514,18 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except
    * that the Path f must be fully qualified and the permission is absolute
    * (i.e. umask has been applied).
+   *
+   * @param f the path.
+   * @param createFlag create_flag.
+   * @param opts create ops.
+   * @throws AccessControlException access controll exception.
+   * @throws FileAlreadyExistsException file already exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not dir exception.
+   * @throws UnsupportedFileSystemException unsupported file system exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return output stream.
    */
   public final FSDataOutputStream create(final Path f,
       final EnumSet<CreateFlag> createFlag, Options.CreateOpts... opts)
@@ -630,6 +643,24 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts
    * have been declared explicitly.
+   *
+   * @param f the path.
+   * @param flag create flag.
+   * @param absolutePermission absolute permission.
+   * @param bufferSize buffer size.
+   * @param replication replications.
+   * @param blockSize block size.
+   * @param progress progress.
+   * @param checksumOpt check sum opt.
+   * @param createParent create parent.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not directory exception.
+   * @throws UnsupportedFileSystemException unsupported filesystem exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return output stream.
    */
   public abstract FSDataOutputStream createInternal(Path f,
       EnumSet<CreateFlag> flag, FsPermission absolutePermission,
@@ -644,6 +675,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
    * f must be fully qualified and the permission is absolute (i.e. 
    * umask has been applied).
+   * @param dir directory.
+   * @param permission permission.
+   * @param createParent create parent flag.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void mkdir(final Path dir, final FsPermission permission,
       final boolean createParent) throws AccessControlException,
@@ -654,6 +693,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#delete(Path, boolean)} except that Path f must be for
    * this file system.
+   *
+   * @param f the path.
+   * @param recursive recursive flag.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return if successfully deleted success true, not false.
    */
   public abstract boolean delete(final Path f, final boolean recursive)
       throws AccessControlException, FileNotFoundException,
@@ -663,6 +710,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#open(Path)} except that Path f must be for this
    * file system.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return input stream.
    */
   public FSDataInputStream open(final Path f) throws AccessControlException,
       FileNotFoundException, UnresolvedLinkException, IOException {
@@ -673,6 +727,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#open(Path, int)} except that Path f must be for this
    * file system.
+   *
+   * @param f the path.
+   * @param bufferSize buffer size.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return if successfully open success true, not false.
    */
   public abstract FSDataInputStream open(final Path f, int bufferSize)
       throws AccessControlException, FileNotFoundException,
@@ -682,6 +744,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#truncate(Path, long)} except that Path f must be for
    * this file system.
+   *
+   * @param f the path.
+   * @param newLength new length.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return if successfully truncate success true, not false.
    */
   public boolean truncate(Path f, long newLength)
       throws AccessControlException, FileNotFoundException,
@@ -694,6 +764,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setReplication(Path, short)} except that Path f must be
    * for this file system.
+   *
+   * @param f the path.
+   * @param replication replication.
+   * @return if successfully set replication success true, not false.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract boolean setReplication(final Path f,
       final short replication) throws AccessControlException,
@@ -703,6 +781,16 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
    * f must be for this file system.
+   *
+   * @param src src.
+   * @param dst dst.
+   * @param options options.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not directory exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public final void rename(final Path src, final Path dst,
       final Options.Rename... options) throws AccessControlException,
@@ -727,6 +815,15 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * File systems that do not have a built in overwrite need implement only this
    * method and can take advantage of the default impl of the other
    * {@link #renameInternal(Path, Path, boolean)}
+   *
+   * @param src src.
+   * @param dst dst.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not directory exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void renameInternal(final Path src, final Path dst)
       throws AccessControlException, FileAlreadyExistsException,
@@ -737,6 +834,16 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
    * f must be for this file system.
+   *
+   * @param src src.
+   * @param dst dst.
+   * @param overwrite overwrite flag.
+   * @throws AccessControlException access control exception.
+   * @throws FileAlreadyExistsException file already exists exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws ParentNotDirectoryException parent not directory exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public void renameInternal(final Path src, final Path dst,
       boolean overwrite) throws AccessControlException,
@@ -800,6 +907,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of  
    * {@link FileContext#createSymlink(Path, Path, boolean)};
+   *
+   * @param target target.
+   * @param link link.
+   * @param createParent create parent.
+   * @throws IOException raised on errors performing I/O.
+   * @throws UnresolvedLinkException unresolved link exception.
    */
   public void createSymlink(final Path target, final Path link,
       final boolean createParent) throws IOException, UnresolvedLinkException {
@@ -810,6 +923,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * Partially resolves the path. This is used during symlink resolution in
    * {@link FSLinkResolver}, and differs from the similarly named method
    * {@link FileContext#getLinkTarget(Path)}.
+   * @param f the path.
+   * @return target path.
    * @throws IOException subclass implementations may throw IOException 
    */
   public Path getLinkTarget(final Path f) throws IOException {
@@ -822,6 +937,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setPermission(Path, FsPermission)} except that Path f
    * must be for this file system.
+   *
+   * @param f the path.
+   * @param permission permission.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void setPermission(final Path f,
       final FsPermission permission) throws AccessControlException,
@@ -831,6 +953,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setOwner(Path, String, String)} except that Path f must
    * be for this file system.
+   *
+   * @param f the path.
+   * @param username username.
+   * @param groupname groupname.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void setOwner(final Path f, final String username,
       final String groupname) throws AccessControlException,
@@ -840,6 +970,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setTimes(Path, long, long)} except that Path f must be
    * for this file system.
+   *
+   * @param f the path.
+   * @param mtime modify time.
+   * @param atime access time.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void setTimes(final Path f, final long mtime,
     final long atime) throws AccessControlException, FileNotFoundException,
@@ -849,6 +987,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#getFileChecksum(Path)} except that Path f must be for
    * this file system.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return File Check sum.
    */
   public abstract FileChecksum getFileChecksum(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -859,6 +1004,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * {@link FileContext#getFileStatus(Path)} 
    * except that an UnresolvedLinkException may be thrown if a symlink is 
    * encountered in the path.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return File Status
    */
   public abstract FileStatus getFileStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -870,8 +1022,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * In some FileSystem implementations such as HDFS metadata
    * synchronization is essential to guarantee consistency of read requests
    * particularly in HA setting.
-   * @throws IOException
-   * @throws UnsupportedOperationException
+   * @throws IOException raised on errors performing I/O.
+   * @throws UnsupportedOperationException Unsupported Operation Exception.
    */
   public void msync() throws IOException, UnsupportedOperationException {
     throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -883,6 +1035,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * {@link FileContext#access(Path, FsAction)}
    * except that an UnresolvedLinkException may be thrown if a symlink is
    * encountered in the path.
+   *
+   * @param path the path.
+   * @param mode fsaction mode.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
    */
   @InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
   public void access(Path path, FsAction mode) throws AccessControlException,
@@ -897,6 +1056,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * encountered in the path leading up to the final path component.
    * If the file system does not support symlinks then the behavior is
    * equivalent to {@link AbstractFileSystem#getFileStatus(Path)}.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnsupportedFileSystemException UnSupported File System Exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return file status.
    */
   public FileStatus getFileLinkStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -908,6 +1074,15 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#getFileBlockLocations(Path, long, long)} except that
    * Path f must be for this file system.
+   *
+   * @param f the path.
+   * @param start start.
+   * @param len length.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return BlockLocation Array.
    */
   public abstract BlockLocation[] getFileBlockLocations(final Path f,
       final long start, final long len) throws AccessControlException,
@@ -917,6 +1092,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#getFsStatus(Path)} except that Path f must be for this
    * file system.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return Fs Status.
    */
   public FsStatus getFsStatus(final Path f) throws AccessControlException,
       FileNotFoundException, UnresolvedLinkException, IOException {
@@ -927,6 +1109,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of
    * {@link FileContext#getFsStatus(Path)}.
+   *
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return Fs Status.
    */
   public abstract FsStatus getFsStatus() throws AccessControlException,
       FileNotFoundException, IOException;
@@ -935,6 +1122,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#listStatus(Path)} except that Path f must be for this
    * file system.
+   *
+   * @param f path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return FileStatus Iterator.
    */
   public RemoteIterator<FileStatus> listStatusIterator(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -967,6 +1161,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * will have different formats for replicated and erasure coded file. Please
    * refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)}
    * for more details.
+   *
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return FileStatus Iterator.
    */
   public RemoteIterator<LocatedFileStatus> listLocatedStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -999,6 +1200,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext.Util#listStatus(Path)} except that Path f must be 
    * for this file system.
+   * @param f the path.
+   * @throws AccessControlException access control exception.
+   * @throws FileNotFoundException file not found exception.
+   * @throws UnresolvedLinkException unresolved link exception.
+   * @throws IOException raised on errors performing I/O.
+   * @return FileStatus Iterator.
    */
   public abstract FileStatus[] listStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -1007,7 +1214,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * @return an iterator over the corrupt files under the given path
    * (may contain duplicates if a file has more than one corrupt block)
-   * @throws IOException
+   * @param path the path.
+   * @throws IOException raised on errors performing I/O.
    */
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
     throws IOException {
@@ -1020,6 +1228,10 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * The specification of this method matches that of
    * {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f
    * must be for this file system.
+   *
+   * @param verifyChecksum verify check sum flag.
+   * @throws AccessControlException access control exception.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void setVerifyChecksum(final boolean verifyChecksum)
       throws AccessControlException, IOException;
@@ -1041,7 +1253,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param renewer the account name that is allowed to renew the token.
    * @return List of delegation tokens.
    *   If delegation tokens not supported then return a list of size zero.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
   public List<Token<?>> getDelegationTokens(String renewer) throws IOException {
@@ -1141,7 +1353,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param path Path to modify
    * @param name xattr name.
    * @param value xattr value.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void setXAttr(Path path, String name, byte[] value)
       throws IOException {
@@ -1160,7 +1372,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param name xattr name.
    * @param value xattr value.
    * @param flag xattr set flag
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void setXAttr(Path path, String name, byte[] value,
       EnumSet<XAttrSetFlag> flag) throws IOException {
@@ -1178,7 +1390,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param path Path to get extended attribute
    * @param name xattr name.
    * @return byte[] xattr value.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public byte[] getXAttr(Path path, String name) throws IOException {
     throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1196,7 +1408,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    *
    * @return {@literal Map<String, byte[]>} describing the XAttrs of the file
    * or directory
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Map<String, byte[]> getXAttrs(Path path) throws IOException {
     throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1214,7 +1426,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param names XAttr names.
    * @return {@literal Map<String, byte[]>} describing the XAttrs of the file
    * or directory
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Map<String, byte[]> getXAttrs(Path path, List<String> names)
       throws IOException {
@@ -1232,7 +1444,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param path Path to get extended attributes
    * @return {@literal Map<String, byte[]>} describing the XAttrs of the file
    * or directory
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public List<String> listXAttrs(Path path)
           throws IOException {
@@ -1249,7 +1461,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    *
    * @param path Path to remove extended attribute
    * @param name xattr name
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void removeXAttr(Path path, String name) throws IOException {
     throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1259,6 +1471,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of
    * {@link FileContext#createSnapshot(Path, String)}.
+   *
+   * @param path the path.
+   * @param snapshotName snapshot name.
+   * @throws IOException raised on errors performing I/O.
+   * @return path.
    */
   public Path createSnapshot(final Path path, final String snapshotName)
       throws IOException {
@@ -1269,6 +1486,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of
    * {@link FileContext#renameSnapshot(Path, String, String)}.
+   *
+   * @param path the path.
+   * @param snapshotOldName snapshot old name.
+   * @param snapshotNewName snapshot new name.
+   * @throws IOException raised on errors performing I/O.
    */
   public void renameSnapshot(final Path path, final String snapshotOldName,
       final String snapshotNewName) throws IOException {
@@ -1279,6 +1501,10 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * The specification of this method matches that of
    * {@link FileContext#deleteSnapshot(Path, String)}.
+   *
+   * @param snapshotDir snapshot dir.
+   * @param snapshotName snapshot name.
+   * @throws IOException raised on errors performing I/O.
    */
   public void deleteSnapshot(final Path snapshotDir, final String snapshotName)
       throws IOException {
@@ -1289,7 +1515,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * Set the source path to satisfy storage policy.
    * @param path The source path referring to either a directory or a file.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void satisfyStoragePolicy(final Path path) throws IOException {
     throw new UnsupportedOperationException(
@@ -1303,6 +1529,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * @param policyName the name of the target storage policy. The list
    *                   of supported Storage policies can be retrieved
    *                   via {@link #getAllStoragePolicies}.
+   * @throws IOException raised on errors performing I/O.
    */
   public void setStoragePolicy(final Path path, final String policyName)
       throws IOException {
@@ -1314,7 +1541,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
   /**
    * Unset the storage policy set for a given file or directory.
    * @param src file or directory path.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void unsetStoragePolicy(final Path src) throws IOException {
     throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1326,7 +1553,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    *
    * @param src file or directory path.
    * @return storage policy for give file.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public BlockStoragePolicySpi getStoragePolicy(final Path src)
       throws IOException {
@@ -1338,7 +1565,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
    * Retrieve all the storage policies supported by this file system.
    *
    * @return all storage policies supported by this filesystem.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies()
       throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
index 213fbc24c4d..7518dd2f7ef 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
@@ -36,13 +36,22 @@ public class AvroFSInput implements Closeable, SeekableInput {
   private final FSDataInputStream stream;
   private final long len;
 
-  /** Construct given an {@link FSDataInputStream} and its length. */
+  /**
+   * Construct given an {@link FSDataInputStream} and its length.
+   *
+   * @param in inputstream.
+   * @param len len.
+   */
   public AvroFSInput(final FSDataInputStream in, final long len) {
     this.stream = in;
     this.len = len;
   }
 
-  /** Construct given a {@link FileContext} and a {@link Path}. */
+  /** Construct given a {@link FileContext} and a {@link Path}.
+   * @param fc filecontext.
+   * @param p the path.
+   * @throws IOException If an I/O error occurred.
+   * */
   public AvroFSInput(final FileContext fc, final Path p) throws IOException {
     FileStatus status = fc.getFileStatus(p);
     this.len = status.getLen();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
index 607fffbcc70..e693bcbfe89 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
@@ -68,6 +68,7 @@ public abstract class BatchedRemoteIterator<K, E> implements RemoteIterator<E> {
    * 
    * @param prevKey The key to send.
    * @return A list of replies.
+   * @throws IOException If an I/O error occurred.
    */
   public abstract BatchedEntries<E> makeRequest(K prevKey) throws IOException;
 
@@ -102,6 +103,8 @@ public abstract class BatchedRemoteIterator<K, E> implements RemoteIterator<E> {
 
   /**
    * Return the next list key associated with an element.
+   * @param element element.
+   * @return K Generics Type.
    */
   public abstract K elementToPrevKey(E element);
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
index 29358dd7d10..67687c1f0e0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
@@ -85,6 +85,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Copy constructor.
+   * @param that blocklocation.
    */
   public BlockLocation(BlockLocation that) {
     this.hosts = that.hosts;
@@ -100,6 +101,10 @@ public class BlockLocation implements Serializable {
 
   /**
    * Constructor with host, name, offset and length.
+   * @param names names array.
+   * @param hosts host array.
+   * @param offset offset.
+   * @param length length.
    */
   public BlockLocation(String[] names, String[] hosts, long offset, 
                        long length) {
@@ -108,6 +113,11 @@ public class BlockLocation implements Serializable {
 
   /**
    * Constructor with host, name, offset, length and corrupt flag.
+   * @param names names.
+   * @param hosts hosts.
+   * @param offset offset.
+   * @param length length.
+   * @param corrupt corrupt.
    */
   public BlockLocation(String[] names, String[] hosts, long offset, 
                        long length, boolean corrupt) {
@@ -116,6 +126,11 @@ public class BlockLocation implements Serializable {
 
   /**
    * Constructor with host, name, network topology, offset and length.
+   * @param names names.
+   * @param hosts hosts.
+   * @param topologyPaths topologyPaths.
+   * @param offset offset.
+   * @param length length.
    */
   public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
                        long offset, long length) {
@@ -125,6 +140,12 @@ public class BlockLocation implements Serializable {
   /**
    * Constructor with host, name, network topology, offset, length 
    * and corrupt flag.
+   * @param names names.
+   * @param hosts hosts.
+   * @param topologyPaths topologyPaths.
+   * @param offset offset.
+   * @param length length.
+   * @param corrupt corrupt.
    */
   public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
                        long offset, long length, boolean corrupt) {
@@ -177,6 +198,8 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the list of hosts (hostname) hosting this block.
+   * @return hosts array.
+   * @throws IOException If an I/O error occurred.
    */
   public String[] getHosts() throws IOException {
     return hosts;
@@ -184,6 +207,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the list of hosts (hostname) hosting a cached replica of the block.
+   * @return cached hosts.
    */
   public String[] getCachedHosts() {
     return cachedHosts;
@@ -191,6 +215,8 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the list of names (IP:xferPort) hosting this block.
+   * @return names array.
+   * @throws IOException If an I/O error occurred.
    */
   public String[] getNames() throws IOException {
     return names;
@@ -199,6 +225,8 @@ public class BlockLocation implements Serializable {
   /**
    * Get the list of network topology paths for each of the hosts.
    * The last component of the path is the "name" (IP:xferPort).
+   * @return topology paths.
+   * @throws IOException If an I/O error occurred.
    */
   public String[] getTopologyPaths() throws IOException {
     return topologyPaths;
@@ -206,6 +234,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the storageID of each replica of the block.
+   * @return storage ids.
    */
   public String[] getStorageIds() {
     return storageIds;
@@ -213,6 +242,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the storage type of each replica of the block.
+   * @return storage type of each replica of the block.
    */
   public StorageType[] getStorageTypes() {
     return storageTypes;
@@ -220,6 +250,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the start offset of file associated with this block.
+   * @return start offset of file associated with this block.
    */
   public long getOffset() {
     return offset;
@@ -227,6 +258,7 @@ public class BlockLocation implements Serializable {
   
   /**
    * Get the length of the block.
+   * @return length of the block.
    */
   public long getLength() {
     return length;
@@ -234,6 +266,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Get the corrupt flag.
+   * @return corrupt flag.
    */
   public boolean isCorrupt() {
     return corrupt;
@@ -241,6 +274,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Return true if the block is striped (erasure coded).
+   * @return if the block is striped true, not false.
    */
   public boolean isStriped() {
     return false;
@@ -248,6 +282,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the start offset of file associated with this block.
+   * @param offset start offset.
    */
   public void setOffset(long offset) {
     this.offset = offset;
@@ -255,6 +290,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the length of block.
+   * @param length length of block.
    */
   public void setLength(long length) {
     this.length = length;
@@ -262,6 +298,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the corrupt flag.
+   * @param corrupt corrupt flag.
    */
   public void setCorrupt(boolean corrupt) {
     this.corrupt = corrupt;
@@ -269,6 +306,8 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the hosts hosting this block.
+   * @param hosts hosts array.
+   * @throws IOException If an I/O error occurred.
    */
   public void setHosts(String[] hosts) throws IOException {
     if (hosts == null) {
@@ -280,6 +319,7 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the hosts hosting a cached replica of this block.
+   * @param cachedHosts cached hosts.
    */
   public void setCachedHosts(String[] cachedHosts) {
     if (cachedHosts == null) {
@@ -291,6 +331,8 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the names (host:port) hosting this block.
+   * @param names names.
+   * @throws IOException If an I/O error occurred.
    */
   public void setNames(String[] names) throws IOException {
     if (names == null) {
@@ -302,6 +344,9 @@ public class BlockLocation implements Serializable {
 
   /**
    * Set the network topology paths of the hosts.
+   *
+   * @param topologyPaths topology paths.
+   * @throws IOException If an I/O error occurred.
    */
   public void setTopologyPaths(String[] topologyPaths) throws IOException {
     if (topologyPaths == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
index 6576fe5827d..f577649dd5f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
@@ -47,6 +47,12 @@ public final class ByteBufferUtil {
 
   /**
    * Perform a fallback read.
+   *
+   * @param stream input stream.
+   * @param bufferPool bufferPool.
+   * @param maxLength maxLength.
+   * @throws IOException raised on errors performing I/O.
+   * @return byte buffer.
    */
   public static ByteBuffer fallbackRead(
       InputStream stream, ByteBufferPool bufferPool, int maxLength)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
index 362d125b09d..d7b61346d4e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
@@ -53,6 +53,9 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
   /**
    * This is the constructor used by the builder.
    * All overriding classes should implement this.
+   *
+   * @param builder builder.
+   * @throws IOException raised on errors performing I/O.
    */
   public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder)
       throws IOException {
@@ -140,6 +143,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
 
   /**
    * Increment the cached value of used space.
+   *
+   * @param value dfs used value.
    */
   public void incDfsUsed(long value) {
     used.addAndGet(value);
@@ -154,6 +159,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
 
   /**
    * How long in between runs of the background refresh.
+   *
+   * @return refresh interval.
    */
   @VisibleForTesting
   public long getRefreshInterval() {
@@ -163,6 +170,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
   /**
    * Randomize the refresh interval timing by this amount, the actual interval will be chosen
    * uniformly between {@code interval-jitter} and {@code interval+jitter}.
+   *
+   * @return between interval-jitter and interval+jitter.
    */
   @VisibleForTesting
   public long getJitter() {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
index 59ffe00bcb2..0efcdc8022f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
@@ -102,25 +102,44 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
     return fs;
   }
 
-  /** Return the name of the checksum file associated with a file.*/
+  /**
+   * Return the name of the checksum file associated with a file.
+   *
+   * @param file the file path.
+   * @return name of the checksum file associated with a file.
+   */
   public Path getChecksumFile(Path file) {
     return new Path(file.getParent(), "." + file.getName() + ".crc");
   }
 
-  /** Return true iff file is a checksum file name.*/
+  /**
+   * Return true if file is a checksum file name.
+   *
+   * @param file the file path.
+   * @return if file is a checksum file true, not false.
+   */
   public static boolean isChecksumFile(Path file) {
     String name = file.getName();
     return name.startsWith(".") && name.endsWith(".crc");
   }
 
-  /** Return the length of the checksum file given the size of the 
+  /**
+   * Return the length of the checksum file given the size of the
    * actual file.
-   **/
+   *
+   * @param file the file path.
+   * @param fileSize file size.
+   * @return checksum length.
+   */
   public long getChecksumFileLength(Path file, long fileSize) {
     return getChecksumLength(fileSize, getBytesPerSum());
   }
 
-  /** Return the bytes Per Checksum */
+  /**
+   * Return the bytes Per Checksum.
+   *
+   * @return bytes per check sum.
+   */
   public int getBytesPerSum() {
     return bytesPerChecksum;
   }
@@ -362,6 +381,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * Opens an FSDataInputStream at the indicated Path.
    * @param f the file name to open
    * @param bufferSize the size of the buffer to be used.
+   * @throws IOException if an I/O error occurs.
    */
   @Override
   public FSDataInputStream open(Path f, int bufferSize) throws IOException {
@@ -669,7 +689,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
    * @param src file name
    * @param replication new replication
-   * @throws IOException
+   * @throws IOException if an I/O error occurs.
    * @return true if successful;
    *         false if file does not exist or is a directory
    */
@@ -754,7 +774,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * @param f
    *          given path
    * @return the statuses of the files/directories in the given path
-   * @throws IOException
+   * @throws IOException if an I/O error occurs.
    */
   @Override
   public FileStatus[] listStatus(Path f) throws IOException {
@@ -775,7 +795,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * @param f
    *          given path
    * @return the statuses of the files/directories in the given patch
-   * @throws IOException
+   * @throws IOException if an I/O error occurs.
    */
   @Override
   public RemoteIterator<LocatedFileStatus> listLocatedStatus(Path f)
@@ -811,6 +831,10 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
    * Copy it from FS control to the local dst name.
    * If src and dst are directories, the copyCrc parameter
    * determines whether to copy CRC files.
+   * @param src src path.
+   * @param dst dst path.
+   * @param copyCrc copy csc flag.
+   * @throws IOException if an I/O error occurs.
    */
   @SuppressWarnings("deprecation")
   public void copyToLocalFile(Path src, Path dst, boolean copyCrc)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
index bc1122c56a2..4820c5c3045 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
@@ -70,30 +70,53 @@ public abstract class ChecksumFs extends FilterFs {
     this.verifyChecksum = inVerifyChecksum;
   }
 
-  /** get the raw file system. */
+  /**
+   * get the raw file system.
+   *
+   * @return abstract file system.
+   */
   public AbstractFileSystem getRawFs() {
     return getMyFs();
   }
 
-  /** Return the name of the checksum file associated with a file.*/
+  /**
+   * Return the name of the checksum file associated with a file.
+   *
+   * @param file the file path.
+   * @return the checksum file associated with a file.
+   */
   public Path getChecksumFile(Path file) {
     return new Path(file.getParent(), "." + file.getName() + ".crc");
   }
 
-  /** Return true iff file is a checksum file name.*/
+  /**
+   * Return true iff file is a checksum file name.
+   *
+   * @param file the file path.
+   * @return if is checksum file true,not false.
+   */
   public static boolean isChecksumFile(Path file) {
     String name = file.getName();
     return name.startsWith(".") && name.endsWith(".crc");
   }
 
-  /** Return the length of the checksum file given the size of the 
+  /**
+   * Return the length of the checksum file given the size of the
    * actual file.
-   **/
+   *
+   * @param file the file path.
+   * @param fileSize file size.
+   * @return check sum file length.
+   */
   public long getChecksumFileLength(Path file, long fileSize) {
     return getChecksumLength(fileSize, getBytesPerSum());
   }
 
-  /** Return the bytes Per Checksum. */
+  /**
+   * Return the bytes Per Checksum.
+   *
+   * @return bytes per sum.
+   */
   public int getBytesPerSum() {
     return defaultBytesPerChecksum;
   }
@@ -433,7 +456,7 @@ public abstract class ChecksumFs extends FilterFs {
    * Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
    * @param src file name
    * @param replication new replication
-   * @throws IOException
+   * @throws IOException if an I/O error occurs.
    * @return true if successful;
    *         false if file does not exist or is a directory
    */
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index a799e883bcf..52252365092 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -169,11 +169,11 @@ public class CommonConfigurationKeysPublic {
 
   /**
    * Number of filesystems instances can be created in parallel.
-   * <p></p>
+   * <p>
    * A higher number here does not necessarily improve performance, especially
    * for object stores, where multiple threads may be attempting to create an FS
    * instance for the same URI.
-   * <p></p>
+   * </p>
    * Default value: {@value}.
    */
   public static final String FS_CREATION_PARALLEL_COUNT =
@@ -181,8 +181,9 @@ public class CommonConfigurationKeysPublic {
 
   /**
    * Default value for {@link #FS_CREATION_PARALLEL_COUNT}.
-   * <p></p>
+   * <p>
    * Default value: {@value}.
+   * </p>
    */
   public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT =
       64;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
index e1ed5cbcfca..bdbc8f3a33f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
@@ -37,7 +37,13 @@ public class CompositeCrcFileChecksum extends FileChecksum {
   private DataChecksum.Type crcType;
   private int bytesPerCrc;
 
-  /** Create a CompositeCrcFileChecksum. */
+  /**
+   * Create a CompositeCrcFileChecksum.
+   *
+   * @param crc crc.
+   * @param crcType crcType.
+   * @param bytesPerCrc bytesPerCrc.
+   */
   public CompositeCrcFileChecksum(
       int crc, DataChecksum.Type crcType, int bytesPerCrc) {
     this.crc = crc;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
index 79850e1a2f2..9f97a12fa60 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
@@ -149,17 +149,31 @@ public class ContentSummary extends QuotaUsage implements Writable{
   @Deprecated
   public ContentSummary() {}
   
-  /** Constructor, deprecated by ContentSummary.Builder
+  /**
+   *  Constructor, deprecated by ContentSummary.Builder
    *  This constructor implicitly set spaceConsumed the same as length.
    *  spaceConsumed and length must be set explicitly with
-   *  ContentSummary.Builder
+   *  ContentSummary.Builder.
+   *
+   * @param length length.
+   * @param fileCount file count.
+   * @param directoryCount directory count.
    * */
   @Deprecated
   public ContentSummary(long length, long fileCount, long directoryCount) {
     this(length, fileCount, directoryCount, -1L, length, -1L);
   }
 
-  /** Constructor, deprecated by ContentSummary.Builder */
+  /**
+   * Constructor, deprecated by ContentSummary.Builder.
+   *
+   * @param length length.
+   * @param fileCount file count.
+   * @param directoryCount directory count.
+   * @param quota quota.
+   * @param spaceConsumed space consumed.
+   * @param spaceQuota space quota.
+   * */
   @Deprecated
   public ContentSummary(
       long length, long fileCount, long directoryCount, long quota,
@@ -172,7 +186,11 @@ public class ContentSummary extends QuotaUsage implements Writable{
     setSpaceQuota(spaceQuota);
   }
 
-  /** Constructor for ContentSummary.Builder*/
+  /**
+   * Constructor for ContentSummary.Builder.
+   *
+   * @param builder builder.
+   */
   private ContentSummary(Builder builder) {
     super(builder);
     this.length = builder.length;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
index 71993713ad2..ca008e53693 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
@@ -189,6 +189,8 @@ public enum CreateFlag {
   /**
    * Validate the CreateFlag for the append operation. The flag must contain
    * APPEND, and cannot contain OVERWRITE.
+   *
+   * @param flag enum set flag.
    */
   public static void validateForAppend(EnumSet<CreateFlag> flag) {
     validate(flag);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
index da4636b2c0f..c5a052f3de4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
@@ -65,7 +65,10 @@ public class DF extends Shell {
     return dirPath;
   }
 
-  /** @return a string indicating which filesystem volume we're checking. */
+  /**
+   * @return a string indicating which filesystem volume we're checking.
+   * @throws IOException raised on errors performing I/O.
+   */
   public String getFilesystem() throws IOException {
     if (Shell.WINDOWS) {
       this.filesystem = dirFile.getCanonicalPath().substring(0, 2);
@@ -100,7 +103,10 @@ public class DF extends Shell {
     return (int) (used * 100.0 / cap);
   }
 
-  /** @return the filesystem mount point for the indicated volume */
+  /**
+   * @return the filesystem mount point for the indicated volume.
+   * @throws IOException raised on errors performing I/O.
+   */
   public String getMount() throws IOException {
     // Abort early if specified path does not exist
     if (!dirFile.exists()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
index 33905dcbb77..6f6e3041065 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
@@ -47,7 +47,11 @@ public class DelegationTokenRenewer
     /** @return the renew token. */
     public Token<?> getRenewToken();
 
-    /** Set delegation token. */
+    /**
+     * Set delegation token.
+     * @param <T> generic type T.
+     * @param token token.
+     */
     public <T extends TokenIdentifier> void setDelegationToken(Token<T> token);
   }
 
@@ -172,7 +176,11 @@ public class DelegationTokenRenewer
   /** Queue to maintain the RenewActions to be processed by the {@link #run()} */
   private volatile DelayQueue<RenewAction<?>> queue = new DelayQueue<RenewAction<?>>();
   
-  /** For testing purposes */
+  /**
+   * For testing purposes.
+   *
+   * @return renew queue length.
+   */
   @VisibleForTesting
   protected int getRenewQueueLength() {
     return queue.size();
@@ -211,7 +219,13 @@ public class DelegationTokenRenewer
     }
   }
   
-  /** Add a renew action to the queue. */
+  /**
+   * Add a renew action to the queue.
+   *
+   * @param <T> generic type T.
+   * @param fs file system.
+   * @return renew action.
+   * */
   @SuppressWarnings("static-access")
   public <T extends FileSystem & Renewable> RenewAction<T> addRenewAction(final T fs) {
     synchronized (this) {
@@ -230,8 +244,10 @@ public class DelegationTokenRenewer
 
   /**
    * Remove the associated renew action from the queue
-   * 
-   * @throws IOException
+   *
+   * @param <T> generic type T.
+   * @param fs file system.
+   * @throws IOException raised on errors performing I/O.
    */
   public <T extends FileSystem & Renewable> void removeRenewAction(
       final T fs) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
index a4c7254cfeb..56ef51f128d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
@@ -37,12 +37,17 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
 
   /**
    * Set optional Builder parameter.
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    */
   B opt(@Nonnull String key, @Nonnull String value);
 
   /**
    * Set optional boolean parameter for the Builder.
-   *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, boolean value);
@@ -50,6 +55,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set optional int parameter for the Builder.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, int value);
@@ -57,6 +65,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set optional float parameter for the Builder.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, float value);
@@ -64,6 +75,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set optional long parameter for the Builder.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, long value);
@@ -71,6 +85,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set optional double parameter for the Builder.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, double value);
@@ -78,6 +95,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set an array of string values as optional parameter for the Builder.
    *
+   * @param key key.
+   * @param values values.
+   * @return generic type B.
    * @see #opt(String, String)
    */
   B opt(@Nonnull String key, @Nonnull String... values);
@@ -87,12 +107,19 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
    *
    * If the option is not supported or unavailable,
    * the client should expect {@link #build()} throws IllegalArgumentException.
+   *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    */
   B must(@Nonnull String key, @Nonnull String value);
 
   /**
    * Set mandatory boolean option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, boolean value);
@@ -100,6 +127,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set mandatory int option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, int value);
@@ -107,6 +137,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set mandatory float option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, float value);
@@ -114,6 +147,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set mandatory long option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, long value);
@@ -121,6 +157,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set mandatory double option.
    *
+   * @param key key.
+   * @param value value.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, double value);
@@ -128,6 +167,9 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
   /**
    * Set a string array as mandatory option.
    *
+   * @param key key.
+   * @param values values.
+   * @return generic type B.
    * @see #must(String, String)
    */
   B must(@Nonnull String key, @Nonnull String... values);
@@ -139,6 +181,7 @@ public interface FSBuilder<S, B extends FSBuilder<S, B>> {
    * @throws UnsupportedOperationException if the filesystem does not support
    * the specific operation.
    * @throws IOException on filesystem IO errors.
+   * @return generic type S.
    */
   S build() throws IllegalArgumentException,
       UnsupportedOperationException, IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
index c96d499d17b..16938a83a69 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
@@ -123,6 +123,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Constructor.
+   *
+   * @param fileSystem file system.
+   * @param p the path.
    */
   protected FSDataOutputStreamBuilder(@Nonnull FileSystem fileSystem,
       @Nonnull Path p) {
@@ -149,6 +152,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set permission for the file.
+   *
+   * @param perm permission.
+   * @return B Generics Type.
    */
   public B permission(@Nonnull final FsPermission perm) {
     checkNotNull(perm);
@@ -162,6 +168,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set the size of the buffer to be used.
+   *
+   * @param bufSize buffer size.
+   * @return Generics Type B.
    */
   public B bufferSize(int bufSize) {
     bufferSize = bufSize;
@@ -174,6 +183,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set replication factor.
+   *
+   * @param replica replica.
+   * @return Generics Type B.
    */
   public B replication(short replica) {
     replication = replica;
@@ -186,6 +198,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set block size.
+   *
+   * @param blkSize block size.
+   * @return B Generics Type.
    */
   public B blockSize(long blkSize) {
     blockSize = blkSize;
@@ -194,6 +209,8 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Return true to create the parent directories if they do not exist.
+   *
+   * @return if create the parent directories if they do not exist true,not false.
    */
   protected boolean isRecursive() {
     return recursive;
@@ -201,6 +218,8 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Create the parent directory if they do not exist.
+   *
+   * @return B Generics Type.
    */
   public B recursive() {
     recursive = true;
@@ -213,6 +232,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set the facility of reporting progress.
+   *
+   * @param prog progress.
+   * @return B Generics Type.
    */
   public B progress(@Nonnull final Progressable prog) {
     checkNotNull(prog);
@@ -226,6 +248,8 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Create an FSDataOutputStream at the specified path.
+   *
+   * @return return Generics Type B.
    */
   public B create() {
     flags.add(CreateFlag.CREATE);
@@ -236,6 +260,9 @@ public abstract class FSDataOutputStreamBuilder
    * Set to true to overwrite the existing file.
    * Set it to false, an exception will be thrown when calling {@link #build()}
    * if the file exists.
+   *
+   * @param overwrite overrite.
+   * @return Generics Type B.
    */
   public B overwrite(boolean overwrite) {
     if (overwrite) {
@@ -248,6 +275,8 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Append to an existing file (optional operation).
+   *
+   * @return Generics Type B.
    */
   public B append() {
     flags.add(CreateFlag.APPEND);
@@ -260,6 +289,9 @@ public abstract class FSDataOutputStreamBuilder
 
   /**
    * Set checksum opt.
+   *
+   * @param chksumOpt check sum opt.
+   * @return Generics Type B.
    */
   public B checksumOpt(@Nonnull final ChecksumOpt chksumOpt) {
     checkNotNull(chksumOpt);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
index de66eab713a..ee16ca8a2cd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
@@ -82,6 +82,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @param sum the type of Checksum engine
    * @param chunkSize maximun chunk size
    * @param checksumSize the number byte of each checksum
+   * @param verifyChecksum verify check sum.
    */
   protected FSInputChecker( Path file, int numOfRetries, 
       boolean verifyChecksum, Checksum sum, int chunkSize, int checksumSize ) {
@@ -118,6 +119,7 @@ abstract public class FSInputChecker extends FSInputStream {
    * @param len maximum number of bytes to read
    * @param checksum the data buffer into which to write checksums
    * @return number of bytes read
+   * @throws IOException raised on errors performing I/O.
    */
   abstract protected int readChunk(long pos, byte[] buf, int offset, int len,
       byte[] checksum) throws IOException;
@@ -129,7 +131,10 @@ abstract public class FSInputChecker extends FSInputStream {
    */
   abstract protected long getChunkPosition(long pos);
 
-  /** Return true if there is a need for checksum verification */
+  /**
+   * Return true if there is a need for checksum verification.
+   * @return if there is a need for checksum verification true, not false.
+   */
   protected synchronized boolean needChecksum() {
     return verifyChecksum && sum != null;
   }
@@ -357,6 +362,9 @@ abstract public class FSInputChecker extends FSInputStream {
    * Convert a checksum byte array to a long
    * This is deprecated since 0.22 since it is no longer in use
    * by this class.
+   *
+   * @param checksum check sum.
+   * @return crc.
    */
   @Deprecated
   static public long checksum2long(byte[] checksum) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
index ffe4b34ca5f..f85cf7a8581 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
@@ -74,7 +74,7 @@ public abstract class FSLinkResolver<T> {
    * @param fc FileContext used to access file systems.
    * @param path The path to resolve symlinks on.
    * @return Generic type determined by the implementation of next.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public T resolve(final FileContext fc, final Path path) throws IOException {
     int count = 0;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
index 6de026b9d17..4ef512dc257 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
@@ -186,6 +186,8 @@ abstract public class FSOutputSummer extends OutputStream implements
 
   /**
    * Return the number of valid bytes currently in the buffer.
+   *
+   * @return buffer data size.
    */
   protected synchronized int getBufferedDataSize() {
     return count;
@@ -227,6 +229,10 @@ abstract public class FSOutputSummer extends OutputStream implements
 
   /**
    * Converts a checksum integer value to a byte stream
+   *
+   * @param sum check sum.
+   * @param checksumSize check sum size.
+   * @return byte stream.
    */
   static public byte[] convertToByteStream(Checksum sum, int checksumSize) {
     return int2byte((int)sum.getValue(), new byte[checksumSize]);
@@ -245,6 +251,8 @@ abstract public class FSOutputSummer extends OutputStream implements
 
   /**
    * Resets existing buffer with a new one of the specified size.
+   *
+   * @param size size.
    */
   protected synchronized void setChecksumBufSize(int size) {
     this.buf = new byte[size];
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
index 6822fa48562..62d2e3af786 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
@@ -28,20 +28,37 @@ import org.apache.hadoop.io.Writable;
 @InterfaceAudience.Public
 @InterfaceStability.Stable
 public abstract class FileChecksum implements Writable {
-  /** The checksum algorithm name */
+  /**
+   * The checksum algorithm name.
+   *
+   * @return algorithm name.
+   */
   public abstract String getAlgorithmName();
 
-  /** The length of the checksum in bytes */
+  /**
+   * The length of the checksum in bytes.
+   *
+   * @return length.
+   */
   public abstract int getLength();
 
-  /** The value of the checksum in bytes */
+  /**
+   * The value of the checksum in bytes.
+   *
+   * @return byte array.
+   */
   public abstract byte[] getBytes();
 
   public ChecksumOpt getChecksumOpt() {
     return null;
   }
 
-  /** Return true if both the algorithms and the values are the same. */
+  /**
+   * Return true if both the algorithms and the values are the same.
+   *
+   * @param other other.
+   * @return if equal true, not false.
+   */
   @Override
   public boolean equals(Object other) {
     if (other == this) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index f3004ce7e03..298570bb55f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -366,8 +366,8 @@ public class FileContext implements PathCapabilities {
    * Create a FileContext with specified FS as default using the specified
    * config.
    * 
-   * @param defFS
-   * @param aConf
+   * @param defFS default fs.
+   * @param aConf configutration.
    * @return new FileContext with specified FS as default.
    */
   public static FileContext getFileContext(final AbstractFileSystem defFS,
@@ -378,7 +378,7 @@ public class FileContext implements PathCapabilities {
   /**
    * Create a FileContext for specified file system using the default config.
    * 
-   * @param defaultFS
+   * @param defaultFS default fs.
    * @return a FileContext with the specified AbstractFileSystem
    *                 as the default FS.
    */
@@ -411,6 +411,7 @@ public class FileContext implements PathCapabilities {
    * 
    * @throws UnsupportedFileSystemException If the file system from the default
    *           configuration is not supported
+   * @return file context.
    */
   public static FileContext getFileContext()
       throws UnsupportedFileSystemException {
@@ -430,7 +431,7 @@ public class FileContext implements PathCapabilities {
   /**
    * Create a FileContext for specified URI using the default config.
    * 
-   * @param defaultFsUri
+   * @param defaultFsUri defaultFsUri.
    * @return a FileContext with the specified URI as the default FS.
    * 
    * @throws UnsupportedFileSystemException If the file system for
@@ -444,8 +445,8 @@ public class FileContext implements PathCapabilities {
   /**
    * Create a FileContext for specified default URI using the specified config.
    * 
-   * @param defaultFsUri
-   * @param aConf
+   * @param defaultFsUri defaultFsUri.
+   * @param aConf configrution.
    * @return new FileContext for specified uri
    * @throws UnsupportedFileSystemException If the file system with specified is
    *           not supported
@@ -476,7 +477,7 @@ public class FileContext implements PathCapabilities {
    * {@link #getFileContext(URI, Configuration)} instead of this one.
    * 
    * 
-   * @param aConf
+   * @param aConf configration.
    * @return new FileContext
    * @throws UnsupportedFileSystemException If file system in the config
    *           is not supported
@@ -554,6 +555,7 @@ public class FileContext implements PathCapabilities {
   
   /**
    * Gets the working directory for wd-relative names (such a "foo/bar").
+   * @return the path.
    */
   public Path getWorkingDirectory() {
     return workingDir;
@@ -600,13 +602,14 @@ public class FileContext implements PathCapabilities {
    * @throws FileNotFoundException  If <code>f</code> does not exist
    * @throws AccessControlException if access denied
    * @throws IOException If an IO Error occurred
-   * 
+   * @throws UnresolvedLinkException If unresolved link occurred.
+   *
    * Exceptions applicable to file systems accessed over RPC:
    * @throws RpcClientException If an exception occurred in the RPC client
    * @throws RpcServerException If an exception occurred in the RPC server
    * @throws UnexpectedServerException If server implementation throws
    *           undeclared exception to RPC server
-   * 
+   *
    * RuntimeExceptions:
    * @throws InvalidPathException If path <code>f</code> is not valid
    */
@@ -620,7 +623,7 @@ public class FileContext implements PathCapabilities {
    * A Fully-qualified path has scheme and authority specified and an absolute
    * path.
    * Use the default file system and working dir in this FileContext to qualify.
-   * @param path
+   * @param path the path.
    * @return qualified path
    */
   public Path makeQualified(final Path path) {
@@ -759,6 +762,7 @@ public class FileContext implements PathCapabilities {
    *
    * Client should expect {@link FSDataOutputStreamBuilder#build()} throw the
    * same exceptions as create(Path, EnumSet, CreateOpts...).
+   * @throws IOException If an I/O error occurred.
    */
   public FSDataOutputStreamBuilder<FSDataOutputStream, ?> create(final Path f)
       throws IOException {
@@ -832,6 +836,8 @@ public class FileContext implements PathCapabilities {
    * 
    * RuntimeExceptions:
    * @throws InvalidPathException If path <code>f</code> is invalid
+   *
+   * @return if delete success true, not false.
    */
   public boolean delete(final Path f, final boolean recursive)
       throws AccessControlException, FileNotFoundException,
@@ -862,6 +868,7 @@ public class FileContext implements PathCapabilities {
    * @throws RpcServerException If an exception occurred in the RPC server
    * @throws UnexpectedServerException If server implementation throws 
    *           undeclared exception to RPC server
+   * @return input stream.
    */
   public FSDataInputStream open(final Path f) throws AccessControlException,
       FileNotFoundException, UnsupportedFileSystemException, IOException {
@@ -892,6 +899,7 @@ public class FileContext implements PathCapabilities {
    * @throws RpcServerException If an exception occurred in the RPC server
    * @throws UnexpectedServerException If server implementation throws 
    *           undeclared exception to RPC server
+   * @return output stream.
    */
   public FSDataInputStream open(final Path f, final int bufferSize)
       throws AccessControlException, FileNotFoundException,
@@ -1001,6 +1009,7 @@ public class FileContext implements PathCapabilities {
    * 
    * @param src path to be renamed
    * @param dst new path after rename
+   * @param options rename options.
    * 
    * @throws AccessControlException If access is denied
    * @throws FileAlreadyExistsException If <code>dst</code> already exists and
@@ -1052,7 +1061,7 @@ public class FileContext implements PathCapabilities {
   
   /**
    * Set permission of a path.
-   * @param f
+   * @param f the path.
    * @param permission - the new absolute permission (umask is not applied)
    *
    * @throws AccessControlException If access is denied
@@ -1196,7 +1205,7 @@ public class FileContext implements PathCapabilities {
    * Set the verify checksum flag for the  file system denoted by the path.
    * This is only applicable if the 
    * corresponding FileSystem supports checksum. By default doesn't do anything.
-   * @param verifyChecksum
+   * @param verifyChecksum verify check sum.
    * @param f set the verifyChecksum for the Filesystem containing this path
    *
    * @throws AccessControlException If access is denied
@@ -1251,8 +1260,9 @@ public class FileContext implements PathCapabilities {
   /**
    * Synchronize client metadata state.
    *
-   * @throws IOException
-   * @throws UnsupportedOperationException
+   * @throws IOException If an I/O error occurred.
+   * @throws UnsupportedOperationException If file system for <code>f</code> is
+   *                                       not supported.
    */
   public void msync() throws IOException, UnsupportedOperationException {
     defaultFS.msync();
@@ -1613,9 +1623,12 @@ public class FileContext implements PathCapabilities {
   }
 
   /**
+   * List CorruptFile Blocks.
+   *
+   * @param path the path.
    * @return an iterator over the corrupt files under the given path
    * (may contain duplicates if a file has more than one corrupt block)
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public RemoteIterator<Path> listCorruptFileBlocks(Path path)
     throws IOException {
@@ -1739,6 +1752,7 @@ public class FileContext implements PathCapabilities {
      * @throws RpcServerException If an exception occurred in the RPC server
      * @throws UnexpectedServerException If server implementation throws 
      *           undeclared exception to RPC server
+     * @return if f exists true, not false.
      */
     public boolean exists(final Path f) throws AccessControlException,
       UnsupportedFileSystemException, IOException {
@@ -1799,6 +1813,12 @@ public class FileContext implements PathCapabilities {
     
     /**
      * See {@link #listStatus(Path[], PathFilter)}
+     *
+     * @param files files.
+     * @throws AccessControlException If access is denied.
+     * @throws FileNotFoundException If <code>files</code> does not exist.
+     * @throws IOException If an I/O error occurred.
+     * @return file status array.
      */
     public FileStatus[] listStatus(Path[] files) throws AccessControlException,
         FileNotFoundException, IOException {
@@ -2054,36 +2074,29 @@ public class FileContext implements PathCapabilities {
      *    <dt> <tt> ? </tt>
      *    <dd> Matches any single character.
      *
-     *    <p>
      *    <dt> <tt> * </tt>
      *    <dd> Matches zero or more characters.
      *
-     *    <p>
      *    <dt> <tt> [<i>abc</i>] </tt>
      *    <dd> Matches a single character from character set
      *     <tt>{<i>a,b,c</i>}</tt>.
      *
-     *    <p>
      *    <dt> <tt> [<i>a</i>-<i>b</i>] </tt>
      *    <dd> Matches a single character from the character range
      *     <tt>{<i>a...b</i>}</tt>. Note: character <tt><i>a</i></tt> must be
      *     lexicographically less than or equal to character <tt><i>b</i></tt>.
      *
-     *    <p>
      *    <dt> <tt> [^<i>a</i>] </tt>
      *    <dd> Matches a single char that is not from character set or range
      *     <tt>{<i>a</i>}</tt>.  Note that the <tt>^</tt> character must occur
      *     immediately to the right of the opening bracket.
      *
-     *    <p>
      *    <dt> <tt> \<i>c</i> </tt>
      *    <dd> Removes (escapes) any special meaning of character <i>c</i>.
      *
-     *    <p>
      *    <dt> <tt> {ab,cd} </tt>
      *    <dd> Matches a string from the string set <tt>{<i>ab, cd</i>} </tt>
-     *    
-     *    <p>
+     *
      *    <dt> <tt> {ab,c{de,fh}} </tt>
      *    <dd> Matches a string from string set <tt>{<i>ab, cde, cfh</i>}</tt>
      *
@@ -2144,6 +2157,18 @@ public class FileContext implements PathCapabilities {
     /**
      * Copy file from src to dest. See
      * {@link #copy(Path, Path, boolean, boolean)}
+     *
+     * @param src src.
+     * @param dst dst.
+     * @throws AccessControlException If access is denied.
+     * @throws FileAlreadyExistsException If file <code>src</code> already exists.
+     * @throws FileNotFoundException if next file does not exist any more.
+     * @throws ParentNotDirectoryException If parent of <code>src</code> is not a
+     * directory.
+     * @throws UnsupportedFileSystemException If file system for
+     * <code>src/dst</code> is not supported.
+     * @throws IOException If an I/O error occurred.
+     * @return if success copy true, not false.
      */
     public boolean copy(final Path src, final Path dst)
         throws AccessControlException, FileAlreadyExistsException,
@@ -2154,8 +2179,8 @@ public class FileContext implements PathCapabilities {
     
     /**
      * Copy from src to dst, optionally deleting src and overwriting dst.
-     * @param src
-     * @param dst
+     * @param src src.
+     * @param dst dst.
      * @param deleteSource - delete src if true
      * @param overwrite  overwrite dst if true; throw IOException if dst exists
      *         and overwrite is false.
@@ -2276,7 +2301,7 @@ public class FileContext implements PathCapabilities {
    * Are qualSrc and qualDst of the same file system?
    * @param qualPath1 - fully qualified path
    * @param qualPath2 - fully qualified path
-   * @return
+   * @return is same fs true,not false.
    */
   private static boolean isSameFS(Path qualPath1, Path qualPath2) {
     URI srcUri = qualPath1.toUri();
@@ -2299,6 +2324,13 @@ public class FileContext implements PathCapabilities {
   /**
    * Resolves all symbolic links in the specified path.
    * Returns the new path object.
+   *
+   * @param f the path.
+   * @throws FileNotFoundException If <code>f</code> does not exist.
+   * @throws UnresolvedLinkException If unresolved link occurred.
+   * @throws AccessControlException If access is denied.
+   * @throws IOException If an I/O error occurred.
+   * @return resolve path.
    */
   protected Path resolve(final Path f) throws FileNotFoundException,
       UnresolvedLinkException, AccessControlException, IOException {
@@ -2316,6 +2348,7 @@ public class FileContext implements PathCapabilities {
    * to, but not including the final path component.
    * @param f path to resolve
    * @return the new path object.
+   * @throws IOException If an I/O error occurred.
    */
   protected Path resolveIntermediate(final Path f) throws IOException {
     return new FSLinkResolver<FileStatus>() {
@@ -2334,7 +2367,7 @@ public class FileContext implements PathCapabilities {
    * @param f
    *          Path which needs to be resolved
    * @return List of AbstractFileSystems accessed in the path
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   Set<AbstractFileSystem> resolveAbstractFileSystems(final Path f)
       throws IOException {
@@ -2395,7 +2428,7 @@ public class FileContext implements PathCapabilities {
    * @param p Path for which delegations tokens are requested.
    * @param renewer the account name that is allowed to renew the token.
    * @return List of delegation tokens.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   @InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
   public List<Token<?>> getDelegationTokens(
@@ -2547,7 +2580,7 @@ public class FileContext implements PathCapabilities {
    * @param path Path to modify
    * @param name xattr name.
    * @param value xattr value.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void setXAttr(Path path, String name, byte[] value)
       throws IOException {
@@ -2566,7 +2599,7 @@ public class FileContext implements PathCapabilities {
    * @param name xattr name.
    * @param value xattr value.
    * @param flag xattr set flag
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void setXAttr(Path path, final String name, final byte[] value,
       final EnumSet<XAttrSetFlag> flag) throws IOException {
@@ -2591,7 +2624,7 @@ public class FileContext implements PathCapabilities {
    * @param path Path to get extended attribute
    * @param name xattr name.
    * @return byte[] xattr value.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public byte[] getXAttr(Path path, final String name) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2614,7 +2647,7 @@ public class FileContext implements PathCapabilities {
    * @param path Path to get extended attributes
    * @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
    * of the file or directory
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public Map<String, byte[]> getXAttrs(Path path) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2638,7 +2671,7 @@ public class FileContext implements PathCapabilities {
    * @param names XAttr names.
    * @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
    * of the file or directory
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public Map<String, byte[]> getXAttrs(Path path, final List<String> names)
       throws IOException {
@@ -2661,7 +2694,7 @@ public class FileContext implements PathCapabilities {
    *
    * @param path Path to remove extended attribute
    * @param name xattr name
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void removeXAttr(Path path, final String name) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2685,7 +2718,7 @@ public class FileContext implements PathCapabilities {
    * @param path Path to get extended attributes
    * @return List{@literal <}String{@literal >} of the XAttr names of the
    * file or directory
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public List<String> listXAttrs(Path path) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2802,7 +2835,7 @@ public class FileContext implements PathCapabilities {
   /**
    * Set the source path to satisfy storage policy.
    * @param path The source path referring to either a directory or a file.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void satisfyStoragePolicy(final Path path)
       throws IOException {
@@ -2824,6 +2857,7 @@ public class FileContext implements PathCapabilities {
    * @param policyName the name of the target storage policy. The list
    *                   of supported Storage policies can be retrieved
    *                   via {@link #getAllStoragePolicies}.
+   * @throws IOException If an I/O error occurred.
    */
   public void setStoragePolicy(final Path path, final String policyName)
       throws IOException {
@@ -2841,7 +2875,7 @@ public class FileContext implements PathCapabilities {
   /**
    * Unset the storage policy set for a given file or directory.
    * @param src file or directory path.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void unsetStoragePolicy(final Path src) throws IOException {
     final Path absF = fixRelativePart(src);
@@ -2860,7 +2894,7 @@ public class FileContext implements PathCapabilities {
    *
    * @param path file or directory path.
    * @return storage policy for give file.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException {
     final Path absF = fixRelativePart(path);
@@ -2878,7 +2912,7 @@ public class FileContext implements PathCapabilities {
    * Retrieve all the storage policies supported by this file system.
    *
    * @return all storage policies supported by this filesystem.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies()
       throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
index 9260b9a62c6..f50c06cec38 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
@@ -52,6 +52,7 @@ public class FileEncryptionInfo implements Serializable {
    * @param keyName name of the key used for the encryption zone
    * @param ezKeyVersionName name of the KeyVersion used to encrypt the
    *                         encrypted data encryption key.
+   * @param version version.
    */
   public FileEncryptionInfo(final CipherSuite suite,
       final CryptoProtocolVersion version, final byte[] edek,
@@ -134,6 +135,8 @@ public class FileEncryptionInfo implements Serializable {
    *
    * NOTE:
    * Currently this method is used by CLI for backward compatibility.
+   *
+   * @return stable string.
    */
   public String toStringStable() {
     StringBuilder builder = new StringBuilder("{")
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
index d7ca8f172f8..fcef578b072 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
@@ -116,6 +116,17 @@ public class FileStatus implements Writable, Comparable<Object>,
 
   /**
    * Constructor for file systems on which symbolic links are not supported
+   *
+   * @param length length.
+   * @param isdir isdir.
+   * @param block_replication block replication.
+   * @param blocksize block size.
+   * @param modification_time modification time.
+   * @param access_time access_time.
+   * @param permission permission.
+   * @param owner owner.
+   * @param group group.
+   * @param path the path.
    */
   public FileStatus(long length, boolean isdir,
                     int block_replication,
@@ -182,6 +193,7 @@ public class FileStatus implements Writable, Comparable<Object>,
    * Copy constructor.
    *
    * @param other FileStatus to copy
+   * @throws IOException raised on errors performing I/O.
    */
   public FileStatus(FileStatus other) throws IOException {
     // It's important to call the getters here instead of directly accessing the
@@ -375,6 +387,8 @@ public class FileStatus implements Writable, Comparable<Object>,
 
   /**
    * @return The contents of the symbolic link.
+   *
+   * @throws IOException raised on errors performing I/O.
    */
   public Path getSymlink() throws IOException {
     if (!isSymlink()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index aa194e84a35..0bc419b0353 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -104,13 +104,13 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
  * All user code that may potentially use the Hadoop Distributed
  * File System should be written to use a FileSystem object or its
  * successor, {@link FileContext}.
- *
+ * </p>
  * <p>
  * The local implementation is {@link LocalFileSystem} and distributed
  * implementation is DistributedFileSystem. There are other implementations
  * for object stores and (outside the Apache Hadoop codebase),
  * third party filesystems.
- * <p>
+ * </p>
  * Notes
  * <ol>
  * <li>The behaviour of the filesystem is
@@ -133,13 +133,12 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
  * New methods may be marked as Unstable or Evolving for their initial release,
  * as a warning that they are new and may change based on the
  * experience of use in applications.
- * <p></p>
+ * <p>
  * <b>Important note for developers</b>
- * <p></p>
+ * </p>
  * If you are making changes here to the public API or protected methods,
  * you must review the following subclasses and make sure that
  * they are filtering/passing through new methods as appropriate.
- * <p></p>
  *
  * {@link FilterFileSystem}: methods are passed through. If not,
  * then {@code TestFilterFileSystem.MustNotImplement} must be
@@ -148,21 +147,22 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
  * {@link #hasPathCapability(Path, String)} then
  * {@link FilterFileSystem#hasPathCapability(Path, String)}
  * must return false, always.
- * <p></p>
+ * <p>
  * {@link ChecksumFileSystem}: checksums are created and
  * verified.
- * <p></p>
+ * </p>
  * {@code TestHarFileSystem} will need its {@code MustNotImplement}
  * interface updated.
- * <p></p>
  *
+ * <p>
  * There are some external places your changes will break things.
  * Do co-ordinate changes here.
- * <p></p>
+ * </p>
  *
  * HBase: HBoss
- * <p></p>
+ * <p>
  * Hive: HiveShim23
+ * </p>
  * {@code shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java}
  *
  *****************************************************************/
@@ -281,6 +281,8 @@ public abstract class FileSystem extends Configured
   /**
    * Returns the configured FileSystem implementation.
    * @param conf the configuration to use
+   * @return FileSystem.
+   * @throws IOException If an I/O error occurred.
    */
   public static FileSystem get(Configuration conf) throws IOException {
     return get(getDefaultUri(conf), conf);
@@ -375,6 +377,7 @@ public abstract class FileSystem extends Configured
    * implement that method.
    *
    * @see #canonicalizeUri(URI)
+   * @return the URI of this filesystem.
    */
   protected URI getCanonicalUri() {
     return canonicalizeUri(getUri());
@@ -391,6 +394,7 @@ public abstract class FileSystem extends Configured
    * not specified and if {@link #getDefaultPort()} returns a
    * default port.
    *
+   * @param uri url.
    * @return URI
    * @see NetUtils#getCanonicalUri(URI, int)
    */
@@ -454,11 +458,21 @@ public abstract class FileSystem extends Configured
       : null;
   }
 
-  /** @deprecated call {@link #getUri()} instead.*/
+  /**
+   * @return uri to string.
+   * @deprecated call {@link #getUri()} instead.
+   */
   @Deprecated
   public String getName() { return getUri().toString(); }
 
-  /** @deprecated call {@link #get(URI, Configuration)} instead. */
+  /**
+   * @deprecated call {@link #get(URI, Configuration)} instead.
+   *
+   * @param name name.
+   * @param conf configuration.
+   * @return file system.
+   * @throws IOException If an I/O error occurred.
+   */
   @Deprecated
   public static FileSystem getNamed(String name, Configuration conf)
     throws IOException {
@@ -513,6 +527,9 @@ public abstract class FileSystem extends Configured
    *   configuration and URI, cached and returned to the caller.
    * </li>
    * </ol>
+   * @param uri uri of the filesystem.
+   * @param conf configrution.
+   * @return filesystem instance.
    * @throws IOException if the FileSystem cannot be instantiated.
    */
   public static FileSystem get(URI uri, Configuration conf) throws IOException {
@@ -542,7 +559,7 @@ public abstract class FileSystem extends Configured
   /**
    * Returns the FileSystem for this URI's scheme and authority and the
    * given user. Internally invokes {@link #newInstance(URI, Configuration)}
-   * @param uri of the filesystem
+   * @param uri uri of the filesystem.
    * @param conf the configuration to use
    * @param user to perform the get as
    * @return filesystem instance
@@ -860,6 +877,7 @@ public abstract class FileSystem extends Configured
    * @param start offset into the given file
    * @param len length for which to get locations for
    * @throws IOException IO failure
+   * @return block location array.
    */
   public BlockLocation[] getFileBlockLocations(FileStatus file,
       long start, long len) throws IOException {
@@ -900,6 +918,7 @@ public abstract class FileSystem extends Configured
    * @param len length for which to get locations for
    * @throws FileNotFoundException when the path does not exist
    * @throws IOException IO failure
+   * @return block location array.
    */
   public BlockLocation[] getFileBlockLocations(Path p,
       long start, long len) throws IOException {
@@ -962,6 +981,7 @@ public abstract class FileSystem extends Configured
    * @param f the file name to open
    * @param bufferSize the size of the buffer to be used.
    * @throws IOException IO failure
+   * @return input stream.
    */
   public abstract FSDataInputStream open(Path f, int bufferSize)
     throws IOException;
@@ -970,6 +990,7 @@ public abstract class FileSystem extends Configured
    * Opens an FSDataInputStream at the indicated Path.
    * @param f the file to open
    * @throws IOException IO failure
+   * @return input stream.
    */
   public FSDataInputStream open(Path f) throws IOException {
     return open(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -987,6 +1008,7 @@ public abstract class FileSystem extends Configured
    * @throws IOException IO failure
    * @throws UnsupportedOperationException If {@link #open(PathHandle, int)}
    *                                       not overridden by subclass
+   * @return input stream.
    */
   public FSDataInputStream open(PathHandle fd) throws IOException {
     return open(fd, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1004,6 +1026,7 @@ public abstract class FileSystem extends Configured
    *                                    not satisfied
    * @throws IOException IO failure
    * @throws UnsupportedOperationException If not overridden by subclass
+   * @return input stream.
    */
   public FSDataInputStream open(PathHandle fd, int bufferSize)
       throws IOException {
@@ -1021,6 +1044,7 @@ public abstract class FileSystem extends Configured
    *         not overridden by subclass.
    * @throws UnsupportedOperationException If this FileSystem cannot enforce
    *         the specified constraints.
+   * @return path handle.
    */
   public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) {
     // method is final with a default so clients calling getPathHandle(stat)
@@ -1036,6 +1060,7 @@ public abstract class FileSystem extends Configured
    * @param stat Referent in the target FileSystem
    * @param opt Constraints that determine the validity of the
    *            {@link PathHandle} reference.
+   * @return path handle.
    */
   protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) {
     throw new UnsupportedOperationException();
@@ -1046,6 +1071,7 @@ public abstract class FileSystem extends Configured
    * Files are overwritten by default.
    * @param f the file to create
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f) throws IOException {
     return create(f, true);
@@ -1057,6 +1083,7 @@ public abstract class FileSystem extends Configured
    * @param overwrite if a file with this name already exists, then if true,
    *   the file will be overwritten, and if false an exception will be thrown.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f, boolean overwrite)
       throws IOException {
@@ -1074,6 +1101,7 @@ public abstract class FileSystem extends Configured
    * @param f the file to create
    * @param progress to report progress
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f, Progressable progress)
       throws IOException {
@@ -1090,6 +1118,7 @@ public abstract class FileSystem extends Configured
    * @param f the file to create
    * @param replication the replication factor
    * @throws IOException IO failure
+   * @return output stream1
    */
   public FSDataOutputStream create(Path f, short replication)
       throws IOException {
@@ -1108,6 +1137,7 @@ public abstract class FileSystem extends Configured
    * @param replication the replication factor
    * @param progress to report progress
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f, short replication,
       Progressable progress) throws IOException {
@@ -1125,6 +1155,7 @@ public abstract class FileSystem extends Configured
    *   the file will be overwritten, and if false an error will be thrown.
    * @param bufferSize the size of the buffer to be used.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
                                    boolean overwrite,
@@ -1144,7 +1175,9 @@ public abstract class FileSystem extends Configured
    * @param overwrite if a file with this name already exists, then if true,
    *   the file will be overwritten, and if false an error will be thrown.
    * @param bufferSize the size of the buffer to be used.
+   * @param progress to report progress.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
                                    boolean overwrite,
@@ -1164,7 +1197,9 @@ public abstract class FileSystem extends Configured
    *   the file will be overwritten, and if false an error will be thrown.
    * @param bufferSize the size of the buffer to be used.
    * @param replication required block replication for the file.
+   * @param blockSize the size of the buffer to be used.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
       boolean overwrite,
@@ -1182,7 +1217,10 @@ public abstract class FileSystem extends Configured
    *   the file will be overwritten, and if false an error will be thrown.
    * @param bufferSize the size of the buffer to be used.
    * @param replication required block replication for the file.
+   * @param blockSize the size of the buffer to be used.
+   * @param progress to report progress.
    * @throws IOException IO failure
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
                                             boolean overwrite,
@@ -1209,6 +1247,7 @@ public abstract class FileSystem extends Configured
    * @param progress the progress reporter
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
   public abstract FSDataOutputStream create(Path f,
       FsPermission permission,
@@ -1230,6 +1269,7 @@ public abstract class FileSystem extends Configured
    * @param progress the progress reporter
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
       FsPermission permission,
@@ -1256,6 +1296,7 @@ public abstract class FileSystem extends Configured
    *        found in conf will be used.
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
   public FSDataOutputStream create(Path f,
       FsPermission permission,
@@ -1277,6 +1318,16 @@ public abstract class FileSystem extends Configured
    * the permission with umask before calling this method.
    * This a temporary method added to support the transition from FileSystem
    * to FileContext for user applications.
+   *
+   * @param f path.
+   * @param absolutePermission permission.
+   * @param flag create flag.
+   * @param bufferSize buffer size.
+   * @param replication replication.
+   * @param blockSize block size.
+   * @param progress progress.
+   * @param checksumOpt check sum opt.
+   * @return output stream.
    * @throws IOException IO failure
    */
   @Deprecated
@@ -1331,6 +1382,11 @@ public abstract class FileSystem extends Configured
    * with umask before calling this method.
    * This a temporary method added to support the transition from FileSystem
    * to FileContext for user applications.
+   *
+   * @param f the path.
+   * @param absolutePermission permission.
+   * @param createParent create parent.
+   * @throws IOException IO failure.
    */
   @Deprecated
   protected void primitiveMkdir(Path f, FsPermission absolutePermission,
@@ -1370,6 +1426,7 @@ public abstract class FileSystem extends Configured
    * @param progress the progress reporter
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
   public FSDataOutputStream createNonRecursive(Path f,
       boolean overwrite,
@@ -1393,6 +1450,7 @@ public abstract class FileSystem extends Configured
    * @param progress the progress reporter
    * @throws IOException IO failure
    * @see #setPermission(Path, FsPermission)
+   * @return output stream.
    */
    public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
        boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -1416,6 +1474,7 @@ public abstract class FileSystem extends Configured
     * @param progress the progress reporter
     * @throws IOException IO failure
     * @see #setPermission(Path, FsPermission)
+    * @return output stream.
     */
     public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
         EnumSet<CreateFlag> flags, int bufferSize, short replication, long blockSize,
@@ -1430,6 +1489,7 @@ public abstract class FileSystem extends Configured
    * <i>Important: the default implementation is not atomic</i>
    * @param f path to use for create
    * @throws IOException IO failure
+   * @return if create new file success true,not false.
    */
   public boolean createNewFile(Path f) throws IOException {
     if (exists(f)) {
@@ -1450,6 +1510,7 @@ public abstract class FileSystem extends Configured
    * @throws IOException IO failure
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default).
+   * @return output stream.
    */
   public FSDataOutputStream append(Path f) throws IOException {
     return append(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1464,6 +1525,7 @@ public abstract class FileSystem extends Configured
    * @throws IOException IO failure
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default).
+   * @return output stream.
    */
   public FSDataOutputStream append(Path f, int bufferSize) throws IOException {
     return append(f, bufferSize, null);
@@ -1477,6 +1539,7 @@ public abstract class FileSystem extends Configured
    * @throws IOException IO failure
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default).
+   * @return output stream.
    */
   public abstract FSDataOutputStream append(Path f, int bufferSize,
       Progressable progress) throws IOException;
@@ -1515,7 +1578,7 @@ public abstract class FileSystem extends Configured
    * This is the default behavior.
    * @param src file name
    * @param replication new replication
-   * @throws IOException
+   * @throws IOException an IO failure.
    * @return true if successful, or the feature in unsupported;
    *         false if replication is supported but the file does not exist,
    *         or is a directory
@@ -1544,11 +1607,12 @@ public abstract class FileSystem extends Configured
    * <p>
    * If OVERWRITE option is not passed as an argument, rename fails
    * if the dst already exists.
+   * </p>
    * <p>
    * If OVERWRITE option is passed as an argument, rename overwrites
    * the dst if it is a file or an empty directory. Rename fails if dst is
    * a non-empty directory.
-   * <p>
+   * </p>
    * Note that atomicity of rename is dependent on the file system
    * implementation. Please refer to the file system documentation for
    * details. This default implementation is non atomic.
@@ -1556,9 +1620,11 @@ public abstract class FileSystem extends Configured
    * This method is deprecated since it is a temporary method added to
    * support the transition from FileSystem to FileContext for user
    * applications.
+   * </p>
    *
    * @param src path to be renamed
    * @param dst new path after rename
+   * @param options rename options.
    * @throws FileNotFoundException src path does not exist, or the parent
    * path of dst does not exist.
    * @throws FileAlreadyExistsException dest path exists and is a file
@@ -1653,6 +1719,9 @@ public abstract class FileSystem extends Configured
 
   /**
    * Delete a file/directory.
+   * @param f the path.
+   * @throws IOException IO failure.
+   * @return if delete success true, not false.
    * @deprecated Use {@link #delete(Path, boolean)} instead.
    */
   @Deprecated
@@ -1769,6 +1838,7 @@ public abstract class FileSystem extends Configured
    * @param f path to check
    * @throws IOException IO failure
    * @deprecated Use {@link #getFileStatus(Path)} instead
+   * @return if f is directory true, not false.
    */
   @Deprecated
   public boolean isDirectory(Path f) throws IOException {
@@ -1786,6 +1856,7 @@ public abstract class FileSystem extends Configured
    * @param f path to check
    * @throws IOException IO failure
    * @deprecated Use {@link #getFileStatus(Path)} instead
+   * @return if f is file true, not false.
    */
   @Deprecated
   public boolean isFile(Path f) throws IOException {
@@ -1798,6 +1869,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * The number of bytes in a file.
+   * @param f the path.
    * @return the number of bytes; 0 for a directory
    * @deprecated Use {@link #getFileStatus(Path)} instead.
    * @throws FileNotFoundException if the path does not resolve
@@ -1812,6 +1884,7 @@ public abstract class FileSystem extends Configured
    * @param f path to use
    * @throws FileNotFoundException if the path does not resolve
    * @throws IOException IO failure
+   * @return content summary.
    */
   public ContentSummary getContentSummary(Path f) throws IOException {
     FileStatus status = getFileStatus(f);
@@ -1946,9 +2019,9 @@ public abstract class FileSystem extends Configured
    * @param f Path to list
    * @param token opaque iteration token returned by previous call, or null
    *              if this is the first call.
-   * @return
-   * @throws FileNotFoundException
-   * @throws IOException
+   * @return directory entries.
+   * @throws FileNotFoundException when the path does not exist.
+   * @throws IOException If an I/O error occurred.
    */
   @InterfaceAudience.Private
   protected DirectoryEntries listStatusBatch(Path f, byte[] token) throws
@@ -1979,6 +2052,8 @@ public abstract class FileSystem extends Configured
 
   /**
    * List corrupted file blocks.
+   *
+   * @param path the path.
    * @return an iterator over the corrupt files under the given path
    * (may contain duplicates if a file has more than one corrupt block)
    * @throws UnsupportedOperationException if the operation is unsupported
@@ -2072,36 +2147,29 @@ public abstract class FileSystem extends Configured
    *    <dt> <tt> ? </tt>
    *    <dd> Matches any single character.
    *
-   *    <p>
    *    <dt> <tt> * </tt>
    *    <dd> Matches zero or more characters.
    *
-   *    <p>
    *    <dt> <tt> [<i>abc</i>] </tt>
    *    <dd> Matches a single character from character set
    *     <tt>{<i>a,b,c</i>}</tt>.
    *
-   *    <p>
    *    <dt> <tt> [<i>a</i>-<i>b</i>] </tt>
    *    <dd> Matches a single character from the character range
    *     <tt>{<i>a...b</i>}</tt>.  Note that character <tt><i>a</i></tt> must be
    *     lexicographically less than or equal to character <tt><i>b</i></tt>.
    *
-   *    <p>
    *    <dt> <tt> [^<i>a</i>] </tt>
    *    <dd> Matches a single character that is not from character set or range
    *     <tt>{<i>a</i>}</tt>.  Note that the <tt>^</tt> character must occur
    *     immediately to the right of the opening bracket.
    *
-   *    <p>
    *    <dt> <tt> \<i>c</i> </tt>
    *    <dd> Removes (escapes) any special meaning of character <i>c</i>.
    *
-   *    <p>
    *    <dt> <tt> {ab,cd} </tt>
    *    <dd> Matches a string from the string set <tt>{<i>ab, cd</i>} </tt>
    *
-   *    <p>
    *    <dt> <tt> {ab,c{de,fh}} </tt>
    *    <dd> Matches a string from the string set <tt>{<i>ab, cde, cfh</i>}</tt>
    *
@@ -2332,6 +2400,7 @@ public abstract class FileSystem extends Configured
 
   /** Return the current user's home directory in this FileSystem.
    * The default implementation returns {@code "/user/$USER/"}.
+   * @return the path.
    */
   public Path getHomeDirectory() {
     String username;
@@ -2394,6 +2463,7 @@ public abstract class FileSystem extends Configured
    * @param f path to create
    * @param permission to apply to f
    * @throws IOException IO failure
+   * @return if mkdir success true, not false.
    */
   public abstract boolean mkdirs(Path f, FsPermission permission
       ) throws IOException;
@@ -2441,6 +2511,7 @@ public abstract class FileSystem extends Configured
    * @param delSrc whether to delete the src
    * @param src path
    * @param dst path
+   * @throws IOException IO failure.
    */
   public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
     throws IOException {
@@ -2555,6 +2626,7 @@ public abstract class FileSystem extends Configured
    * @param fsOutputFile path of output file
    * @param tmpLocalFile path of local tmp file
    * @throws IOException IO failure
+   * @return the path.
    */
   public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
     throws IOException {
@@ -2602,6 +2674,7 @@ public abstract class FileSystem extends Configured
   /**
    * Return the total size of all files in the filesystem.
    * @throws IOException IO failure
+   * @return the number of path used.
    */
   public long getUsed() throws IOException {
     Path path = new Path("/");
@@ -2610,7 +2683,9 @@ public abstract class FileSystem extends Configured
 
   /**
    * Return the total size of all files from a specified path.
+   * @param path the path.
    * @throws IOException IO failure
+   * @return the number of path content summary.
    */
   public long getUsed(Path path) throws IOException {
     return getContentSummary(path).getLength();
@@ -2633,6 +2708,7 @@ public abstract class FileSystem extends Configured
    * Return the number of bytes that large input files should be optimally
    * be split into to minimize I/O time.
    * @deprecated use {@link #getDefaultBlockSize(Path)} instead
+   * @return default block size.
    */
   @Deprecated
   public long getDefaultBlockSize() {
@@ -2685,8 +2761,8 @@ public abstract class FileSystem extends Configured
    * In some FileSystem implementations such as HDFS metadata
    * synchronization is essential to guarantee consistency of read requests
    * particularly in HA setting.
-   * @throws IOException
-   * @throws UnsupportedOperationException
+   * @throws IOException If an I/O error occurred.
+   * @throws UnsupportedOperationException if the operation is unsupported.
    */
   public void msync() throws IOException, UnsupportedOperationException {
     throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -2762,6 +2838,8 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link FileContext#fixRelativePart}.
+   * @param p the path.
+   * @return relative part.
    */
   protected Path fixRelativePart(Path p) {
     if (p.isUriPathAbsolute()) {
@@ -2773,6 +2851,18 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link FileContext#createSymlink(Path, Path, boolean)}.
+   *
+   * @param target target path.
+   * @param link link.
+   * @param createParent create parent.
+   * @throws AccessControlException if access is denied.
+   * @throws FileAlreadyExistsException when the path does not exist.
+   * @throws FileNotFoundException when the path does not exist.
+   * @throws ParentNotDirectoryException if the parent path of dest is not
+   *                                     a directory.
+   * @throws UnsupportedFileSystemException if there was no known implementation
+   *                                        for the scheme.
+   * @throws IOException raised on errors performing I/O.
    */
   public void createSymlink(final Path target, final Path link,
       final boolean createParent) throws AccessControlException,
@@ -2786,8 +2876,14 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link FileContext#getFileLinkStatus(Path)}.
-   * @throws FileNotFoundException when the path does not exist
-   * @throws IOException see specific implementation
+   *
+   * @param f the path.
+   * @throws AccessControlException if access is denied.
+   * @throws FileNotFoundException when the path does not exist.
+   * @throws IOException raised on errors performing I/O.
+   * @throws UnsupportedFileSystemException if there was no known implementation
+   *                                        for the scheme.
+   * @return file status
    */
   public FileStatus getFileLinkStatus(final Path f)
       throws AccessControlException, FileNotFoundException,
@@ -2798,6 +2894,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link AbstractFileSystem#supportsSymlinks()}.
+   * @return if support symlinkls true, not false.
    */
   public boolean supportsSymlinks() {
     return false;
@@ -2805,8 +2902,11 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link FileContext#getLinkTarget(Path)}.
+   * @param f the path.
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default outcome).
+   * @throws IOException IO failure.
+   * @return the path.
    */
   public Path getLinkTarget(Path f) throws IOException {
     // Supporting filesystems should override this method
@@ -2816,8 +2916,11 @@ public abstract class FileSystem extends Configured
 
   /**
    * See {@link AbstractFileSystem#getLinkTarget(Path)}.
+   * @param f the path.
    * @throws UnsupportedOperationException if the operation is unsupported
    *         (default outcome).
+   * @throws IOException IO failure.
+   * @return the path.
    */
   protected Path resolveLink(Path f) throws IOException {
     // Supporting filesystems should override this method
@@ -3221,7 +3324,7 @@ public abstract class FileSystem extends Configured
   /**
    * Set the source path to satisfy storage policy.
    * @param path The source path referring to either a directory or a file.
-   * @throws IOException
+   * @throws IOException If an I/O error occurred.
    */
   public void satisfyStoragePolicy(final Path path) throws IOException {
     throw new UnsupportedOperationException(
@@ -3529,7 +3632,7 @@ public abstract class FileSystem extends Configured
      * @param conf configuration
      * @param key key to store/retrieve this FileSystem in the cache
      * @return a cached or newly instantiated FileSystem.
-     * @throws IOException
+     * @throws IOException If an I/O error occurred.
      */
     private FileSystem getInternal(URI uri, Configuration conf, Key key)
         throws IOException{
@@ -4024,6 +4127,7 @@ public abstract class FileSystem extends Configured
 
     /**
      * Get or create the thread-local data associated with the current thread.
+     * @return statistics data.
      */
     public StatisticsData getThreadStatistics() {
       StatisticsData data = threadData.get();
@@ -4382,6 +4486,7 @@ public abstract class FileSystem extends Configured
   /**
    * Return the FileSystem classes that have Statistics.
    * @deprecated use {@link #getGlobalStorageStatistics()}
+   * @return statistics lists.
    */
   @Deprecated
   public static synchronized List<Statistics> getAllStatistics() {
@@ -4390,6 +4495,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * Get the statistics for a particular file system.
+   * @param scheme scheme.
    * @param cls the class to lookup
    * @return a statistics object
    * @deprecated use {@link #getGlobalStorageStatistics()}
@@ -4424,6 +4530,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * Print all statistics for all file systems to {@code System.out}
+   * @throws IOException If an I/O error occurred.
    */
   public static synchronized
   void printStatistics() throws IOException {
@@ -4464,6 +4571,7 @@ public abstract class FileSystem extends Configured
 
   /**
    * Get the global storage statistics.
+   * @return global storage statistics.
    */
   public static GlobalStorageStatistics getGlobalStorageStatistics() {
     return GlobalStorageStatistics.INSTANCE;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
index 7eec0eb7cec..593495a1daa 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
@@ -38,8 +38,8 @@ public abstract class FileSystemLinkResolver<T> {
    * an UnresolvedLinkException if called on an unresolved {@link Path}.
    * @param p Path on which to perform an operation
    * @return Generic type returned by operation
-   * @throws IOException
-   * @throws UnresolvedLinkException
+   * @throws IOException raised on errors performing I/O.
+   * @throws UnresolvedLinkException unresolved link exception.
    */
   abstract public T doCall(final Path p) throws IOException,
       UnresolvedLinkException;
@@ -54,7 +54,7 @@ public abstract class FileSystemLinkResolver<T> {
    * @param p
    *          Resolved Target of path
    * @return Generic type determined by implementation
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   abstract public T next(final FileSystem fs, final Path p) throws IOException;
 
@@ -66,7 +66,7 @@ public abstract class FileSystemLinkResolver<T> {
    * @param filesys FileSystem with which to try call
    * @param path Path with which to try call
    * @return Generic type determined by implementation
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public T resolve(final FileSystem filesys, final Path path)
       throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
index 7400ca36daa..2af0a7b9e74 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
@@ -162,6 +162,8 @@ public class FileUtil {
    * (3) If dir is a normal file, it is deleted.
    * (4) If dir is a normal directory, then dir and all its contents recursively
    *     are deleted.
+   * @param dir dir.
+   * @return fully delete status.
    */
   public static boolean fullyDelete(final File dir) {
     return fullyDelete(dir, false);
@@ -257,6 +259,9 @@ public class FileUtil {
    * we return false, the directory may be partially-deleted.
    * If dir is a symlink to a directory, all the contents of the actual
    * directory pointed to by dir will be deleted.
+   *
+   * @param dir dir.
+   * @return fullyDeleteContents Status.
    */
   public static boolean fullyDeleteContents(final File dir) {
     return fullyDeleteContents(dir, false);
@@ -267,8 +272,11 @@ public class FileUtil {
    * we return false, the directory may be partially-deleted.
    * If dir is a symlink to a directory, all the contents of the actual
    * directory pointed to by dir will be deleted.
+   *
+   * @param dir dir.
    * @param tryGrantPermissions if 'true', try grant +rwx permissions to this
    * and all the underlying directories before trying to delete their contents.
+   * @return fully delete contents status.
    */
   public static boolean fullyDeleteContents(final File dir, final boolean tryGrantPermissions) {
     if (tryGrantPermissions) {
@@ -311,7 +319,7 @@ public class FileUtil {
    *
    * @param fs {@link FileSystem} on which the path is present
    * @param dir directory to recursively delete
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link FileSystem#delete(Path, boolean)}
    */
   @Deprecated
@@ -343,7 +351,17 @@ public class FileUtil {
     }
   }
 
-  /** Copy files between FileSystems. */
+  /**
+   * Copy files between FileSystems.
+   * @param srcFS src fs.
+   * @param src src.
+   * @param dstFS dst fs.
+   * @param dst dst.
+   * @param deleteSource delete source.
+   * @param conf configuration.
+   * @return if copy success true, not false.
+   * @throws IOException raised on errors performing I/O.
+   */
   public static boolean copy(FileSystem srcFS, Path src,
                              FileSystem dstFS, Path dst,
                              boolean deleteSource,
@@ -391,7 +409,19 @@ public class FileUtil {
     return returnVal;
   }
 
-  /** Copy files between FileSystems. */
+  /**
+   * Copy files between FileSystems.
+   *
+   * @param srcFS srcFs.
+   * @param src src.
+   * @param dstFS dstFs.
+   * @param dst dst.
+   * @param deleteSource delete source.
+   * @param overwrite overwrite.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return true if the operation succeeded.
+   */
   public static boolean copy(FileSystem srcFS, Path src,
                              FileSystem dstFS, Path dst,
                              boolean deleteSource,
@@ -403,20 +433,21 @@ public class FileUtil {
 
   /**
    * Copy a file/directory tree within/between filesystems.
-   * <p></p>
+   * <p>
    * returns true if the operation succeeded. When deleteSource is true,
    * this means "after the copy, delete(source) returned true"
    * If the destination is a directory, and mkdirs (dest) fails,
    * the operation will return false rather than raise any exception.
-   * <p></p>
+   * </p>
    * The overwrite flag is about overwriting files; it has no effect about
    * handing an attempt to copy a file atop a directory (expect an IOException),
    * or a directory over a path which contains a file (mkdir will fail, so
    * "false").
-   * <p></p>
+   * <p>
    * The operation is recursive, and the deleteSource operation takes place
    * as each subdirectory is copied. Therefore, if an operation fails partway
    * through, the source tree may be partially deleted.
+   * </p>
    * @param srcFS source filesystem
    * @param srcStatus status of source
    * @param dstFS destination filesystem
@@ -471,7 +502,17 @@ public class FileUtil {
 
   }
 
-  /** Copy local files to a FileSystem. */
+  /**
+   * Copy local files to a FileSystem.
+   *
+   * @param src src.
+   * @param dstFS dstFs.
+   * @param dst dst.
+   * @param deleteSource delete source.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return true if the operation succeeded.
+   */
   public static boolean copy(File src,
                              FileSystem dstFS, Path dst,
                              boolean deleteSource,
@@ -514,7 +555,17 @@ public class FileUtil {
     }
   }
 
-  /** Copy FileSystem files to local files. */
+  /**
+   * Copy FileSystem files to local files.
+   *
+   * @param srcFS srcFs.
+   * @param src src.
+   * @param dst dst.
+   * @param deleteSource delete source.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return true if the operation succeeded.
+   */
   public static boolean copy(FileSystem srcFS, Path src,
                              File dst, boolean deleteSource,
                              Configuration conf) throws IOException {
@@ -958,7 +1009,7 @@ public class FileUtil {
    *
    * @param inFile The tar file as input.
    * @param untarDir The untar directory where to untar the tar file.
-   * @throws IOException
+   * @throws IOException an exception occurred.
    */
   public static void unTar(File inFile, File untarDir) throws IOException {
     if (!untarDir.mkdirs()) {
@@ -1169,6 +1220,7 @@ public class FileUtil {
    * @param target the target for symlink
    * @param linkname the symlink
    * @return 0 on success
+   * @throws IOException raised on errors performing I/O.
    */
   public static int symLink(String target, String linkname) throws IOException{
 
@@ -1230,8 +1282,8 @@ public class FileUtil {
    * @param filename the name of the file to change
    * @param perm the permission string
    * @return the exit code from the command
-   * @throws IOException
-   * @throws InterruptedException
+   * @throws IOException raised on errors performing I/O.
+   * @throws InterruptedException command interrupted.
    */
   public static int chmod(String filename, String perm
                           ) throws IOException, InterruptedException {
@@ -1245,7 +1297,7 @@ public class FileUtil {
    * @param perm permission string
    * @param recursive true, if permissions should be changed recursively
    * @return the exit code from the command.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static int chmod(String filename, String perm, boolean recursive)
                             throws IOException {
@@ -1271,7 +1323,7 @@ public class FileUtil {
    * @param file the file to change
    * @param username the new user owner name
    * @param groupname the new group owner name
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static void setOwner(File file, String username,
       String groupname) throws IOException {
@@ -1288,7 +1340,7 @@ public class FileUtil {
    * Platform independent implementation for {@link File#setReadable(boolean)}
    * File#setReadable does not work as expected on Windows.
    * @param f input file
-   * @param readable
+   * @param readable readable.
    * @return true on success, false otherwise
    */
   public static boolean setReadable(File f, boolean readable) {
@@ -1309,7 +1361,7 @@ public class FileUtil {
    * Platform independent implementation for {@link File#setWritable(boolean)}
    * File#setWritable does not work as expected on Windows.
    * @param f input file
-   * @param writable
+   * @param writable writable.
    * @return true on success, false otherwise
    */
   public static boolean setWritable(File f, boolean writable) {
@@ -1333,7 +1385,7 @@ public class FileUtil {
    * behavior on Windows as on Unix platforms. Creating, deleting or renaming
    * a file within that folder will still succeed on Windows.
    * @param f input file
-   * @param executable
+   * @param executable executable.
    * @return true on success, false otherwise
    */
   public static boolean setExecutable(File f, boolean executable) {
@@ -1412,7 +1464,7 @@ public class FileUtil {
    * of forking if group == other.
    * @param f the file to change
    * @param permission the new permissions
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static void setPermission(File f, FsPermission permission
                                    ) throws IOException {
@@ -1717,6 +1769,7 @@ public class FileUtil {
    * wildcard path to return all jars from the directory to use in a classpath.
    *
    * @param path the path to the directory. The path may include the wildcard.
+   * @param useLocal use local.
    * @return the list of jars as URLs, or an empty list if there are no jars, or
    * the directory does not exist
    */
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
index 607aa263622..cdbe51e3307 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
@@ -233,7 +233,7 @@ public class FilterFileSystem extends FileSystem {
    * 
    * @param src file name
    * @param replication new replication
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @return true if successful;
    *         false if file does not exist or is a directory
    */
@@ -304,7 +304,7 @@ public class FilterFileSystem extends FileSystem {
    * Set the current working directory for the given file system. All relative
    * paths will be resolved relative to it.
    * 
-   * @param newDir
+   * @param newDir new dir.
    */
   @Override
   public void setWorkingDirectory(Path newDir) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
index 7275b70227f..73258661ec1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
@@ -130,7 +130,7 @@ public class FsShell extends Configured implements Tool {
    * Returns the current trash location for the path specified
    * @param path to be deleted
    * @return path to the trash
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Path getCurrentTrashDir(Path path) throws IOException {
     return getTrash().getCurrentTrashDir(path);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
index d392c7d765d..c4bc341bf4f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
@@ -35,24 +35,39 @@ public class FsStatus implements Writable {
   private long used;
   private long remaining;
 
-  /** Construct a FsStatus object, using the specified statistics */
+  /**
+   * Construct a FsStatus object, using the specified statistics.
+   *
+   * @param capacity capacity.
+   * @param used used.
+   * @param remaining remaining.
+   */
   public FsStatus(long capacity, long used, long remaining) {
     this.capacity = capacity;
     this.used = used;
     this.remaining = remaining;
   }
 
-  /** Return the capacity in bytes of the file system */
+  /**
+   * Return the capacity in bytes of the file system.
+   * @return capacity.
+   */
   public long getCapacity() {
     return capacity;
   }
 
-  /** Return the number of bytes used on the file system */
+  /**
+   * Return the number of bytes used on the file system.
+   * @return used.
+   */
   public long getUsed() {
     return used;
   }
 
-  /** Return the number of remaining bytes on the file system */
+  /**
+   * Return the number of remaining bytes on the file system.
+   * @return remaining.
+   */
   public long getRemaining() {
     return remaining;
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
index cb430ed3f62..c87444c6c87 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
@@ -56,9 +56,9 @@ public class GlobExpander {
    * {a,b}/{c/\d}        - {a,b}/c/d
    * </pre>
    * 
-   * @param filePattern
+   * @param filePattern file pattern.
    * @return expanded file patterns
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static List<String> expand(String filePattern) throws IOException {
     List<String> fullyExpanded = new ArrayList<String>();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
index 30ce07a422e..d9433903444 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
@@ -104,6 +104,8 @@ public enum GlobalStorageStatistics {
   /**
    * Get an iterator that we can use to iterate throw all the global storage
    * statistics objects.
+   *
+   * @return StorageStatistics Iterator.
    */
   synchronized public Iterator<StorageStatistics> iterator() {
     Entry<String, StorageStatistics> first = map.firstEntry();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
index 7e12d0a11e9..1d64b0bcbe9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
@@ -463,7 +463,7 @@ public class HarFileSystem extends FileSystem {
    * @param start the start of the desired range in the contained file
    * @param len the length of the desired range
    * @return block locations for this segment of file
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
@@ -525,7 +525,7 @@ public class HarFileSystem extends FileSystem {
    * Combine the status stored in the index and the underlying status. 
    * @param h status stored in the index
    * @return the combined file status
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   private FileStatus toFileStatus(HarStatus h) throws IOException {
     final Path p = h.isDir ? archivePath : new Path(archivePath, h.partName);
@@ -635,7 +635,7 @@ public class HarFileSystem extends FileSystem {
    * while creating a hadoop archive.
    * @param f the path in har filesystem
    * @return filestatus.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public FileStatus getFileStatus(Path f) throws IOException {
@@ -1104,7 +1104,7 @@ public class HarFileSystem extends FileSystem {
      * @param start the start position in the part file
      * @param length the length of valid data in the part file
      * @param bufsize the buffer size
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public HarFSDataInputStream(FileSystem fs, Path  p, long start, 
         long length, int bufsize) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
index 855fbb04e59..1624c5d395a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
@@ -156,6 +156,7 @@ public class HardLink {
    * Creates a hardlink.
    * @param file - existing source file
    * @param linkName - desired target link file
+   * @throws IOException raised on errors performing I/O.
    */
   public static void createHardLink(File file, File linkName)
       throws IOException {
@@ -177,6 +178,7 @@ public class HardLink {
    * @param fileBaseNames - list of path-less file names, as returned by 
    *                        parentDir.list()
    * @param linkDir - where the hardlinks should be put. It must already exist.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
       File linkDir) throws IOException {
@@ -204,6 +206,10 @@ public class HardLink {
 
    /**
    * Retrieves the number of links to the specified file.
+    *
+    * @param fileName file name.
+    * @throws IOException raised on errors performing I/O.
+    * @return link count.
    */
   public static int getLinkCount(File fileName) throws IOException {
     if (fileName == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
index bcf325ceca5..a0e89d6aeac 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
@@ -33,7 +33,7 @@ public interface HasFileDescriptor {
 
   /**
    * @return the FileDescriptor
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public FileDescriptor getFileDescriptor() throws IOException;
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
index 5f266a7b825..f6c9d3c7cb0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
@@ -78,8 +78,9 @@ public class LocalDirAllocator {
 
   private final DiskValidator diskValidator;
 
-  /**Create an allocator object
-   * @param contextCfgItemName
+  /**
+   * Create an allocator object.
+   * @param contextCfgItemName contextCfgItemName.
    */
   public LocalDirAllocator(String contextCfgItemName) {
     this.contextCfgItemName = contextCfgItemName;
@@ -123,7 +124,7 @@ public class LocalDirAllocator {
    *  available disk)
    *  @param conf the Configuration object
    *  @return the complete path to the file on a local disk
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPathForWrite(String pathStr, 
       Configuration conf) throws IOException {
@@ -139,7 +140,7 @@ public class LocalDirAllocator {
    *  @param size the size of the file that is going to be written
    *  @param conf the Configuration object
    *  @return the complete path to the file on a local disk
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPathForWrite(String pathStr, long size, 
       Configuration conf) throws IOException {
@@ -156,7 +157,7 @@ public class LocalDirAllocator {
    *  @param conf the Configuration object
    *  @param checkWrite ensure that the path is writable
    *  @return the complete path to the file on a local disk
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPathForWrite(String pathStr, long size, 
                                    Configuration conf,
@@ -171,7 +172,7 @@ public class LocalDirAllocator {
    *  @param pathStr the requested file (this will be searched)
    *  @param conf the Configuration object
    *  @return the complete path to the file on a local disk
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public Path getLocalPathToRead(String pathStr, 
       Configuration conf) throws IOException {
@@ -184,7 +185,7 @@ public class LocalDirAllocator {
    * @param pathStr the path underneath the roots
    * @param conf the configuration to look up the roots in
    * @return all of the paths that exist under any of the roots
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Iterable<Path> getAllLocalPathsToRead(String pathStr, 
                                                Configuration conf
@@ -205,7 +206,7 @@ public class LocalDirAllocator {
    *  @param size the size of the file that is going to be written
    *  @param conf the Configuration object
    *  @return a unique temporary file
-   *  @throws IOException
+   *  @throws IOException raised on errors performing I/O.
    */
   public File createTmpFileForWrite(String pathStr, long size, 
       Configuration conf) throws IOException {
@@ -213,8 +214,9 @@ public class LocalDirAllocator {
     return context.createTmpFileForWrite(pathStr, size, conf);
   }
   
-  /** Method to check whether a context is valid
-   * @param contextCfgItemName
+  /**
+   * Method to check whether a context is valid.
+   * @param contextCfgItemName contextCfgItemName.
    * @return true/false
    */
   public static boolean isContextValid(String contextCfgItemName) {
@@ -224,9 +226,9 @@ public class LocalDirAllocator {
   }
   
   /**
-   * Removes the context from the context config items
+   * Removes the context from the context config items.
    * 
-   * @param contextCfgItemName
+   * @param contextCfgItemName contextCfgItemName.
    */
   @Deprecated
   @InterfaceAudience.LimitedPrivate({"MapReduce"})
@@ -236,8 +238,9 @@ public class LocalDirAllocator {
     }
   }
     
-  /** We search through all the configured dirs for the file's existence
-   *  and return true when we find  
+  /**
+   *  We search through all the configured dirs for the file's existence
+   *  and return true when we find.
    *  @param pathStr the requested file (this will be searched)
    *  @param conf the Configuration object
    *  @return true if files exist. false otherwise
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
index c41190a7b36..590cbd9a49e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
@@ -71,7 +71,11 @@ public class LocalFileSystem extends ChecksumFileSystem {
     super(rawLocalFileSystem);
   }
     
-  /** Convert a path to a File. */
+  /**
+   * Convert a path to a File.
+   * @param path the path.
+   * @return file.
+   */
   public File pathToFile(Path path) {
     return ((RawLocalFileSystem)fs).pathToFile(path);
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
index 5a4a6a97cc4..354e4a6b465 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
@@ -28,7 +28,13 @@ public class MD5MD5CRC32CastagnoliFileChecksum extends MD5MD5CRC32FileChecksum {
     this(0, 0, null);
   }
 
-  /** Create a MD5FileChecksum */
+  /**
+   * Create a MD5FileChecksum.
+   *
+   * @param bytesPerCRC bytesPerCRC.
+   * @param crcPerBlock crcPerBlock.
+   * @param md5 md5.
+   */
   public MD5MD5CRC32CastagnoliFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
     super(bytesPerCRC, crcPerBlock, md5);
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
index 3fdb7e98262..c5ac381f782 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
@@ -44,7 +44,13 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
     this(0, 0, null);
   }
 
-  /** Create a MD5FileChecksum */
+  /**
+   * Create a MD5FileChecksum.
+   *
+   * @param bytesPerCRC bytesPerCRC.
+   * @param crcPerBlock crcPerBlock.
+   * @param md5 md5.
+   */
   public MD5MD5CRC32FileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
     this.bytesPerCRC = bytesPerCRC;
     this.crcPerBlock = crcPerBlock;
@@ -76,7 +82,10 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
     return WritableUtils.toByteArray(this);
   }
 
-  /** returns the CRC type */
+  /**
+   * returns the CRC type.
+   * @return data check sum type.
+   */
   public DataChecksum.Type getCrcType() {
     // default to the one that is understood by all releases.
     return DataChecksum.Type.CRC32;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
index 5164d0200d2..f7996c86237 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
@@ -28,7 +28,13 @@ public class MD5MD5CRC32GzipFileChecksum extends MD5MD5CRC32FileChecksum {
     this(0, 0, null);
   }
 
-  /** Create a MD5FileChecksum */
+  /**
+   * Create a MD5FileChecksum.
+   *
+   * @param bytesPerCRC bytesPerCRC.
+   * @param crcPerBlock crcPerBlock.
+   * @param md5 md5.
+   */
   public MD5MD5CRC32GzipFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
     super(bytesPerCRC, crcPerBlock, md5);
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java
index dcb76b50b34..5e4eda26c7f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java
@@ -31,10 +31,11 @@ import org.apache.hadoop.fs.statistics.IOStatisticsSource;
 /**
  * MultipartUploader is an interface for copying files multipart and across
  * multiple nodes.
- * <p></p>
+ * <p>
  * The interface extends {@link IOStatisticsSource} so that there is no
  * need to cast an instance to see if is a source of statistics.
  * However, implementations MAY return null for their actual statistics.
+ * </p>
  */
 @InterfaceAudience.Public
 @InterfaceStability.Unstable
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
index 381bfaa07f6..e7b0865063e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
@@ -25,34 +25,43 @@ import org.apache.hadoop.fs.permission.FsPermission;
 
 /**
  * Builder interface for Multipart readers.
- * @param <S>
- * @param <B>
+ * @param <S> MultipartUploader Generic Type.
+ * @param <B> MultipartUploaderBuilder Generic Type.
  */
 public interface MultipartUploaderBuilder<S extends MultipartUploader, B extends MultipartUploaderBuilder<S, B>>
     extends FSBuilder<S, B> {
 
   /**
    * Set permission for the file.
+   * @param perm permission.
+   * @return B Generics Type.
    */
   B permission(@Nonnull FsPermission perm);
 
   /**
    * Set the size of the buffer to be used.
+   * @param bufSize buffer size.
+   * @return B Generics Type.
    */
   B bufferSize(int bufSize);
 
   /**
    * Set replication factor.
+   * @param replica replica.
+   * @return B Generics Type.
    */
   B replication(short replica);
 
   /**
    * Set block size.
+   * @param blkSize blkSize.
+   * @return B Generics Type.
    */
   B blockSize(long blkSize);
 
   /**
    * Create an FSDataOutputStream at the specified path.
+   * @return B Generics Type.
    */
   B create();
 
@@ -60,16 +69,21 @@ public interface MultipartUploaderBuilder<S extends MultipartUploader, B extends
    * Set to true to overwrite the existing file.
    * Set it to false, an exception will be thrown when calling {@link #build()}
    * if the file exists.
+   * @param overwrite overwrite.
+   * @return B Generics Type.
    */
   B overwrite(boolean overwrite);
 
   /**
    * Append to an existing file (optional operation).
+   * @return B Generics Type.
    */
   B append();
 
   /**
    * Set checksum opt.
+   * @param chksumOpt chk sum opt.
+   * @return B Generics Type.
    */
   B checksumOpt(@Nonnull Options.ChecksumOpt chksumOpt);
 
@@ -78,6 +92,7 @@ public interface MultipartUploaderBuilder<S extends MultipartUploader, B extends
    *
    * @throws IllegalArgumentException if the parameters are not valid.
    * @throws IOException on errors when file system creates or appends the file.
+   * @return S Generics Type.
    */
   S build() throws IllegalArgumentException, IOException;
 }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
index 9b457272fcb..9ef7de657dc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Options.java
@@ -280,7 +280,9 @@ public final class Options {
     }
 
     /**
-     * Create a ChecksumOpts that disables checksum
+     * Create a ChecksumOpts that disables checksum.
+     *
+     * @return ChecksumOpt.
      */
     public static ChecksumOpt createDisabled() {
       return new ChecksumOpt(DataChecksum.Type.NULL, -1);
@@ -295,6 +297,7 @@ public final class Options {
      * @param userOpt User-specified checksum option. Ignored if null.
      * @param userBytesPerChecksum User-specified bytesPerChecksum
      *                Ignored if {@literal <} 0.
+     * @return ChecksumOpt.
      */
     public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt, 
         ChecksumOpt userOpt, int userBytesPerChecksum) {
@@ -330,6 +333,8 @@ public final class Options {
      *
      * @param defaultOpt Default checksum option
      * @param userOpt User-specified checksum option
+     *
+     * @return ChecksumOpt.
      */
     public static ChecksumOpt processChecksumOpt(ChecksumOpt defaultOpt,
         ChecksumOpt userOpt) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java
index b00a31891c8..b0103562123 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/QuotaUsage.java
@@ -105,7 +105,9 @@ public class QuotaUsage {
   // Make it protected for the deprecated ContentSummary constructor.
   protected QuotaUsage() { }
 
-  /** Build the instance based on the builder. */
+  /** Build the instance based on the builder.
+   * @param builder bulider.
+   */
   protected QuotaUsage(Builder builder) {
     this.fileAndDirectoryCount = builder.fileAndDirectoryCount;
     this.quota = builder.quota;
@@ -127,37 +129,67 @@ public class QuotaUsage {
     this.spaceQuota = spaceQuota;
   }
 
-  /** Return the directory count. */
+  /**
+   * Return the directory count.
+   *
+   * @return file and directory count.
+   */
   public long getFileAndDirectoryCount() {
     return fileAndDirectoryCount;
   }
 
-  /** Return the directory quota. */
+  /**
+   * Return the directory quota.
+   *
+   * @return quota.
+   */
   public long getQuota() {
     return quota;
   }
 
-  /** Return (disk) space consumed. */
+  /**
+   * Return (disk) space consumed.
+   *
+   * @return space consumed.
+   */
   public long getSpaceConsumed() {
     return spaceConsumed;
   }
 
-  /** Return (disk) space quota. */
+  /**
+   * Return (disk) space quota.
+   *
+   * @return space quota.
+   */
   public long getSpaceQuota() {
     return spaceQuota;
   }
 
-  /** Return storage type quota. */
+  /**
+   * Return storage type quota.
+   *
+   * @param type storage type.
+   * @return type quota.
+   */
   public long getTypeQuota(StorageType type) {
     return (typeQuota != null) ? typeQuota[type.ordinal()] : -1L;
   }
 
-  /** Return storage type consumed. */
+  /**
+   * Return storage type consumed.
+   *
+   * @param type storage type.
+   * @return type consumed.
+   */
   public long getTypeConsumed(StorageType type) {
     return (typeConsumed != null) ? typeConsumed[type.ordinal()] : 0L;
   }
 
-  /** Return true if any storage type quota has been set. */
+  /**
+   * Return true if any storage type quota has been set.
+   *
+   * @return if any storage type quota has been set true, not false.
+   * */
   public boolean isTypeQuotaSet() {
     if (typeQuota != null) {
       for (StorageType t : StorageType.getTypesSupportingQuota()) {
@@ -169,7 +201,12 @@ public class QuotaUsage {
     return false;
   }
 
-  /** Return true if any storage type consumption information is available. */
+  /**
+   * Return true if any storage type consumption information is available.
+   *
+   * @return if any storage type consumption information
+   * is available, not false.
+   */
   public boolean isTypeConsumedAvailable() {
     if (typeConsumed != null) {
       for (StorageType t : StorageType.getTypesSupportingQuota()) {
@@ -271,12 +308,15 @@ public class QuotaUsage {
     return toString(hOption, false, null);
   }
 
-  /** Return the string representation of the object in the output format.
+  /**
+   * Return the string representation of the object in the output format.
    * if hOption is false file sizes are returned in bytes
    * if hOption is true file sizes are returned in human readable
    *
    * @param hOption a flag indicating if human readable output if to be used
-   * @return the string representation of the object
+   * @param tOption type option.
+   * @param types storage types.
+   * @return the string representation of the object.
    */
   public String toString(boolean hOption,
       boolean tOption, List<StorageType> types) {
@@ -328,7 +368,7 @@ public class QuotaUsage {
   /**
    * return the header of with the StorageTypes.
    *
-   * @param storageTypes
+   * @param storageTypes storage types.
    * @return storage header string
    */
   public static String getStorageTypeHeader(List<StorageType> storageTypes) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
index edcc4a8b99e..468b37a885d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
@@ -100,7 +100,12 @@ public class RawLocalFileSystem extends FileSystem {
     }
   }
   
-  /** Convert a path to a File. */
+  /**
+   * Convert a path to a File.
+   *
+   * @param path the path.
+   * @return file.
+   */
   public File pathToFile(Path path) {
     checkPath(path);
     if (!path.isAbsolute()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
index 919c857ffa6..f7546d58e60 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
@@ -32,17 +32,27 @@ public interface Seekable {
    * Seek to the given offset from the start of the file.
    * The next read() will be from that location.  Can't
    * seek past the end of the file.
+   *
+   * @param pos offset from the start of the file.
+   * @throws IOException raised on errors performing I/O.
    */
   void seek(long pos) throws IOException;
-  
+
   /**
    * Return the current offset from the start of the file
+   *
+   * @return offset from the start of the file.
+   * @throws IOException raised on errors performing I/O.
    */
   long getPos() throws IOException;
 
   /**
-   * Seeks a different copy of the data.  Returns true if 
+   * Seeks a different copy of the data.  Returns true if
    * found a new source, false otherwise.
+   *
+   * @param targetPos target position.
+   * @return true if found a new source, false otherwise.
+   * @throws IOException raised on errors performing I/O.
    */
   @InterfaceAudience.Private
   boolean seekToNewSource(long targetPos) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
index 07f05132900..72a45309b17 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
@@ -73,8 +73,8 @@ public class Stat extends Shell {
   }
 
   /**
-   * Whether Stat is supported on the current platform
-   * @return
+   * Whether Stat is supported on the current platform.
+   * @return if is available true, not false.
    */
   public static boolean isAvailable() {
     if (Shell.LINUX || Shell.FREEBSD || Shell.MAC) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
index 2efe4566344..b4a86ab7812 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
@@ -127,6 +127,7 @@ public abstract class StorageStatistics {
 
   /**
    * Get the name of this StorageStatistics object.
+   * @return name of this StorageStatistics object
    */
   public String getName() {
     return name;
@@ -145,12 +146,15 @@ public abstract class StorageStatistics {
    *
    * The values returned will depend on the type of FileSystem or FileContext
    * object.  The values do not necessarily reflect a snapshot in time.
+   *
+   * @return LongStatistic Iterator.
    */
   public abstract Iterator<LongStatistic> getLongStatistics();
 
   /**
    * Get the value of a statistic.
    *
+   * @param key key.
    * @return         null if the statistic is not being tracked or is not a
    *                 long statistic. The value of the statistic, otherwise.
    */
@@ -159,6 +163,7 @@ public abstract class StorageStatistics {
   /**
    * Return true if a statistic is being tracked.
    *
+   * @param key key.
    * @return         True only if the statistic is being tracked.
    */
   public abstract boolean isTracked(String key);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
index e29cb9a4e0e..a58a1a3cb8e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
@@ -43,6 +43,7 @@ public class Trash extends Configured {
   /** 
    * Construct a trash can accessor.
    * @param conf a Configuration
+   * @throws IOException raised on errors performing I/O.
    */
   public Trash(Configuration conf) throws IOException {
     this(FileSystem.get(conf), conf);
@@ -52,6 +53,7 @@ public class Trash extends Configured {
    * Construct a trash can accessor for the FileSystem provided.
    * @param fs the FileSystem
    * @param conf a Configuration
+   * @throws IOException raised on errors performing I/O.
    */
   public Trash(FileSystem fs, Configuration conf) throws IOException {
     super(conf);
@@ -97,47 +99,74 @@ public class Trash extends Configured {
   }
   
   /**
-   * Returns whether the trash is enabled for this filesystem
+   * Returns whether the trash is enabled for this filesystem.
+   *
+   * @return return if isEnabled true,not false.
    */
   public boolean isEnabled() {
     return trashPolicy.isEnabled();
   }
 
   /** Move a file or directory to the current trash directory.
+   *
+   * @param path the path.
    * @return false if the item is already in the trash or trash is disabled
+   * @throws IOException raised on errors performing I/O.
    */ 
   public boolean moveToTrash(Path path) throws IOException {
     return trashPolicy.moveToTrash(path);
   }
 
-  /** Create a trash checkpoint. */
+  /**
+   * Create a trash checkpoint.
+   * @throws IOException raised on errors performing I/O.
+   */
   public void checkpoint() throws IOException {
     trashPolicy.createCheckpoint();
   }
 
-  /** Delete old checkpoint(s). */
+  /**
+   * Delete old checkpoint(s).
+   * @throws IOException raised on errors performing I/O.
+   */
   public void expunge() throws IOException {
     trashPolicy.deleteCheckpoint();
   }
 
-  /** Delete all trash immediately. */
+  /**
+   * Delete all trash immediately.
+   * @throws IOException raised on errors performing I/O.
+   */
   public void expungeImmediately() throws IOException {
     trashPolicy.createCheckpoint();
     trashPolicy.deleteCheckpointsImmediately();
   }
 
-  /** get the current working directory */
+  /**
+   * get the current working directory.
+   *
+   * @throws IOException on raised on errors performing I/O.
+   * @return Trash Dir.
+   */
   Path getCurrentTrashDir() throws IOException {
     return trashPolicy.getCurrentTrashDir();
   }
 
-  /** get the configured trash policy */
+  /**
+   * get the configured trash policy.
+   *
+   * @return TrashPolicy.
+   */
   TrashPolicy getTrashPolicy() {
     return trashPolicy;
   }
 
-  /** Return a {@link Runnable} that periodically empties the trash of all
+  /**
+   * Return a {@link Runnable} that periodically empties the trash of all
    * users, intended to be run by the superuser.
+   *
+   * @throws IOException on raised on errors performing I/O.
+   * @return Runnable.
    */
   public Runnable getEmptier() throws IOException {
     return trashPolicy.getEmptier();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
index 64fb81be99e..35e51f9e1cf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
@@ -60,27 +60,34 @@ public abstract class TrashPolicy extends Configured {
 
   /**
    * Returns whether the Trash Policy is enabled for this filesystem.
+   *
+   * @return if isEnabled true,not false.
    */
   public abstract boolean isEnabled();
 
   /** 
    * Move a file or directory to the current trash directory.
+   * @param path the path.
    * @return false if the item is already in the trash or trash is disabled
+   * @throws IOException raised on errors performing I/O.
    */ 
   public abstract boolean moveToTrash(Path path) throws IOException;
 
   /** 
-   * Create a trash checkpoint. 
+   * Create a trash checkpoint.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void createCheckpoint() throws IOException;
 
   /** 
    * Delete old trash checkpoint(s).
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void deleteCheckpoint() throws IOException;
 
   /**
    * Delete all checkpoints immediately, ie empty trash.
+   * @throws IOException raised on errors performing I/O.
    */
   public abstract void deleteCheckpointsImmediately() throws IOException;
 
@@ -94,6 +101,8 @@ public abstract class TrashPolicy extends Configured {
    * TrashPolicy#getCurrentTrashDir(Path path).
    * It returns the trash location correctly for the path specified no matter
    * the path is in encryption zone or not.
+   *
+   * @return the path.
    */
   public abstract Path getCurrentTrashDir();
 
@@ -102,7 +111,7 @@ public abstract class TrashPolicy extends Configured {
    * Policy
    * @param path path to be deleted
    * @return current trash directory for the path to be deleted
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public Path getCurrentTrashDir(Path path) throws IOException {
     throw new UnsupportedOperationException();
@@ -111,6 +120,9 @@ public abstract class TrashPolicy extends Configured {
   /** 
    * Return a {@link Runnable} that periodically empties the trash of all
    * users, intended to be run by the superuser.
+   *
+   * @throws IOException raised on errors performing I/O.
+   * @return Runnable.
    */
   public abstract Runnable getEmptier() throws IOException;
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
index 3d65275e673..df878d99870 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
@@ -67,7 +67,7 @@ public enum XAttrCodec {
    * the given string is treated as text. 
    * @param value string representation of the value.
    * @return byte[] the value
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static byte[] decodeValue(String value) throws IOException {
     byte[] result = null;
@@ -102,9 +102,9 @@ public enum XAttrCodec {
    * while strings encoded as hexadecimal and base64 are prefixed with 
    * 0x and 0s, respectively.
    * @param value byte[] value
-   * @param encoding
+   * @param encoding encoding.
    * @return String string representation of value
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static String encodeValue(byte[] value, XAttrCodec encoding) 
       throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
index 9d3a46d6332..4256522b2a3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
@@ -340,12 +340,14 @@ public abstract class
 
   /**
    * Get all the keys that are set as mandatory keys.
+   * @return mandatory keys.
    */
   public Set<String> getMandatoryKeys() {
     return Collections.unmodifiableSet(mandatoryKeys);
   }
   /**
    * Get all the keys that are set as optional keys.
+   * @return optional keys.
    */
   public Set<String> getOptionalKeys() {
     return Collections.unmodifiableSet(optionalKeys);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
index 416924e18d8..f9ae9f55cc1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
@@ -127,7 +127,7 @@ public abstract class AbstractMultipartUploader implements MultipartUploader {
    * {@inheritDoc}.
    * @param path path to abort uploads under.
    * @return a future to -1.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public CompletableFuture<Integer> abortUploadsUnderPath(Path path)
       throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
index 70e39de7388..833c21ec1a6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
@@ -126,6 +126,9 @@ public abstract class FutureDataInputStreamBuilderImpl
 
   /**
    * Set the size of the buffer to be used.
+   *
+   * @param bufSize buffer size.
+   * @return FutureDataInputStreamBuilder.
    */
   public FutureDataInputStreamBuilder bufferSize(int bufSize) {
     bufferSize = bufSize;
@@ -137,6 +140,8 @@ public abstract class FutureDataInputStreamBuilderImpl
    * This must be used after the constructor has been invoked to create
    * the actual builder: it allows for subclasses to do things after
    * construction.
+   *
+   * @return FutureDataInputStreamBuilder.
    */
   public FutureDataInputStreamBuilder builder() {
     return getThisBuilder();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
index f47e5f4fbfb..0a080426c2b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
@@ -75,6 +75,8 @@ public final class FutureIOSupport {
    * See {@link FutureIO#awaitFuture(Future, long, TimeUnit)}.
    * @param future future to evaluate
    * @param <T> type of the result.
+   * @param timeout timeout.
+   * @param unit unit.
    * @return the result, if all went well.
    * @throws InterruptedIOException future was interrupted
    * @throws IOException if something went wrong
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
index 5584e647849..665bcc6a956 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
@@ -88,6 +88,9 @@ public abstract class MultipartUploaderBuilderImpl
 
   /**
    * Constructor.
+   *
+   * @param fileSystem fileSystem.
+   * @param p path.
    */
   protected MultipartUploaderBuilderImpl(@Nonnull FileSystem fileSystem,
       @Nonnull Path p) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
index 25b9ba65904..260ee7e570c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
@@ -185,7 +185,8 @@ public class AclStatus {
 
     /**
      * Sets the permission for the file.
-     * @param permission
+     * @param permission permission.
+     * @return Builder.
      */
     public Builder setPermission(FsPermission permission) {
       this.permission = permission;
@@ -224,6 +225,7 @@ public class AclStatus {
   /**
    * Get the effective permission for the AclEntry
    * @param entry AclEntry to get the effective action
+   * @return FsAction.
    */
   public FsAction getEffectivePermission(AclEntry entry) {
     return getEffectivePermission(entry, permission);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
index 97dcf816c16..746e0e1e238 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
@@ -48,7 +48,8 @@ public enum FsAction {
 
   /**
    * Return true if this action implies that action.
-   * @param that
+   * @param that FsAction that.
+   * @return if implies true,not false.
    */
   public boolean implies(FsAction that) {
     if (that != null) {
@@ -57,15 +58,26 @@ public enum FsAction {
     return false;
   }
 
-  /** AND operation. */
+  /**
+   * AND operation.
+   * @param that FsAction that.
+   * @return FsAction.
+   */
   public FsAction and(FsAction that) {
     return vals[ordinal() & that.ordinal()];
   }
-  /** OR operation. */
+  /**
+   * OR operation.
+   * @param that FsAction that.
+   * @return FsAction.
+   */
   public FsAction or(FsAction that) {
     return vals[ordinal() | that.ordinal()];
   }
-  /** NOT operation. */
+  /**
+   * NOT operation.
+   * @return FsAction.
+   */
   public FsAction not() {
     return vals[7 - ordinal()];
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
index 2bd6f1f3b91..ff3b4f6d65a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
@@ -35,7 +35,10 @@ public final class FsCreateModes extends FsPermission {
   /**
    * Create from unmasked mode and umask.
    *
-   * If the mode is already an FsCreateModes object, return it.
+   * @param mode mode.
+   * @param umask umask.
+   * @return If the mode is already
+   * an FsCreateModes object, return it.
    */
   public static FsPermission applyUMask(FsPermission mode,
                                         FsPermission umask) {
@@ -47,6 +50,10 @@ public final class FsCreateModes extends FsPermission {
 
   /**
    * Create from masked and unmasked modes.
+   *
+   * @param masked masked.
+   * @param unmasked unmasked.
+   * @return FsCreateModes.
    */
   public static FsCreateModes create(FsPermission masked,
                                      FsPermission unmasked) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
index 51c113af270..33fed1d3039 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
@@ -56,7 +56,11 @@ public class FsPermission implements Writable, Serializable,
   /** Maximum acceptable length of a permission string to parse */
   public static final int MAX_PERMISSION_LENGTH = 10;
 
-  /** Create an immutable {@link FsPermission} object. */
+  /**
+   * Create an immutable {@link FsPermission} object.
+   * @param permission permission.
+   * @return FsPermission.
+   */
   public static FsPermission createImmutable(short permission) {
     return new ImmutableFsPermission(permission);
   }
@@ -85,7 +89,7 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Construct by the given mode.
-   * @param mode
+   * @param mode mode.
    * @see #toShort()
    */
   public FsPermission(short mode) { fromShort(mode); }
@@ -145,13 +149,19 @@ public class FsPermission implements Writable, Serializable,
     this(new RawParser(mode).getPermission());
   }
 
-  /** Return user {@link FsAction}. */
+  /**
+   * @return Return user {@link FsAction}.
+   */
   public FsAction getUserAction() {return useraction;}
 
-  /** Return group {@link FsAction}. */
+  /**
+   * @return Return group {@link FsAction}.
+   */
   public FsAction getGroupAction() {return groupaction;}
 
-  /** Return other {@link FsAction}. */
+  /**
+   * @return Return other {@link FsAction}.
+   */
   public FsAction getOtherAction() {return otheraction;}
 
   private void set(FsAction u, FsAction g, FsAction o, boolean sb) {
@@ -180,6 +190,7 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get masked permission if exists.
+   * @return masked.
    */
   public FsPermission getMasked() {
     return null;
@@ -187,6 +198,7 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get unmasked permission if exists.
+   * @return unmasked.
    */
   public FsPermission getUnmasked() {
     return null;
@@ -194,6 +206,10 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Create and initialize a {@link FsPermission} from {@link DataInput}.
+   *
+   * @param in data input.
+   * @throws IOException raised on errors performing I/O.
+   * @return FsPermission.
    */
   public static FsPermission read(DataInput in) throws IOException {
     FsPermission p = new FsPermission();
@@ -203,6 +219,7 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Encode the object to a short.
+   * @return object to a short.
    */
   public short toShort() {
     int s =  (stickyBit ? 1 << 9 : 0)     |
@@ -301,6 +318,9 @@ public class FsPermission implements Writable, Serializable,
    * '-' sets bits in the mask.
    * 
    * Octal umask, the specified bits are set in the file mode creation mask.
+   *
+   * @param conf configuration.
+   * @return FsPermission UMask.
    */
   public static FsPermission getUMask(Configuration conf) {
     int umask = DEFAULT_UMASK;
@@ -346,7 +366,11 @@ public class FsPermission implements Writable, Serializable,
   }
 
   /**
-   * Returns true if the file is encrypted or directory is in an encryption zone
+   * Returns true if the file is encrypted or directory is in an encryption zone.
+   *
+   * @return if the file is encrypted or directory
+   * is in an encryption zone true, not false.
+   *
    * @deprecated Get encryption bit from the
    * {@link org.apache.hadoop.fs.FileStatus} object.
    */
@@ -357,6 +381,9 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Returns true if the file or directory is erasure coded.
+   *
+   * @return if the file or directory is
+   * erasure coded true, not false.
    * @deprecated Get ec bit from the {@link org.apache.hadoop.fs.FileStatus}
    * object.
    */
@@ -365,7 +392,11 @@ public class FsPermission implements Writable, Serializable,
     return false;
   }
 
-  /** Set the user file creation mask (umask) */
+  /**
+   * Set the user file creation mask (umask)
+   * @param conf configuration.
+   * @param umask umask.
+   */
   public static void setUMask(Configuration conf, FsPermission umask) {
     conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort()));
   }
@@ -379,6 +410,8 @@ public class FsPermission implements Writable, Serializable,
    * {@link FsPermission#getDirDefault()} for directory, and use
    * {@link FsPermission#getFileDefault()} for file.
    * This method is kept for compatibility.
+   *
+   * @return Default FsPermission.
    */
   public static FsPermission getDefault() {
     return new FsPermission((short)00777);
@@ -386,6 +419,8 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get the default permission for directory.
+   *
+   * @return DirDefault FsPermission.
    */
   public static FsPermission getDirDefault() {
     return new FsPermission((short)00777);
@@ -393,6 +428,8 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get the default permission for file.
+   *
+   * @return FileDefault FsPermission.
    */
   public static FsPermission getFileDefault() {
     return new FsPermission((short)00666);
@@ -400,6 +437,8 @@ public class FsPermission implements Writable, Serializable,
 
   /**
    * Get the default permission for cache pools.
+   *
+   * @return CachePoolDefault FsPermission.
    */
   public static FsPermission getCachePoolDefault() {
     return new FsPermission((short)00755);
@@ -408,6 +447,7 @@ public class FsPermission implements Writable, Serializable,
   /**
    * Create a FsPermission from a Unix symbolic permission string
    * @param unixSymbolicPermission e.g. "-rw-rw-rw-"
+   * @return FsPermission.
    */
   public static FsPermission valueOf(String unixSymbolicPermission) {
     if (unixSymbolicPermission == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
index 3c3693f613b..be4beb506a6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
@@ -39,7 +39,13 @@ public class PermissionStatus implements Writable {
     WritableFactories.setFactory(PermissionStatus.class, FACTORY);
   }
 
-  /** Create an immutable {@link PermissionStatus} object. */
+  /**
+   * Create an immutable {@link PermissionStatus} object.
+   * @param user user.
+   * @param group group.
+   * @param permission permission.
+   * @return PermissionStatus.
+   */
   public static PermissionStatus createImmutable(
       String user, String group, FsPermission permission) {
     return new PermissionStatus(user, group, permission) {
@@ -56,20 +62,35 @@ public class PermissionStatus implements Writable {
 
   private PermissionStatus() {}
 
-  /** Constructor */
+  /**
+   * Constructor.
+   *
+   * @param user user.
+   * @param group group.
+   * @param permission permission.
+   */
   public PermissionStatus(String user, String group, FsPermission permission) {
     username = user;
     groupname = group;
     this.permission = permission;
   }
 
-  /** Return user name */
+  /**
+   * Return user name.
+   * @return user name.
+   */
   public String getUserName() {return username;}
 
-  /** Return group name */
+  /**
+   * Return group name.
+   * @return group name.
+   */
   public String getGroupName() {return groupname;}
 
-  /** Return permission */
+  /**
+   * Return permission.
+   * @return FsPermission.
+   */
   public FsPermission getPermission() {return permission;}
 
   @Override
@@ -86,6 +107,9 @@ public class PermissionStatus implements Writable {
 
   /**
    * Create and initialize a {@link PermissionStatus} from {@link DataInput}.
+   * @param in data input.
+   * @throws IOException raised on errors performing I/O.
+   * @return PermissionStatus.
    */
   public static PermissionStatus read(DataInput in) throws IOException {
     PermissionStatus p = new PermissionStatus();
@@ -95,6 +119,11 @@ public class PermissionStatus implements Writable {
 
   /**
    * Serialize a {@link PermissionStatus} from its base components.
+   * @param out out.
+   * @param username username.
+   * @param groupname groupname.
+   * @param permission FsPermission.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void write(DataOutput out,
                            String username, 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
index 0bdb47730a9..7858238ee71 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
@@ -77,7 +77,11 @@ abstract public class Command extends Configured {
     err = System.err;
   }
   
-  /** Constructor */
+  /**
+   * Constructor.
+   *
+   * @param conf configuration.
+   */
   protected Command(Configuration conf) {
     super(conf);
   }
@@ -109,7 +113,7 @@ abstract public class Command extends Configured {
    * Execute the command on the input path data. Commands can override to make
    * use of the resolved filesystem.
    * @param pathData The input path with resolved filesystem
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   protected void run(PathData pathData) throws IOException {
     run(pathData.path);
@@ -136,11 +140,19 @@ abstract public class Command extends Configured {
     return exitCode;
   }
 
-  /** sets the command factory for later use */
+  /**
+   * sets the command factory for later use.
+   * @param factory factory.
+   */
   public void setCommandFactory(CommandFactory factory) {
     this.commandFactory = factory;
   }
-  /** retrieves the command factory */
+
+  /**
+   * retrieves the command factory.
+   *
+   * @return command factory.
+   */
   protected CommandFactory getCommandFactory() {
     return this.commandFactory;
   }
@@ -201,7 +213,7 @@ abstract public class Command extends Configured {
    * IllegalArgumentException is thrown, the FsShell object will print the
    * short usage of the command.
    * @param args the command line arguments
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   protected void processOptions(LinkedList<String> args) throws IOException {}
 
@@ -211,7 +223,7 @@ abstract public class Command extends Configured {
    * {@link #expandArguments(LinkedList)} and pass the resulting list to
    * {@link #processArguments(LinkedList)} 
    * @param args the list of argument strings
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   protected void processRawArguments(LinkedList<String> args)
   throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
index 678225f81e0..69a418c1925 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
@@ -119,6 +119,8 @@ abstract class CommandWithDestination extends FsCommand {
    * owner, group and permission information of the source
    * file will be preserved as far as target {@link FileSystem}
    * implementation allows.
+   *
+   * @param preserve preserve.
    */
   protected void setPreserve(boolean preserve) {
     if (preserve) {
@@ -175,6 +177,7 @@ abstract class CommandWithDestination extends FsCommand {
    *  The last arg is expected to be a local path, if only one argument is
    *  given then the destination will be the current directory 
    *  @param args is the list of arguments
+   * @throws IOException raised on errors performing I/O.
    */
   protected void getLocalDestination(LinkedList<String> args)
   throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
index 2071a16799a..da99ac21256 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
@@ -610,10 +610,11 @@ public class PathData implements Comparable<PathData> {
 
   /**
    * Open a file for sequential IO.
-   * <p></p>
+   * <p>
    * This uses FileSystem.openFile() to request sequential IO;
    * the file status is also passed in.
    * Filesystems may use to optimize their IO.
+   * </p>
    * @return an input stream
    * @throws IOException failure
    */
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
index 5069d2d34e5..cd9bbe2bc88 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
@@ -38,12 +38,18 @@ public abstract class BaseExpression implements Expression, Configurable {
   private String[] usage = { "Not yet implemented" };
   private String[] help = { "Not yet implemented" };
 
-  /** Sets the usage text for this {@link Expression} */
+  /**
+   * Sets the usage text for this {@link Expression} .
+   * @param usage usage array.
+   */
   protected void setUsage(String[] usage) {
     this.usage = usage;
   }
 
-  /** Sets the help text for this {@link Expression} */
+  /**
+   * Sets the help text for this {@link Expression} .
+   * @param help help.
+   */
   protected void setHelp(String[] help) {
     this.help = help;
   }
@@ -92,7 +98,10 @@ public abstract class BaseExpression implements Expression, Configurable {
   /** Children of this expression. */
   private LinkedList<Expression> children = new LinkedList<Expression>();
 
-  /** Return the options to be used by this expression. */
+  /**
+   * Return the options to be used by this expression.
+   * @return options.
+   */
   protected FindOptions getOptions() {
     return (this.options == null) ? new FindOptions() : this.options;
   }
@@ -265,6 +274,7 @@ public abstract class BaseExpression implements Expression, Configurable {
    * @param depth
    *          current depth in the process directories
    * @return FileStatus
+   * @throws IOException raised on errors performing I/O.
    */
   protected FileStatus getFileStatus(PathData item, int depth)
       throws IOException {
@@ -285,6 +295,8 @@ public abstract class BaseExpression implements Expression, Configurable {
    * @param item
    *          PathData
    * @return Path
+   *
+   * @throws IOException raised on errors performing I/O.
    */
   protected Path getPath(PathData item) throws IOException {
     return item.path;
@@ -295,6 +307,7 @@ public abstract class BaseExpression implements Expression, Configurable {
    *
    * @param item PathData
    * @return FileSystem
+   * @throws IOException raised on errors performing I/O.
    */
   protected FileSystem getFileSystem(PathData item) throws IOException {
     return item.fs;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
index ccad631028c..353fe685cc9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
@@ -30,13 +30,15 @@ public interface Expression {
   /**
    * Set the options for this expression, called once before processing any
    * items.
+   * @param options options.
+   * @throws IOException raised on errors performing I/O.
    */
   public void setOptions(FindOptions options) throws IOException;
 
   /**
    * Prepares the expression for execution, called once after setting options
    * and before processing any options.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void prepare() throws IOException;
 
@@ -46,13 +48,14 @@ public interface Expression {
    * @param item {@link PathData} item to be processed
    * @param depth distance of the item from the command line argument
    * @return {@link Result} of applying the expression to the item
+   * @throws IOException raised on errors performing I/O.
    */
   public Result apply(PathData item, int depth) throws IOException;
 
   /**
    * Finishes the expression, called once after processing all items.
    *
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public void finish() throws IOException;
 
@@ -76,15 +79,21 @@ public interface Expression {
   /**
    * Indicates whether this expression performs an action, i.e. provides output
    * back to the user.
+   * @return if is action true, not false.
    */
   public boolean isAction();
 
-  /** Identifies the expression as an operator rather than a primary. */
+  /**
+   * Identifies the expression as an operator rather than a primary.
+   * @return if is operator true, not false.
+   */
   public boolean isOperator();
 
   /**
    * Returns the precedence of this expression
    * (only applicable to operators).
+   *
+   * @return precedence.
    */
   public int getPrecedence();
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
index b0f1be5c35c..c6051862305 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
@@ -264,6 +264,7 @@ public class FindOptions {
 
   /**
    * Return the {@link Configuration} return configuration {@link Configuration}
+   * @return configuration.
    */
   public Configuration getConfiguration() {
     return this.configuration;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
index 2ef9cb4a801..a242681acd0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
@@ -35,23 +35,36 @@ public final class Result {
     this.descend = recurse;
   }
 
-  /** Should further directories be descended. */
+  /**
+   * Should further directories be descended.
+   * @return if is pass true,not false.
+   */
   public boolean isDescend() {
     return this.descend;
   }
 
-  /** Should processing continue. */
+  /**
+   * Should processing continue.
+   * @return if is pass true,not false.
+   */
   public boolean isPass() {
     return this.success;
   }
 
-  /** Returns the combination of this and another result. */
+  /**
+   * Returns the combination of this and another result.
+   * @param other other.
+   * @return result.
+   */
   public Result combine(Result other) {
     return new Result(this.isPass() && other.isPass(), this.isDescend()
         && other.isDescend());
   }
 
-  /** Negate this result. */
+  /**
+   * Negate this result.
+   * @return Result.
+   */
   public Result negate() {
     return new Result(!this.isPass(), this.isDescend());
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
index 63d37e97c98..88606eb4b30 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
@@ -53,7 +53,7 @@ import static org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.snapshotM
  * deserialized. If for some reason this is required, use
  * {@link #requiredSerializationClasses()} to get the list of classes
  * used when deserializing instances of this object.
- * <p>
+ * </p>
  * <p>
  * It is annotated for correct serializations with jackson2.
  * </p>
@@ -238,6 +238,8 @@ public final class IOStatisticsSnapshot
   /**
    * Serialize by converting each map to a TreeMap, and saving that
    * to the stream.
+   * @param s ObjectOutputStream.
+   * @throws IOException raised on errors performing I/O.
    */
   private synchronized void writeObject(ObjectOutputStream s)
       throws IOException {
@@ -253,6 +255,10 @@ public final class IOStatisticsSnapshot
   /**
    * Deserialize by loading each TreeMap, and building concurrent
    * hash maps from them.
+   *
+   * @param s ObjectInputStream.
+   * @throws IOException raised on errors performing I/O.
+   * @throws ClassNotFoundException class not found exception
    */
   private void readObject(final ObjectInputStream s)
       throws IOException, ClassNotFoundException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java
index 75977047c0f..bb4d9a44587 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java
@@ -71,6 +71,7 @@ public final class IOStatisticsSupport {
    * Returns null if the source isn't of the write type
    * or the return value of
    * {@link IOStatisticsSource#getIOStatistics()} was null.
+   * @param source source.
    * @return an IOStatistics instance or null
    */
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java
index d9ff0c25c6a..369db496543 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java
@@ -207,6 +207,7 @@ public final class MeanStatistic implements Serializable, Cloneable {
   /**
    * Add another MeanStatistic.
    * @param other other value
+   * @return mean statistic.
    */
   public synchronized MeanStatistic add(final MeanStatistic other) {
     if (other.isEmpty()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
index c45dfc21a1b..6a5d01fb3b0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
@@ -141,6 +141,7 @@ public final class IOStatisticsBinding {
   /**
    * Convert entry values to the string format used in logging.
    *
+   * @param <E> type of values.
    * @param name statistic name
    * @param value stat value
    * @return formatted string
@@ -178,6 +179,8 @@ public final class IOStatisticsBinding {
   /**
    * A passthrough copy operation suitable for immutable
    * types, including numbers.
+   *
+   * @param <E> type of values.
    * @param src source object
    * @return the source object
    */
@@ -437,6 +440,7 @@ public final class IOStatisticsBinding {
    * @param input input callable.
    * @param <B> return type.
    * @return the result of the operation.
+   * @throws IOException raised on errors performing I/O.
    */
   public static <B> B trackDuration(
       DurationTrackerFactory factory,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java
index d9d3850ef4e..c70d0ee91e1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java
@@ -107,6 +107,7 @@ public final class DataBlocks {
    * @param len number of bytes to be written.
    * @throws NullPointerException      for a null buffer
    * @throws IndexOutOfBoundsException if indices are out of range
+   * @throws IOException raised on errors performing I/O.
    */
   public static void validateWriteArgs(byte[] b, int off, int len)
       throws IOException {
@@ -287,6 +288,7 @@ public final class DataBlocks {
      * @param limit      limit of the block.
      * @param statistics stats to work with
      * @return a new block.
+     * @throws IOException raised on errors performing I/O.
      */
     public abstract DataBlock create(long index, int limit,
         BlockUploadStatistics statistics)
@@ -482,6 +484,8 @@ public final class DataBlocks {
 
     /**
      * Inner close logic for subclasses to implement.
+     *
+     * @throws IOException raised on errors performing I/O.
      */
     protected void innerClose() throws IOException {
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
index acc82766be1..9ad727f5a2d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
@@ -86,6 +86,8 @@ public final class AuditingFunctions {
    * activates and deactivates the span around the inner one.
    * @param auditSpan audit span
    * @param operation operation
+   * @param <T> Generics Type T.
+   * @param <R> Generics Type R.
    * @return a new invocation.
    */
   public static <T, R> FunctionRaisingIOE<T, R> withinAuditSpan(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
index ead2a365f3a..c9ee5e232d9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
@@ -48,7 +48,7 @@ public class ConfigUtil {
   /**
    * Add a link to the config for the specified mount table
    * @param conf - add the link to this conf
-   * @param mountTableName
+   * @param mountTableName mountTable.
    * @param src - the src path name
    * @param target - the target URI link
    */
@@ -71,9 +71,10 @@ public class ConfigUtil {
 
   /**
    * Add a LinkMergeSlash to the config for the specified mount table.
-   * @param conf
-   * @param mountTableName
-   * @param target
+   *
+   * @param conf configuration.
+   * @param mountTableName mountTable.
+   * @param target target.
    */
   public static void addLinkMergeSlash(Configuration conf,
       final String mountTableName, final URI target) {
@@ -83,8 +84,9 @@ public class ConfigUtil {
 
   /**
    * Add a LinkMergeSlash to the config for the default mount table.
-   * @param conf
-   * @param target
+   *
+   * @param conf configuration.
+   * @param target targets.
    */
   public static void addLinkMergeSlash(Configuration conf, final URI target) {
     addLinkMergeSlash(conf, getDefaultMountTableName(conf), target);
@@ -92,9 +94,10 @@ public class ConfigUtil {
 
   /**
    * Add a LinkFallback to the config for the specified mount table.
-   * @param conf
-   * @param mountTableName
-   * @param target
+   *
+   * @param conf configuration.
+   * @param mountTableName mountTable.
+   * @param target targets.
    */
   public static void addLinkFallback(Configuration conf,
       final String mountTableName, final URI target) {
@@ -104,8 +107,9 @@ public class ConfigUtil {
 
   /**
    * Add a LinkFallback to the config for the default mount table.
-   * @param conf
-   * @param target
+   *
+   * @param conf configuration.
+   * @param target targets.
    */
   public static void addLinkFallback(Configuration conf, final URI target) {
     addLinkFallback(conf, getDefaultMountTableName(conf), target);
@@ -113,9 +117,10 @@ public class ConfigUtil {
 
   /**
    * Add a LinkMerge to the config for the specified mount table.
-   * @param conf
-   * @param mountTableName
-   * @param targets
+   *
+   * @param conf configuration.
+   * @param mountTableName mountTable.
+   * @param targets targets.
    */
   public static void addLinkMerge(Configuration conf,
       final String mountTableName, final URI[] targets) {
@@ -125,8 +130,9 @@ public class ConfigUtil {
 
   /**
    * Add a LinkMerge to the config for the default mount table.
-   * @param conf
-   * @param targets
+   *
+   * @param conf configuration.
+   * @param targets targets array.
    */
   public static void addLinkMerge(Configuration conf, final URI[] targets) {
     addLinkMerge(conf, getDefaultMountTableName(conf), targets);
@@ -134,6 +140,12 @@ public class ConfigUtil {
 
   /**
    * Add nfly link to configuration for the given mount table.
+   *
+   * @param conf configuration.
+   * @param mountTableName mount table.
+   * @param src src.
+   * @param settings settings.
+   * @param targets targets.
    */
   public static void addLinkNfly(Configuration conf, String mountTableName,
       String src, String settings, final String targets) {
@@ -144,12 +156,13 @@ public class ConfigUtil {
   }
 
   /**
+   * Add nfly link to configuration for the given mount table.
    *
-   * @param conf
-   * @param mountTableName
-   * @param src
-   * @param settings
-   * @param targets
+   * @param conf configuration.
+   * @param mountTableName mount table.
+   * @param src src.
+   * @param settings settings.
+   * @param targets targets.
    */
   public static void addLinkNfly(Configuration conf, String mountTableName,
       String src, String settings, final URI ... targets) {
@@ -202,6 +215,7 @@ public class ConfigUtil {
    * Add config variable for homedir the specified mount table
    * @param conf - add to this conf
    * @param homedir - the home dir path starting with slash
+   * @param mountTableName - the mount table.
    */
   public static void setHomeDirConf(final Configuration conf,
               final String mountTableName, final String homedir) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
index c72baac25fb..f723f238e19 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
@@ -34,6 +34,10 @@ public class FsGetter {
 
   /**
    * Gets new file system instance of given uri.
+   * @param uri uri.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return file system.
    */
   public FileSystem getNewInstance(URI uri, Configuration conf)
       throws IOException {
@@ -42,6 +46,11 @@ public class FsGetter {
 
   /**
    * Gets file system instance of given uri.
+   *
+   * @param uri uri.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
+   * @return FileSystem.
    */
   public FileSystem get(URI uri, Configuration conf) throws IOException {
     return FileSystem.get(uri, conf);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
index a90084ad8f4..5360d55e106 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
@@ -59,7 +59,7 @@ import org.slf4j.LoggerFactory;
  * @param <T> is AbstractFileSystem or FileSystem
  *
  * The two main methods are
- * {@link #InodeTree(Configuration, String)} // constructor
+ * {@link #InodeTree(Configuration, String, URI, boolean)} // constructor
  * {@link #resolve(String, boolean)}
  */
 
@@ -325,8 +325,8 @@ public abstract class InodeTree<T> {
 
    * A merge dir link is  a merge (junction) of links to dirs:
    * example : merge of 2 dirs
-   *     /users -> hdfs:nn1//users
-   *     /users -> hdfs:nn2//users
+   *     /users -&gt; hdfs:nn1//users
+   *     /users -&gt; hdfs:nn2//users
    *
    * For a merge, each target is checked to be dir when created but if target
    * is changed later it is then ignored (a dir with null entries)
@@ -364,6 +364,8 @@ public abstract class InodeTree<T> {
     /**
      * Get the target of the link. If a merge link then it returned
      * as "," separated URI list.
+     *
+     * @return the path.
      */
     public Path getTargetLink() {
       StringBuilder result = new StringBuilder(targetDirLinkList[0].toString());
@@ -387,7 +389,7 @@ public abstract class InodeTree<T> {
     /**
      * Get the instance of FileSystem to use, creating one if needed.
      * @return An Initialized instance of T
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public T getTargetFileSystem() throws IOException {
       if (targetFileSystem != null) {
@@ -500,7 +502,7 @@ public abstract class InodeTree<T> {
   /**
    * The user of this class must subclass and implement the following
    * 3 abstract methods.
-   * @throws IOException
+   * @return Function.
    */
   protected abstract Function<URI, T> initAndGetTargetFs();
 
@@ -591,14 +593,21 @@ public abstract class InodeTree<T> {
   }
 
   /**
-   * Create Inode Tree from the specified mount-table specified in Config
-   * @param config - the mount table keys are prefixed with
-   *       FsConstants.CONFIG_VIEWFS_PREFIX
-   * @param viewName - the name of the mount table - if null use defaultMT name
-   * @throws UnsupportedFileSystemException
-   * @throws URISyntaxException
-   * @throws FileAlreadyExistsException
-   * @throws IOException
+   * Create Inode Tree from the specified mount-table specified in Config.
+   *
+   * @param config the mount table keys are prefixed with
+   *               FsConstants.CONFIG_VIEWFS_PREFIX.
+   * @param viewName the name of the mount table
+   *                 if null use defaultMT name.
+   * @param theUri heUri.
+   * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts.
+   * @throws UnsupportedFileSystemException file system for <code>uri</code> is
+   *                                        not found.
+   * @throws URISyntaxException if the URI does not have an authority
+   *                            it is badly formed.
+   * @throws FileAlreadyExistsException there is a file at the path specified
+   *                                    or is discovered on one of its ancestors.
+   * @throws IOException raised on errors performing I/O.
    */
   protected InodeTree(final Configuration config, final String viewName,
       final URI theUri, boolean initingUriAsFallbackOnNoMounts)
@@ -872,9 +881,9 @@ public abstract class InodeTree<T> {
   /**
    * Resolve the pathname p relative to root InodeDir.
    * @param p - input path
-   * @param resolveLastComponent
+   * @param resolveLastComponent resolveLastComponent.
    * @return ResolveResult which allows further resolution of the remaining path
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public ResolveResult<T> resolve(final String p, final boolean resolveLastComponent)
       throws IOException {
@@ -996,14 +1005,14 @@ public abstract class InodeTree<T> {
   /**
    * Walk through all regex mount points to see
    * whether the path match any regex expressions.
-   *  E.g. link: ^/user/(?<username>\\w+) => s3://$user.apache.com/_${user}
+   *  E.g. link: ^/user/(?&lt;username&gt;\\w+) =&gt; s3://$user.apache.com/_${user}
    *  srcPath: is /user/hadoop/dir1
    *  resolveLastComponent: true
    *  then return value is s3://hadoop.apache.com/_hadoop
    *
-   * @param srcPath
-   * @param resolveLastComponent
-   * @return
+   * @param srcPath srcPath.
+   * @param resolveLastComponent resolveLastComponent.
+   * @return ResolveResult.
    */
   protected ResolveResult<T> tryResolveInRegexMountpoint(final String srcPath,
       final boolean resolveLastComponent) {
@@ -1021,7 +1030,7 @@ public abstract class InodeTree<T> {
    * Build resolve result.
    * Here's an example
    * Mountpoint: fs.viewfs.mounttable.mt
-   *     .linkRegex.replaceresolveddstpath:_:-#.^/user/(?<username>\w+)
+   *     .linkRegex.replaceresolveddstpath:_:-#.^/user/(??&lt;username&gt;\w+)
    * Value: /targetTestRoot/$username
    * Dir path to test:
    * viewfs://mt/user/hadoop_user1/hadoop_dir1
@@ -1030,6 +1039,10 @@ public abstract class InodeTree<T> {
    * targetOfResolvedPathStr: /targetTestRoot/hadoop-user1
    * remainingPath: /hadoop_dir1
    *
+   * @param resultKind resultKind.
+   * @param resolvedPathStr resolvedPathStr.
+   * @param targetOfResolvedPathStr targetOfResolvedPathStr.
+   * @param remainingPath remainingPath.
    * @return targetFileSystem or null on exceptions.
    */
   protected ResolveResult<T> buildResolveResultForRegexMountPoint(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
index bc2c3ea93c5..5fcd77cd291 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
@@ -38,6 +38,7 @@ public interface MountTableConfigLoader {
    *          a directory in the case of multiple versions of mount-table
    *          files(Recommended option).
    * @param conf - Configuration object to add mount table.
+   * @throws IOException raised on errors performing I/O.
    */
   void load(String mountTableConfigPath, Configuration conf)
       throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
index 8f4631b0e83..da3955b125e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
@@ -107,6 +107,8 @@ public class ViewFileSystem extends FileSystem {
 
   /**
    * Gets file system creator instance.
+   *
+   * @return fs getter.
    */
   protected FsGetter fsGetter() {
     return new FsGetter();
@@ -273,7 +275,7 @@ public class ViewFileSystem extends FileSystem {
    * {@link FileSystem#createFileSystem(URI, Configuration)}
    *
    * After this constructor is called initialize() is called.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public ViewFileSystem() throws IOException {
     ugi = UserGroupInformation.getCurrentUser();
@@ -382,10 +384,10 @@ public class ViewFileSystem extends FileSystem {
   }
 
   /**
-   * Convenience Constructor for apps to call directly
+   * Convenience Constructor for apps to call directly.
    * @param theUri which must be that of ViewFileSystem
-   * @param conf
-   * @throws IOException
+   * @param conf conf configuration.
+   * @throws IOException raised on errors performing I/O.
    */
   ViewFileSystem(final URI theUri, final Configuration conf)
       throws IOException {
@@ -394,9 +396,9 @@ public class ViewFileSystem extends FileSystem {
   }
 
   /**
-   * Convenience Constructor for apps to call directly
-   * @param conf
-   * @throws IOException
+   * Convenience Constructor for apps to call directly.
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
    */
   public ViewFileSystem(final Configuration conf) throws IOException {
     this(FsConstants.VIEWFS_URI, conf);
@@ -1314,7 +1316,7 @@ public class ViewFileSystem extends FileSystem {
    * Constants#CONFIG_VIEWFS_LINK_MERGE_SLASH} is supported and is a valid
    * mount point. Else, throw NotInMountpointException.
    *
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Override
   public long getUsed() throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
index e91b66512d5..1c25a9536e1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
@@ -139,6 +139,8 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
 
   /**
    * Sets whether to add fallback automatically when no mount points found.
+   *
+   * @param addAutoFallbackOnNoMounts addAutoFallbackOnNoMounts.
    */
   public void setSupportAutoAddingFallbackOnNoMounts(
       boolean addAutoFallbackOnNoMounts) {
@@ -320,7 +322,8 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
    *
    * @param path - fs uri path
    * @param conf - configuration
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
+   * @return file system.
    */
   public FileSystem getRawFileSystem(Path path, Configuration conf)
       throws IOException {
@@ -339,6 +342,11 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
   /**
    * Gets the mount path info, which contains the target file system and
    * remaining path to pass to the target file system.
+   *
+   * @param path the path.
+   * @param conf configuration.
+   * @return mount path info.
+   * @throws IOException raised on errors performing I/O.
    */
   public MountPathInfo<FileSystem> getMountPathInfo(Path path,
       Configuration conf) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
index f486a10b4c8..c9c6767097b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
@@ -44,7 +44,7 @@ public final class ViewFileSystemUtil {
   /**
    * Check if the FileSystem is a ViewFileSystem.
    *
-   * @param fileSystem
+   * @param fileSystem file system.
    * @return true if the fileSystem is ViewFileSystem
    */
   public static boolean isViewFileSystem(final FileSystem fileSystem) {
@@ -54,7 +54,7 @@ public final class ViewFileSystemUtil {
   /**
    * Check if the FileSystem is a ViewFileSystemOverloadScheme.
    *
-   * @param fileSystem
+   * @param fileSystem file system.
    * @return true if the fileSystem is ViewFileSystemOverloadScheme
    */
   public static boolean isViewFileSystemOverloadScheme(
@@ -101,6 +101,7 @@ public final class ViewFileSystemUtil {
    * @param fileSystem - ViewFileSystem on which mount point exists
    * @param path - URI for which FsStatus is requested
    * @return Map of ViewFsMountPoint and FsStatus
+   * @throws IOException raised on errors performing I/O.
    */
   public static Map<MountPoint, FsStatus> getStatus(
       FileSystem fileSystem, Path path) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
index d98082fe5c1..5f54c9cdd06 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
@@ -909,7 +909,7 @@ public class ViewFs extends AbstractFileSystem {
    *
    * @param src file or directory path.
    * @return storage policy for give file.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public BlockStoragePolicySpi getStoragePolicy(final Path src)
       throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
index 041f8cab49c..edd15af534a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
@@ -91,6 +91,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
      * 
      * Callback implementations are expected to manage their own
      * timeouts (e.g. when making an RPC to a remote node).
+     *
+     * @throws ServiceFailedException Service Failed Exception.
      */
     void becomeActive() throws ServiceFailedException;
 
@@ -119,6 +121,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
      * If there is any fatal error (e.g. wrong ACL's, unexpected Zookeeper
      * errors or Zookeeper persistent unavailability) then notifyFatalError is
      * called to notify the app about it.
+     *
+     * @param errorMessage error message.
      */
     void notifyFatalError(String errorMessage);
 
@@ -204,8 +208,12 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    *                 ZK connection
    * @param app
    *          reference to callback interface object
-   * @throws IOException
+   * @param maxRetryNum maxRetryNum.
+   * @throws IOException raised on errors performing I/O.
    * @throws HadoopIllegalArgumentException
+   *         if valid data is not supplied.
+   * @throws KeeperException
+   *         other zookeeper operation errors.
    */
   public ActiveStandbyElector(String zookeeperHostPorts,
       int zookeeperSessionTimeout, String parentZnodeName, List<ACL> acl,
@@ -245,8 +253,13 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    *          reference to callback interface object
    * @param failFast
    *          whether need to add the retry when establishing ZK connection.
+   * @param maxRetryNum max Retry Num
    * @throws IOException
+   *          raised on errors performing I/O.
    * @throws HadoopIllegalArgumentException
+   *          if valid data is not supplied.
+   * @throws KeeperException
+   *          other zookeeper operation errors.
    */
   public ActiveStandbyElector(String zookeeperHostPorts,
       int zookeeperSessionTimeout, String parentZnodeName, List<ACL> acl,
@@ -312,6 +325,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   
   /**
    * @return true if the configured parent znode exists
+   * @throws IOException raised on errors performing I/O.
+   * @throws InterruptedException interrupted exception.
    */
   public synchronized boolean parentZNodeExists()
       throws IOException, InterruptedException {
@@ -327,6 +342,10 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
   /**
    * Utility function to ensure that the configured base znode exists.
    * This recursively creates the znode as well as all of its parents.
+   *
+   * @throws IOException raised on errors performing I/O.
+   * @throws InterruptedException interrupted exception.
+   * @throws KeeperException other zookeeper operation errors.
    */
   public synchronized void ensureParentZNode()
       throws IOException, InterruptedException, KeeperException {
@@ -371,6 +390,9 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * This recursively deletes everything within the znode as well as the
    * parent znode itself. It should only be used when it's certain that
    * no electors are currently participating in the election.
+   *
+   * @throws IOException raised on errors performing I/O.
+   * @throws InterruptedException interrupted exception.
    */
   public synchronized void clearParentZNode()
       throws IOException, InterruptedException {
@@ -435,6 +457,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * @throws KeeperException
    *           other zookeeper operation errors
    * @throws InterruptedException
+   *           interrupted exception.
    * @throws IOException
    *           when ZooKeeper connection could not be established
    */
@@ -684,7 +707,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * inherit and mock out the zookeeper instance
    * 
    * @return new zookeeper client instance
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @throws KeeperException zookeeper connectionloss exception
    */
   protected synchronized ZooKeeper connectToZooKeeper() throws IOException,
@@ -714,7 +737,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * inherit and pass in a mock object for zookeeper
    *
    * @return new zookeeper client instance
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   protected ZooKeeper createZooKeeper() throws IOException {
     return new ZooKeeper(zkHostPort, zkSessionTimeout, watcher);
@@ -781,6 +804,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
    * Sleep for the given number of milliseconds.
    * This is non-static, and separated out, so that unit tests
    * can override the behavior not to sleep.
+   *
+   * @param sleepMs sleep ms.
    */
   @VisibleForTesting
   protected void sleepFor(int sleepMs) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index c6949e561e2..9eeaacd76bc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -326,6 +326,9 @@ public abstract class HAAdmin extends Configured implements Tool {
   /**
    * Return the serviceId as is, we are assuming it was
    * given as a service address of form {@literal <}host:ipcport{@literal >}.
+   *
+   * @param serviceId serviceId.
+   * @return service addr.
    */
   protected String getServiceAddr(String serviceId) {
     return serviceId;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
index 74a3d121a1a..56c848617ff 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
@@ -118,7 +118,8 @@ public interface HAServiceProtocol {
   /**
    * Request service to transition to active state. No operation, if the
    * service is already in active state.
-   * 
+   *
+   * @param reqInfo reqInfo.
    * @throws ServiceFailedException
    *           if transition from standby to active fails.
    * @throws AccessControlException
@@ -135,7 +136,8 @@ public interface HAServiceProtocol {
   /**
    * Request service to transition to standby state. No operation, if the
    * service is already in standby state.
-   * 
+   *
+   * @param reqInfo reqInfo.
    * @throws ServiceFailedException
    *           if transition from active to standby fails.
    * @throws AccessControlException
@@ -153,6 +155,7 @@ public interface HAServiceProtocol {
    * Request service to transition to observer state. No operation, if the
    * service is already in observer state.
    *
+   * @param reqInfo reqInfo.
    * @throws ServiceFailedException
    *           if transition from standby to observer fails.
    * @throws AccessControlException
@@ -176,6 +179,7 @@ public interface HAServiceProtocol {
    * @throws IOException
    *           if other errors happen
    * @see HAServiceStatus
+   * @return HAServiceStatus.
    */
   @Idempotent
   public HAServiceStatus getServiceStatus() throws AccessControlException,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
index 2e6b1fe1134..288a9dcbe0e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
@@ -93,6 +93,9 @@ public abstract class HAServiceTarget {
   
   /**
    * @return a proxy to connect to the target HA Service.
+   * @param timeoutMs timeout in milliseconds.
+   * @param conf Configuration.
+   * @throws IOException raised on errors performing I/O.
    */
   public HAServiceProtocol getProxy(Configuration conf, int timeoutMs)
       throws IOException {
@@ -115,7 +118,7 @@ public abstract class HAServiceTarget {
    * returned proxy defaults to using {@link #getAddress()}, which means this
    * method's behavior is identical to {@link #getProxy(Configuration, int)}.
    *
-   * @param conf Configuration
+   * @param conf configuration.
    * @param timeoutMs timeout in milliseconds
    * @return a proxy to connect to the target HA service for health monitoring
    * @throws IOException if there is an error
@@ -154,6 +157,9 @@ public abstract class HAServiceTarget {
 
   /**
    * @return a proxy to the ZKFC which is associated with this HA service.
+   * @param conf configuration.
+   * @param timeoutMs timeout in milliseconds.
+   * @throws IOException raised on errors performing I/O.
    */
   public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs)
       throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
index 7e90fb77a07..d222d52e373 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
@@ -184,6 +184,9 @@ public class HealthMonitor {
   
   /**
    * Connect to the service to be monitored. Stubbed out for easier testing.
+   *
+   * @throws IOException raised on errors performing I/O.
+   * @return HAServiceProtocol.
    */
   protected HAServiceProtocol createProxy() throws IOException {
     return targetToMonitor.getHealthMonitorProxy(conf, rpcTimeout, rpcConnectRetries);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
index 87a80b868cd..d24d5630c59 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
@@ -153,6 +153,8 @@ public abstract class ZKFailoverController {
    * the ZKFC will do all of its work. This is so that multiple federated
    * nameservices can run on the same ZK quorum without having to manually
    * configure them to separate subdirectories.
+   *
+   * @return ScopeInsideParentNode.
    */
   protected abstract String getScopeInsideParentNode();
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
index 51db21c185f..5f47ddb3392 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
@@ -80,6 +80,7 @@ public class HtmlQuoting {
    * @param buffer the byte array to take the characters from
    * @param off the index of the first byte to quote
    * @param len the number of bytes to quote
+   * @throws IOException raised on errors performing I/O.
    */
   public static void quoteHtmlChars(OutputStream output, byte[] buffer,
                                     int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index 49807ac4b45..2928f885982 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -270,6 +270,7 @@ public final class HttpServer2 implements FilterContainer {
      *          specifies the binding address, and the port specifies the
      *          listening port. Unspecified or zero port means that the server
      *          can listen to any port.
+     * @return Builder.
      */
     public Builder addEndpoint(URI endpoint) {
       endpoints.add(endpoint);
@@ -280,6 +281,9 @@ public final class HttpServer2 implements FilterContainer {
      * Set the hostname of the http server. The host name is used to resolve the
      * _HOST field in Kerberos principals. The hostname of the first listener
      * will be used if the name is unspecified.
+     *
+     * @param hostName hostName.
+     * @return Builder.
      */
     public Builder hostName(String hostName) {
       this.hostName = hostName;
@@ -308,6 +312,9 @@ public final class HttpServer2 implements FilterContainer {
     /**
      * Specify whether the server should authorize the client in SSL
      * connections.
+     *
+     * @param value value.
+     * @return Builder.
      */
     public Builder needsClientAuth(boolean value) {
       this.needsClientAuth = value;
@@ -332,6 +339,9 @@ public final class HttpServer2 implements FilterContainer {
     /**
      * Specify the SSL configuration to load. This API provides an alternative
      * to keyStore/keyPassword/trustStore.
+     *
+     * @param sslCnf sslCnf.
+     * @return Builder.
      */
     public Builder setSSLConf(Configuration sslCnf) {
       this.sslConf = sslCnf;
@@ -898,8 +908,11 @@ public final class HttpServer2 implements FilterContainer {
 
   /**
    * Add default apps.
+   *
+   * @param parent contexthandlercollection.
    * @param appDir The application directory
-   * @throws IOException
+   * @param conf configuration.
+   * @throws IOException raised on errors performing I/O.
    */
   protected void addDefaultApps(ContextHandlerCollection parent,
       final String appDir, Configuration conf) throws IOException {
@@ -1180,6 +1193,12 @@ public final class HttpServer2 implements FilterContainer {
 
   /**
    * Define a filter for a context and set up default url mappings.
+   *
+   * @param ctx ctx.
+   * @param name name.
+   * @param classname classname.
+   * @param parameters parameters.
+   * @param urls urls.
    */
   public static void defineFilter(ServletContextHandler ctx, String name,
       String classname, Map<String,String> parameters, String[] urls) {
@@ -1290,6 +1309,7 @@ public final class HttpServer2 implements FilterContainer {
   /**
    * Get the address that corresponds to a particular connector.
    *
+   * @param index index.
    * @return the corresponding address for the connector, or null if there's no
    *         such connector or the connector is not bounded or was closed.
    */
@@ -1309,6 +1329,9 @@ public final class HttpServer2 implements FilterContainer {
 
   /**
    * Set the min, max number of worker threads (simultaneous connections).
+   *
+   * @param min min.
+   * @param max max.
    */
   public void setThreads(int min, int max) {
     QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
@@ -1335,6 +1358,8 @@ public final class HttpServer2 implements FilterContainer {
 
   /**
    * Start the server. Does not wait for the server to start.
+   *
+   * @throws IOException raised on errors performing I/O.
    */
   public void start() throws IOException {
     try {
@@ -1509,7 +1534,9 @@ public final class HttpServer2 implements FilterContainer {
   }
 
   /**
-   * stop the server
+   * stop the server.
+   *
+   * @throws Exception exception.
    */
   public void stop() throws Exception {
     MultiException exception = null;
@@ -1610,6 +1637,7 @@ public final class HttpServer2 implements FilterContainer {
    * @param request the servlet request.
    * @param response the servlet response.
    * @return TRUE/FALSE based on the logic decribed above.
+   * @throws IOException raised on errors performing I/O.
    */
   public static boolean isInstrumentationAccessAllowed(
     ServletContext servletContext, HttpServletRequest request,
@@ -1631,9 +1659,11 @@ public final class HttpServer2 implements FilterContainer {
    * Does the user sending the HttpServletRequest has the administrator ACLs? If
    * it isn't the case, response will be modified to send an error to the user.
    *
+   * @param servletContext servletContext.
+   * @param request request.
    * @param response used to send the error response if user does not have admin access.
    * @return true if admin-authorized, false otherwise
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static boolean hasAdministratorAccess(
       ServletContext servletContext, HttpServletRequest request,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
index eef74628e16..8cf82f42509 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
@@ -84,7 +84,10 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
     idToClassMap.put(id, clazz);
   }
   
-  /** Add a Class to the maps if it is not already present. */ 
+  /**
+   * Add a Class to the maps if it is not already present.
+   * @param clazz clazz.
+   */
   protected synchronized void addToMap(Class<?> clazz) {
     if (classToIdMap.containsKey(clazz)) {
       return;
@@ -97,17 +100,28 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
     addToMap(clazz, id);
   }
 
-  /** @return the Class class for the specified id */
+  /**
+   * the Class class for the specified id.
+   * @param id id.
+   * @return the Class class for the specified id.
+   */
   protected Class<?> getClass(byte id) {
     return idToClassMap.get(id);
   }
 
-  /** @return the id for the specified Class */
+  /**
+   * get id.
+   * @return the id for the specified Class.
+   * @param clazz clazz.
+   */
   protected byte getId(Class<?> clazz) {
     return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1;
   }
 
-  /** Used by child copy constructors. */
+  /**
+   * Used by child copy constructors.
+   * @param other other.
+   */
   protected synchronized void copy(Writable other) {
     if (other != null) {
       try {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
index bee5fd2cb43..313caa63608 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
@@ -38,7 +38,15 @@ public class ArrayFile extends MapFile {
   public static class Writer extends MapFile.Writer {
     private LongWritable count = new LongWritable(0);
 
-    /** Create the named file for values of the named class. */
+    /**
+     * Create the named file for values of the named class.
+     *
+     * @param conf configuration.
+     * @param fs file system.
+     * @param file file.
+     * @param valClass valClass.
+     * @throws IOException raised on errors performing I/O.
+     */
     public Writer(Configuration conf, FileSystem fs,
                   String file, Class<? extends Writable> valClass)
       throws IOException {
@@ -46,7 +54,17 @@ public class ArrayFile extends MapFile {
             valueClass(valClass));
     }
 
-    /** Create the named file for values of the named class. */
+    /**
+     * Create the named file for values of the named class.
+     *
+     * @param conf configuration.
+     * @param fs file system.
+     * @param file file.
+     * @param valClass valClass.
+     * @param compress compress.
+     * @param progress progress.
+     * @throws IOException raised on errors performing I/O.
+     */
     public Writer(Configuration conf, FileSystem fs,
                   String file, Class<? extends Writable> valClass,
                   CompressionType compress, Progressable progress)
@@ -58,7 +76,11 @@ public class ArrayFile extends MapFile {
             progressable(progress));
     }
 
-    /** Append a value to the file. */
+    /**
+     * Append a value to the file.
+     * @param value value.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized void append(Writable value) throws IOException {
       super.append(count, value);                 // add to map
       count.set(count.get()+1);                   // increment count
@@ -69,31 +91,59 @@ public class ArrayFile extends MapFile {
   public static class Reader extends MapFile.Reader {
     private LongWritable key = new LongWritable();
 
-    /** Construct an array reader for the named file.*/
+    /**
+     * Construct an array reader for the named file.
+     * @param fs FileSystem.
+     * @param file file.
+     * @param conf configuration.
+     * @throws IOException raised on errors performing I/O.
+     */
     public Reader(FileSystem fs, String file, 
                   Configuration conf) throws IOException {
       super(new Path(file), conf);
     }
 
-    /** Positions the reader before its <code>n</code>th value. */
+    /**
+     * Positions the reader before its <code>n</code>th value.
+     *
+     * @param n n key.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized void seek(long n) throws IOException {
       key.set(n);
       seek(key);
     }
 
-    /** Read and return the next value in the file. */
+    /**
+     * Read and return the next value in the file.
+     *
+     * @param value value.
+     * @throws IOException raised on errors performing I/O.
+     * @return Writable.
+     */
     public synchronized Writable next(Writable value) throws IOException {
       return next(key, value) ? value : null;
     }
 
-    /** Returns the key associated with the most recent call to {@link
+    /**
+     * Returns the key associated with the most recent call to {@link
      * #seek(long)}, {@link #next(Writable)}, or {@link
-     * #get(long,Writable)}. */
+     * #get(long,Writable)}.
+     *
+     * @return key key.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized long key() throws IOException {
       return key.get();
     }
 
-    /** Return the <code>n</code>th value in the file. */
+    /**
+     * Return the <code>n</code>th value in the file.
+     * @param n n key.
+     * @param value value.
+     * @throws IOException raised on errors performing I/O.
+     * @return writable.
+     */
     public synchronized Writable get(long n, Writable value)
       throws IOException {
       key.set(n);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
index 2b6f3166bc2..ce7813e7483 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
@@ -106,7 +106,9 @@ public class ArrayPrimitiveWritable implements Writable {
   
   /**
    * Construct an instance of known type but no value yet
-   * for use with type-specific wrapper classes
+   * for use with type-specific wrapper classes.
+   *
+   * @param componentType componentType.
    */
   public ArrayPrimitiveWritable(Class<?> componentType) {
     checkPrimitive(componentType);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
index a32c44c8e50..a78ff8b6c58 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
@@ -31,11 +31,15 @@ public abstract class BinaryComparable implements Comparable<BinaryComparable> {
 
   /**
    * Return n st bytes 0..n-1 from {#getBytes()} are valid.
+   *
+   * @return length.
    */
   public abstract int getLength();
 
   /**
    * Return representative byte array for this instance.
+   *
+   * @return getBytes.
    */
   public abstract byte[] getBytes();
 
@@ -53,6 +57,11 @@ public abstract class BinaryComparable implements Comparable<BinaryComparable> {
 
   /**
    * Compare bytes from {#getBytes()} to those provided.
+   *
+   * @param other other.
+   * @param off off.
+   * @param len len.
+   * @return compareBytes.
    */
   public int compareTo(byte[] other, int off, int len) {
     return WritableComparator.compareBytes(getBytes(), 0, getLength(),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
index 519fcd74cbb..91ea07d5de4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
@@ -259,7 +259,7 @@ public class BloomMapFile {
      * probability of false positives.
      * @param key key to check
      * @return  false iff key doesn't exist, true if key probably exists.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public boolean probablyHasKey(WritableComparable key) throws IOException {
       if (bloomFilter == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
index 0079079a792..789b866255b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
@@ -35,21 +35,24 @@ public class BooleanWritable implements WritableComparable<BooleanWritable> {
    */
   public BooleanWritable() {};
 
-  /** 
+  /**
+   * @param value value.
    */
   public BooleanWritable(boolean value) {
     set(value);
   }
 
   /** 
-   * Set the value of the BooleanWritable
+   * Set the value of the BooleanWritable.
+   * @param value value.
    */    
   public void set(boolean value) {
     this.value = value;
   }
 
   /**
-   * Returns the value of the BooleanWritable
+   * Returns the value of the BooleanWritable.
+   * @return the value of the BooleanWritable.
    */
   public boolean get() {
     return value;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
index c27449d3618..542721f318d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
@@ -114,20 +114,28 @@ public class BoundedByteArrayOutputStream extends OutputStream {
     this.currentPointer = startOffset;
   }
 
-  /** Return the current limit */
+  /**
+   * Return the current limit.
+   * @return limit.
+   */
   public int getLimit() {
     return limit;
   }
 
-  /** Returns the underlying buffer.
+  /**
+   * Returns the underlying buffer.
    *  Data is only valid to {@link #size()}.
+   * @return the underlying buffer.
    */
   public byte[] getBuffer() {
     return buffer;
   }
 
-  /** Returns the length of the valid data 
+  /**
+   * Returns the length of the valid data
    * currently in the buffer.
+   *
+   * @return the length of the valid data.
    */
   public int size() {
     return currentPointer - startOffset;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
index ffcdea2c9a3..c4b88f4b5c9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
@@ -33,10 +33,16 @@ public class ByteWritable implements WritableComparable<ByteWritable> {
 
   public ByteWritable(byte value) { set(value); }
 
-  /** Set the value of this ByteWritable. */
+  /**
+   * Set the value of this ByteWritable.
+   * @param value value.
+   */
   public void set(byte value) { this.value = value; }
 
-  /** Return the value of this ByteWritable. */
+  /**
+   * Return the value of this ByteWritable.
+   * @return value bytes.
+   */
   public byte get() { return value; }
 
   @Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
index c5538c9e56e..80a23f86ce8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
@@ -77,6 +77,8 @@ public class BytesWritable extends BinaryComparable
   /**
    * Get a copy of the bytes that is exactly the length of the data.
    * See {@link #getBytes()} for faster access to the underlying array.
+   *
+   * @return copyBytes.
    */
   public byte[] copyBytes() {
     return Arrays.copyOf(bytes, size);
@@ -95,6 +97,7 @@ public class BytesWritable extends BinaryComparable
   /**
    * Get the data from the BytesWritable.
    * @deprecated Use {@link #getBytes()} instead.
+   * @return data from the BytesWritable.
    */
   @Deprecated
   public byte[] get() {
@@ -112,6 +115,7 @@ public class BytesWritable extends BinaryComparable
   /**
    * Get the current size of the buffer.
    * @deprecated Use {@link #getLength()} instead.
+   * @return current size of the buffer.
    */
   @Deprecated
   public int getSize() {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
index 6550e1f2fde..c0315ab828c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
@@ -67,7 +67,11 @@ public abstract class CompressedWritable implements Writable {
     }
   }
 
-  /** Subclasses implement this instead of {@link #readFields(DataInput)}. */
+  /**
+   * Subclasses implement this instead of {@link #readFields(DataInput)}.
+   * @param in data input.
+   * @throws IOException raised on errors performing I/O.
+   */
   protected abstract void readFieldsCompressed(DataInput in)
     throws IOException;
 
@@ -87,7 +91,12 @@ public abstract class CompressedWritable implements Writable {
     out.write(compressed);
   }
 
-  /** Subclasses implement this instead of {@link #write(DataOutput)}. */
+  /**
+   * Subclasses implement this instead of {@link #write(DataOutput)}.
+   *
+   * @param out data output.
+   * @throws IOException raised on errors performing I/O.
+   */
   protected abstract void writeCompressed(DataOutput out) throws IOException;
 
 }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
index 63c41c2e750..85e905d8700 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
@@ -140,12 +140,23 @@ public class DataInputBuffer extends DataInputStream {
     this.buffer = buffer;
   }
 
-  /** Resets the data that the buffer reads. */
+  /**
+   * Resets the data that the buffer reads.
+   *
+   * @param input input.
+   * @param length length.
+   */
   public void reset(byte[] input, int length) {
     buffer.reset(input, 0, length);
   }
 
-  /** Resets the data that the buffer reads. */
+  /**
+   * Resets the data that the buffer reads.
+   *
+   * @param input input.
+   * @param start start.
+   * @param length length.
+   */
   public void reset(byte[] input, int start, int length) {
     buffer.reset(input, start, length);
   }
@@ -154,12 +165,18 @@ public class DataInputBuffer extends DataInputStream {
     return buffer.getData();
   }
 
-  /** Returns the current position in the input. */
+  /**
+   * Returns the current position in the input.
+   *
+   * @return position.
+   */
   public int getPosition() { return buffer.getPosition(); }
 
   /**
    * Returns the index one greater than the last valid character in the input
    * stream buffer.
+   *
+   * @return length.
    */
   public int getLength() { return buffer.getLength(); }
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
index 1d86b89701c..4c1fa41e149 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
@@ -99,27 +99,45 @@ public class DataOutputBuffer extends DataOutputStream {
     this.buffer = buffer;
   }
 
-  /** Returns the current contents of the buffer.
+  /**
+   * Returns the current contents of the buffer.
    *  Data is only valid to {@link #getLength()}.
+   *
+   * @return data byte.
    */
   public byte[] getData() { return buffer.getData(); }
 
-  /** Returns the length of the valid data currently in the buffer. */
+  /**
+   * Returns the length of the valid data currently in the buffer.
+   * @return length.
+   */
   public int getLength() { return buffer.getLength(); }
 
-  /** Resets the buffer to empty. */
+  /**
+   * Resets the buffer to empty.
+   * @return DataOutputBuffer.
+   */
   public DataOutputBuffer reset() {
     this.written = 0;
     buffer.reset();
     return this;
   }
 
-  /** Writes bytes from a DataInput directly into the buffer. */
+  /**
+   * Writes bytes from a DataInput directly into the buffer.
+   * @param in data input.
+   * @param length length.
+   * @throws IOException raised on errors performing I/O.
+   */
   public void write(DataInput in, int length) throws IOException {
     buffer.write(in, length);
   }
 
-  /** Write to a file stream */
+  /**
+   * Write to a file stream.
+   * @param out OutputStream.
+   * @throws IOException raised on errors performing I/O.
+   */
   public void writeTo(OutputStream out) throws IOException {
     buffer.writeTo(out);
   }
@@ -128,6 +146,10 @@ public class DataOutputBuffer extends DataOutputStream {
    * Overwrite an integer into the internal buffer. Note that this call can only
    * be used to overwrite existing data in the buffer, i.e., buffer#count cannot
    * be increased, and DataOutputStream#written cannot be increased.
+   *
+   * @param v v.
+   * @param offset offset.
+   * @throws IOException raised on errors performing I/O.
    */
   public void writeInt(int v, int offset) throws IOException {
     Preconditions.checkState(offset + 4 <= buffer.getLength());
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
index be86159519b..4b1dc7513d0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
@@ -64,8 +64,8 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
    * the argument <tt>value</tt>'s size is bigger than zero, the argument
    * <tt>elementType</tt> is not be used.
    * 
-   * @param value
-   * @param elementType
+   * @param value enumSet value.
+   * @param elementType elementType.
    */
   public EnumSetWritable(EnumSet<E> value, Class<E> elementType) {
     set(value, elementType);
@@ -75,7 +75,7 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
    * Construct a new EnumSetWritable. Argument <tt>value</tt> should not be null
    * or empty.
    * 
-   * @param value
+   * @param value enumSet value.
    */
   public EnumSetWritable(EnumSet<E> value) {
     this(value, null);
@@ -88,8 +88,8 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
    * null. If the argument <tt>value</tt>'s size is bigger than zero, the
    * argument <tt>elementType</tt> is not be used.
    * 
-   * @param value
-   * @param elementType
+   * @param value enumSet Value.
+   * @param elementType elementType.
    */
   public void set(EnumSet<E> value, Class<E> elementType) {
     if ((value == null || value.size() == 0)
@@ -106,7 +106,10 @@ public class EnumSetWritable<E extends Enum<E>> extends AbstractCollection<E>
     }
   }
 
-  /** Return the value of this EnumSetWritable. */
+  /**
+   * Return the value of this EnumSetWritable.
+   * @return EnumSet.
+   */
   public EnumSet<E> get() {
     return value;
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
index 367fc946da1..864bb8752f5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
@@ -33,10 +33,16 @@ public class FloatWritable implements WritableComparable<FloatWritable> {
 
   public FloatWritable(float value) { set(value); }
 
-  /** Set the value of this FloatWritable. */
+  /**
+   * Set the value of this FloatWritable.
+   * @param value value.
+   */
   public void set(float value) { this.value = value; }
 
-  /** Return the value of this FloatWritable. */
+  /**
+   * Return the value of this FloatWritable.
+   * @return value.
+   */
   public float get() { return value; }
 
   @Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
index 7cfeed7f931..6de927467e4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
@@ -90,7 +90,7 @@ public abstract class GenericWritable implements Writable, Configurable {
   /**
    * Set the instance that is wrapped.
    * 
-   * @param obj
+   * @param obj input obj.
    */
   public void set(Writable obj) {
     instance = obj;
@@ -109,6 +109,7 @@ public abstract class GenericWritable implements Writable, Configurable {
 
   /**
    * Return the wrapped instance.
+   * @return the wrapped instance.
    */
   public Writable get() {
     return instance;
@@ -145,6 +146,7 @@ public abstract class GenericWritable implements Writable, Configurable {
   /**
    * Return all classes that may be wrapped.  Subclasses should implement this
    * to return a constant array of classes.
+   * @return all classes that may be wrapped.
    */
   abstract protected Class<? extends Writable>[] getTypes();
 
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
index 121af64b011..f0a9b0b6952 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
@@ -59,7 +59,8 @@ public class IOUtils {
    * @param out OutputStream to write to
    * @param buffSize the size of the buffer 
    * @param close whether or not close the InputStream and 
-   * OutputStream at the end. The streams are closed in the finally clause.  
+   * OutputStream at the end. The streams are closed in the finally clause.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void copyBytes(InputStream in, OutputStream out,
                                int buffSize, boolean close)
@@ -85,7 +86,8 @@ public class IOUtils {
    * 
    * @param in InputStrem to read from
    * @param out OutputStream to write to
-   * @param buffSize the size of the buffer 
+   * @param buffSize the size of the buffer.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void copyBytes(InputStream in, OutputStream out, int buffSize) 
     throws IOException {
@@ -107,7 +109,8 @@ public class IOUtils {
    *
    * @param in InputStrem to read from
    * @param out OutputStream to write to
-   * @param conf the Configuration object 
+   * @param conf the Configuration object.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void copyBytes(InputStream in, OutputStream out, Configuration conf)
     throws IOException {
@@ -123,6 +126,7 @@ public class IOUtils {
    * @param conf the Configuration object
    * @param close whether or not close the InputStream and 
    * OutputStream at the end. The streams are closed in the finally clause.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void copyBytes(InputStream in, OutputStream out, Configuration conf, boolean close)
     throws IOException {
@@ -181,6 +185,7 @@ public class IOUtils {
    * @param off - offset within buf
    * @param len - amount of data to be read
    * @return number of bytes read
+   * @throws IOException raised on errors performing I/O.
    */
   public static int wrappedReadForCompressedData(InputStream is, byte[] buf,
       int off, int len) throws IOException {
@@ -407,6 +412,7 @@ public class IOUtils {
    * once the sync is done.<br>
    * Borrowed from Uwe Schindler in LUCENE-5588
    * @param fileToSync the file to fsync
+   * @throws IOException raised on errors performing I/O.
    */
   public static void fsync(File fileToSync) throws IOException {
     if (!fileToSync.exists()) {
@@ -440,7 +446,7 @@ public class IOUtils {
    * @param isDir if true, the given file is a directory (Channel should be
    *          opened for read and ignore IOExceptions, because not all file
    *          systems and operating systems allow to fsync on a directory)
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static void fsync(FileChannel channel, boolean isDir)
       throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java
index 0d084b8396f..686b359f57d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java
@@ -75,20 +75,35 @@ public class InputBuffer extends FilterInputStream {
     this.buffer = buffer;
   }
 
-  /** Resets the data that the buffer reads. */
+  /**
+   * Resets the data that the buffer reads.
+   * @param input input.
+   * @param length length.
+   */
   public void reset(byte[] input, int length) {
     buffer.reset(input, 0, length);
   }
 
-  /** Resets the data that the buffer reads. */
+  /**
+   * Resets the data that the buffer reads.
+   * @param input input.
+   * @param start start.
+   * @param length length.
+   */
   public void reset(byte[] input, int start, int length) {
     buffer.reset(input, start, length);
   }
 
-  /** Returns the current position in the input. */
+  /**
+   * Returns the current position in the input.
+   * @return the current position in the input.
+   */
   public int getPosition() { return buffer.getPosition(); }
 
-  /** Returns the length of the input. */
+  /**
+   * Returns the length of the input.
+   * @return length of the input.
+   */
   public int getLength() { return buffer.getLength(); }
 
 }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
index f656d028cb0..ffcf93946d0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
@@ -36,10 +36,16 @@ public class IntWritable implements WritableComparable<IntWritable> {
 
   public IntWritable(int value) { set(value); }
 
-  /** Set the value of this IntWritable. */
+  /**
+   * Set the value of this IntWritable.
+   * @param value input value.
+   */
   public void set(int value) { this.value = value; }
 
-  /** Return the value of this IntWritable. */
+  /**
+   * Return the value of this IntWritable.
+   * @return value of this IntWritable.
+   */
   public int get() { return value; }
 
   @Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
index b77ca6781a6..9262af87bc2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
@@ -36,10 +36,16 @@ public class LongWritable implements WritableComparable<LongWritable> {
 
   public LongWritable(long value) { set(value); }
 
-  /** Set the value of this LongWritable. */
+  /**
+   * Set the value of this LongWritable.
+   * @param value value.
+   */
   public void set(long value) { this.value = value; }
 
-  /** Return the value of this LongWritable. */
+  /**
+   * Return the value of this LongWritable.
+   * @return value of this LongWritable.
+   */
   public long get() { return value; }
 
   @Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
index 99c17acdd43..edfcf6e1e77 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
@@ -54,12 +54,18 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     this.digest = new byte[MD5_LEN];
   }
 
-  /** Constructs an MD5Hash from a hex string. */
+  /**
+   * Constructs an MD5Hash from a hex string.
+   * @param hex input hex.
+   */
   public MD5Hash(String hex) {
     setDigest(hex);
   }
   
-  /** Constructs an MD5Hash with a specified value. */
+  /**
+   * Constructs an MD5Hash with a specified value.
+   * @param digest digest.
+   */
   public MD5Hash(byte[] digest) {
     if (digest.length != MD5_LEN)
       throw new IllegalArgumentException("Wrong length: " + digest.length);
@@ -72,7 +78,12 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     in.readFully(digest);
   }
 
-  /** Constructs, reads and returns an instance. */
+  /**
+   * Constructs, reads and returns an instance.
+   * @param in in.
+   * @throws IOException raised on errors performing I/O.
+   * @return MD5Hash.
+   */
   public static MD5Hash read(DataInput in) throws IOException {
     MD5Hash result = new MD5Hash();
     result.readFields(in);
@@ -85,21 +96,32 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     out.write(digest);
   }
 
-  /** Copy the contents of another instance into this instance. */
+  /**
+   * Copy the contents of another instance into this instance.
+   * @param that that.
+   */
   public void set(MD5Hash that) {
     System.arraycopy(that.digest, 0, this.digest, 0, MD5_LEN);
   }
 
-  /** Returns the digest bytes. */
+  /**
+   * Returns the digest bytes.
+   * @return digest.
+   */
   public byte[] getDigest() { return digest; }
 
-  /** Construct a hash value for a byte array. */
+  /**
+   * Construct a hash value for a byte array.
+   * @param data data.
+   * @return MD5Hash.
+   */
   public static MD5Hash digest(byte[] data) {
     return digest(data, 0, data.length);
   }
 
   /**
-   * Create a thread local MD5 digester
+   * Create a thread local MD5 digester.
+   * @return MessageDigest.
    */
   public static MessageDigest getDigester() {
     MessageDigest digester = DIGESTER_FACTORY.get();
@@ -107,7 +129,12 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     return digester;
   }
 
-  /** Construct a hash value for the content from the InputStream. */
+  /**
+   * Construct a hash value for the content from the InputStream.
+   * @param in input stream.
+   * @return MD5Hash.
+   * @throws IOException raised on errors performing I/O.
+   */
   public static MD5Hash digest(InputStream in) throws IOException {
     final byte[] buffer = new byte[4*1024]; 
 
@@ -119,7 +146,13 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     return new MD5Hash(digester.digest());
   }
 
-  /** Construct a hash value for a byte array. */
+  /**
+   * Construct a hash value for a byte array.
+   * @param data data.
+   * @param start start.
+   * @param len len.
+   * @return MD5Hash.
+   */
   public static MD5Hash digest(byte[] data, int start, int len) {
     byte[] digest;
     MessageDigest digester = getDigester();
@@ -128,7 +161,13 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     return new MD5Hash(digest);
   }
 
-  /** Construct a hash value for an array of byte array. */
+  /**
+   * Construct a hash value for an array of byte array.
+   * @param dataArr dataArr.
+   * @param start start.
+   * @param len len.
+   * @return MD5Hash.
+   */
   public static MD5Hash digest(byte[][] dataArr, int start, int len) {
     byte[] digest;
     MessageDigest digester = getDigester();
@@ -139,17 +178,28 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     return new MD5Hash(digest);
   }
 
-  /** Construct a hash value for a String. */
+  /**
+   * Construct a hash value for a String.
+   * @param string string.
+   * @return MD5Hash.
+   */
   public static MD5Hash digest(String string) {
     return digest(UTF8.getBytes(string));
   }
 
-  /** Construct a hash value for a String. */
+  /**
+   * Construct a hash value for a String.
+   * @param utf8 utf8.
+   * @return MD5Hash.
+   */
   public static MD5Hash digest(UTF8 utf8) {
     return digest(utf8.getBytes(), 0, utf8.getLength());
   }
 
-  /** Construct a half-sized version of this MD5.  Fits in a long **/
+  /**
+   * Construct a half-sized version of this MD5.  Fits in a long.
+   * @return halfDigest.
+   */
   public long halfDigest() {
     long value = 0;
     for (int i = 0; i < 8; i++)
@@ -226,7 +276,10 @@ public class MD5Hash implements WritableComparable<MD5Hash> {
     return buf.toString();
   }
 
-  /** Sets the digest value from a hex string. */
+  /**
+   * Sets the digest value from a hex string.
+   * @param hex hex.
+   */
   public void setDigest(String hex) {
     if (hex.length() != MD5_LEN*2)
       throw new IllegalArgumentException("Wrong length: " + hex.length());
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
index 51db0b3f0af..7b3cd78e3cc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
@@ -98,8 +98,16 @@ public class MapFile {
     private long lastIndexKeyCount = Long.MIN_VALUE;
 
 
-    /** Create the named map for keys of the named class. 
+    /**
+     * Create the named map for keys of the named class.
      * @deprecated Use Writer(Configuration, Path, Option...) instead.
+     *
+     * @param conf configuration.
+     * @param fs filesystem.
+     * @param dirName dirName.
+     * @param keyClass keyClass.
+     * @param valClass valClass.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -108,8 +116,18 @@ public class MapFile {
       this(conf, new Path(dirName), keyClass(keyClass), valueClass(valClass));
     }
 
-    /** Create the named map for keys of the named class. 
+    /**
+     * Create the named map for keys of the named class.
      * @deprecated Use Writer(Configuration, Path, Option...) instead.
+     *
+     * @param conf configuration.
+     * @param fs fs.
+     * @param dirName dirName.
+     * @param keyClass keyClass.
+     * @param valClass valClass.
+     * @param compress compress.
+     * @param progress progress.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -120,8 +138,19 @@ public class MapFile {
            compression(compress), progressable(progress));
     }
 
-    /** Create the named map for keys of the named class. 
+    /**
+     * Create the named map for keys of the named class.
      * @deprecated Use Writer(Configuration, Path, Option...) instead.
+     *
+     * @param conf configuration.
+     * @param fs FileSystem.
+     * @param dirName dirName.
+     * @param keyClass keyClass.
+     * @param valClass valClass.
+     * @param compress compress.
+     * @param codec codec.
+     * @param progress progress.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -132,8 +161,16 @@ public class MapFile {
            compression(compress, codec), progressable(progress));
     }
 
-    /** Create the named map for keys of the named class. 
+    /**
+     * Create the named map for keys of the named class.
      * @deprecated Use Writer(Configuration, Path, Option...) instead.
+     * @param conf configuration.
+     * @param fs fs.
+     * @param dirName dirName.
+     * @param keyClass keyClass.
+     * @param valClass valClass.
+     * @param compress compress.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -145,6 +182,12 @@ public class MapFile {
 
     /** Create the named map using the named key comparator. 
      * @deprecated Use Writer(Configuration, Path, Option...) instead.
+     * @param conf configuration.
+     * @param fs fs.
+     * @param dirName dirName.
+     * @param comparator comparator.
+     * @param valClass valClass.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -154,7 +197,14 @@ public class MapFile {
            valueClass(valClass));
     }
 
-    /** Create the named map using the named key comparator. 
+    /** Create the named map using the named key comparator.
+     * @param conf configuration.
+     * @param fs filesystem.
+     * @param dirName dirName.
+     * @param comparator comparator.
+     * @param valClass valClass.
+     * @param compress compress.
+     * @throws IOException raised on errors performing I/O.
      * @deprecated Use Writer(Configuration, Path, Option...) instead.
      */
     @Deprecated
@@ -165,8 +215,18 @@ public class MapFile {
            valueClass(valClass), compression(compress));
     }
 
-    /** Create the named map using the named key comparator. 
+    /**
+     * Create the named map using the named key comparator.
      * @deprecated Use Writer(Configuration, Path, Option...)} instead.
+     *
+     * @param conf configuration.
+     * @param fs filesystem.
+     * @param dirName dirName.
+     * @param comparator comparator.
+     * @param valClass valClass.
+     * @param compress CompressionType.
+     * @param progress progress.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -178,8 +238,19 @@ public class MapFile {
            progressable(progress));
     }
 
-    /** Create the named map using the named key comparator. 
+    /**
+     * Create the named map using the named key comparator.
      * @deprecated Use Writer(Configuration, Path, Option...) instead.
+     *
+     * @param conf configuration.
+     * @param fs FileSystem.
+     * @param dirName dirName.
+     * @param comparator comparator.
+     * @param valClass valClass.
+     * @param compress CompressionType.
+     * @param codec codec.
+     * @param progress progress.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -285,16 +356,26 @@ public class MapFile {
       this.index = SequenceFile.createWriter(conf, indexOptions);      
     }
 
-    /** The number of entries that are added before an index entry is added.*/
+    /**
+     * The number of entries that are added before an index entry is added.
+     * @return indexInterval
+     */
     public int getIndexInterval() { return indexInterval; }
 
-    /** Sets the index interval.
+    /**
+     * Sets the index interval.
      * @see #getIndexInterval()
+     *
+     * @param interval interval.
      */
     public void setIndexInterval(int interval) { indexInterval = interval; }
 
-    /** Sets the index interval and stores it in conf
+    /**
+     * Sets the index interval and stores it in conf.
      * @see #getIndexInterval()
+     *
+     * @param conf configuration.
+     * @param interval interval.
      */
     public static void setIndexInterval(Configuration conf, int interval) {
       conf.setInt(INDEX_INTERVAL, interval);
@@ -307,8 +388,14 @@ public class MapFile {
       index.close();
     }
 
-    /** Append a key/value pair to the map.  The key must be greater or equal
-     * to the previous key added to the map. */
+    /**
+     * Append a key/value pair to the map.  The key must be greater or equal
+     * to the previous key added to the map.
+     *
+     * @param key key.
+     * @param val value.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized void append(WritableComparable key, Writable val)
       throws IOException {
 
@@ -370,10 +457,18 @@ public class MapFile {
     private WritableComparable[] keys;
     private long[] positions;
 
-    /** Returns the class of keys in this file. */
+    /**
+     * Returns the class of keys in this file.
+     *
+     * @return keyClass.
+     */
     public Class<?> getKeyClass() { return data.getKeyClass(); }
 
-    /** Returns the class of values in this file. */
+    /**
+     * Returns the class of values in this file.
+     *
+     * @return Value Class.
+     */
     public Class<?> getValueClass() { return data.getValueClass(); }
 
     public static interface Option extends SequenceFile.Reader.Option {}
@@ -403,8 +498,14 @@ public class MapFile {
       open(dir, comparator, conf, opts);
     }
  
-    /** Construct a map reader for the named map.
+    /**
+     * Construct a map reader for the named map.
      * @deprecated
+     *
+     * @param fs FileSystem.
+     * @param dirName dirName.
+     * @param conf configuration.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Reader(FileSystem fs, String dirName, 
@@ -412,8 +513,15 @@ public class MapFile {
       this(new Path(dirName), conf);
     }
 
-    /** Construct a map reader for the named map using the named comparator.
+    /**
+     * Construct a map reader for the named map using the named comparator.
      * @deprecated
+     *
+     * @param fs FileSystem.
+     * @param dirName dirName.
+     * @param comparator WritableComparator.
+     * @param conf Configuration.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Reader(FileSystem fs, String dirName, WritableComparator comparator, 
@@ -450,6 +558,12 @@ public class MapFile {
     /**
      * Override this method to specialize the type of
      * {@link SequenceFile.Reader} returned.
+     *
+     * @param dataFile data file.
+     * @param conf configuration.
+     * @param options options.
+     * @throws IOException raised on errors performing I/O.
+     * @return SequenceFile.Reader.
      */
     protected SequenceFile.Reader 
       createDataFileReader(Path dataFile, Configuration conf,
@@ -516,13 +630,21 @@ public class MapFile {
       }
     }
 
-    /** Re-positions the reader before its first key. */
+    /**
+     * Re-positions the reader before its first key.
+     *
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized void reset() throws IOException {
       data.seek(firstPosition);
     }
 
-    /** Get the key at approximately the middle of the file. Or null if the
-     *  file is empty. 
+    /**
+     * Get the key at approximately the middle of the file. Or null if the
+     *  file is empty.
+     *
+     * @throws IOException raised on errors performing I/O.
+     * @return WritableComparable.
      */
     public synchronized WritableComparable midKey() throws IOException {
 
@@ -534,9 +656,11 @@ public class MapFile {
       return keys[(count - 1) / 2];
     }
     
-    /** Reads the final key from the file.
+    /**
+     * Reads the final key from the file.
      *
      * @param key key to read into
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized void finalKey(WritableComparable key)
       throws IOException {
@@ -556,9 +680,14 @@ public class MapFile {
       }
     }
 
-    /** Positions the reader at the named key, or if none such exists, at the
+    /**
+     * Positions the reader at the named key, or if none such exists, at the
      * first entry after the named key.  Returns true iff the named key exists
      * in this map.
+     *
+     * @param key key.
+     * @throws IOException raised on errors performing I/O.
+     * @return if the named key exists in this map true, not false.
      */
     public synchronized boolean seek(WritableComparable key) throws IOException {
       return seekInternal(key) == 0;
@@ -669,15 +798,28 @@ public class MapFile {
       return -(low + 1);                          // key not found.
     }
 
-    /** Read the next key/value pair in the map into <code>key</code> and
+    /**
+     * Read the next key/value pair in the map into <code>key</code> and
      * <code>val</code>.  Returns true if such a pair exists and false when at
-     * the end of the map */
+     * the end of the map.
+     *
+     * @param key WritableComparable.
+     * @param val Writable.
+     * @return if such a pair exists true,not false.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized boolean next(WritableComparable key, Writable val)
       throws IOException {
       return data.next(key, val);
     }
 
-    /** Return the value for the named key, or null if none exists. */
+    /**
+     * Return the value for the named key, or null if none exists.
+     * @param key key.
+     * @param val val.
+     * @return Writable if such a pair exists true,not false.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized Writable get(WritableComparable key, Writable val)
       throws IOException {
       if (seek(key)) {
@@ -692,9 +834,10 @@ public class MapFile {
      * Returns <code>key</code> or if it does not exist, at the first entry
      * after the named key.
      * 
--     * @param key       - key that we're trying to find
--     * @param val       - data value if key is found
--     * @return          - the key that was the closest match or null if eof.
+     * @param key key that we're trying to find.
+     * @param val data value if key is found.
+     * @return the key that was the closest match or null if eof.
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized WritableComparable getClosest(WritableComparable key,
       Writable val)
@@ -711,6 +854,7 @@ public class MapFile {
      * the first entry that falls just before the <code>key</code>.  Otherwise,
      * return the record that sorts just after.
      * @return          - the key that was the closest match or null if eof.
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized WritableComparable getClosest(WritableComparable key,
         Writable val, final boolean before)
@@ -730,7 +874,10 @@ public class MapFile {
       return nextKey;
     }
 
-    /** Close the map. */
+    /**
+     * Close the map.
+     * @throws IOException raised on errors performing I/O.
+     */
     @Override
     public synchronized void close() throws IOException {
       if (!indexClosed) {
@@ -741,7 +888,13 @@ public class MapFile {
 
   }
 
-  /** Renames an existing map directory. */
+  /**
+   * Renames an existing map directory.
+   * @param fs fs.
+   * @param oldName oldName.
+   * @param newName newName.
+   * @throws IOException raised on errors performing I/O.
+   */
   public static void rename(FileSystem fs, String oldName, String newName)
     throws IOException {
     Path oldDir = new Path(oldName);
@@ -751,7 +904,12 @@ public class MapFile {
     }
   }
 
-  /** Deletes the named map file. */
+  /**
+   * Deletes the named map file.
+   * @param fs input fs.
+   * @param name input name.
+   * @throws IOException raised on errors performing I/O.
+   */
   public static void delete(FileSystem fs, String name) throws IOException {
     Path dir = new Path(name);
     Path data = new Path(dir, DATA_FILE_NAME);
@@ -769,8 +927,9 @@ public class MapFile {
    * @param keyClass key class (has to be a subclass of Writable)
    * @param valueClass value class (has to be a subclass of Writable)
    * @param dryrun do not perform any changes, just report what needs to be done
+   * @param conf configuration.
    * @return number of valid entries in this MapFile, or -1 if no fixing was needed
-   * @throws Exception
+   * @throws Exception Exception.
    */
   public static long fix(FileSystem fs, Path dir,
                          Class<? extends Writable> keyClass,
@@ -870,11 +1029,12 @@ public class MapFile {
     }
 
     /**
-     * Merge multiple MapFiles to one Mapfile
+     * Merge multiple MapFiles to one Mapfile.
      *
-     * @param inMapFiles
-     * @param outMapFile
-     * @throws IOException
+     * @param inMapFiles input inMapFiles.
+     * @param deleteInputs deleteInputs.
+     * @param outMapFile input outMapFile.
+     * @throws IOException raised on errors performing I/O.
      */
     public void merge(Path[] inMapFiles, boolean deleteInputs,
         Path outMapFile) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java
index c9d7ade4306..452965b7c82 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java
@@ -42,7 +42,11 @@ public class MultipleIOException extends IOException {
   /** @return the underlying exceptions */
   public List<IOException> getExceptions() {return exceptions;}
 
-  /** A convenient method to create an {@link IOException}. */
+  /**
+   * A convenient method to create an {@link IOException}.
+   * @param exceptions IOException List.
+   * @return IOException.
+   */
   public static IOException createIOException(List<IOException> exceptions) {
     if (exceptions == null || exceptions.isEmpty()) {
       return null;
@@ -60,7 +64,10 @@ public class MultipleIOException extends IOException {
   public static class Builder {
     private List<IOException> exceptions;
     
-    /** Add the given {@link Throwable} to the exception list. */
+    /**
+     * Add the given {@link Throwable} to the exception list.
+     * @param t Throwable.
+     */
     public void add(Throwable t) {
       if (exceptions == null) {
         exceptions = new ArrayList<>();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
index 77c590fdb63..d6e4846264f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
@@ -32,7 +32,10 @@ public class NullWritable implements WritableComparable<NullWritable> {
 
   private NullWritable() {}                       // no public ctor
 
-  /** Returns the single instance of this class. */
+  /**
+   * Returns the single instance of this class.
+   * @return the single instance of this class.
+   */
   public static NullWritable get() { return THIS; }
   
   @Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
index b35a32f288b..29c06a01ad6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
@@ -54,13 +54,22 @@ public class ObjectWritable implements Writable, Configurable {
     this.instance = instance;
   }
 
-  /** Return the instance, or null if none. */
+  /**
+   * Return the instance, or null if none.
+   * @return the instance, or null if none.
+   */
   public Object get() { return instance; }
   
-  /** Return the class this is meant to be. */
+  /**
+   * Return the class this is meant to be.
+   * @return the class this is meant to be.
+   */
   public Class getDeclaredClass() { return declaredClass; }
   
-  /** Reset the instance. */
+  /**
+   * Reset the instance.
+   * @param instance instance.
+   */
   public void set(Object instance) {
     this.declaredClass = instance.getClass();
     this.instance = instance;
@@ -120,8 +129,16 @@ public class ObjectWritable implements Writable, Configurable {
     }
   }
 
-  /** Write a {@link Writable}, {@link String}, primitive type, or an array of
-   * the preceding. */
+  /**
+   * Write a {@link Writable}, {@link String}, primitive type, or an array of
+   * the preceding.
+   *
+   * @param out DataOutput.
+   * @param instance instance.
+   * @param conf Configuration.
+   * @param declaredClass declaredClass.
+   * @throws IOException raised on errors performing I/O.
+   */
   public static void writeObject(DataOutput out, Object instance,
                                  Class declaredClass, 
                                  Configuration conf) throws IOException {
@@ -137,6 +154,13 @@ public class ObjectWritable implements Writable, Configurable {
      * usages, to preserve the ability to interchange files with other clusters 
      * that may not be running the same version of software.  Sometime in ~2013 
      * we can consider removing this parameter and always using the compact format.
+     *
+     * @param conf configuration.
+     * @param out dataoutput.
+     * @param declaredClass declaredClass.
+     * @param instance instance.
+     * @throws IOException raised on errors performing I/O.
+     *
      */
     public static void writeObject(DataOutput out, Object instance,
         Class declaredClass, Configuration conf, boolean allowCompactArrays) 
@@ -210,15 +234,30 @@ public class ObjectWritable implements Writable, Configurable {
   }
   
   
-  /** Read a {@link Writable}, {@link String}, primitive type, or an array of
-   * the preceding. */
+  /**
+   * Read a {@link Writable}, {@link String}, primitive type, or an array of
+   * the preceding.
+   *
+   * @param conf configuration.
+   * @param in DataInput.
+   * @return Object.
+   * @throws IOException raised on errors performing I/O.
+   */
   public static Object readObject(DataInput in, Configuration conf)
     throws IOException {
     return readObject(in, null, conf);
   }
     
-  /** Read a {@link Writable}, {@link String}, primitive type, or an array of
-   * the preceding. */
+  /**
+   * Read a {@link Writable}, {@link String}, primitive type, or an array of
+   * the preceding.
+   *
+   * @param in DataInput.
+   * @param objectWritable objectWritable.
+   * @param conf configuration.
+   * @return Object.
+   * @throws IOException raised on errors performing I/O.
+   */
   @SuppressWarnings("unchecked")
   public static Object readObject(DataInput in, ObjectWritable objectWritable, Configuration conf)
     throws IOException {
@@ -365,6 +404,10 @@ public class ObjectWritable implements Writable, Configurable {
    * Find and load the class with given name <tt>className</tt> by first finding
    * it in the specified <tt>conf</tt>. If the specified <tt>conf</tt> is null,
    * try load it directly.
+   *
+   * @param conf configuration.
+   * @param className classname.
+   * @return Class.
    */
   public static Class<?> loadClass(Configuration conf, String className) {
     Class<?> declaredClass = null;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
index 15a396dc2bf..f80c0a71883 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
@@ -77,21 +77,33 @@ public class OutputBuffer extends FilterOutputStream {
     this.buffer = buffer;
   }
 
-  /** Returns the current contents of the buffer.
+  /**
+   * Returns the current contents of the buffer.
    *  Data is only valid to {@link #getLength()}.
+   *
+   * @return the current contents of the buffer.
    */
   public byte[] getData() { return buffer.getData(); }
 
-  /** Returns the length of the valid data currently in the buffer. */
+  /**
+   * Returns the length of the valid data currently in the buffer.
+   * @return the length of the valid data
+   *          currently in the buffer.
+   */
   public int getLength() { return buffer.getLength(); }
 
-  /** Resets the buffer to empty. */
+  /** @return Resets the buffer to empty. */
   public OutputBuffer reset() {
     buffer.reset();
     return this;
   }
 
-  /** Writes bytes from a InputStream directly into the buffer. */
+  /**
+   * Writes bytes from a InputStream directly into the buffer.
+   * @param in input in.
+   * @param length input length.
+   * @throws IOException raised on errors performing I/O.
+   */
   public void write(InputStream in, int length) throws IOException {
     buffer.write(in, length);
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java
index a52190db5f4..354dda964e9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.serializer.DeserializerComparator;
  * A {@link Comparator} that operates directly on byte representations of
  * objects.
  * </p>
- * @param <T>
+ * @param <T> generic type.
  * @see DeserializerComparator
  */
 @InterfaceAudience.Public
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
index 65e751eca41..2a6fafce545 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
@@ -50,7 +50,7 @@ public class ReadaheadPool {
   private static ReadaheadPool instance;
 
   /**
-   * Return the singleton instance for the current process.
+   * @return Return the singleton instance for the current process.
    */
   public static ReadaheadPool getInstance() {
     synchronized (ReadaheadPool.class) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
index 016daf9f352..cddddcc6c9a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
@@ -90,7 +90,7 @@ public class SecureIOUtils {
   private final static FileSystem rawFilesystem;
 
   /**
-   * Open the given File for random read access, verifying the expected user/
+   * @return Open the given File for random read access, verifying the expected user/
    * group constraints if security is enabled.
    * 
    * Note that this function provides no additional security checks if hadoop
@@ -114,8 +114,14 @@ public class SecureIOUtils {
   }
 
   /**
-   * Same as openForRandomRead except that it will run even if security is off.
+   * @return Same as openForRandomRead except that it will run even if security is off.
    * This is used by unit tests.
+   *
+   * @param f input f.
+   * @param mode input mode.
+   * @param expectedOwner input expectedOwner.
+   * @param expectedGroup input expectedGroup.
+   * @throws IOException raised on errors performing I/O.
    */
   @VisibleForTesting
   protected static RandomAccessFile forceSecureOpenForRandomRead(File f,
@@ -145,6 +151,7 @@ public class SecureIOUtils {
    * @param expectedGroup the expected group owner for the file
    * @throws IOException if an IO Error occurred or the user/group does not
    * match if security is enabled
+   * @return FSDataInputStream.
    */
   public static FSDataInputStream openFSDataInputStream(File file,
       String expectedOwner, String expectedGroup) throws IOException {
@@ -157,6 +164,12 @@ public class SecureIOUtils {
   /**
    * Same as openFSDataInputStream except that it will run even if security is
    * off. This is used by unit tests.
+   *
+   * @param file input file.
+   * @param expectedOwner input expectedOwner.
+   * @param expectedGroup input expectedGroup.
+   * @throws IOException raised on errors performing I/O.
+   * @return FSDataInputStream.
    */
   @VisibleForTesting
   protected static FSDataInputStream forceSecureOpenFSDataInputStream(
@@ -182,7 +195,7 @@ public class SecureIOUtils {
    * Open the given File for read access, verifying the expected user/group
    * constraints if security is enabled.
    *
-   * Note that this function provides no additional checks if Hadoop
+   * @return Note that this function provides no additional checks if Hadoop
    * security is disabled, since doing the checks would be too expensive
    * when native libraries are not available.
    *
@@ -201,8 +214,12 @@ public class SecureIOUtils {
   }
 
   /**
-   * Same as openForRead() except that it will run even if security is off.
+   * @return Same as openForRead() except that it will run even if security is off.
    * This is used by unit tests.
+   * @param f input f.
+   * @param expectedOwner input expectedOwner.
+   * @param expectedGroup input expectedGroup.
+   * @throws IOException raised on errors performing I/O.
    */
   @VisibleForTesting
   protected static FileInputStream forceSecureOpenForRead(File f, String expectedOwner,
@@ -251,6 +268,7 @@ public class SecureIOUtils {
    *
    * @throws AlreadyExistsException if the file already exists
    * @throws IOException if any other error occurred
+   * @return createForWrite FileOutputStream.
    */
   public static FileOutputStream createForWrite(File f, int permissions)
   throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
index 890e7916ab0..a0b45814f1c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
@@ -269,7 +269,7 @@ public class SequenceFile {
    * @param conf the configuration to use
    * @param opts the options to create the file with
    * @return a new Writer
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static Writer createWriter(Configuration conf, Writer.Option... opts
                                     ) throws IOException {
@@ -301,7 +301,7 @@ public class SequenceFile {
    * @param keyClass The 'key' type.
    * @param valClass The 'value' type.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -323,7 +323,7 @@ public class SequenceFile {
    * @param valClass The 'value' type.
    * @param compressionType The compression type.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -348,7 +348,7 @@ public class SequenceFile {
    * @param compressionType The compression type.
    * @param progress The Progressable object to track progress.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -375,7 +375,7 @@ public class SequenceFile {
    * @param compressionType The compression type.
    * @param codec The compression codec.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -403,7 +403,7 @@ public class SequenceFile {
    * @param progress The Progressable object to track progress.
    * @param metadata The metadata of the file.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -437,7 +437,7 @@ public class SequenceFile {
    * @param progress The Progressable object to track progress.
    * @param metadata The metadata of the file.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -475,7 +475,7 @@ public class SequenceFile {
    * @param codec The compression codec.
    * @param metadata The metadata of the file.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   @Deprecated
   public static Writer
@@ -508,7 +508,7 @@ public class SequenceFile {
    * @param createFlag gives the semantics of create: overwrite, append etc.
    * @param opts file creation options; see {@link CreateOpts}.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    */
   public static Writer
   createWriter(FileContext fc, Configuration conf, Path name,
@@ -532,7 +532,7 @@ public class SequenceFile {
    * @param codec The compression codec.
    * @param progress The Progressable object to track progress.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -560,7 +560,7 @@ public class SequenceFile {
    * @param codec The compression codec.
    * @param metadata The metadata of the file.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -585,7 +585,7 @@ public class SequenceFile {
    * @param compressionType The compression type.
    * @param codec The compression codec.
    * @return Returns the handle to the constructed SequenceFile Writer.
-   * @throws IOException
+   * @throws IOException raised on errors performing I/O.
    * @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
    *     instead.
    */
@@ -603,22 +603,26 @@ public class SequenceFile {
   /** The interface to 'raw' values of SequenceFiles. */
   public static interface ValueBytes {
 
-    /** Writes the uncompressed bytes to the outStream.
+    /**
+     * Writes the uncompressed bytes to the outStream.
      * @param outStream : Stream to write uncompressed bytes into.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public void writeUncompressedBytes(DataOutputStream outStream)
       throws IOException;
 
-    /** Write compressed bytes to outStream. 
+    /**
+     * Write compressed bytes to outStream.
      * Note: that it will NOT compress the bytes if they are not compressed.
      * @param outStream : Stream to write compressed bytes into.
+     * @throws IllegalArgumentException an illegal or inappropriate argument.
+     * @throws IOException raised on errors performing I/O.
      */
     public void writeCompressedBytes(DataOutputStream outStream) 
       throws IllegalArgumentException, IOException;
 
     /**
-     * Size of stored data.
+     * @return Size of stored data.
      */
     public int getSize();
   }
@@ -1190,10 +1194,17 @@ public class SequenceFile {
           codec, metadata, syncInterval);
     }
 
-    /** Create the named file.
+    /**
+     * Create the named file.
      * @deprecated Use 
      *   {@link SequenceFile#createWriter(Configuration, Writer.Option...)} 
      *   instead.
+     * @param fs input filesystem.
+     * @param conf input configuration.
+     * @param name input name.
+     * @param keyClass input keyClass.
+     * @param valClass input valClass.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(FileSystem fs, Configuration conf, Path name, 
@@ -1203,10 +1214,19 @@ public class SequenceFile {
            new Metadata(), SYNC_INTERVAL);
     }
     
-    /** Create the named file with write-progress reporter.
+    /**
+     * Create the named file with write-progress reporter.
      * @deprecated Use 
      *   {@link SequenceFile#createWriter(Configuration, Writer.Option...)} 
      *   instead.
+     * @param fs input filesystem.
+     * @param conf input configuration.
+     * @param name input name.
+     * @param keyClass input keyClass.
+     * @param valClass input valClass.
+     * @param progress input progress.
+     * @param metadata input metadata.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(FileSystem fs, Configuration conf, Path name, 
@@ -1217,10 +1237,22 @@ public class SequenceFile {
            null, metadata, SYNC_INTERVAL);
     }
     
-    /** Create the named file with write-progress reporter. 
+    /**
+     * Create the named file with write-progress reporter.
      * @deprecated Use 
      *   {@link SequenceFile#createWriter(Configuration, Writer.Option...)} 
      *   instead.
+     * @param fs input filesystem.
+     * @param conf input configuration.
+     * @param name input name.
+     * @param keyClass input keyClass.
+     * @param valClass input valClass.
+     * @param bufferSize input bufferSize.
+     * @param replication input replication.
+     * @param blockSize input blockSize.
+     * @param progress input progress.
+     * @param metadata input metadata.
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public Writer(FileSystem fs, Configuration conf, Path name,
@@ -1321,16 +1353,19 @@ public class SequenceFile {
       }
     }
     
-    /** Returns the class of keys in this file. */
+    /** @return Returns the class of keys in this file. */
     public Class getKeyClass() { return keyClass; }
 
-    /** Returns the class of values in this file. */
+    /** @return Returns the class of values in this file. */
     public Class getValueClass() { return valClass; }
 
-    /** Returns the compression codec of data in this file. */
+    /** @return Returns the compression codec of data in this file. */
     public CompressionCodec getCompressionCodec() { return codec; }
     
-    /** create a sync point */
+    /**
+     * create a sync point.
+     * @throws IOException raised on errors performing I/O.
+     */
     public void sync() throws IOException {
       if (sync != null && lastSyncPos != out.getPos()) {
         out.writeInt(SYNC_ESCAPE);                // mark the start of the sync
@@ -1340,8 +1375,9 @@ public class SequenceFile {
     }
 
     /**
-     * flush all currently written data to the file system
+     * flush all currently written data to the file system.
      * @deprecated Use {@link #hsync()} or {@link #hflush()} instead
+     * @throws IOException raised on errors performing I/O.
      */
     @Deprecated
     public void syncFs() throws IOException {
@@ -1413,13 +1449,23 @@ public class SequenceFile {
       }
     }
 
-    /** Append a key/value pair. */
+    /**
+     * Append a key/value pair.
+     * @param key input Writable key.
+     * @param val input Writable val.
+     * @throws IOException raised on errors performing I/O.
+     */
     public void append(Writable key, Writable val)
       throws IOException {
       append((Object) key, (Object) val);
     }
 
-    /** Append a key/value pair. */
+    /**
+     * Append a key/value pair.
+     * @param key input Object key.
+     * @param val input Object val.
+     * @throws IOException raised on errors performing I/O.
+     */
     @SuppressWarnings("unchecked")
     public synchronized void append(Object key, Object val)
       throws IOException {
@@ -1470,14 +1516,16 @@ public class SequenceFile {
       val.writeUncompressedBytes(out);            // value
     }
 
-    /** Returns the current length of the output file.
+    /** @return Returns the current length of the output file.
      *
      * <p>This always returns a synchronized position.  In other words,
      * immediately after calling {@link SequenceFile.Reader#seek(long)} with a position
      * returned by this method, {@link SequenceFile.Reader#next(Writable)} may be called.  However
      * the key may be earlier in the file than key last written when this
      * method was called (e.g., with block-compression, it may be the first key
-     * in the block that was being written when this method was called).
+     * in the block that was being written when this method was called).</p>
+     *
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized long getLength() throws IOException {
       return out.getPos();
@@ -1888,7 +1936,7 @@ public class SequenceFile {
      * @param fs The file system used to open the file.
      * @param file The file being read.
      * @param conf Configuration
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      * @deprecated Use Reader(Configuration, Option...) instead.
      */
     @Deprecated
@@ -1904,7 +1952,7 @@ public class SequenceFile {
      * @param start The starting position.
      * @param length The length being read.
      * @param conf Configuration
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      * @deprecated Use Reader(Configuration, Reader.Option...) instead.
      */
     @Deprecated
@@ -1949,7 +1997,7 @@ public class SequenceFile {
      * @param length The length being read if it is {@literal >=} 0.
      *               Otherwise, the length is not available.
      * @return The opened stream.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     protected FSDataInputStream openFile(FileSystem fs, Path file,
         int bufferSize, long length) throws IOException {
@@ -2139,12 +2187,12 @@ public class SequenceFile {
       in.close();
     }
 
-    /** Returns the name of the key class. */
+    /** @return Returns the name of the key class. */
     public String getKeyClassName() {
       return keyClassName;
     }
 
-    /** Returns the class of keys in this file. */
+    /** @return Returns the class of keys in this file. */
     public synchronized Class<?> getKeyClass() {
       if (null == keyClass) {
         try {
@@ -2156,12 +2204,12 @@ public class SequenceFile {
       return keyClass;
     }
 
-    /** Returns the name of the value class. */
+    /** @return Returns the name of the value class. */
     public String getValueClassName() {
       return valClassName;
     }
 
-    /** Returns the class of values in this file. */
+    /** @return Returns the class of values in this file. */
     public synchronized Class<?> getValueClass() {
       if (null == valClass) {
         try {
@@ -2173,13 +2221,22 @@ public class SequenceFile {
       return valClass;
     }
 
-    /** Returns true if values are compressed. */
+    /**
+     * Returns true if values are compressed.
+     * @return if values are compressed true, not false.
+     */
     public boolean isCompressed() { return decompress; }
     
-    /** Returns true if records are block-compressed. */
+    /**
+     * Returns true if records are block-compressed.
+     * @return if records are block-compressed true, not false.
+     */
     public boolean isBlockCompressed() { return blockCompressed; }
     
-    /** Returns the compression codec of data in this file. */
+    /**
+     * Returns the compression codec of data in this file.
+     * @return CompressionCodec.
+     */
     public CompressionCodec getCompressionCodec() { return codec; }
     
     private byte[] getSync() {
@@ -2202,7 +2259,10 @@ public class SequenceFile {
       }
     }
 
-    /** Returns the metadata object of the file */
+    /**
+     * Returns the metadata object of the file.
+     * @return metadata.
+     */
     public Metadata getMetadata() {
       return this.metadata;
     }
@@ -2311,7 +2371,7 @@ public class SequenceFile {
     /**
      * Get the 'value' corresponding to the last read 'key'.
      * @param val : The 'value' to be read.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized void getCurrentValue(Writable val) 
       throws IOException {
@@ -2348,9 +2408,9 @@ public class SequenceFile {
     }
     
     /**
-     * Get the 'value' corresponding to the last read 'key'.
+     * @return Get the 'value' corresponding to the last read 'key'.
      * @param val : The 'value' to be read.
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized Object getCurrentValue(Object val) 
       throws IOException {
@@ -2392,8 +2452,13 @@ public class SequenceFile {
       return valDeserializer.deserialize(val);
     }
     
-    /** Read the next key in the file into <code>key</code>, skipping its
-     * value.  True if another entry exists, and false at end of file. */
+    /**
+     * @return Read the next key in the file into <code>key</code>, skipping its
+     * value.True if another entry exists, and false at end of file.
+     *
+     * @param key key.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized boolean next(Writable key) throws IOException {
       if (key.getClass() != getKeyClass())
         throw new IOException("wrong key class: "+key.getClass().getName()
@@ -2440,9 +2505,16 @@ public class SequenceFile {
       return true;
     }
 
-    /** Read the next key/value pair in the file into <code>key</code> and
-     * <code>val</code>.  Returns true if such a pair exists and false when at
-     * end of file */
+    /**
+     * Read the next key/value pair in the file into <code>key</code> and
+     * <code>val</code>.
+     * @return Returns true if such a pair exists and false when at
+     * end of file.
+     *
+     * @param key input key.
+     * @param val input val.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized boolean next(Writable key, Writable val)
       throws IOException {
       if (val.getClass() != getValueClass())
@@ -2526,7 +2598,7 @@ public class SequenceFile {
      * @param key - The buffer into which the key is read
      * @param val - The 'raw' value
      * @return Returns the total record length or -1 for end of file
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized int nextRaw(DataOutputBuffer key, ValueBytes val) 
       throws IOException {
@@ -2585,7 +2657,7 @@ public class SequenceFile {
      * Read 'raw' keys.
      * @param key - The buffer into which the key is read
      * @return Returns the key length or -1 for end of file
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized int nextRawKey(DataOutputBuffer key) 
       throws IOException {
@@ -2624,8 +2696,14 @@ public class SequenceFile {
       
     }
 
-    /** Read the next key in the file, skipping its
-     * value.  Return null at end of file. */
+    /**
+     * Read the next key in the file, skipping its
+     * value.
+     *
+     * @param key input Object key.
+     * @throws IOException raised on errors performing I/O.
+     * @return Return null at end of file.
+     */
     public synchronized Object next(Object key) throws IOException {
       if (key != null && key.getClass() != getKeyClass()) {
         throw new IOException("wrong key class: "+key.getClass().getName()
@@ -2682,7 +2760,7 @@ public class SequenceFile {
      * Read 'raw' values.
      * @param val - The 'raw' value
      * @return Returns the value length
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized int nextRawValue(ValueBytes val) 
       throws IOException {
@@ -2722,16 +2800,20 @@ public class SequenceFile {
       }
     }
 
-    /** disables sync. often invoked for tmp files */
+    /** disables sync. often invoked for tmp files. */
     synchronized void ignoreSync() {
       sync = null;
     }
     
-    /** Set the current byte position in the input file.
+    /**
+     * Set the current byte position in the input file.
      *
      * <p>The position passed must be a position returned by {@link
      * SequenceFile.Writer#getLength()} when writing this file.  To seek to an arbitrary
-     * position, use {@link SequenceFile.Reader#sync(long)}.
+     * position, use {@link SequenceFile.Reader#sync(long)}. </p>
+     *
+     * @param position input position.
+     * @throws IOException raised on errors performing I/O.
      */
     public synchronized void seek(long position) throws IOException {
       in.seek(position);
@@ -2741,7 +2823,11 @@ public class SequenceFile {
       }
     }
 
-    /** Seek to the next sync mark past a given position.*/
+    /**
+     * Seek to the next sync mark past a given position.
+     * @param position position.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized void sync(long position) throws IOException {
       if (position+SYNC_SIZE >= end) {
         seek(end);
@@ -2777,10 +2863,13 @@ public class SequenceFile {
       }
     }
 
-    /** Returns true iff the previous call to next passed a sync mark.*/
+    /** @return Returns true iff the previous call to next passed a sync mark.*/
     public synchronized boolean syncSeen() { return syncSeen; }
 
-    /** Return the current byte position in the input file. */
+    /**
+     * @return Return the current byte position in the input file.
+     * @throws IOException raised on errors performing I/O.
+     */
     public synchronized long getPosition() throws IOException {
       return in.getPos();
     }
@@ -2822,19 +2911,40 @@ public class SequenceFile {
     
     private Progressable progressable = null;
 
-    /** Sort and merge files containing the named classes. */
+    /**
+     * Sort and merge files containing the named classes.
+     * @param fs input FileSystem.
+     * @param keyClass input keyClass.
+     * @param valClass input valClass.
+     * @param conf input Configuration.
+     */
     public Sorter(FileSystem fs, Class<? extends WritableComparable> keyClass,
                   Class valClass, Configuration conf)  {
       this(fs, WritableComparator.get(keyClass, conf), keyClass, valClass, conf);
     }
 
-    /** Sort and merge using an arbitrary {@link RawComparator}. */
+    /**
+     * Sort and merge using an arbitrary {@link RawComparator}.
+     * @param fs input FileSystem.
+     * @param comparator input RawComparator.
+     * @param keyClass input keyClass.
+     * @param valClass input valClass.
+     * @param conf input Configuration.
+     */
     public Sorter(FileSystem fs, RawComparator comparator, Class keyClass, 
                   Class valClass, Configuration conf) {
       this(fs, comparator, keyClass, valClass, conf, new Metadata());
     }
 
-    /** Sort and merge using an arbitrary {@link RawComparator}. */
+    /**
+     * Sort and merge using an arbitrary {@link RawComparator}.
+     * @param fs input FileSystem.
+     * @param comparator input RawComparator.
+     * @param keyClass input keyClass.
+     * @param valClass input valClass.
+     * @param conf input Configuration.
+     * @param metadata input metadata.
+     */
     @SuppressWarnings("deprecation")
     public Sorter(FileSystem fs, RawComparator comparator, Class keyClass,
                   Class valClass, Configuration conf, Metadata metadata) {
@@ -2863,19 +2973,28 @@ public class SequenceFile {
       this.metadata = metadata;
     }
 
-    /** Set the number of streams to merge at once.*/
+    /**
+     * Set the number of streams to merge at once.
+     * @param factor factor.
+     */
     public void setFactor(int factor) { this.factor = factor; }
 
-    /** Get the number of streams to merge at once.*/
+    /** @return Get the number of streams to merge at once.*/
     public int getFactor() { return factor; }
 
-    /** Set the total amount of buffer memory, in bytes.*/
+    /**
+     * Set the total amount of buffer memory, in bytes.
+     * @param memory buffer memory.
+     */
     public void setMemory(int memory) { this.memory = memory; }
 
-    /** Get the total amount of buffer memory, in bytes.*/
+    /** @return Get the total amount of buffer memory, in bytes.*/
     public int getMemory() { return memory; }
 
-    /** Set the progressable object in order to report progress. */
+    /**
+     * Set the progressable object in order to report progress.
+     * @param progressable input Progressable.
+     */
     public void setProgressable(Progressable progressable) {
       this.progressable = progressable;
     }
@@ -2885,6 +3004,7 @@ public class SequenceFile {
      * @param inFiles the files to be sorted
      * @param outFile the sorted output file
      * @param deleteInput should the input files be deleted as they are read?
+     * @throws IOException raised on errors performing I/O.
      */
     public void sort(Path[] inFiles, Path outFile,
                      boolean deleteInput) throws IOException {
@@ -2907,6 +3027,7 @@ public class SequenceFile {
      * @param tempDir the directory where temp files are created during sort
      * @param deleteInput should the input files be deleted as they are read?
      * @return iterator the RawKeyValueIterator
+     * @throws IOException raised on errors performing I/O.
      */
     public RawKeyValueIterator sortAndIterate(Path[] inFiles, Path tempDir, 
                                               boolean deleteInput) throws IOException {
@@ -2932,8 +3053,9 @@ public class SequenceFile {
 
     /**
      * The backwards compatible interface to sort.
-     * @param inFile the input file to sort
-     * @param outFile the sorted output file
+     * @param inFile the input file to sort.
+     * @param outFile the sorted output file.
+     * @throws IOException raised on errors performing I/O.
      */
     public void sort(Path inFile, Path outFile) throws IOException {
       sort(new Path[]{inFile}, outFile, false);
@@ -3151,27 +3273,32 @@ public class SequenceFile {
 
     /** The interface to iterate over raw keys/values of SequenceFiles. */
     public static interface RawKeyValueIterator {
-      /** Gets the current raw key
+      /**
+       * Gets the current raw key.
        * @return DataOutputBuffer
-       * @throws IOException
+       * @throws IOException raised on errors performing I/O.
        */
       DataOutputBuffer getKey() throws IOException; 
-      /** Gets the current raw value
+      /**
+       * Gets the current raw value.
        * @return ValueBytes 
-       * @throws IOException
+       * @throws IOException raised on errors performing I/O.
        */
       ValueBytes getValue() throws IOException; 
-      /** Sets up the current key and value (for getKey and getValue)
+      /**
+       * Sets up the current key and value (for getKey and getValue).
        * @return true if there exists a key/value, false otherwise 
-       * @throws IOException
+       * @throws IOException raised on errors performing I/O.
        */
       boolean next() throws IOException;
-      /** closes the iterator so that the underlying streams can be closed
-       * @throws IOException
+      /**
+       * closes the iterator so that the underlying streams can be closed.
+       * @throws IOException raised on errors performing I/O.
        */
       void close() throws IOException;
-      /** Gets the Progress object; this has a float (0.0 - 1.0) 
-       * indicating the bytes processed by the iterator so far
+      /**
+       * @return Gets the Progress object; this has a float (0.0 - 1.0)
+       * indicating the bytes processed by the iterator so far.
        */
       Progress getProgress();
     }    
@@ -3181,7 +3308,7 @@ public class SequenceFile {
      * @param segments the list of SegmentDescriptors
      * @param tmpDir the directory to write temporary files into
      * @return RawKeyValueIterator
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public RawKeyValueIterator merge(List <SegmentDescriptor> segments, 
                                      Path tmpDir) 
@@ -3199,7 +3326,7 @@ public class SequenceFile {
      * unnecessary
      * @param tmpDir the directory to write temporary files into
      * @return RawKeyValueIteratorMergeQueue
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs,
                                      Path tmpDir) 
@@ -3217,7 +3344,7 @@ public class SequenceFile {
      * @param factor the factor that will be used as the maximum merge fan-in
      * @param tmpDir the directory to write temporary files into
      * @return RawKeyValueIteratorMergeQueue
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs,
                                      int factor, Path tmpDir) 
@@ -3243,7 +3370,7 @@ public class SequenceFile {
      * @param deleteInputs true if the input files should be deleted when 
      * unnecessary
      * @return RawKeyValueIteratorMergeQueue
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public RawKeyValueIterator merge(Path [] inNames, Path tempDir, 
                                      boolean deleteInputs) 
@@ -3274,7 +3401,7 @@ public class SequenceFile {
      * @param outputFile the path of the output file 
      * @param prog the Progressable to report status during the file write
      * @return Writer
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public Writer cloneFileAttributes(Path inputFile, Path outputFile, 
                                       Progressable prog) throws IOException {
@@ -3296,10 +3423,10 @@ public class SequenceFile {
 
     /**
      * Writes records from RawKeyValueIterator into a file represented by the 
-     * passed writer
+     * passed writer.
      * @param records the RawKeyValueIterator
      * @param writer the Writer created earlier 
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public void writeFile(RawKeyValueIterator records, Writer writer) 
       throws IOException {
@@ -3313,7 +3440,7 @@ public class SequenceFile {
     /** Merge the provided files.
      * @param inFiles the array of input path names
      * @param outFile the final output file
-     * @throws IOException
+     * @throws IOException raised on errors performing I/O.
      */
     public void merge(Path[] inFiles, Path outFile) throws IOException {
       if (fs.exists(outFile)) {
@@ -3649,10 +3776,13 @@ public class SequenceFile {
         this.segmentPathName = segmentPathName;
       }
       
-      /** Do the sync checks */
+      /** Do the sync checks. */
       public void doSync() {ignoreSync = false;}
       
-      /** Whether to delete the files when no longer needed */
+      /**
+       * Whether to delete the files when no longer needed.
+       * @param preserve input boolean preserve.
+       */
       public void preserveInput(boolean preserve) {
         preserveInput = preserve;
       }
@@ -3694,9 +3824,10 @@ public class SequenceFile {
         return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32));
       }
 
-      /** Fills up the rawKey object with the key returned by the Reader
+      /**
+       * Fills up the rawKey object with the key returned by the Reader.
        * @return true if there is a key returned; false, otherwise
-       * @throws IOException
+       * @throws IOException raised on errors performing I/O.
        */
       public boolean nextRawKey() throws IOException {
         if (in == null) {
@@ -3725,18 +3856,19 @@ public class SequenceFile {
         return (keyLength >= 0);
       }
 
-      /** Fills up the passed rawValue with the value corresponding to the key
-       * read earlier
-       * @param rawValue
+      /**
+       * Fills up the passed rawValue with the value corresponding to the key
+       * read earlier.
+       * @param rawValue input ValueBytes rawValue.
        * @return the length of the value
-       * @throws IOException
+       * @throws IOException raised on errors performing I/O.
        */
       public int nextRawValue(ValueBytes rawValue) throws IOException {
         int valLength = in.nextRawValue(rawValue);
         return valLength;
       }
       
-      /** Returns the stored rawKey */
+      /** @return Returns the stored rawKey */
       public DataOutputBuffer getKey() {
         return rawKey;
       }
@@ -3747,8 +3879,10 @@ public class SequenceFile {
         this.in = null;
       }
 
-      /** The default cleanup. Subclasses can override this with a custom 
-       * cleanup 
+      /**
+       * The default cleanup. Subclasses can override this with a custom
+       * cleanup.
+       * @throws IOException raised on errors performing I/O.
        */
       public void cleanup() throws IOException {
         close();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
index 118cce75136..de75810df0f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
@@ -39,15 +39,29 @@ public class SetFile extends MapFile {
    */
   public static class Writer extends MapFile.Writer {
 
-    /** Create the named set for keys of the named class. 
-     *  @deprecated pass a Configuration too
+    /**
+     * Create the named set for keys of the named class.
+     * @deprecated pass a Configuration too
+     * @param fs input FileSystem.
+     * @param dirName input dirName.
+     * @param keyClass input keyClass.
+     * @throws IOException raised on errors performing I/O.
      */
     public Writer(FileSystem fs, String dirName,
 	Class<? extends WritableComparable> keyClass) throws IOException {
       super(new Configuration(), fs, dirName, keyClass, NullWritable.class);
     }
 
-    /** Create a set naming the element class and compression type. */
+    /**
+     * Create a set naming the element class and compression type.
+     *
+     * @param conf input Configuration.
+     * @param fs input FileSystem.
+     * @param dirName input dirName.
+     * @param keyClass input keyClass.
+     * @param compress input compress.
+     * @throws IOException raised on errors performing I/O.
+     */
     public Writer(Configuration conf, FileSystem fs, String dirName,
                   Class<? extends WritableComparable> keyClass,
                   SequenceFile.CompressionType compress)
@@ -55,7 +69,16 @@ public class SetFile extends MapFile {
       this(conf, fs, dirName, WritableComparator.get(keyClass, conf), compress);
     }
 
-    /** Create a set naming the element comparator and compression type. */
+    /**
+     * Create a set naming the element comparator and compression type.
+     *
+     * @param conf input Configuration.
+     * @param fs input FileSystem.
+     * @param dirName input dirName.
+     * @param comparator input comparator.
+     * @param compress input compress.
+     * @throws IOException raised on errors performing I/O.
+     */
     public Writer(Configuration conf, FileSystem fs, String dirName,
                   WritableComparator comparator,
                   SequenceFile.CompressionType compress) throws IOException {
@@ -65,8 +88,12 @@ public class SetFile extends MapFile {
             compression(compress));
     }
 
-    /** Append a key to a set.  The key must be strictly greater than the
-     * previous key added to the set. */
+    /**
+     * Append a key to a set.  The key must be strictly greater than the
+     * previous key added to the set.
+     * @param key input key.
+     * @throws IOException raised on errors performing I/O.
+     */
     public void append(WritableComparable key) throws IOException{
       append(key, NullWritable.get());
     }
@@ -75,12 +102,25 @@ public class SetFile extends MapFile {
   /** Provide access to an existing set file. */
   public static class Reader extends MapFile.Reader {
 
-    /** Construct a set reader for the named set.*/
+    /**
+     * Construct a set reader for the named set.
+     * @param fs input FileSystem.
+     * @param dirName input dirName.
+     * @param conf input Configuration.
+     * @throws IOException raised on errors performing I/O.
+     */
     public Reader(FileSystem fs, String dirName, Configuration conf) throws IOException {
       super(fs, dirName, conf);
     }
 
-    /** Construct a set reader for the named set using the named comparator.*/
+    /**
+     * Construct a set reader for the named set using the named comparator.
+     * @param fs input FileSystem.
+     * @param dirName input dirName.
+     * @param comparator input comparator.
+     * @param conf input Configuration.
+     * @throws IOException raised on errors performing I/O.
+     */
     public Reader(FileSystem fs, String dirName, WritableComparator comparator, Configuration conf)
       throws IOException {
       super(new Path(dirName), conf, comparator(comparator));
@@ -93,15 +133,26 @@ public class SetFile extends MapFile {
       return super.seek(key);
     }
 
-    /** Read the next key in a set into <code>key</code>.  Returns
-     * true if such a key exists and false when at the end of the set. */
+    /**
+     * Read the next key in a set into <code>key</code>.
+     *
+     * @param key input key.
+     * @return Returns true if such a key exists
+     *    and false when at the end of the set.
+     * @throws IOException raised on errors performing I/O.
+     */
     public boolean next(WritableComparable key)
       throws IOException {
       return next(key, NullWritable.get());
     }
 
-    /** Read the matching key from a set into <code>key</code>.
-     * Returns <code>key</code>, or null if no match exists. */
+    /**
+     * Read the matching key from a set into <code>key</code>.
+     *
+     * @param key input key.
+     * @return Returns <code>key</code>, or null if no match exists.
+     * @throws IOException raised on errors performing I/O.
+     */
     public WritableComparable get(WritableComparable key)
       throws IOException {
       if (seek(key)) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java
index be09df18017..96e6cacae87 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java
@@ -38,12 +38,15 @@ public class ShortWritable implements WritableComparable<ShortWritable> {
     set(value);
   }
 
-  /** Set the value of this ShortWritable. */
+  /**
+   * Set the value of this ShortWritable.
+   * @param value input value.
+   */
   public void set(short value) {
     this.value = value;
   }
 
-  /** Return the value of this ShortWritable. */
+  /** @return Return the value of this ShortWritable. */
   public short get() {
     return value;
   }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
index 5ca7f3c84ca..86fb1ff9a54 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
@@ -90,6 +90,7 @@ public class Text extends BinaryComparable
 
   /**
    * Construct from a string.
+   * @param string input string.
    */
   public Text(String string) {
     set(string);
@@ -97,6 +98,7 @@ public class Text extends BinaryComparable
 
   /**
    * Construct from another text.
+   * @param utf8 input utf8.
    */
   public Text(Text utf8) {
     set(utf8);
@@ -104,13 +106,15 @@ public class Text extends BinaryComparable
 
   /**
    * Construct from a byte array.
+   *
+   * @param utf8 input utf8.
    */
   public Text(byte[] utf8)  {
     set(utf8);
   }
 
   /**
-   * Get a copy of the bytes that is exactly the length of the data.
+   * @return Get a copy of the bytes that is exactly the length of the data.
    * See {@link #getBytes()} for faster access to the underlying array.
    */
   public byte[] copyBytes() {
@@ -136,7 +140,7 @@ public class Text extends BinaryComparable
   }
 
   /**
-   * Returns the length of this text. The length is equal to the number of
+   * @return Returns the length of this text. The length is equal to the number of
    * Unicode code units in the text.
    */
   public int getTextLength() {
@@ -149,7 +153,9 @@ public class Text extends BinaryComparable
   /**
    * Returns the Unicode Scalar Value (32-bit integer value)
    * for the character at <code>position</code>. Note that this
-   * method avoids using the converter or doing String instantiation
+   * method avoids using the converter or doing String instantiation.
+   *
+   * @param position input position.
    * @return the Unicode scalar value at position or -1
    *          if the position is invalid or points to a
    *          trailing byte
@@ -172,6 +178,9 @@ public class Text extends BinaryComparable
    * position is measured in bytes and the return value is in
    * terms of byte position in the buffer. The backing buffer is
    * not converted to a string for this operation.
+   *
+   * @param what input what.
+   * @param start input start.
    * @return byte position of the first occurrence of the search
    *         string in the UTF-8 buffer or -1 if not found
    */
@@ -213,6 +222,8 @@ public class Text extends BinaryComparable
 
   /**
    * Set to contain the contents of a string.
+   *
+   * @param string input string.
    */
   public void set(String string) {
     try {
@@ -229,6 +240,8 @@ public class Text extends BinaryComparable
    * Set to a utf8 byte array. If the length of <code>utf8</code> is
    * <em>zero</em>, actually clear {@link #bytes} and any existing
    * data is lost.
+   *
+   * @param utf8 input utf8.
    */
   public void set(byte[] utf8) {
     if (utf8.length == 0) {
@@ -242,6 +255,7 @@ public class Text extends BinaryComparable
 
   /**
    * Copy a text.
+   * @param other other.
    */
   public void set(Text other) {
     set(other.getBytes(), 0, other.getLength());
@@ -349,6 +363,8 @@ public class Text extends BinaryComparable
 
   /**
    * Skips over one Text in the input.
+   * @param in input in.
+   * @throws IOException raised on errors performing I/O.
    */
   public static void skip(DataInput in) throws IOException {
     int length = WritableUtils.readVInt(in);
@@ -359,6 +375,10 @@ public class Text extends BinaryComparable
    * Read a Text object whose length is already known.
    * This allows creating Text from a stream which uses a different serialization
    * format.
+   *
+   * @param in input in.
+   * @param len input len.
+   * @throws IOException raised on errors performing I/O.
    */
   public void readWithKnownLength(DataInput in, int len) throws IOException {
     ensureCapacity(len);
@@ -426,9 +446,13 @@ public class Text extends BinaryComparable
 
   /// STATIC UTILITIES FROM HERE DOWN
   /**
-   * Converts the provided byte array to a String using the
+   * @return Converts the provided byte array to a String using the
    * UTF-8 encoding. If the input is malformed,
    * replace by a default value.
+   *
+   * @param utf8 input utf8.
+   * @throws CharacterCodingException when a character
+   *                                  encoding or decoding error occurs.
    */
   public static String decode(byte[] utf8) throws CharacterCodingException {
     return decode(ByteBuffer.wrap(utf8), true);
@@ -440,11 +464,18 @@ public class Text extends BinaryComparable
   }
 
   /**
-   * Converts the provided byte array to a String using the
+   * @return Converts the provided byte array to a String using the
    * UTF-8 encoding. If <code>replace</code> is true, then
    * malformed input is replaced with the
    * substitution character, which is U+FFFD. Otherwise the
    * method throws a MalformedInputException.
+   *
+   * @param utf8 input utf8.
+   * @param start input start.
+   * @param length input length.
+   * @param replace input replace.
+   * @throws CharacterCodingException when a character
+   *                                  encoding or decoding error occurs.
    */
   public static String decode(byte[] utf8, int start, int length, boolean replace) 
     throws CharacterCodingException {
@@ -472,8 +503,12 @@ public class Text extends BinaryComparable
    * Converts the provided String to bytes using the
    * UTF-8 encoding. If the input is malformed,
    * invalid chars are replaced by a default value.
+   *
+   * @param string input string.
    * @return ByteBuffer: bytes stores at ByteBuffer.array() 
    *                     and length is ByteBuffer.limit()
+   * @throws CharacterCodingException when a character
+   *                                  encoding or decoding error occurs.
    */
 
   public static ByteBuffer encode(String string)
@@ -487,8 +522,13 @@ public class Text extends BinaryComparable
    * malformed input is replaced with the
    * substitution character, which is U+FFFD. Otherwise the
    * method throws a MalformedInputException.
+   *
+   * @param string input string.
+   * @param replace input replace.
    * @return ByteBuffer: bytes stores at ByteBuffer.array() 
    *                     and length is ByteBuffer.limit()
+   * @throws CharacterCodingException when a character
+   *                                  encoding or decoding error occurs.
    */
   public static ByteBuffer encode(String string, boolean replace)
     throws CharacterCodingException {
@@ -508,13 +548,20 @@ public class Text extends BinaryComparable
 
   static final public int DEFAULT_MAX_LEN = 1024 * 1024;
 
-  /** Read a UTF8 encoded string from in
+  /**
+   * @return Read a UTF8 encoded string from in.
+   * @param in input in.
+   * @throws IOException raised on errors performing I/O.
    */
   public static String readString(DataInput in) throws IOException {
     return readString(in, Integer.MAX_VALUE);
   }
 
-  /** Read a UTF8 encoded string with a maximum size
+  /**
+   * @return Read a UTF8 encoded string with a maximum size.
+   * @param in input datainput.
+   * @param maxLength input maxLength.
+   * @throws IOException raised on errors performing I/O.
    */
   public static String readString(DataInput in, int maxLength)
       throws IOException {
@@ -526,6 +573,11 @@ public class Text extends BinaryComparable
 
   /**
    * Write a UTF8 encoded string to out.
+   *
+   * @param out input out.
+   * @param s input s.
+   * @throws IOException raised on errors performing I/O.
+   * @return a UTF8 encoded string to out.
    */
   public static int writeString(DataOutput out, String s) throws IOException {
     ByteBuffer bytes = encode(s);
@@ -536,7 +588,12 @@ public class Text extends BinaryComparable
   }
 
   /**
-   * Write a UTF8 encoded string with a maximum size to out.
+   * @return Write a UTF8 encoded string with a maximum size to out.
+   *
+   * @param out input out.
+   * @param s input s.
+   * @param maxLength input maxLength.
+   * @throws IOException raised on errors performing I/O.
    */
   public static int writeString(DataOutput out, String s, int maxLength)
       throws IOException {
@@ -670,9 +727,11 @@ public class Text extends BinaryComparable
     3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5 };
 
   /**
-   * Returns the next code point at the current position in
+   * @return Returns the next code point at the current position in
    * the buffer. The buffer's position will be incremented.
    * Any mark set on this buffer will be changed by this method!
+   *
+   * @param bytes input bytes.
    */
   public static int bytesToCodePoint(ByteBuffer bytes) {
     bytes.mark();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
index f5d33a13005..fdee830e6fe 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
@@ -63,27 +63,36 @@ public class UTF8 implements WritableComparable<UTF8> {
     //set("");
   }
 
-  /** Construct from a given string. */
+  /**
+   * Construct from a given string.
+   * @param string input string.
+   */
   public UTF8(String string) {
     set(string);
   }
 
-  /** Construct from a given string. */
+  /**
+   * Construct from a given string.
+   * @param utf8 input utf8.
+   */
   public UTF8(UTF8 utf8) {
     set(utf8);
   }
 
-  /** The raw bytes. */
+  /** @return The raw bytes. */
   public byte[] getBytes() {
     return bytes;
   }
 
-  /** The number of bytes in the encoded string. */
+  /** @return The number of bytes in the encoded string. */
   public int getLength() {
     return length;
   }
 
-  /** Set to contain the contents of a string. */
+  /**
+   * Set to contain the contents of a string.
+   * @param string input string.
+   */
   public void set(String string) {
     if (string.length() > 0xffff/3) {             // maybe too long
       LOG.warn("truncating long string: " + string.length()
@@ -108,7 +117,10 @@ public class UTF8 implements WritableComparable<UTF8> {
     }
   }
 
-  /** Set to contain the contents of a string. */
+  /**
+   * Set to contain the contents of a string.
+   * @param other input other.
+   */
   public void set(UTF8 other) {
     length = other.length;
     if (bytes == null || length > bytes.length)   // grow buffer
@@ -124,7 +136,11 @@ public class UTF8 implements WritableComparable<UTF8> {
     in.readFully(bytes, 0, length);
   }
 
-  /** Skips over one UTF8 in the input. */
+  /**
+   * Skips over one UTF8 in the input.
+   * @param in datainput.
+   * @throws IOException raised on errors performing I/O.
+   */
   public static void skip(DataInput in) throws IOException {
     int length = in.readUnsignedShort();
... 9642 lines suppressed ...


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org