You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by gg...@apache.org on 2022/12/11 08:02:48 UTC

[commons-compress] branch master updated (01bfe335 -> 8e112661)

This is an automated email from the ASF dual-hosted git repository.

ggregory pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git


    from 01bfe335 Remove unused imports
     new f5a37de3 Sort members
     new 71579a20 Fix spelling
     new 32add381 Replace hack with standard code
     new da55b924 Normalize formatting
     new 8e112661 Use Files APIs and try-with-resources

The 5 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../commons/compress/MemoryLimitException.java     |   16 +-
 .../commons/compress/archivers/ArchiveEntry.java   |   22 +-
 .../compress/archivers/ArchiveInputStream.java     |   88 +-
 .../compress/archivers/ArchiveOutputStream.java    |  118 +-
 .../compress/archivers/ArchiveStreamFactory.java   |  298 +-
 .../apache/commons/compress/archivers/Lister.java  |  100 +-
 .../compress/archivers/ar/ArArchiveEntry.java      |  108 +-
 .../archivers/ar/ArArchiveInputStream.java         |  364 +--
 .../archivers/ar/ArArchiveOutputStream.java        |  154 +-
 .../compress/archivers/arj/ArjArchiveEntry.java    |  140 +-
 .../archivers/arj/ArjArchiveInputStream.java       |  314 +-
 .../compress/archivers/arj/LocalFileHeader.java    |  126 +-
 .../commons/compress/archivers/arj/MainHeader.java |   44 +-
 .../compress/archivers/cpio/CpioArchiveEntry.java  |  390 +--
 .../archivers/cpio/CpioArchiveInputStream.java     |  222 +-
 .../archivers/cpio/CpioArchiveOutputStream.java    |  466 +--
 .../commons/compress/archivers/cpio/CpioUtil.java  |   14 +-
 .../commons/compress/archivers/dump/Dirent.java    |   22 +-
 .../archivers/dump/DumpArchiveConstants.java       |   72 +-
 .../compress/archivers/dump/DumpArchiveEntry.java  |  782 ++---
 .../archivers/dump/DumpArchiveException.java       |    6 +-
 .../archivers/dump/DumpArchiveInputStream.java     |  330 +--
 .../archivers/dump/DumpArchiveSummary.java         |  248 +-
 .../compress/archivers/dump/DumpArchiveUtil.java   |   76 +-
 .../compress/archivers/dump/TapeInputStream.java   |  290 +-
 .../compress/archivers/examples/Archiver.java      |   18 +-
 .../examples/CloseableConsumerAdapter.java         |   10 +-
 .../compress/archivers/jar/JarArchiveEntry.java    |   30 +-
 .../archivers/jar/JarArchiveInputStream.java       |   34 +-
 .../archivers/sevenz/AES256SHA256Decoder.java      |  144 +-
 .../commons/compress/archivers/sevenz/Archive.java |   16 +-
 .../BoundedSeekableByteChannelInputStream.java     |   10 +-
 .../commons/compress/archivers/sevenz/CLI.java     |   46 +-
 .../commons/compress/archivers/sevenz/Coders.java  |  228 +-
 .../compress/archivers/sevenz/DeltaDecoder.java    |   10 +-
 .../commons/compress/archivers/sevenz/Folder.java  |   46 +-
 .../compress/archivers/sevenz/LZMA2Decoder.java    |   46 +-
 .../compress/archivers/sevenz/LZMADecoder.java     |   26 +-
 .../compress/archivers/sevenz/SevenZFile.java      | 3036 ++++++++++----------
 .../archivers/sevenz/SevenZFileOptions.java        |  170 +-
 .../compress/archivers/sevenz/SevenZMethod.java    |   18 +-
 .../sevenz/SevenZMethodConfiguration.java          |   26 +-
 .../archivers/sevenz/SevenZOutputFile.java         |  872 +++---
 .../archivers/tar/TarArchiveInputStream.java       |  766 ++---
 .../archivers/tar/TarArchiveOutputStream.java      |  726 ++---
 .../archivers/tar/TarArchiveSparseEntry.java       |    8 +-
 .../archivers/tar/TarArchiveStructSparse.java      |   16 +-
 .../commons/compress/archivers/tar/TarFile.java    |  766 ++---
 .../commons/compress/archivers/tar/TarUtils.java   | 1004 +++----
 .../archivers/zip/AbstractUnicodeExtraField.java   |  124 +-
 .../compress/archivers/zip/AsiExtraField.java      |  234 +-
 .../commons/compress/archivers/zip/BinaryTree.java |  138 +-
 .../compress/archivers/zip/CircularBuffer.java     |   30 +-
 .../archivers/zip/ExplodingInputStream.java        |  110 +-
 .../compress/archivers/zip/ExtraFieldUtils.java    |  474 +--
 .../compress/archivers/zip/GeneralPurposeBit.java  |  214 +-
 .../zip/InflaterInputStreamWithStatistics.java     |   20 +-
 .../commons/compress/archivers/zip/JarMarker.java  |   64 +-
 .../compress/archivers/zip/NioZipEncoding.java     |  154 +-
 .../compress/archivers/zip/PKWareExtraHeader.java  |  322 +--
 .../archivers/zip/ParallelScatterZipCreator.java   |  120 +-
 .../archivers/zip/ResourceAlignmentExtraField.java |   44 +-
 .../archivers/zip/ScatterZipOutputStream.java      |  180 +-
 .../compress/archivers/zip/StreamCompressor.java   |  328 +--
 .../archivers/zip/UnicodeCommentExtraField.java    |   22 +-
 .../archivers/zip/UnicodePathExtraField.java       |   18 +-
 .../archivers/zip/UnparseableExtraFieldData.java   |   58 +-
 .../archivers/zip/UnrecognizedExtraField.java      |  122 +-
 .../archivers/zip/UnshrinkingInputStream.java      |   30 +-
 .../zip/UnsupportedZipFeatureException.java        |  122 +-
 .../archivers/zip/X0015_CertificateIdForFile.java  |   20 +-
 .../X0016_CertificateIdForCentralDirectory.java    |   20 +-
 .../zip/X0017_StrongEncryptionHeader.java          |   58 +-
 .../archivers/zip/X5455_ExtendedTimestamp.java     |  550 ++--
 .../compress/archivers/zip/X7875_NewUnix.java      |  318 +-
 .../zip/Zip64ExtendedInformationExtraField.java    |  192 +-
 .../archivers/zip/Zip64RequiredException.java      |   14 +-
 .../compress/archivers/zip/ZipArchiveEntry.java    | 1518 +++++-----
 .../archivers/zip/ZipArchiveEntryRequest.java      |   30 +-
 .../archivers/zip/ZipArchiveInputStream.java       | 1834 ++++++------
 .../archivers/zip/ZipArchiveOutputStream.java      | 2538 ++++++++--------
 .../archivers/zip/ZipEightByteInteger.java         |  156 +-
 .../compress/archivers/zip/ZipEncoding.java        |   14 +-
 .../compress/archivers/zip/ZipEncodingHelper.java  |   20 +-
 .../compress/archivers/zip/ZipExtraField.java      |   38 +-
 .../commons/compress/archivers/zip/ZipLong.java    |  154 +-
 .../commons/compress/archivers/zip/ZipMethod.java  |   28 +-
 .../commons/compress/archivers/zip/ZipShort.java   |  122 +-
 .../archivers/zip/ZipSplitOutputStream.java        |  210 +-
 .../zip/ZipSplitReadOnlySeekableByteChannel.java   |  256 +-
 .../commons/compress/archivers/zip/ZipUtil.java    |  364 +--
 .../apache/commons/compress/changes/Change.java    |   48 +-
 .../apache/commons/compress/changes/ChangeSet.java |   40 +-
 .../compress/changes/ChangeSetPerformer.java       |  242 +-
 .../commons/compress/changes/ChangeSetResults.java |   18 +-
 .../compressors/CompressorInputStream.java         |   30 +-
 .../compressors/CompressorStreamFactory.java       |  200 +-
 .../commons/compress/compressors/FileNameUtil.java |   48 +-
 .../brotli/BrotliCompressorInputStream.java        |   32 +-
 .../compress/compressors/brotli/BrotliUtils.java   |   28 +-
 .../bzip2/BZip2CompressorInputStream.java          | 1122 ++++----
 .../bzip2/BZip2CompressorOutputStream.java         |  792 ++---
 .../compress/compressors/bzip2/BZip2Utils.java     |   34 +-
 .../compress/compressors/bzip2/BlockSort.java      |  624 ++--
 .../commons/compress/compressors/bzip2/CRC.java    |   12 +-
 .../deflate/DeflateCompressorInputStream.java      |   88 +-
 .../deflate/DeflateCompressorOutputStream.java     |   36 +-
 .../compressors/deflate/DeflateParameters.java     |   42 +-
 .../deflate64/Deflate64CompressorInputStream.java  |   64 +-
 .../compressors/deflate64/HuffmanDecoder.java      |  728 ++---
 .../gzip/GzipCompressorInputStream.java            |   96 +-
 .../gzip/GzipCompressorOutputStream.java           |  154 +-
 .../compress/compressors/gzip/GzipParameters.java  |  102 +-
 .../compress/compressors/gzip/GzipUtils.java       |   36 +-
 .../lz4/BlockLZ4CompressorInputStream.java         |   94 +-
 .../lz4/BlockLZ4CompressorOutputStream.java        |  432 +--
 .../lz4/FramedLZ4CompressorInputStream.java        |  308 +-
 .../lz4/FramedLZ4CompressorOutputStream.java       |  196 +-
 .../commons/compress/compressors/lz4/XXHash32.java |  104 +-
 .../AbstractLZ77CompressorInputStream.java         |  152 +-
 .../compressors/lz77support/LZ77Compressor.java    |  452 +--
 .../compressors/lz77support/Parameters.java        |  322 +--
 .../lzma/LZMACompressorInputStream.java            |   74 +-
 .../lzma/LZMACompressorOutputStream.java           |   30 +-
 .../compress/compressors/lzma/LZMAUtils.java       |  100 +-
 .../compress/compressors/lzw/LZWInputStream.java   |  284 +-
 .../pack200/Pack200CompressorInputStream.java      |  248 +-
 .../pack200/Pack200CompressorOutputStream.java     |   50 +-
 .../compress/compressors/pack200/Pack200Utils.java |   46 +-
 .../compress/compressors/pack200/StreamBridge.java |    8 +-
 .../snappy/FramedSnappyCompressorInputStream.java  |  224 +-
 .../snappy/FramedSnappyCompressorOutputStream.java |   62 +-
 .../compressors/snappy/PureJavaCrc32C.java         |  134 +-
 .../snappy/SnappyCompressorInputStream.java        |   92 +-
 .../snappy/SnappyCompressorOutputStream.java       |  208 +-
 .../compressors/xz/XZCompressorInputStream.java    |   42 +-
 .../compressors/xz/XZCompressorOutputStream.java   |   28 +-
 .../commons/compress/compressors/xz/XZUtils.java   |  124 +-
 .../compressors/z/ZCompressorInputStream.java      |  100 +-
 .../zstandard/ZstdCompressorInputStream.java       |   32 +-
 .../zstandard/ZstdCompressorOutputStream.java      |   56 +-
 .../compress/compressors/zstandard/ZstdUtils.java  |   56 +-
 .../harmony/archive/internal/nls/Messages.java     |  144 +-
 .../commons/compress/harmony/pack200/Archive.java  |  348 +--
 .../harmony/pack200/AttributeDefinitionBands.java  |  162 +-
 .../compress/harmony/pack200/BHSDCodec.java        |  192 +-
 .../commons/compress/harmony/pack200/BandSet.java  |  742 ++---
 .../commons/compress/harmony/pack200/BcBands.java  |   84 +-
 .../commons/compress/harmony/pack200/CPClass.java  |   10 +-
 .../compress/harmony/pack200/CPMethodOrField.java  |   26 +-
 .../compress/harmony/pack200/CPNameAndType.java    |   18 +-
 .../compress/harmony/pack200/CPSignature.java      |   18 +-
 .../commons/compress/harmony/pack200/CPString.java |    8 +-
 .../commons/compress/harmony/pack200/CPUTF8.java   |    6 +-
 .../compress/harmony/pack200/ClassBands.java       | 2174 +++++++-------
 .../commons/compress/harmony/pack200/Codec.java    |   38 +-
 .../compress/harmony/pack200/CodecEncoding.java    |   12 +-
 .../commons/compress/harmony/pack200/CpBands.java  |  786 ++---
 .../compress/harmony/pack200/FileBands.java        |   30 +-
 .../commons/compress/harmony/pack200/IcBands.java  |  190 +-
 .../commons/compress/harmony/pack200/IntList.java  |   14 +-
 .../harmony/pack200/MetadataBandGroup.java         |  334 +--
 .../compress/harmony/pack200/NewAttribute.java     |  186 +-
 .../harmony/pack200/NewAttributeBands.java         | 1036 +++----
 .../compress/harmony/pack200/Pack200Adapter.java   |   24 +-
 .../harmony/pack200/Pack200ClassReader.java        |   36 +-
 .../harmony/pack200/Pack200PackerAdapter.java      |   62 +-
 .../compress/harmony/pack200/PackingUtils.java     |  104 +-
 .../compress/harmony/pack200/PopulationCodec.java  |   28 +-
 .../commons/compress/harmony/pack200/RunCodec.java |   70 +-
 .../commons/compress/harmony/pack200/Segment.java  |  894 +++---
 .../compress/harmony/pack200/SegmentHeader.java    |  312 +-
 .../compress/harmony/unpack200/Archive.java        |  104 +-
 .../harmony/unpack200/AttrDefinitionBands.java     |   14 +-
 .../harmony/unpack200/AttributeLayout.java         |   42 +-
 .../compress/harmony/unpack200/BandSet.java        |  388 +--
 .../compress/harmony/unpack200/BcBands.java        |  192 +-
 .../compress/harmony/unpack200/ClassBands.java     | 1120 ++++----
 .../compress/harmony/unpack200/CpBands.java        |  514 ++--
 .../compress/harmony/unpack200/FileBands.java      |   74 +-
 .../compress/harmony/unpack200/IcBands.java        |  168 +-
 .../compress/harmony/unpack200/IcTuple.java        |  332 +--
 .../harmony/unpack200/MetadataBandGroup.java       |   92 +-
 .../harmony/unpack200/NewAttributeBands.java       | 1226 ++++----
 .../harmony/unpack200/Pack200UnpackerAdapter.java  |   32 +-
 .../compress/harmony/unpack200/Segment.java        |  160 +-
 .../harmony/unpack200/SegmentConstantPool.java     |  236 +-
 .../unpack200/SegmentConstantPoolArrayCache.java   |  142 +-
 .../compress/harmony/unpack200/SegmentHeader.java  |  250 +-
 .../compress/harmony/unpack200/SegmentUtils.java   |   28 +-
 .../bytecode/AnnotationDefaultAttribute.java       |   36 +-
 .../unpack200/bytecode/AnnotationsAttribute.java   |   76 +-
 .../harmony/unpack200/bytecode/Attribute.java      |   10 +-
 .../unpack200/bytecode/BCIRenumberedAttribute.java |   30 +-
 .../harmony/unpack200/bytecode/ByteCode.java       |  246 +-
 .../harmony/unpack200/bytecode/CPClass.java        |   24 +-
 .../harmony/unpack200/bytecode/CPConstant.java     |    8 +-
 .../harmony/unpack200/bytecode/CPDouble.java       |    8 +-
 .../harmony/unpack200/bytecode/CPFieldRef.java     |   82 +-
 .../harmony/unpack200/bytecode/CPFloat.java        |    8 +-
 .../harmony/unpack200/bytecode/CPInteger.java      |    8 +-
 .../unpack200/bytecode/CPInterfaceMethodRef.java   |   28 +-
 .../harmony/unpack200/bytecode/CPLong.java         |    8 +-
 .../harmony/unpack200/bytecode/CPMember.java       |   74 +-
 .../harmony/unpack200/bytecode/CPMethod.java       |   18 +-
 .../harmony/unpack200/bytecode/CPMethodRef.java    |   18 +-
 .../harmony/unpack200/bytecode/CPNameAndType.java  |   86 +-
 .../compress/harmony/unpack200/bytecode/CPRef.java |    4 +-
 .../harmony/unpack200/bytecode/CPString.java       |   46 +-
 .../harmony/unpack200/bytecode/CPUTF8.java         |   30 +-
 .../unpack200/bytecode/ClassConstantPool.java      |   62 +-
 .../harmony/unpack200/bytecode/ClassFileEntry.java |    8 +-
 .../harmony/unpack200/bytecode/CodeAttribute.java  |   52 +-
 .../unpack200/bytecode/ConstantPoolEntry.java      |   20 +-
 .../unpack200/bytecode/ConstantValueAttribute.java |    8 +-
 .../unpack200/bytecode/DeprecatedAttribute.java    |   12 +-
 .../bytecode/EnclosingMethodAttribute.java         |   40 +-
 .../unpack200/bytecode/ExceptionTableEntry.java    |   18 +-
 .../unpack200/bytecode/ExceptionsAttribute.java    |    8 +-
 .../unpack200/bytecode/InnerClassesAttribute.java  |   60 +-
 .../bytecode/LineNumberTableAttribute.java         |   52 +-
 .../bytecode/LocalVariableTableAttribute.java      |   78 +-
 .../bytecode/LocalVariableTypeTableAttribute.java  |   68 +-
 .../harmony/unpack200/bytecode/NewAttribute.java   |  244 +-
 .../harmony/unpack200/bytecode/OperandManager.java |  122 +-
 ...timeVisibleorInvisibleAnnotationsAttribute.java |   30 +-
 ...leorInvisibleParameterAnnotationsAttribute.java |  106 +-
 .../unpack200/bytecode/SignatureAttribute.java     |   22 +-
 .../unpack200/bytecode/SourceFileAttribute.java    |   26 +-
 .../unpack200/bytecode/forms/ByteCodeForm.java     |   68 +-
 .../unpack200/bytecode/forms/ClassRefForm.java     |   20 +-
 .../bytecode/forms/ClassSpecificReferenceForm.java |    4 +-
 .../bytecode/forms/InitMethodReferenceForm.java    |    8 +-
 .../bytecode/forms/NarrowClassRefForm.java         |   10 +-
 .../unpack200/bytecode/forms/ReferenceForm.java    |   22 +-
 .../bytecode/forms/SingleByteReferenceForm.java    |   10 +-
 .../bytecode/forms/SuperFieldRefForm.java          |   10 +-
 .../bytecode/forms/SuperMethodRefForm.java         |   10 +-
 .../unpack200/bytecode/forms/ThisFieldRefForm.java |   10 +-
 .../bytecode/forms/ThisMethodRefForm.java          |   10 +-
 .../bytecode/forms/VariableInstructionForm.java    |   74 +-
 .../FileBasedScatterGatherBackingStore.java        |   20 +-
 .../parallel/ScatterGatherBackingStore.java        |   12 +-
 .../commons/compress/utils/ArchiveUtils.java       |  242 +-
 .../commons/compress/utils/BitInputStream.java     |  134 +-
 .../commons/compress/utils/BoundedInputStream.java |   28 +-
 .../apache/commons/compress/utils/ByteUtils.java   |  172 +-
 .../apache/commons/compress/utils/Charsets.java    |   52 +-
 .../utils/ChecksumCalculatingInputStream.java      |   16 +-
 .../compress/utils/CountingInputStream.java        |   40 +-
 .../compress/utils/CountingOutputStream.java       |   32 +-
 .../apache/commons/compress/utils/ExactMath.java   |    8 +-
 .../utils/FixedLengthBlockOutputStream.java        |  240 +-
 .../org/apache/commons/compress/utils/IOUtils.java |  268 +-
 .../utils/MultiReadOnlySeekableByteChannel.java    |  236 +-
 .../utils/SeekableInMemoryByteChannel.java         |  134 +-
 .../org/apache/commons/compress/utils/Sets.java    |    8 +-
 .../apache/commons/compress/AbstractTestCase.java  |  309 +-
 .../apache/commons/compress/ArchiveReadTest.java   |   18 +-
 .../apache/commons/compress/ArchiveUtilsTest.java  |   98 +-
 .../apache/commons/compress/ChainingTestCase.java  |   12 +-
 .../commons/compress/DetectArchiverTestCase.java   |   56 +-
 .../org/apache/commons/compress/IOMethodsTest.java |  174 +-
 .../org/apache/commons/compress/OsgiITest.java     |   24 +-
 .../commons/compress/archivers/ArTestCase.java     |  246 +-
 .../archivers/ArchiveOutputStreamTest.java         |  182 +-
 .../archivers/ArchiveStreamFactoryTest.java        |  438 +--
 .../commons/compress/archivers/CpioTestCase.java   |   26 +-
 .../commons/compress/archivers/DumpTestCase.java   |   98 +-
 .../compress/archivers/ExceptionMessageTest.java   |   20 +-
 .../commons/compress/archivers/JarTestCase.java    |   52 +-
 .../commons/compress/archivers/LongPathTest.java   |   16 +-
 .../compress/archivers/LongSymLinkTest.java        |   16 +-
 .../commons/compress/archivers/SevenZTestCase.java |  222 +-
 .../commons/compress/archivers/TarTestCase.java    |  519 ++--
 .../commons/compress/archivers/ZipTestCase.java    | 1165 ++++----
 .../archivers/ar/ArArchiveInputStreamTest.java     |   46 +-
 .../archivers/arj/ArjArchiveInputStreamTest.java   |   46 +-
 .../compress/archivers/arj/CoverageTest.java       |   12 +-
 .../cpio/CpioArchiveOutputStreamTest.java          |   22 +-
 .../archivers/dump/DumpArchiveInputStreamTest.java |   58 +-
 .../archivers/dump/DumpArchiveUtilTest.java        |   14 +-
 .../compress/archivers/examples/ExpanderTest.java  |  206 +-
 .../examples/ParameterizedArchiverTest.java        |   86 +-
 .../examples/ParameterizedExpanderTest.java        |   98 +-
 .../archivers/examples/SevenZArchiverTest.java     |   72 +-
 .../compress/archivers/jar/ExpandApkTest.java      |   10 +-
 .../archivers/memory/MemoryArchiveEntry.java       |   10 +-
 .../archivers/memory/MemoryArchiveInputStream.java |    8 +-
 .../compress/archivers/sevenz/CoverageTest.java    |   10 +-
 .../compress/archivers/sevenz/FolderTest.java      |   28 +-
 .../archivers/sevenz/SevenZArchiveEntryTest.java   |   30 +-
 .../compress/archivers/sevenz/SevenZFileTest.java  |  926 +++---
 .../archivers/sevenz/SevenZNativeHeapTest.java     |  216 +-
 .../archivers/sevenz/SevenZOutputFileTest.java     |  572 ++--
 .../commons/compress/archivers/tar/BigFilesIT.java |   58 +-
 .../compress/archivers/tar/FileTimesIT.java        |  406 +--
 .../compress/archivers/tar/SparseFilesTest.java    |  498 ++--
 .../archivers/tar/TarArchiveEntryTest.java         |  486 ++--
 .../archivers/tar/TarArchiveInputStreamTest.java   |  352 ++-
 .../archivers/tar/TarArchiveOutputStreamTest.java  |  834 +++---
 .../compress/archivers/tar/TarFileTest.java        |  238 +-
 .../commons/compress/archivers/tar/TarLister.java  |   48 +-
 .../archivers/tar/TarMemoryFileSystemTest.java     |   60 +-
 .../compress/archivers/tar/TarUtilsTest.java       |  938 +++---
 .../compress/archivers/zip/AsiExtraFieldTest.java  |   56 +-
 .../compress/archivers/zip/BitStreamTest.java      |   42 +-
 .../compress/archivers/zip/CircularBufferTest.java |   36 +-
 .../compress/archivers/zip/DataDescriptorTest.java |  100 +-
 .../archivers/zip/EncryptedArchiveTest.java        |   32 +-
 .../compress/archivers/zip/ExplodeSupportTest.java |   64 +-
 .../archivers/zip/ExtraFieldUtilsTest.java         |  228 +-
 .../archivers/zip/GeneralPurposeBitTest.java       |   72 +-
 .../commons/compress/archivers/zip/Lister.java     |   28 +-
 .../archivers/zip/Maven221MultiVolumeTest.java     |   10 +-
 .../compress/archivers/zip/NioZipEncodingTest.java |   48 +-
 .../zip/ParallelScatterZipCreatorTest.java         |  159 +-
 .../compress/archivers/zip/ScatterSampleTest.java  |   38 +-
 .../archivers/zip/ScatterZipOutputStreamTest.java  |    8 +-
 .../archivers/zip/StreamCompressorTest.java        |   26 +-
 .../compress/archivers/zip/UTF8ZipFilesTest.java   |  476 +--
 .../compress/archivers/zip/X000A_NTFSTest.java     |   32 +-
 .../archivers/zip/X5455_ExtendedTimestampTest.java |  488 ++--
 .../compress/archivers/zip/X7875_NewUnixTest.java  |  214 +-
 .../Zip64ExtendedInformationExtraFieldTest.java    |  204 +-
 .../compress/archivers/zip/Zip64SupportIT.java     | 2125 +++++++-------
 .../archivers/zip/ZipArchiveEntryTest.java         |  358 +--
 .../archivers/zip/ZipArchiveInputStreamTest.java   |  908 +++---
 .../archivers/zip/ZipClassCoverageTest.java        |   26 +-
 .../archivers/zip/ZipEightByteIntegerTest.java     |   66 +-
 .../compress/archivers/zip/ZipEncodingTest.java    |  124 +-
 .../zip/ZipFileIgnoringLocalFileHeaderTest.java    |   62 +-
 .../compress/archivers/zip/ZipFileTest.java        | 1156 ++++----
 .../compress/archivers/zip/ZipLongTest.java        |   80 +-
 .../archivers/zip/ZipMemoryFileSystemTest.java     |  490 ++--
 .../compress/archivers/zip/ZipShortTest.java       |   72 +-
 .../archivers/zip/ZipSplitOutputStreamTest.java    |   34 +-
 .../compress/archivers/zip/ZipUtilTest.java        |  172 +-
 .../compress/changes/ChangeSetTestCase.java        |  828 +++---
 .../compress/compressors/BZip2TestCase.java        |   69 +-
 .../compress/compressors/BZip2UtilsTestCase.java   |   52 +-
 .../compress/compressors/DeflateTestCase.java      |   48 +-
 .../compressors/DetectCompressorTestCase.java      |  208 +-
 .../compress/compressors/FramedSnappyTestCase.java |   71 +-
 .../commons/compress/compressors/GZipTestCase.java |  189 +-
 .../compress/compressors/GzipUtilsTestCase.java    |   76 +-
 .../commons/compress/compressors/LZMATestCase.java |   72 +-
 .../compress/compressors/Pack200TestCase.java      |  168 +-
 .../commons/compress/compressors/XZTestCase.java   |   48 +-
 .../commons/compress/compressors/ZTestCase.java    |   46 +-
 .../brotli/BrotliCompressorInputStreamTest.java    |  128 +-
 .../bzip2/BZip2CompressorInputStreamTest.java      |   72 +-
 .../compress/compressors/bzip2/BlockSortTest.java  |  122 +-
 .../bzip2/PythonTruncatedBzip2Test.java            |   52 +-
 .../deflate/DeflateCompressorInputStreamTest.java  |   20 +-
 .../compressors/deflate/DeflateParametersTest.java |    8 +-
 .../compressors/deflate64/HuffmanDecoderTest.java  |  160 +-
 .../lz4/BlockLZ4CompressorInputStreamTest.java     |   28 +-
 .../lz4/BlockLZ4CompressorOutputStreamTest.java    |  234 +-
 .../lz4/BlockLZ4CompressorRoundtripTest.java       |   48 +-
 .../compress/compressors/lz4/FactoryTest.java      |   20 +-
 .../lz4/FramedLZ4CompressorInputStreamTest.java    |  452 +--
 .../lz4/FramedLZ4CompressorRoundtripTest.java      |   48 +-
 .../AbstractLZ77CompressorInputStreamTest.java     |   22 +-
 .../lz77support/LZ77CompressorTest.java            |  302 +-
 .../compressors/lz77support/ParametersTest.java    |   92 +-
 .../compressors/lzma/LZMAUtilsTestCase.java        |   68 +-
 .../compressors/pack200/Pack200UtilsTest.java      |   37 +-
 .../FramedSnappyCompressorInputStreamTest.java     |  223 +-
 .../compressors/snappy/SnappyRoundtripTest.java    |  122 +-
 .../xz/XZCompressorInputStreamTest.java            |   54 +-
 .../compress/compressors/xz/XZUtilsTestCase.java   |   72 +-
 .../compressors/z/ZCompressorInputStreamTest.java  |   26 +-
 .../zstandard/ZstdCompressorInputStreamTest.java   |  186 +-
 .../compressors/zstandard/ZstdRoundtripTest.java   |   43 +-
 .../compressors/zstandard/ZstdUtilsTest.java       |   24 +-
 .../harmony/pack200/tests/ArchiveTest.java         |  330 +--
 .../harmony/pack200/tests/BHSDCodecTest.java       |   20 +-
 .../harmony/pack200/tests/CodecEncodingTest.java   |  136 +-
 .../compress/harmony/pack200/tests/CodecTest.java  |  160 +-
 .../compress/harmony/pack200/tests/HelloWorld.java |    8 +-
 .../pack200/tests/NewAttributeBandsTest.java       |  320 +--
 .../harmony/pack200/tests/PackingOptionsTest.java  |  568 ++--
 .../harmony/pack200/tests/PopulationCodecTest.java |   46 +-
 .../harmony/pack200/tests/RunCodecTest.java        |   78 +-
 .../unpack200/tests/AbstractBandsTestCase.java     |   50 +-
 .../harmony/unpack200/tests/ArchiveTest.java       |  356 +--
 .../unpack200/tests/AttributeLayoutTest.java       |   78 +-
 .../harmony/unpack200/tests/BcBandsTest.java       |  654 ++---
 .../harmony/unpack200/tests/ClassBandsTest.java    |   50 +-
 .../harmony/unpack200/tests/CodeAttributeTest.java |    8 +-
 .../harmony/unpack200/tests/ICTupleTest.java       |   34 +-
 .../unpack200/tests/NewAttributeBandsTest.java     |  220 +-
 .../tests/SegmentConstantPoolArrayCacheTest.java   |   36 +-
 .../unpack200/tests/SegmentConstantPoolTest.java   |   40 +-
 .../harmony/unpack200/tests/SegmentTest.java       |   36 +-
 .../tests/bytecode/ClassFileEntryTest.java         |   80 +-
 .../unpack200/tests/bytecode/ConstantPoolTest.java |   26 +-
 .../commons/compress/utils/BitInputStreamTest.java |  204 +-
 .../commons/compress/utils/ByteUtilsTest.java      |  124 +-
 .../utils/ChecksumCalculatingInputStreamTest.java  |   58 +-
 .../commons/compress/utils/CountingStreamTest.java |   40 +-
 .../commons/compress/utils/FileNameUtilsTest.java  |   44 +-
 .../utils/FixedLengthBlockOutputStreamTest.java    |  384 +--
 .../apache/commons/compress/utils/IOUtilsTest.java |  170 +-
 .../MultiReadOnlySeekableByteChannelTest.java      |  366 +--
 .../utils/SeekableInMemoryByteChannelTest.java     |  288 +-
 .../compress/utils/ServiceLoaderIteratorTest.java  |   14 +-
 .../utils/SkipShieldingInputStreamTest.java        |   30 +-
 .../commons/compress/utils/TimeUtilsTest.java      |  120 +-
 .../ZipSplitReadOnlySeekableByteChannelTest.java   |  130 +-
 411 files changed, 38849 insertions(+), 39030 deletions(-)


[commons-compress] 02/05: Fix spelling

Posted by gg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ggregory pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git

commit 71579a2074e09aee83c0fcdc9e7fe002d49e6d80
Author: Gary Gregory <ga...@gmail.com>
AuthorDate: Sun Dec 11 01:41:36 2022 -0500

    Fix spelling
---
 .../apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java   | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
index 0a77fa6e..827cabc5 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
@@ -66,7 +66,7 @@ class AES256SHA256Decoder extends AbstractCoder {
     }
 
     /**
-     * Convenience method that encodes Unicode characters into bytes in UTF-16 (ittle-endian byte order) charset
+     * Convenience method that encodes Unicode characters into bytes in UTF-16 (little-endian byte order) charset
      *
      * @param chars characters to encode
      * @return encoded characters


[commons-compress] 01/05: Sort members

Posted by gg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ggregory pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git

commit f5a37de3a4008cb00a49b9e92162836ff236f65e
Author: Gary Gregory <ga...@gmail.com>
AuthorDate: Sun Dec 11 01:36:45 2022 -0500

    Sort members
---
 .../commons/compress/MemoryLimitException.java     |   16 +-
 .../commons/compress/archivers/ArchiveEntry.java   |   22 +-
 .../compress/archivers/ArchiveInputStream.java     |   88 +-
 .../compress/archivers/ArchiveOutputStream.java    |  118 +-
 .../compress/archivers/ArchiveStreamFactory.java   |  298 +-
 .../apache/commons/compress/archivers/Lister.java  |  100 +-
 .../compress/archivers/ar/ArArchiveEntry.java      |  108 +-
 .../archivers/ar/ArArchiveInputStream.java         |  364 +--
 .../archivers/ar/ArArchiveOutputStream.java        |  154 +-
 .../compress/archivers/arj/ArjArchiveEntry.java    |  140 +-
 .../archivers/arj/ArjArchiveInputStream.java       |  314 +-
 .../compress/archivers/arj/LocalFileHeader.java    |  126 +-
 .../commons/compress/archivers/arj/MainHeader.java |   44 +-
 .../compress/archivers/cpio/CpioArchiveEntry.java  |  390 +--
 .../archivers/cpio/CpioArchiveInputStream.java     |  222 +-
 .../archivers/cpio/CpioArchiveOutputStream.java    |  466 +--
 .../commons/compress/archivers/cpio/CpioUtil.java  |   14 +-
 .../commons/compress/archivers/dump/Dirent.java    |   22 +-
 .../archivers/dump/DumpArchiveConstants.java       |   72 +-
 .../compress/archivers/dump/DumpArchiveEntry.java  |  782 ++---
 .../archivers/dump/DumpArchiveException.java       |    6 +-
 .../archivers/dump/DumpArchiveInputStream.java     |  330 +--
 .../archivers/dump/DumpArchiveSummary.java         |  248 +-
 .../compress/archivers/dump/DumpArchiveUtil.java   |   76 +-
 .../compress/archivers/dump/TapeInputStream.java   |  290 +-
 .../compress/archivers/examples/Archiver.java      |   18 +-
 .../examples/CloseableConsumerAdapter.java         |   10 +-
 .../compress/archivers/jar/JarArchiveEntry.java    |   30 +-
 .../archivers/jar/JarArchiveInputStream.java       |   34 +-
 .../archivers/sevenz/AES256SHA256Decoder.java      |  144 +-
 .../commons/compress/archivers/sevenz/Archive.java |   16 +-
 .../BoundedSeekableByteChannelInputStream.java     |   10 +-
 .../commons/compress/archivers/sevenz/CLI.java     |   46 +-
 .../commons/compress/archivers/sevenz/Coders.java  |  228 +-
 .../compress/archivers/sevenz/DeltaDecoder.java    |   10 +-
 .../commons/compress/archivers/sevenz/Folder.java  |   46 +-
 .../compress/archivers/sevenz/LZMA2Decoder.java    |   46 +-
 .../compress/archivers/sevenz/LZMADecoder.java     |   26 +-
 .../compress/archivers/sevenz/SevenZFile.java      | 3036 ++++++++++----------
 .../archivers/sevenz/SevenZFileOptions.java        |  170 +-
 .../compress/archivers/sevenz/SevenZMethod.java    |   18 +-
 .../sevenz/SevenZMethodConfiguration.java          |   26 +-
 .../archivers/sevenz/SevenZOutputFile.java         |  872 +++---
 .../archivers/tar/TarArchiveInputStream.java       |  766 ++---
 .../archivers/tar/TarArchiveOutputStream.java      |  726 ++---
 .../archivers/tar/TarArchiveSparseEntry.java       |    8 +-
 .../archivers/tar/TarArchiveStructSparse.java      |   16 +-
 .../commons/compress/archivers/tar/TarFile.java    |  766 ++---
 .../commons/compress/archivers/tar/TarUtils.java   | 1004 +++----
 .../archivers/zip/AbstractUnicodeExtraField.java   |  124 +-
 .../compress/archivers/zip/AsiExtraField.java      |  234 +-
 .../commons/compress/archivers/zip/BinaryTree.java |  138 +-
 .../compress/archivers/zip/CircularBuffer.java     |   30 +-
 .../archivers/zip/ExplodingInputStream.java        |  110 +-
 .../compress/archivers/zip/ExtraFieldUtils.java    |  474 +--
 .../compress/archivers/zip/GeneralPurposeBit.java  |  214 +-
 .../zip/InflaterInputStreamWithStatistics.java     |   20 +-
 .../commons/compress/archivers/zip/JarMarker.java  |   64 +-
 .../compress/archivers/zip/NioZipEncoding.java     |  154 +-
 .../compress/archivers/zip/PKWareExtraHeader.java  |  322 +--
 .../archivers/zip/ParallelScatterZipCreator.java   |  120 +-
 .../archivers/zip/ResourceAlignmentExtraField.java |   44 +-
 .../archivers/zip/ScatterZipOutputStream.java      |  180 +-
 .../compress/archivers/zip/StreamCompressor.java   |  328 +--
 .../archivers/zip/UnicodeCommentExtraField.java    |   22 +-
 .../archivers/zip/UnicodePathExtraField.java       |   18 +-
 .../archivers/zip/UnparseableExtraFieldData.java   |   58 +-
 .../archivers/zip/UnrecognizedExtraField.java      |  122 +-
 .../archivers/zip/UnshrinkingInputStream.java      |   30 +-
 .../zip/UnsupportedZipFeatureException.java        |  122 +-
 .../archivers/zip/X0015_CertificateIdForFile.java  |   20 +-
 .../X0016_CertificateIdForCentralDirectory.java    |   20 +-
 .../zip/X0017_StrongEncryptionHeader.java          |   58 +-
 .../archivers/zip/X5455_ExtendedTimestamp.java     |  524 ++--
 .../compress/archivers/zip/X7875_NewUnix.java      |  318 +-
 .../zip/Zip64ExtendedInformationExtraField.java    |  192 +-
 .../archivers/zip/Zip64RequiredException.java      |   14 +-
 .../compress/archivers/zip/ZipArchiveEntry.java    | 1518 +++++-----
 .../archivers/zip/ZipArchiveEntryRequest.java      |   30 +-
 .../archivers/zip/ZipArchiveInputStream.java       | 1834 ++++++------
 .../archivers/zip/ZipArchiveOutputStream.java      | 2538 ++++++++--------
 .../archivers/zip/ZipEightByteInteger.java         |  156 +-
 .../compress/archivers/zip/ZipEncoding.java        |   14 +-
 .../compress/archivers/zip/ZipEncodingHelper.java  |   20 +-
 .../compress/archivers/zip/ZipExtraField.java      |   38 +-
 .../commons/compress/archivers/zip/ZipLong.java    |  154 +-
 .../commons/compress/archivers/zip/ZipMethod.java  |   28 +-
 .../commons/compress/archivers/zip/ZipShort.java   |  122 +-
 .../archivers/zip/ZipSplitOutputStream.java        |  210 +-
 .../zip/ZipSplitReadOnlySeekableByteChannel.java   |  256 +-
 .../commons/compress/archivers/zip/ZipUtil.java    |  364 +--
 .../apache/commons/compress/changes/Change.java    |   48 +-
 .../apache/commons/compress/changes/ChangeSet.java |   40 +-
 .../compress/changes/ChangeSetPerformer.java       |  242 +-
 .../commons/compress/changes/ChangeSetResults.java |   18 +-
 .../compressors/CompressorInputStream.java         |   30 +-
 .../compressors/CompressorStreamFactory.java       |  200 +-
 .../commons/compress/compressors/FileNameUtil.java |   48 +-
 .../brotli/BrotliCompressorInputStream.java        |   32 +-
 .../compress/compressors/brotli/BrotliUtils.java   |   28 +-
 .../bzip2/BZip2CompressorInputStream.java          | 1122 ++++----
 .../bzip2/BZip2CompressorOutputStream.java         |  792 ++---
 .../compress/compressors/bzip2/BZip2Utils.java     |   34 +-
 .../compress/compressors/bzip2/BlockSort.java      |  624 ++--
 .../commons/compress/compressors/bzip2/CRC.java    |   12 +-
 .../deflate/DeflateCompressorInputStream.java      |   88 +-
 .../deflate/DeflateCompressorOutputStream.java     |   36 +-
 .../compressors/deflate/DeflateParameters.java     |   42 +-
 .../deflate64/Deflate64CompressorInputStream.java  |   64 +-
 .../compressors/deflate64/HuffmanDecoder.java      |  728 ++---
 .../gzip/GzipCompressorInputStream.java            |   96 +-
 .../gzip/GzipCompressorOutputStream.java           |  154 +-
 .../compress/compressors/gzip/GzipParameters.java  |  102 +-
 .../compress/compressors/gzip/GzipUtils.java       |   36 +-
 .../lz4/BlockLZ4CompressorInputStream.java         |   94 +-
 .../lz4/BlockLZ4CompressorOutputStream.java        |  432 +--
 .../lz4/FramedLZ4CompressorInputStream.java        |  308 +-
 .../lz4/FramedLZ4CompressorOutputStream.java       |  196 +-
 .../commons/compress/compressors/lz4/XXHash32.java |  104 +-
 .../AbstractLZ77CompressorInputStream.java         |  152 +-
 .../compressors/lz77support/LZ77Compressor.java    |  452 +--
 .../compressors/lz77support/Parameters.java        |  322 +--
 .../lzma/LZMACompressorInputStream.java            |   74 +-
 .../lzma/LZMACompressorOutputStream.java           |   30 +-
 .../compress/compressors/lzma/LZMAUtils.java       |  100 +-
 .../compress/compressors/lzw/LZWInputStream.java   |  284 +-
 .../pack200/Pack200CompressorInputStream.java      |  248 +-
 .../pack200/Pack200CompressorOutputStream.java     |   50 +-
 .../compress/compressors/pack200/Pack200Utils.java |   46 +-
 .../compress/compressors/pack200/StreamBridge.java |    8 +-
 .../snappy/FramedSnappyCompressorInputStream.java  |  224 +-
 .../snappy/FramedSnappyCompressorOutputStream.java |   62 +-
 .../compressors/snappy/PureJavaCrc32C.java         |  134 +-
 .../snappy/SnappyCompressorInputStream.java        |   92 +-
 .../snappy/SnappyCompressorOutputStream.java       |  208 +-
 .../compressors/xz/XZCompressorInputStream.java    |   42 +-
 .../compressors/xz/XZCompressorOutputStream.java   |   28 +-
 .../commons/compress/compressors/xz/XZUtils.java   |  124 +-
 .../compressors/z/ZCompressorInputStream.java      |  100 +-
 .../zstandard/ZstdCompressorInputStream.java       |   32 +-
 .../zstandard/ZstdCompressorOutputStream.java      |   56 +-
 .../compress/compressors/zstandard/ZstdUtils.java  |   56 +-
 .../harmony/archive/internal/nls/Messages.java     |  144 +-
 .../commons/compress/harmony/pack200/Archive.java  |  348 +--
 .../harmony/pack200/AttributeDefinitionBands.java  |  162 +-
 .../compress/harmony/pack200/BHSDCodec.java        |  192 +-
 .../commons/compress/harmony/pack200/BandSet.java  |  742 ++---
 .../commons/compress/harmony/pack200/BcBands.java  |   84 +-
 .../commons/compress/harmony/pack200/CPClass.java  |   10 +-
 .../compress/harmony/pack200/CPMethodOrField.java  |   26 +-
 .../compress/harmony/pack200/CPNameAndType.java    |   18 +-
 .../compress/harmony/pack200/CPSignature.java      |   18 +-
 .../commons/compress/harmony/pack200/CPString.java |    8 +-
 .../commons/compress/harmony/pack200/CPUTF8.java   |    6 +-
 .../compress/harmony/pack200/ClassBands.java       | 2174 +++++++-------
 .../commons/compress/harmony/pack200/Codec.java    |   38 +-
 .../compress/harmony/pack200/CodecEncoding.java    |   12 +-
 .../commons/compress/harmony/pack200/CpBands.java  |  786 ++---
 .../compress/harmony/pack200/FileBands.java        |   30 +-
 .../commons/compress/harmony/pack200/IcBands.java  |  190 +-
 .../commons/compress/harmony/pack200/IntList.java  |   14 +-
 .../harmony/pack200/MetadataBandGroup.java         |  334 +--
 .../compress/harmony/pack200/NewAttribute.java     |  186 +-
 .../harmony/pack200/NewAttributeBands.java         | 1036 +++----
 .../compress/harmony/pack200/Pack200Adapter.java   |   24 +-
 .../harmony/pack200/Pack200ClassReader.java        |   36 +-
 .../harmony/pack200/Pack200PackerAdapter.java      |   62 +-
 .../compress/harmony/pack200/PackingUtils.java     |  104 +-
 .../compress/harmony/pack200/PopulationCodec.java  |   28 +-
 .../commons/compress/harmony/pack200/RunCodec.java |   70 +-
 .../commons/compress/harmony/pack200/Segment.java  |  894 +++---
 .../compress/harmony/pack200/SegmentHeader.java    |  312 +-
 .../compress/harmony/unpack200/Archive.java        |  104 +-
 .../harmony/unpack200/AttrDefinitionBands.java     |   14 +-
 .../harmony/unpack200/AttributeLayout.java         |   42 +-
 .../compress/harmony/unpack200/BandSet.java        |  388 +--
 .../compress/harmony/unpack200/BcBands.java        |  192 +-
 .../compress/harmony/unpack200/ClassBands.java     | 1120 ++++----
 .../compress/harmony/unpack200/CpBands.java        |  514 ++--
 .../compress/harmony/unpack200/FileBands.java      |   74 +-
 .../compress/harmony/unpack200/IcBands.java        |  168 +-
 .../compress/harmony/unpack200/IcTuple.java        |  332 +--
 .../harmony/unpack200/MetadataBandGroup.java       |   92 +-
 .../harmony/unpack200/NewAttributeBands.java       | 1226 ++++----
 .../harmony/unpack200/Pack200UnpackerAdapter.java  |   32 +-
 .../compress/harmony/unpack200/Segment.java        |  160 +-
 .../harmony/unpack200/SegmentConstantPool.java     |  236 +-
 .../unpack200/SegmentConstantPoolArrayCache.java   |  142 +-
 .../compress/harmony/unpack200/SegmentHeader.java  |  250 +-
 .../compress/harmony/unpack200/SegmentUtils.java   |   28 +-
 .../bytecode/AnnotationDefaultAttribute.java       |   36 +-
 .../unpack200/bytecode/AnnotationsAttribute.java   |   76 +-
 .../harmony/unpack200/bytecode/Attribute.java      |   10 +-
 .../unpack200/bytecode/BCIRenumberedAttribute.java |   30 +-
 .../harmony/unpack200/bytecode/ByteCode.java       |  246 +-
 .../harmony/unpack200/bytecode/CPClass.java        |   24 +-
 .../harmony/unpack200/bytecode/CPConstant.java     |    8 +-
 .../harmony/unpack200/bytecode/CPDouble.java       |    8 +-
 .../harmony/unpack200/bytecode/CPFieldRef.java     |   82 +-
 .../harmony/unpack200/bytecode/CPFloat.java        |    8 +-
 .../harmony/unpack200/bytecode/CPInteger.java      |    8 +-
 .../unpack200/bytecode/CPInterfaceMethodRef.java   |   28 +-
 .../harmony/unpack200/bytecode/CPLong.java         |    8 +-
 .../harmony/unpack200/bytecode/CPMember.java       |   74 +-
 .../harmony/unpack200/bytecode/CPMethod.java       |   18 +-
 .../harmony/unpack200/bytecode/CPMethodRef.java    |   18 +-
 .../harmony/unpack200/bytecode/CPNameAndType.java  |   86 +-
 .../compress/harmony/unpack200/bytecode/CPRef.java |    4 +-
 .../harmony/unpack200/bytecode/CPString.java       |   46 +-
 .../harmony/unpack200/bytecode/CPUTF8.java         |   30 +-
 .../unpack200/bytecode/ClassConstantPool.java      |   62 +-
 .../harmony/unpack200/bytecode/ClassFileEntry.java |    8 +-
 .../harmony/unpack200/bytecode/CodeAttribute.java  |   52 +-
 .../unpack200/bytecode/ConstantPoolEntry.java      |   20 +-
 .../unpack200/bytecode/ConstantValueAttribute.java |    8 +-
 .../unpack200/bytecode/DeprecatedAttribute.java    |   12 +-
 .../bytecode/EnclosingMethodAttribute.java         |   40 +-
 .../unpack200/bytecode/ExceptionTableEntry.java    |   18 +-
 .../unpack200/bytecode/ExceptionsAttribute.java    |    8 +-
 .../unpack200/bytecode/InnerClassesAttribute.java  |   60 +-
 .../bytecode/LineNumberTableAttribute.java         |   52 +-
 .../bytecode/LocalVariableTableAttribute.java      |   78 +-
 .../bytecode/LocalVariableTypeTableAttribute.java  |   68 +-
 .../harmony/unpack200/bytecode/NewAttribute.java   |  244 +-
 .../harmony/unpack200/bytecode/OperandManager.java |  122 +-
 ...timeVisibleorInvisibleAnnotationsAttribute.java |   30 +-
 ...leorInvisibleParameterAnnotationsAttribute.java |  106 +-
 .../unpack200/bytecode/SignatureAttribute.java     |   22 +-
 .../unpack200/bytecode/SourceFileAttribute.java    |   26 +-
 .../unpack200/bytecode/forms/ByteCodeForm.java     |   68 +-
 .../unpack200/bytecode/forms/ClassRefForm.java     |   20 +-
 .../bytecode/forms/ClassSpecificReferenceForm.java |    4 +-
 .../bytecode/forms/InitMethodReferenceForm.java    |    8 +-
 .../bytecode/forms/NarrowClassRefForm.java         |   10 +-
 .../unpack200/bytecode/forms/ReferenceForm.java    |   22 +-
 .../bytecode/forms/SingleByteReferenceForm.java    |   10 +-
 .../bytecode/forms/SuperFieldRefForm.java          |   10 +-
 .../bytecode/forms/SuperMethodRefForm.java         |   10 +-
 .../unpack200/bytecode/forms/ThisFieldRefForm.java |   10 +-
 .../bytecode/forms/ThisMethodRefForm.java          |   10 +-
 .../bytecode/forms/VariableInstructionForm.java    |   74 +-
 .../FileBasedScatterGatherBackingStore.java        |   20 +-
 .../parallel/ScatterGatherBackingStore.java        |   12 +-
 .../commons/compress/utils/ArchiveUtils.java       |  242 +-
 .../commons/compress/utils/BitInputStream.java     |  134 +-
 .../commons/compress/utils/BoundedInputStream.java |   28 +-
 .../apache/commons/compress/utils/ByteUtils.java   |  172 +-
 .../apache/commons/compress/utils/Charsets.java    |   52 +-
 .../utils/ChecksumCalculatingInputStream.java      |   16 +-
 .../compress/utils/CountingInputStream.java        |   40 +-
 .../compress/utils/CountingOutputStream.java       |   32 +-
 .../apache/commons/compress/utils/ExactMath.java   |    8 +-
 .../utils/FixedLengthBlockOutputStream.java        |  240 +-
 .../org/apache/commons/compress/utils/IOUtils.java |  268 +-
 .../utils/MultiReadOnlySeekableByteChannel.java    |  236 +-
 .../utils/SeekableInMemoryByteChannel.java         |  134 +-
 .../org/apache/commons/compress/utils/Sets.java    |    8 +-
 .../apache/commons/compress/AbstractTestCase.java  |  312 +-
 .../apache/commons/compress/ArchiveReadTest.java   |   18 +-
 .../apache/commons/compress/ArchiveUtilsTest.java  |   98 +-
 .../apache/commons/compress/ChainingTestCase.java  |   12 +-
 .../commons/compress/DetectArchiverTestCase.java   |   56 +-
 .../org/apache/commons/compress/IOMethodsTest.java |  174 +-
 .../org/apache/commons/compress/OsgiITest.java     |   24 +-
 .../commons/compress/archivers/ArTestCase.java     |  194 +-
 .../archivers/ArchiveOutputStreamTest.java         |  188 +-
 .../archivers/ArchiveStreamFactoryTest.java        |  438 +--
 .../commons/compress/archivers/CpioTestCase.java   |   14 +-
 .../commons/compress/archivers/DumpTestCase.java   |   86 +-
 .../compress/archivers/ExceptionMessageTest.java   |   20 +-
 .../commons/compress/archivers/LongPathTest.java   |   16 +-
 .../compress/archivers/LongSymLinkTest.java        |   16 +-
 .../commons/compress/archivers/SevenZTestCase.java |  222 +-
 .../commons/compress/archivers/TarTestCase.java    |  528 ++--
 .../commons/compress/archivers/ZipTestCase.java    | 1176 ++++----
 .../archivers/ar/ArArchiveInputStreamTest.java     |   46 +-
 .../archivers/arj/ArjArchiveInputStreamTest.java   |   46 +-
 .../compress/archivers/arj/CoverageTest.java       |   12 +-
 .../archivers/dump/DumpArchiveInputStreamTest.java |   58 +-
 .../archivers/dump/DumpArchiveUtilTest.java        |   14 +-
 .../compress/archivers/examples/ExpanderTest.java  |  206 +-
 .../examples/ParameterizedArchiverTest.java        |   86 +-
 .../examples/ParameterizedExpanderTest.java        |   98 +-
 .../archivers/examples/SevenZArchiverTest.java     |   72 +-
 .../compress/archivers/jar/ExpandApkTest.java      |   10 +-
 .../archivers/memory/MemoryArchiveEntry.java       |   10 +-
 .../archivers/memory/MemoryArchiveInputStream.java |    8 +-
 .../compress/archivers/sevenz/CoverageTest.java    |   10 +-
 .../compress/archivers/sevenz/FolderTest.java      |   28 +-
 .../archivers/sevenz/SevenZArchiveEntryTest.java   |   30 +-
 .../compress/archivers/sevenz/SevenZFileTest.java  |  926 +++---
 .../archivers/sevenz/SevenZNativeHeapTest.java     |  216 +-
 .../archivers/sevenz/SevenZOutputFileTest.java     |  572 ++--
 .../commons/compress/archivers/tar/BigFilesIT.java |   58 +-
 .../compress/archivers/tar/FileTimesIT.java        |  406 +--
 .../compress/archivers/tar/SparseFilesTest.java    |  498 ++--
 .../archivers/tar/TarArchiveEntryTest.java         |  486 ++--
 .../archivers/tar/TarArchiveInputStreamTest.java   |  348 +--
 .../archivers/tar/TarArchiveOutputStreamTest.java  |  834 +++---
 .../compress/archivers/tar/TarFileTest.java        |  238 +-
 .../commons/compress/archivers/tar/TarLister.java  |   48 +-
 .../archivers/tar/TarMemoryFileSystemTest.java     |   60 +-
 .../compress/archivers/tar/TarUtilsTest.java       |  938 +++---
 .../compress/archivers/zip/AsiExtraFieldTest.java  |   56 +-
 .../compress/archivers/zip/BitStreamTest.java      |   42 +-
 .../compress/archivers/zip/CircularBufferTest.java |   36 +-
 .../compress/archivers/zip/DataDescriptorTest.java |  100 +-
 .../archivers/zip/EncryptedArchiveTest.java        |   32 +-
 .../compress/archivers/zip/ExplodeSupportTest.java |   42 +-
 .../archivers/zip/ExtraFieldUtilsTest.java         |  228 +-
 .../archivers/zip/GeneralPurposeBitTest.java       |   72 +-
 .../commons/compress/archivers/zip/Lister.java     |   30 +-
 .../archivers/zip/Maven221MultiVolumeTest.java     |   10 +-
 .../compress/archivers/zip/NioZipEncodingTest.java |   48 +-
 .../zip/ParallelScatterZipCreatorTest.java         |  150 +-
 .../compress/archivers/zip/ScatterSampleTest.java  |   38 +-
 .../archivers/zip/ScatterZipOutputStreamTest.java  |    8 +-
 .../archivers/zip/StreamCompressorTest.java        |   26 +-
 .../compress/archivers/zip/UTF8ZipFilesTest.java   |  476 +--
 .../compress/archivers/zip/X000A_NTFSTest.java     |   32 +-
 .../archivers/zip/X5455_ExtendedTimestampTest.java |  488 ++--
 .../compress/archivers/zip/X7875_NewUnixTest.java  |  214 +-
 .../Zip64ExtendedInformationExtraFieldTest.java    |  204 +-
 .../compress/archivers/zip/Zip64SupportIT.java     | 2124 +++++++-------
 .../archivers/zip/ZipArchiveEntryTest.java         |  358 +--
 .../archivers/zip/ZipArchiveInputStreamTest.java   |  908 +++---
 .../archivers/zip/ZipClassCoverageTest.java        |   26 +-
 .../archivers/zip/ZipEightByteIntegerTest.java     |   66 +-
 .../compress/archivers/zip/ZipEncodingTest.java    |  124 +-
 .../zip/ZipFileIgnoringLocalFileHeaderTest.java    |   64 +-
 .../compress/archivers/zip/ZipFileTest.java        | 1156 ++++----
 .../compress/archivers/zip/ZipLongTest.java        |   80 +-
 .../archivers/zip/ZipMemoryFileSystemTest.java     |  490 ++--
 .../compress/archivers/zip/ZipShortTest.java       |   72 +-
 .../archivers/zip/ZipSplitOutputStreamTest.java    |   34 +-
 .../compress/archivers/zip/ZipUtilTest.java        |  172 +-
 .../compress/changes/ChangeSetTestCase.java        |  828 +++---
 .../compress/compressors/BZip2TestCase.java        |   46 +-
 .../compress/compressors/BZip2UtilsTestCase.java   |   52 +-
 .../compress/compressors/DeflateTestCase.java      |   36 +-
 .../compressors/DetectCompressorTestCase.java      |  208 +-
 .../compress/compressors/FramedSnappyTestCase.java |   70 +-
 .../commons/compress/compressors/GZipTestCase.java |  180 +-
 .../compress/compressors/GzipUtilsTestCase.java    |   76 +-
 .../commons/compress/compressors/LZMATestCase.java |   56 +-
 .../compress/compressors/Pack200TestCase.java      |  172 +-
 .../commons/compress/compressors/XZTestCase.java   |   48 +-
 .../commons/compress/compressors/ZTestCase.java    |   34 +-
 .../brotli/BrotliCompressorInputStreamTest.java    |  122 +-
 .../bzip2/BZip2CompressorInputStreamTest.java      |   72 +-
 .../compress/compressors/bzip2/BlockSortTest.java  |  122 +-
 .../bzip2/PythonTruncatedBzip2Test.java            |   52 +-
 .../deflate/DeflateCompressorInputStreamTest.java  |   20 +-
 .../compressors/deflate/DeflateParametersTest.java |    8 +-
 .../compressors/deflate64/HuffmanDecoderTest.java  |  160 +-
 .../lz4/BlockLZ4CompressorInputStreamTest.java     |   28 +-
 .../lz4/BlockLZ4CompressorOutputStreamTest.java    |  234 +-
 .../lz4/BlockLZ4CompressorRoundtripTest.java       |   42 +-
 .../compress/compressors/lz4/FactoryTest.java      |    8 +-
 .../lz4/FramedLZ4CompressorInputStreamTest.java    |  452 +--
 .../lz4/FramedLZ4CompressorRoundtripTest.java      |   40 +-
 .../AbstractLZ77CompressorInputStreamTest.java     |   22 +-
 .../lz77support/LZ77CompressorTest.java            |  302 +-
 .../compressors/lz77support/ParametersTest.java    |   92 +-
 .../compressors/lzma/LZMAUtilsTestCase.java        |   68 +-
 .../FramedSnappyCompressorInputStreamTest.java     |  202 +-
 .../compressors/snappy/SnappyRoundtripTest.java    |  124 +-
 .../xz/XZCompressorInputStreamTest.java            |   54 +-
 .../compress/compressors/xz/XZUtilsTestCase.java   |   72 +-
 .../compressors/z/ZCompressorInputStreamTest.java  |   26 +-
 .../zstandard/ZstdCompressorInputStreamTest.java   |  188 +-
 .../compressors/zstandard/ZstdRoundtripTest.java   |   34 +-
 .../compressors/zstandard/ZstdUtilsTest.java       |   24 +-
 .../harmony/pack200/tests/ArchiveTest.java         |  330 +--
 .../harmony/pack200/tests/BHSDCodecTest.java       |   20 +-
 .../harmony/pack200/tests/CodecEncodingTest.java   |  136 +-
 .../compress/harmony/pack200/tests/CodecTest.java  |  160 +-
 .../compress/harmony/pack200/tests/HelloWorld.java |    8 +-
 .../pack200/tests/NewAttributeBandsTest.java       |  320 +--
 .../harmony/pack200/tests/PackingOptionsTest.java  |  568 ++--
 .../harmony/pack200/tests/PopulationCodecTest.java |   46 +-
 .../harmony/pack200/tests/RunCodecTest.java        |   78 +-
 .../unpack200/tests/AbstractBandsTestCase.java     |   50 +-
 .../harmony/unpack200/tests/ArchiveTest.java       |  356 +--
 .../unpack200/tests/AttributeLayoutTest.java       |   78 +-
 .../harmony/unpack200/tests/BcBandsTest.java       |  654 ++---
 .../harmony/unpack200/tests/ClassBandsTest.java    |   50 +-
 .../harmony/unpack200/tests/CodeAttributeTest.java |    8 +-
 .../harmony/unpack200/tests/ICTupleTest.java       |   34 +-
 .../unpack200/tests/NewAttributeBandsTest.java     |  220 +-
 .../tests/SegmentConstantPoolArrayCacheTest.java   |   36 +-
 .../unpack200/tests/SegmentConstantPoolTest.java   |   40 +-
 .../harmony/unpack200/tests/SegmentTest.java       |   36 +-
 .../tests/bytecode/ClassFileEntryTest.java         |   80 +-
 .../unpack200/tests/bytecode/ConstantPoolTest.java |   26 +-
 .../commons/compress/utils/BitInputStreamTest.java |  204 +-
 .../commons/compress/utils/ByteUtilsTest.java      |  124 +-
 .../utils/ChecksumCalculatingInputStreamTest.java  |   58 +-
 .../commons/compress/utils/CountingStreamTest.java |   40 +-
 .../commons/compress/utils/FileNameUtilsTest.java  |   44 +-
 .../utils/FixedLengthBlockOutputStreamTest.java    |  384 +--
 .../apache/commons/compress/utils/IOUtilsTest.java |  170 +-
 .../MultiReadOnlySeekableByteChannelTest.java      |  366 +--
 .../utils/SeekableInMemoryByteChannelTest.java     |  288 +-
 .../compress/utils/ServiceLoaderIteratorTest.java  |   14 +-
 .../utils/SkipShieldingInputStreamTest.java        |   30 +-
 .../commons/compress/utils/TimeUtilsTest.java      |  120 +-
 .../ZipSplitReadOnlySeekableByteChannelTest.java   |  130 +-
 408 files changed, 38768 insertions(+), 38768 deletions(-)

diff --git a/src/main/java/org/apache/commons/compress/MemoryLimitException.java b/src/main/java/org/apache/commons/compress/MemoryLimitException.java
index 093d5aef..88aef325 100644
--- a/src/main/java/org/apache/commons/compress/MemoryLimitException.java
+++ b/src/main/java/org/apache/commons/compress/MemoryLimitException.java
@@ -32,8 +32,14 @@ public class MemoryLimitException extends IOException {
 
     private static final long serialVersionUID = 1L;
 
+    private static String buildMessage(final long memoryNeededInKb, final int memoryLimitInKb) {
+        return memoryNeededInKb + " kb of memory would be needed; limit was "
+                + memoryLimitInKb + " kb. " +
+                "If the file is not corrupt, consider increasing the memory limit.";
+    }
     /** long instead of int to account for overflow for corrupt files. */
     private final long memoryNeededInKb;
+
     private final int memoryLimitInKb;
 
     public MemoryLimitException(final long memoryNeededInKb, final int memoryLimitInKb) {
@@ -48,17 +54,11 @@ public class MemoryLimitException extends IOException {
         this.memoryLimitInKb = memoryLimitInKb;
     }
 
-    public long getMemoryNeededInKb() {
-        return memoryNeededInKb;
-    }
-
     public int getMemoryLimitInKb() {
         return memoryLimitInKb;
     }
 
-    private static String buildMessage(final long memoryNeededInKb, final int memoryLimitInKb) {
-        return memoryNeededInKb + " kb of memory would be needed; limit was "
-                + memoryLimitInKb + " kb. " +
-                "If the file is not corrupt, consider increasing the memory limit.";
+    public long getMemoryNeededInKb() {
+        return memoryNeededInKb;
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveEntry.java
index d5fa746a..ee611e17 100644
--- a/src/main/java/org/apache/commons/compress/archivers/ArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveEntry.java
@@ -25,6 +25,17 @@ import java.util.Date;
  */
 public interface ArchiveEntry {
 
+    /** Special value indicating that the size is unknown */
+    long SIZE_UNKNOWN = -1;
+
+    /**
+     * Gets the last modified date of this entry.
+     *
+     * @return the last modified date of this entry.
+     * @since 1.1
+     */
+    Date getLastModifiedDate();
+
     /**
      * Gets the name of the entry in this archive. May refer to a file or directory or other item.
      *
@@ -41,21 +52,10 @@ public interface ArchiveEntry {
      */
     long getSize();
 
-    /** Special value indicating that the size is unknown */
-    long SIZE_UNKNOWN = -1;
-
     /**
      * Returns true if this entry refers to a directory.
      *
      * @return true if this entry refers to a directory.
      */
     boolean isDirectory();
-
-    /**
-     * Gets the last modified date of this entry.
-     *
-     * @return the last modified date of this entry.
-     * @since 1.1
-     */
-    Date getLastModifiedDate();
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java
index 2f03a45b..1cd9304e 100644
--- a/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveInputStream.java
@@ -39,20 +39,28 @@ import java.io.InputStream;
  */
 public abstract class ArchiveInputStream extends InputStream {
 
-    private final byte[] single = new byte[1];
     private static final int BYTE_MASK = 0xFF;
+    private final byte[] single = new byte[1];
 
     /** holds the number of bytes read in this stream */
     private long bytesRead;
 
     /**
-     * Returns the next Archive Entry in this Stream.
+     * Whether this stream is able to read the given entry.
      *
-     * @return the next entry,
-     *         or {@code null} if there are no more entries
-     * @throws IOException if the next entry could not be read
+     * <p>
+     * Some archive formats support variants or details that are not supported (yet).
+     * </p>
+     *
+     * @param archiveEntry
+     *            the entry to test
+     * @return This implementation always returns true.
+     *
+     * @since 1.1
      */
-    public abstract ArchiveEntry getNextEntry() throws IOException;
+    public boolean canReadEntryData(final ArchiveEntry archiveEntry) {
+        return true;
+    }
 
     /*
      * Note that subclasses also implement specific get() methods which
@@ -63,25 +71,6 @@ public abstract class ArchiveInputStream extends InputStream {
      */
     // public abstract XXXArchiveEntry getNextXXXEntry() throws IOException;
 
-    /**
-     * Reads a byte of data. This method will block until enough input is
-     * available.
-     *
-     * Simply calls the {@link #read(byte[], int, int)} method.
-     *
-     * MUST be overridden if the {@link #read(byte[], int, int)} method
-     * is not overridden; may be overridden otherwise.
-     *
-     * @return the byte read, or -1 if end of input is reached
-     * @throws IOException
-     *             if an I/O error has occurred
-     */
-    @Override
-    public int read() throws IOException {
-        final int num = read(single, 0, 1);
-        return num == -1 ? -1 : single[0] & BYTE_MASK;
-    }
-
     /**
      * Increments the counter of already read bytes.
      * Doesn't increment if the EOF has been hit (read == -1)
@@ -106,13 +95,12 @@ public abstract class ArchiveInputStream extends InputStream {
     }
 
     /**
-     * Decrements the counter of already read bytes.
-     *
-     * @param pushedBack the number of bytes pushed back.
+     * Returns the current number of bytes read from this stream.
+     * @return the number of read bytes
      * @since 1.1
      */
-    protected void pushedBackBytes(final long pushedBack) {
-        bytesRead -= pushedBack;
+    public long getBytesRead() {
+        return bytesRead;
     }
 
     /**
@@ -127,29 +115,41 @@ public abstract class ArchiveInputStream extends InputStream {
     }
 
     /**
-     * Returns the current number of bytes read from this stream.
-     * @return the number of read bytes
+     * Returns the next Archive Entry in this Stream.
+     *
+     * @return the next entry,
+     *         or {@code null} if there are no more entries
+     * @throws IOException if the next entry could not be read
+     */
+    public abstract ArchiveEntry getNextEntry() throws IOException;
+
+    /**
+     * Decrements the counter of already read bytes.
+     *
+     * @param pushedBack the number of bytes pushed back.
      * @since 1.1
      */
-    public long getBytesRead() {
-        return bytesRead;
+    protected void pushedBackBytes(final long pushedBack) {
+        bytesRead -= pushedBack;
     }
 
     /**
-     * Whether this stream is able to read the given entry.
+     * Reads a byte of data. This method will block until enough input is
+     * available.
      *
-     * <p>
-     * Some archive formats support variants or details that are not supported (yet).
-     * </p>
+     * Simply calls the {@link #read(byte[], int, int)} method.
      *
-     * @param archiveEntry
-     *            the entry to test
-     * @return This implementation always returns true.
+     * MUST be overridden if the {@link #read(byte[], int, int)} method
+     * is not overridden; may be overridden otherwise.
      *
-     * @since 1.1
+     * @return the byte read, or -1 if end of input is reached
+     * @throws IOException
+     *             if an I/O error has occurred
      */
-    public boolean canReadEntryData(final ArchiveEntry archiveEntry) {
-        return true;
+    @Override
+    public int read() throws IOException {
+        final int num = read(single, 0, 1);
+        return num == -1 ? -1 : single[0] & BYTE_MASK;
     }
 
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveOutputStream.java
index a686c467..2d5fbfa5 100644
--- a/src/main/java/org/apache/commons/compress/archivers/ArchiveOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveOutputStream.java
@@ -48,23 +48,28 @@ import java.nio.file.Path;
  */
 public abstract class ArchiveOutputStream extends OutputStream {
 
+    static final int BYTE_MASK = 0xFF;
     /** Temporary buffer used for the {@link #write(int)} method */
     private final byte[] oneByte = new byte[1];
-    static final int BYTE_MASK = 0xFF;
 
     /** holds the number of bytes written to this stream */
     private long bytesWritten;
     // Methods specific to ArchiveOutputStream
 
     /**
-     * Writes the headers for an archive entry to the output stream.
-     * The caller must then write the content to the stream and call
-     * {@link #closeArchiveEntry()} to complete the process.
+     * Whether this stream is able to write the given entry.
      *
-     * @param entry describes the entry
-     * @throws IOException if an I/O error occurs
+     * <p>Some archive formats support variants or details that are
+     * not supported (yet).</p>
+     *
+     * @param archiveEntry
+     *            the entry to test
+     * @return This implementation always returns true.
+     * @since 1.1
      */
-    public abstract void putArchiveEntry(ArchiveEntry entry) throws IOException;
+    public boolean canWriteEntryData(final ArchiveEntry archiveEntry) {
+        return true;
+    }
 
     /**
      * Closes the archive entry, writing any trailer information that may
@@ -74,12 +79,27 @@ public abstract class ArchiveOutputStream extends OutputStream {
     public abstract void closeArchiveEntry() throws IOException;
 
     /**
-     * Finishes the addition of entries to this stream, without closing it.
-     * Additional data can be written, if the format supports it.
+     * Increments the counter of already written bytes.
+     * Doesn't increment if EOF has been hit ({@code written == -1}).
      *
-     * @throws IOException if the user forgets to close the entry.
+     * @param written the number of bytes written
      */
-    public abstract void finish() throws IOException;
+    protected void count(final int written) {
+        count((long) written);
+    }
+
+    /**
+     * Increments the counter of already written bytes.
+     * Doesn't increment if EOF has been hit ({@code written == -1}).
+     *
+     * @param written the number of bytes written
+     * @since 1.1
+     */
+    protected void count(final long written) {
+        if (written != -1) {
+            bytesWritten = bytesWritten + written;
+        }
+    }
 
     /**
      * Create an archive entry using the inputFile and entryName provided.
@@ -92,6 +112,8 @@ public abstract class ArchiveOutputStream extends OutputStream {
      */
     public abstract ArchiveEntry createArchiveEntry(File inputFile, String entryName) throws IOException;
 
+    // Generic implementations of OutputStream methods that may be useful to sub-classes
+
     /**
      * Create an archive entry using the inputPath and entryName provided.
      *
@@ -112,46 +134,21 @@ public abstract class ArchiveOutputStream extends OutputStream {
         return createArchiveEntry(inputPath.toFile(), entryName);
     }
 
-    // Generic implementations of OutputStream methods that may be useful to sub-classes
-
-    /**
-     * Writes a byte to the current archive entry.
-     *
-     * <p>This method simply calls {@code write( byte[], 0, 1 )}.
-     *
-     * <p>MUST be overridden if the {@link #write(byte[], int, int)} method
-     * is not overridden; may be overridden otherwise.
-     *
-     * @param b The byte to be written.
-     * @throws IOException on error
-     */
-    @Override
-    public void write(final int b) throws IOException {
-        oneByte[0] = (byte) (b & BYTE_MASK);
-        write(oneByte, 0, 1);
-    }
-
     /**
-     * Increments the counter of already written bytes.
-     * Doesn't increment if EOF has been hit ({@code written == -1}).
+     * Finishes the addition of entries to this stream, without closing it.
+     * Additional data can be written, if the format supports it.
      *
-     * @param written the number of bytes written
+     * @throws IOException if the user forgets to close the entry.
      */
-    protected void count(final int written) {
-        count((long) written);
-    }
+    public abstract void finish() throws IOException;
 
     /**
-     * Increments the counter of already written bytes.
-     * Doesn't increment if EOF has been hit ({@code written == -1}).
-     *
-     * @param written the number of bytes written
+     * Returns the current number of bytes written to this stream.
+     * @return the number of written bytes
      * @since 1.1
      */
-    protected void count(final long written) {
-        if (written != -1) {
-            bytesWritten = bytesWritten + written;
-        }
+    public long getBytesWritten() {
+        return bytesWritten;
     }
 
     /**
@@ -166,26 +163,29 @@ public abstract class ArchiveOutputStream extends OutputStream {
     }
 
     /**
-     * Returns the current number of bytes written to this stream.
-     * @return the number of written bytes
-     * @since 1.1
+     * Writes the headers for an archive entry to the output stream.
+     * The caller must then write the content to the stream and call
+     * {@link #closeArchiveEntry()} to complete the process.
+     *
+     * @param entry describes the entry
+     * @throws IOException if an I/O error occurs
      */
-    public long getBytesWritten() {
-        return bytesWritten;
-    }
+    public abstract void putArchiveEntry(ArchiveEntry entry) throws IOException;
 
     /**
-     * Whether this stream is able to write the given entry.
+     * Writes a byte to the current archive entry.
      *
-     * <p>Some archive formats support variants or details that are
-     * not supported (yet).</p>
+     * <p>This method simply calls {@code write( byte[], 0, 1 )}.
      *
-     * @param archiveEntry
-     *            the entry to test
-     * @return This implementation always returns true.
-     * @since 1.1
+     * <p>MUST be overridden if the {@link #write(byte[], int, int)} method
+     * is not overridden; may be overridden otherwise.
+     *
+     * @param b The byte to be written.
+     * @throws IOException on error
      */
-    public boolean canWriteEntryData(final ArchiveEntry archiveEntry) {
-        return true;
+    @Override
+    public void write(final int b) throws IOException {
+        oneByte[0] = (byte) (b & BYTE_MASK);
+        write(oneByte, 0, 1);
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java
index fcc9f9e9..059dbd7f 100644
--- a/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java
+++ b/src/main/java/org/apache/commons/compress/archivers/ArchiveStreamFactory.java
@@ -187,30 +187,100 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider {
      */
     public static final String SEVEN_Z = "7z";
 
-    /**
-     * Entry encoding, null for the platform default.
-     */
-    private final String encoding;
+    private static Iterable<ArchiveStreamProvider> archiveStreamProviderIterable() {
+        return ServiceLoader.load(ArchiveStreamProvider.class, ClassLoader.getSystemClassLoader());
+    }
 
     /**
-     * Entry encoding, null for the default.
+     * Try to determine the type of Archiver
+     * @param in input stream
+     * @return type of archiver if found
+     * @throws ArchiveException if an archiver cannot be detected in the stream
+     * @since 1.14
      */
-    private volatile String entryEncoding;
+    public static String detect(final InputStream in) throws ArchiveException {
+        if (in == null) {
+            throw new IllegalArgumentException("Stream must not be null.");
+        }
 
-    private SortedMap<String, ArchiveStreamProvider> archiveInputStreamProviders;
+        if (!in.markSupported()) {
+            throw new IllegalArgumentException("Mark is not supported.");
+        }
 
-    private SortedMap<String, ArchiveStreamProvider> archiveOutputStreamProviders;
+        final byte[] signature = new byte[SIGNATURE_SIZE];
+        in.mark(signature.length);
+        int signatureLength = -1;
+        try {
+            signatureLength = IOUtils.readFully(in, signature);
+            in.reset();
+        } catch (final IOException e) {
+            throw new ArchiveException("IOException while reading signature.", e);
+        }
 
-    static void putAll(final Set<String> names, final ArchiveStreamProvider provider, final TreeMap<String, ArchiveStreamProvider> map) {
-        names.forEach(name -> map.put(toKey(name), provider));
-    }
+        if (ZipArchiveInputStream.matches(signature, signatureLength)) {
+            return ZIP;
+        }
+        if (JarArchiveInputStream.matches(signature, signatureLength)) {
+            return JAR;
+        }
+        if (ArArchiveInputStream.matches(signature, signatureLength)) {
+            return AR;
+        }
+        if (CpioArchiveInputStream.matches(signature, signatureLength)) {
+            return CPIO;
+        }
+        if (ArjArchiveInputStream.matches(signature, signatureLength)) {
+            return ARJ;
+        }
+        if (SevenZFile.matches(signature, signatureLength)) {
+            return SEVEN_Z;
+        }
 
-    private static Iterable<ArchiveStreamProvider> archiveStreamProviderIterable() {
-        return ServiceLoader.load(ArchiveStreamProvider.class, ClassLoader.getSystemClassLoader());
-    }
+        // Dump needs a bigger buffer to check the signature;
+        final byte[] dumpsig = new byte[DUMP_SIGNATURE_SIZE];
+        in.mark(dumpsig.length);
+        try {
+            signatureLength = IOUtils.readFully(in, dumpsig);
+            in.reset();
+        } catch (final IOException e) {
+            throw new ArchiveException("IOException while reading dump signature", e);
+        }
+        if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) {
+            return DUMP;
+        }
 
-    private static String toKey(final String name) {
-        return name.toUpperCase(Locale.ROOT);
+        // Tar needs an even bigger buffer to check the signature; read the first block
+        final byte[] tarHeader = new byte[TAR_HEADER_SIZE];
+        in.mark(tarHeader.length);
+        try {
+            signatureLength = IOUtils.readFully(in, tarHeader);
+            in.reset();
+        } catch (final IOException e) {
+            throw new ArchiveException("IOException while reading tar signature", e);
+        }
+        if (TarArchiveInputStream.matches(tarHeader, signatureLength)) {
+            return TAR;
+        }
+
+        // COMPRESS-117 - improve auto-recognition
+        if (signatureLength >= TAR_HEADER_SIZE) {
+            TarArchiveInputStream tais = null;
+            try {
+                tais = new TarArchiveInputStream(new ByteArrayInputStream(tarHeader));
+                // COMPRESS-191 - verify the header checksum
+                if (tais.getNextTarEntry().isCheckSumOK()) {
+                    return TAR;
+                }
+            } catch (final Exception e) { // NOPMD NOSONAR
+                // can generate IllegalArgumentException as well
+                // as IOException
+                // autodetection, simply not a TAR
+                // ignored
+            } finally {
+                IOUtils.closeQuietly(tais);
+            }
+        }
+        throw new ArchiveException("No Archiver found for the stream signature");
     }
 
     /**
@@ -285,6 +355,28 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider {
         });
     }
 
+    static void putAll(final Set<String> names, final ArchiveStreamProvider provider, final TreeMap<String, ArchiveStreamProvider> map) {
+        names.forEach(name -> map.put(toKey(name), provider));
+    }
+
+    private static String toKey(final String name) {
+        return name.toUpperCase(Locale.ROOT);
+    }
+
+    /**
+     * Entry encoding, null for the platform default.
+     */
+    private final String encoding;
+
+    /**
+     * Entry encoding, null for the default.
+     */
+    private volatile String entryEncoding;
+
+    private SortedMap<String, ArchiveStreamProvider> archiveInputStreamProviders;
+
+    private SortedMap<String, ArchiveStreamProvider> archiveOutputStreamProviders;
+
     /**
      * Create an instance using the platform default encoding.
      */
@@ -306,32 +398,20 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider {
     }
 
     /**
-     * Returns the encoding to use for arj, jar, zip, dump, cpio and tar
-     * files, or null for the archiver default.
-     *
-     * @return entry encoding, or null for the archiver default
-     * @since 1.5
-     */
-    public String getEntryEncoding() {
-        return entryEncoding;
-    }
-
-    /**
-     * Sets the encoding to use for arj, jar, zip, dump, cpio and tar files. Use null for the archiver default.
+     * Create an archive input stream from an input stream, autodetecting
+     * the archive type from the first few bytes of the stream. The InputStream
+     * must support marks, like BufferedInputStream.
      *
-     * @param entryEncoding the entry encoding, null uses the archiver default.
-     * @since 1.5
-     * @deprecated 1.10 use {@link #ArchiveStreamFactory(String)} to specify the encoding
-     * @throws IllegalStateException if the constructor {@link #ArchiveStreamFactory(String)}
-     * was used to specify the factory encoding.
+     * @param in the input stream
+     * @return the archive input stream
+     * @throws ArchiveException if the archiver name is not known
+     * @throws StreamingNotSupportedException if the format cannot be
+     * read from a stream
+     * @throws IllegalArgumentException if the stream is null or does not support mark
      */
-    @Deprecated
-    public void setEntryEncoding(final String entryEncoding) {
-        // Note: this does not detect new ArchiveStreamFactory(null) but that does not set the encoding anyway
-        if (encoding != null) {
-            throw new IllegalStateException("Cannot overide encoding set by the constructor");
-        }
-        this.entryEncoding = entryEncoding;
+    public ArchiveInputStream createArchiveInputStream(final InputStream in)
+            throws ArchiveException {
+        return createArchiveInputStream(detect(in), in);
     }
 
     /**
@@ -481,115 +561,6 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider {
         throw new ArchiveException("Archiver: " + archiverName + " not found.");
     }
 
-    /**
-     * Create an archive input stream from an input stream, autodetecting
-     * the archive type from the first few bytes of the stream. The InputStream
-     * must support marks, like BufferedInputStream.
-     *
-     * @param in the input stream
-     * @return the archive input stream
-     * @throws ArchiveException if the archiver name is not known
-     * @throws StreamingNotSupportedException if the format cannot be
-     * read from a stream
-     * @throws IllegalArgumentException if the stream is null or does not support mark
-     */
-    public ArchiveInputStream createArchiveInputStream(final InputStream in)
-            throws ArchiveException {
-        return createArchiveInputStream(detect(in), in);
-    }
-
-    /**
-     * Try to determine the type of Archiver
-     * @param in input stream
-     * @return type of archiver if found
-     * @throws ArchiveException if an archiver cannot be detected in the stream
-     * @since 1.14
-     */
-    public static String detect(final InputStream in) throws ArchiveException {
-        if (in == null) {
-            throw new IllegalArgumentException("Stream must not be null.");
-        }
-
-        if (!in.markSupported()) {
-            throw new IllegalArgumentException("Mark is not supported.");
-        }
-
-        final byte[] signature = new byte[SIGNATURE_SIZE];
-        in.mark(signature.length);
-        int signatureLength = -1;
-        try {
-            signatureLength = IOUtils.readFully(in, signature);
-            in.reset();
-        } catch (final IOException e) {
-            throw new ArchiveException("IOException while reading signature.", e);
-        }
-
-        if (ZipArchiveInputStream.matches(signature, signatureLength)) {
-            return ZIP;
-        }
-        if (JarArchiveInputStream.matches(signature, signatureLength)) {
-            return JAR;
-        }
-        if (ArArchiveInputStream.matches(signature, signatureLength)) {
-            return AR;
-        }
-        if (CpioArchiveInputStream.matches(signature, signatureLength)) {
-            return CPIO;
-        }
-        if (ArjArchiveInputStream.matches(signature, signatureLength)) {
-            return ARJ;
-        }
-        if (SevenZFile.matches(signature, signatureLength)) {
-            return SEVEN_Z;
-        }
-
-        // Dump needs a bigger buffer to check the signature;
-        final byte[] dumpsig = new byte[DUMP_SIGNATURE_SIZE];
-        in.mark(dumpsig.length);
-        try {
-            signatureLength = IOUtils.readFully(in, dumpsig);
-            in.reset();
-        } catch (final IOException e) {
-            throw new ArchiveException("IOException while reading dump signature", e);
-        }
-        if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) {
-            return DUMP;
-        }
-
-        // Tar needs an even bigger buffer to check the signature; read the first block
-        final byte[] tarHeader = new byte[TAR_HEADER_SIZE];
-        in.mark(tarHeader.length);
-        try {
-            signatureLength = IOUtils.readFully(in, tarHeader);
-            in.reset();
-        } catch (final IOException e) {
-            throw new ArchiveException("IOException while reading tar signature", e);
-        }
-        if (TarArchiveInputStream.matches(tarHeader, signatureLength)) {
-            return TAR;
-        }
-
-        // COMPRESS-117 - improve auto-recognition
-        if (signatureLength >= TAR_HEADER_SIZE) {
-            TarArchiveInputStream tais = null;
-            try {
-                tais = new TarArchiveInputStream(new ByteArrayInputStream(tarHeader));
-                // COMPRESS-191 - verify the header checksum
-                if (tais.getNextTarEntry().isCheckSumOK()) {
-                    return TAR;
-                }
-            } catch (final Exception e) { // NOPMD NOSONAR
-                // can generate IllegalArgumentException as well
-                // as IOException
-                // autodetection, simply not a TAR
-                // ignored
-            } finally {
-                IOUtils.closeQuietly(tais);
-            }
-        }
-        throw new ArchiveException("No Archiver found for the stream signature");
-    }
-
     public SortedMap<String, ArchiveStreamProvider> getArchiveInputStreamProviders() {
         if (archiveInputStreamProviders == null) {
             archiveInputStreamProviders = Collections
@@ -606,6 +577,17 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider {
         return archiveOutputStreamProviders;
     }
 
+    /**
+     * Returns the encoding to use for arj, jar, zip, dump, cpio and tar
+     * files, or null for the archiver default.
+     *
+     * @return entry encoding, or null for the archiver default
+     * @since 1.5
+     */
+    public String getEntryEncoding() {
+        return entryEncoding;
+    }
+
     @Override
     public Set<String> getInputStreamArchiveNames() {
         return Sets.newHashSet(AR, ARJ, ZIP, TAR, JAR, CPIO, DUMP, SEVEN_Z);
@@ -616,4 +598,22 @@ public class ArchiveStreamFactory implements ArchiveStreamProvider {
         return Sets.newHashSet(AR, ZIP, TAR, JAR, CPIO, SEVEN_Z);
     }
 
+    /**
+     * Sets the encoding to use for arj, jar, zip, dump, cpio and tar files. Use null for the archiver default.
+     *
+     * @param entryEncoding the entry encoding, null uses the archiver default.
+     * @since 1.5
+     * @deprecated 1.10 use {@link #ArchiveStreamFactory(String)} to specify the encoding
+     * @throws IllegalStateException if the constructor {@link #ArchiveStreamFactory(String)}
+     * was used to specify the factory encoding.
+     */
+    @Deprecated
+    public void setEntryEncoding(final String entryEncoding) {
+        // Note: this does not detect new ArchiveStreamFactory(null) but that does not set the encoding anyway
+        if (encoding != null) {
+            throw new IllegalStateException("Cannot overide encoding set by the constructor");
+        }
+        this.entryEncoding = entryEncoding;
+    }
+
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/Lister.java b/src/main/java/org/apache/commons/compress/archivers/Lister.java
index 55524f54..7c7a07db 100644
--- a/src/main/java/org/apache/commons/compress/archivers/Lister.java
+++ b/src/main/java/org/apache/commons/compress/archivers/Lister.java
@@ -42,52 +42,6 @@ public final class Lister {
 
     private static final ArchiveStreamFactory FACTORY = ArchiveStreamFactory.DEFAULT;
 
-    /**
-     * Runs this class from the command line.
-     * <p>
-     * The name of the archive must be given as a command line argument.
-     * </p>
-     * <p>
-     * The optional second argument defines the archive type, in case the format is not recognized.
-     * </p>
-     *
-     * @param args name of the archive and optional argument archive type.
-     * @throws ArchiveException Archiver related Exception.
-     * @throws IOException an I/O exception.
-     */
-    public static void main(final String[] args) throws ArchiveException, IOException {
-        if (args.length == 0) {
-            usage();
-            return;
-        }
-        System.out.println("Analysing " + args[0]);
-        final File f = new File(args[0]);
-        if (!f.isFile()) {
-            System.err.println(f + " doesn't exist or is a directory");
-        }
-        final String format = args.length > 1 ? args[1] : detectFormat(f);
-        if (ArchiveStreamFactory.SEVEN_Z.equalsIgnoreCase(format)) {
-            list7z(f);
-        } else if ("zipfile".equals(format)) {
-            listZipUsingZipFile(f);
-        } else if ("tarfile".equals(format)) {
-            listZipUsingTarFile(f);
-        } else {
-            listStream(f, args);
-        }
-    }
-
-    private static void listStream(final File f, final String[] args) throws ArchiveException, IOException {
-        try (final InputStream fis = new BufferedInputStream(Files.newInputStream(f.toPath()));
-                final ArchiveInputStream ais = createArchiveInputStream(args, fis)) {
-            System.out.println("Created " + ais.toString());
-            ArchiveEntry ae;
-            while ((ae = ais.getNextEntry()) != null) {
-                System.out.println(ae.getName());
-            }
-        }
-    }
-
     private static ArchiveInputStream createArchiveInputStream(final String[] args, final InputStream fis)
             throws ArchiveException {
         if (args.length > 1) {
@@ -114,6 +68,24 @@ public final class Lister {
         }
     }
 
+    private static void listStream(final File f, final String[] args) throws ArchiveException, IOException {
+        try (final InputStream fis = new BufferedInputStream(Files.newInputStream(f.toPath()));
+                final ArchiveInputStream ais = createArchiveInputStream(args, fis)) {
+            System.out.println("Created " + ais.toString());
+            ArchiveEntry ae;
+            while ((ae = ais.getNextEntry()) != null) {
+                System.out.println(ae.getName());
+            }
+        }
+    }
+
+    private static void listZipUsingTarFile(final File f) throws IOException {
+        try (TarFile t = new TarFile(f)) {
+            System.out.println("Created " + t);
+            t.getEntries().forEach(en -> System.out.println(en.getName()));
+        }
+    }
+
     private static void listZipUsingZipFile(final File f) throws IOException {
         try (ZipFile z = new ZipFile(f)) {
             System.out.println("Created " + z);
@@ -123,10 +95,38 @@ public final class Lister {
         }
     }
 
-    private static void listZipUsingTarFile(final File f) throws IOException {
-        try (TarFile t = new TarFile(f)) {
-            System.out.println("Created " + t);
-            t.getEntries().forEach(en -> System.out.println(en.getName()));
+    /**
+     * Runs this class from the command line.
+     * <p>
+     * The name of the archive must be given as a command line argument.
+     * </p>
+     * <p>
+     * The optional second argument defines the archive type, in case the format is not recognized.
+     * </p>
+     *
+     * @param args name of the archive and optional argument archive type.
+     * @throws ArchiveException Archiver related Exception.
+     * @throws IOException an I/O exception.
+     */
+    public static void main(final String[] args) throws ArchiveException, IOException {
+        if (args.length == 0) {
+            usage();
+            return;
+        }
+        System.out.println("Analysing " + args[0]);
+        final File f = new File(args[0]);
+        if (!f.isFile()) {
+            System.err.println(f + " doesn't exist or is a directory");
+        }
+        final String format = args.length > 1 ? args[1] : detectFormat(f);
+        if (ArchiveStreamFactory.SEVEN_Z.equalsIgnoreCase(format)) {
+            list7z(f);
+        } else if ("zipfile".equals(format)) {
+            listZipUsingZipFile(f);
+        } else if ("tarfile".equals(format)) {
+            listZipUsingTarFile(f);
+        } else {
+            listStream(f, args);
         }
     }
 
diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java
index e5eeab4b..54017404 100644
--- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java
@@ -64,6 +64,7 @@ public class ArArchiveEntry implements ArchiveEntry {
     /** The trailer for each entry */
     public static final String TRAILER = "`\012";
 
+    private static final int DEFAULT_MODE = 33188; // = (octal) 0100644
     /**
      * SVR4/GNU adds a trailing / to names; BSD does not.
      * They also vary in how names longer than 16 characters are represented.
@@ -73,10 +74,33 @@ public class ArArchiveEntry implements ArchiveEntry {
     private final int userId;
     private final int groupId;
     private final int mode;
-    private static final int DEFAULT_MODE = 33188; // = (octal) 0100644
     private final long lastModified;
     private final long length;
 
+    /**
+     * Creates a new instance using the attributes of the given file
+     * @param inputFile the file to create an entry from
+     * @param entryName the name of the entry
+     */
+    public ArArchiveEntry(final File inputFile, final String entryName) {
+        // TODO sort out mode
+        this(entryName, inputFile.isFile() ? inputFile.length() : 0,
+             0, 0, DEFAULT_MODE, inputFile.lastModified() / 1000);
+    }
+
+    /**
+     * Creates a new instance using the attributes of the given file
+     * @param inputPath the file to create an entry from
+     * @param entryName the name of the entry
+     * @param options options indicating how symbolic links are handled.
+     * @throws IOException if an I/O error occurs.
+     * @since 1.21
+     */
+    public ArArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options) throws IOException {
+        this(entryName, Files.isRegularFile(inputPath, options) ? Files.size(inputPath) : 0, 0, 0, DEFAULT_MODE,
+            Files.getLastModifiedTime(inputPath, options).toMillis() / 1000);
+    }
+
     /**
      * Create a new instance using a couple of default values.
      *
@@ -114,52 +138,25 @@ public class ArArchiveEntry implements ArchiveEntry {
         this.lastModified = lastModified;
     }
 
-    /**
-     * Creates a new instance using the attributes of the given file
-     * @param inputFile the file to create an entry from
-     * @param entryName the name of the entry
-     */
-    public ArArchiveEntry(final File inputFile, final String entryName) {
-        // TODO sort out mode
-        this(entryName, inputFile.isFile() ? inputFile.length() : 0,
-             0, 0, DEFAULT_MODE, inputFile.lastModified() / 1000);
-    }
-
-    /**
-     * Creates a new instance using the attributes of the given file
-     * @param inputPath the file to create an entry from
-     * @param entryName the name of the entry
-     * @param options options indicating how symbolic links are handled.
-     * @throws IOException if an I/O error occurs.
-     * @since 1.21
-     */
-    public ArArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options) throws IOException {
-        this(entryName, Files.isRegularFile(inputPath, options) ? Files.size(inputPath) : 0, 0, 0, DEFAULT_MODE,
-            Files.getLastModifiedTime(inputPath, options).toMillis() / 1000);
-    }
-
-    @Override
-    public long getSize() {
-        return this.getLength();
-    }
-
     @Override
-    public String getName() {
-        return name;
-    }
-
-    public int getUserId() {
-        return userId;
+    public boolean equals(final Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null || getClass() != obj.getClass()) {
+            return false;
+        }
+        final ArArchiveEntry other = (ArArchiveEntry) obj;
+        if (name == null) {
+            return other.name == null;
+        }
+        return name.equals(other.name);
     }
 
     public int getGroupId() {
         return groupId;
     }
 
-    public int getMode() {
-        return mode;
-    }
-
     /**
      * Last modified time in seconds since the epoch.
      * @return the last modified date
@@ -177,9 +174,22 @@ public class ArArchiveEntry implements ArchiveEntry {
         return length;
     }
 
+    public int getMode() {
+        return mode;
+    }
+
     @Override
-    public boolean isDirectory() {
-        return false;
+    public String getName() {
+        return name;
+    }
+
+    @Override
+    public long getSize() {
+        return this.getLength();
+    }
+
+    public int getUserId() {
+        return userId;
     }
 
     @Override
@@ -188,17 +198,7 @@ public class ArArchiveEntry implements ArchiveEntry {
     }
 
     @Override
-    public boolean equals(final Object obj) {
-        if (this == obj) {
-            return true;
-        }
-        if (obj == null || getClass() != obj.getClass()) {
-            return false;
-        }
-        final ArArchiveEntry other = (ArArchiveEntry) obj;
-        if (name == null) {
-            return other.name == null;
-        }
-        return name.equals(other.name);
+    public boolean isDirectory() {
+        return false;
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java
index f30951de..4d234722 100644
--- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java
@@ -36,8 +36,101 @@ import org.apache.commons.compress.utils.IOUtils;
  */
 public class ArArchiveInputStream extends ArchiveInputStream {
 
+    // offsets and length of meta data parts
+    private static final int NAME_OFFSET = 0;
+    private static final int NAME_LEN = 16;
+    private static final int LAST_MODIFIED_OFFSET = NAME_LEN;
+
+    private static final int LAST_MODIFIED_LEN = 12;
+
+    private static final int USER_ID_OFFSET = LAST_MODIFIED_OFFSET + LAST_MODIFIED_LEN;
+
+    private static final int USER_ID_LEN = 6;
+
+    private static final int GROUP_ID_OFFSET = USER_ID_OFFSET + USER_ID_LEN;
+    private static final int GROUP_ID_LEN = 6;
+    private static final int FILE_MODE_OFFSET = GROUP_ID_OFFSET + GROUP_ID_LEN;
+    private static final int FILE_MODE_LEN = 8;
+    private static final int LENGTH_OFFSET = FILE_MODE_OFFSET + FILE_MODE_LEN;
+    private static final int LENGTH_LEN = 10;
+    static final String BSD_LONGNAME_PREFIX = "#1/";
+    private static final int BSD_LONGNAME_PREFIX_LEN =
+        BSD_LONGNAME_PREFIX.length();
+    private static final String BSD_LONGNAME_PATTERN =
+        "^" + BSD_LONGNAME_PREFIX + "\\d+";
+    private static final String GNU_STRING_TABLE_NAME = "//";
+    private static final String GNU_LONGNAME_PATTERN = "^/\\d+";
+    /**
+     * Does the name look like it is a long name (or a name containing
+     * spaces) as encoded by BSD ar?
+     *
+     * <p>From the FreeBSD ar(5) man page:</p>
+     * <pre>
+     * BSD   In the BSD variant, names that are shorter than 16
+     *       characters and without embedded spaces are stored
+     *       directly in this field.  If a name has an embedded
+     *       space, or if it is longer than 16 characters, then
+     *       the string "#1/" followed by the decimal represen-
+     *       tation of the length of the file name is placed in
+     *       this field. The actual file name is stored immedi-
+     *       ately after the archive header.  The content of the
+     *       archive member follows the file name.  The ar_size
+     *       field of the header (see below) will then hold the
+     *       sum of the size of the file name and the size of
+     *       the member.
+     * </pre>
+     *
+     * @since 1.3
+     */
+    private static boolean isBSDLongName(final String name) {
+        return name != null && name.matches(BSD_LONGNAME_PATTERN);
+    }
+
+    /**
+     * Is this the name of the "Archive String Table" as used by
+     * SVR4/GNU to store long file names?
+     *
+     * <p>GNU ar stores multiple extended file names in the data section
+     * of a file with the name "//", this record is referred to by
+     * future headers.</p>
+     *
+     * <p>A header references an extended file name by storing a "/"
+     * followed by a decimal offset to the start of the file name in
+     * the extended file name data section.</p>
+     *
+     * <p>The format of the "//" file itself is simply a list of the
+     * long file names, each separated by one or more LF
+     * characters. Note that the decimal offsets are number of
+     * characters, not line or string number within the "//" file.</p>
+     */
+    private static boolean isGNUStringTable(final String name) {
+        return GNU_STRING_TABLE_NAME.equals(name);
+    }
+
+    /**
+     * Checks if the signature matches ASCII "!&lt;arch&gt;" followed by a single LF
+     * control character
+     *
+     * @param signature
+     *            the bytes to check
+     * @param length
+     *            the number of bytes to check
+     * @return true, if this stream is an Ar archive stream, false otherwise
+     */
+    public static boolean matches(final byte[] signature, final int length) {
+        // 3c21 7261 6863 0a3e
+
+        return length >= 8 && signature[0] == 0x21 &&
+                signature[1] == 0x3c && signature[2] == 0x61 &&
+                signature[3] == 0x72 && signature[4] == 0x63 &&
+                signature[5] == 0x68 && signature[6] == 0x3e &&
+                signature[7] == 0x0a;
+    }
+
     private final InputStream input;
+
     private long offset;
+
     private boolean closed;
 
     /*
@@ -55,20 +148,6 @@ public class ArArchiveInputStream extends ArchiveInputStream {
      */
     private long entryOffset = -1;
 
-    // offsets and length of meta data parts
-    private static final int NAME_OFFSET = 0;
-    private static final int NAME_LEN = 16;
-    private static final int LAST_MODIFIED_OFFSET = NAME_LEN;
-    private static final int LAST_MODIFIED_LEN = 12;
-    private static final int USER_ID_OFFSET = LAST_MODIFIED_OFFSET + LAST_MODIFIED_LEN;
-    private static final int USER_ID_LEN = 6;
-    private static final int GROUP_ID_OFFSET = USER_ID_OFFSET + USER_ID_LEN;
-    private static final int GROUP_ID_LEN = 6;
-    private static final int FILE_MODE_OFFSET = GROUP_ID_OFFSET + GROUP_ID_LEN;
-    private static final int FILE_MODE_LEN = 8;
-    private static final int LENGTH_OFFSET = FILE_MODE_OFFSET + FILE_MODE_LEN;
-    private static final int LENGTH_LEN = 10;
-
     // cached buffer for meta data - must only be used locally in the class (COMPRESS-172 - reduce garbage collection)
     private final byte[] metaData =
         new byte[NAME_LEN + LAST_MODIFIED_LEN + USER_ID_LEN + GROUP_ID_LEN + FILE_MODE_LEN + LENGTH_LEN];
@@ -84,6 +163,84 @@ public class ArArchiveInputStream extends ArchiveInputStream {
         closed = false;
     }
 
+    private int asInt(final byte[] byteArray, final int offset, final int len) {
+        return asInt(byteArray, offset, len, 10, false);
+    }
+
+    private int asInt(final byte[] byteArray, final int offset, final int len, final boolean treatBlankAsZero) {
+        return asInt(byteArray, offset, len, 10, treatBlankAsZero);
+    }
+
+    private int asInt(final byte[] byteArray, final int offset, final int len, final int base) {
+        return asInt(byteArray, offset, len, base, false);
+    }
+
+    private int asInt(final byte[] byteArray, final int offset, final int len, final int base, final boolean treatBlankAsZero) {
+        final String string = ArchiveUtils.toAsciiString(byteArray, offset, len).trim();
+        if (string.isEmpty() && treatBlankAsZero) {
+            return 0;
+        }
+        return Integer.parseInt(string, base);
+    }
+    private long asLong(final byte[] byteArray, final int offset, final int len) {
+        return Long.parseLong(ArchiveUtils.toAsciiString(byteArray, offset, len).trim());
+    }
+    /*
+     * (non-Javadoc)
+     *
+     * @see java.io.InputStream#close()
+     */
+    @Override
+    public void close() throws IOException {
+        if (!closed) {
+            closed = true;
+            input.close();
+        }
+        currentEntry = null;
+    }
+
+    /**
+     * Reads the real name from the current stream assuming the very
+     * first bytes to be read are the real file name.
+     *
+     * @see #isBSDLongName
+     *
+     * @since 1.3
+     */
+    private String getBSDLongName(final String bsdLongName) throws IOException {
+        final int nameLen =
+            Integer.parseInt(bsdLongName.substring(BSD_LONGNAME_PREFIX_LEN));
+        final byte[] name = IOUtils.readRange(input, nameLen);
+        final int read = name.length;
+        trackReadBytes(read);
+        if (read != nameLen) {
+            throw new EOFException();
+        }
+        return ArchiveUtils.toAsciiString(name);
+    }
+
+    /**
+     * Get an extended name from the GNU extended name buffer.
+     *
+     * @param offset pointer to entry within the buffer
+     * @return the extended file name; without trailing "/" if present.
+     * @throws IOException if name not found or buffer not set up
+     */
+    private String getExtendedName(final int offset) throws IOException {
+        if (namebuffer == null) {
+            throw new IOException("Cannot process GNU long filename as no // record was found");
+        }
+        for (int i = offset; i < namebuffer.length; i++) {
+            if (namebuffer[i] == '\012' || namebuffer[i] == 0) {
+                if (namebuffer[i - 1] == '/') {
+                    i--; // drop trailing /
+                }
+                return ArchiveUtils.toAsciiString(namebuffer, offset, i - offset);
+            }
+        }
+        throw new IOException("Failed to read entry: " + offset);
+    }
+
     /**
      * Returns the next AR entry in this stream.
      *
@@ -183,52 +340,6 @@ public class ArArchiveInputStream extends ArchiveInputStream {
         return currentEntry;
     }
 
-    /**
-     * Get an extended name from the GNU extended name buffer.
-     *
-     * @param offset pointer to entry within the buffer
-     * @return the extended file name; without trailing "/" if present.
-     * @throws IOException if name not found or buffer not set up
-     */
-    private String getExtendedName(final int offset) throws IOException {
-        if (namebuffer == null) {
-            throw new IOException("Cannot process GNU long filename as no // record was found");
-        }
-        for (int i = offset; i < namebuffer.length; i++) {
-            if (namebuffer[i] == '\012' || namebuffer[i] == 0) {
-                if (namebuffer[i - 1] == '/') {
-                    i--; // drop trailing /
-                }
-                return ArchiveUtils.toAsciiString(namebuffer, offset, i - offset);
-            }
-        }
-        throw new IOException("Failed to read entry: " + offset);
-    }
-
-    private long asLong(final byte[] byteArray, final int offset, final int len) {
-        return Long.parseLong(ArchiveUtils.toAsciiString(byteArray, offset, len).trim());
-    }
-
-    private int asInt(final byte[] byteArray, final int offset, final int len) {
-        return asInt(byteArray, offset, len, 10, false);
-    }
-
-    private int asInt(final byte[] byteArray, final int offset, final int len, final boolean treatBlankAsZero) {
-        return asInt(byteArray, offset, len, 10, treatBlankAsZero);
-    }
-
-    private int asInt(final byte[] byteArray, final int offset, final int len, final int base) {
-        return asInt(byteArray, offset, len, base, false);
-    }
-
-    private int asInt(final byte[] byteArray, final int offset, final int len, final int base, final boolean treatBlankAsZero) {
-        final String string = ArchiveUtils.toAsciiString(byteArray, offset, len).trim();
-        if (string.isEmpty() && treatBlankAsZero) {
-            return 0;
-        }
-        return Integer.parseInt(string, base);
-    }
-
     /*
      * (non-Javadoc)
      *
@@ -240,18 +351,14 @@ public class ArArchiveInputStream extends ArchiveInputStream {
         return getNextArEntry();
     }
 
-    /*
-     * (non-Javadoc)
+    /**
+     * Does the name look like it is a long name (or a name containing
+     * spaces) as encoded by SVR4/GNU ar?
      *
-     * @see java.io.InputStream#close()
+     * @see #isGNUStringTable
      */
-    @Override
-    public void close() throws IOException {
-        if (!closed) {
-            closed = true;
-            input.close();
-        }
-        currentEntry = null;
+    private boolean isGNULongName(final String name) {
+        return name != null && name.matches(GNU_LONGNAME_PATTERN);
     }
 
     /*
@@ -277,108 +384,6 @@ public class ArArchiveInputStream extends ArchiveInputStream {
         return ret;
     }
 
-    /**
-     * Checks if the signature matches ASCII "!&lt;arch&gt;" followed by a single LF
-     * control character
-     *
-     * @param signature
-     *            the bytes to check
-     * @param length
-     *            the number of bytes to check
-     * @return true, if this stream is an Ar archive stream, false otherwise
-     */
-    public static boolean matches(final byte[] signature, final int length) {
-        // 3c21 7261 6863 0a3e
-
-        return length >= 8 && signature[0] == 0x21 &&
-                signature[1] == 0x3c && signature[2] == 0x61 &&
-                signature[3] == 0x72 && signature[4] == 0x63 &&
-                signature[5] == 0x68 && signature[6] == 0x3e &&
-                signature[7] == 0x0a;
-    }
-
-    static final String BSD_LONGNAME_PREFIX = "#1/";
-    private static final int BSD_LONGNAME_PREFIX_LEN =
-        BSD_LONGNAME_PREFIX.length();
-    private static final String BSD_LONGNAME_PATTERN =
-        "^" + BSD_LONGNAME_PREFIX + "\\d+";
-
-    /**
-     * Does the name look like it is a long name (or a name containing
-     * spaces) as encoded by BSD ar?
-     *
-     * <p>From the FreeBSD ar(5) man page:</p>
-     * <pre>
-     * BSD   In the BSD variant, names that are shorter than 16
-     *       characters and without embedded spaces are stored
-     *       directly in this field.  If a name has an embedded
-     *       space, or if it is longer than 16 characters, then
-     *       the string "#1/" followed by the decimal represen-
-     *       tation of the length of the file name is placed in
-     *       this field. The actual file name is stored immedi-
-     *       ately after the archive header.  The content of the
-     *       archive member follows the file name.  The ar_size
-     *       field of the header (see below) will then hold the
-     *       sum of the size of the file name and the size of
-     *       the member.
-     * </pre>
-     *
-     * @since 1.3
-     */
-    private static boolean isBSDLongName(final String name) {
-        return name != null && name.matches(BSD_LONGNAME_PATTERN);
-    }
-
-    /**
-     * Reads the real name from the current stream assuming the very
-     * first bytes to be read are the real file name.
-     *
-     * @see #isBSDLongName
-     *
-     * @since 1.3
-     */
-    private String getBSDLongName(final String bsdLongName) throws IOException {
-        final int nameLen =
-            Integer.parseInt(bsdLongName.substring(BSD_LONGNAME_PREFIX_LEN));
-        final byte[] name = IOUtils.readRange(input, nameLen);
-        final int read = name.length;
-        trackReadBytes(read);
-        if (read != nameLen) {
-            throw new EOFException();
-        }
-        return ArchiveUtils.toAsciiString(name);
-    }
-
-    private static final String GNU_STRING_TABLE_NAME = "//";
-
-    /**
-     * Is this the name of the "Archive String Table" as used by
-     * SVR4/GNU to store long file names?
-     *
-     * <p>GNU ar stores multiple extended file names in the data section
-     * of a file with the name "//", this record is referred to by
-     * future headers.</p>
-     *
-     * <p>A header references an extended file name by storing a "/"
-     * followed by a decimal offset to the start of the file name in
-     * the extended file name data section.</p>
-     *
-     * <p>The format of the "//" file itself is simply a list of the
-     * long file names, each separated by one or more LF
-     * characters. Note that the decimal offsets are number of
-     * characters, not line or string number within the "//" file.</p>
-     */
-    private static boolean isGNUStringTable(final String name) {
-        return GNU_STRING_TABLE_NAME.equals(name);
-    }
-
-    private void trackReadBytes(final long read) {
-        count(read);
-        if (read > 0) {
-            offset += read;
-        }
-    }
-
     /**
      * Reads the GNU archive String Table.
      *
@@ -396,15 +401,10 @@ public class ArArchiveInputStream extends ArchiveInputStream {
         return new ArArchiveEntry(GNU_STRING_TABLE_NAME, bufflen);
     }
 
-    private static final String GNU_LONGNAME_PATTERN = "^/\\d+";
-
-    /**
-     * Does the name look like it is a long name (or a name containing
-     * spaces) as encoded by SVR4/GNU ar?
-     *
-     * @see #isGNUStringTable
-     */
-    private boolean isGNULongName(final String name) {
-        return name != null && name.matches(GNU_LONGNAME_PATTERN);
+    private void trackReadBytes(final long read) {
+        count(read);
+        if (read > 0) {
+            offset += read;
+        }
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java
index 62c07631..bd5bfed3 100644
--- a/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java
@@ -56,20 +56,18 @@ public class ArArchiveOutputStream extends ArchiveOutputStream {
     }
 
     /**
-     * Set the long file mode.
-     * This can be LONGFILE_ERROR(0) or LONGFILE_BSD(1).
-     * This specifies the treatment of long file names (names &gt;= 16).
-     * Default is LONGFILE_ERROR.
-     * @param longFileMode the mode to use
-     * @since 1.3
+     * Calls finish if necessary, and then closes the OutputStream
      */
-    public void setLongFileMode(final int longFileMode) {
-        this.longFileMode = longFileMode;
-    }
-
-    private void writeArchiveHeader() throws IOException {
-        final byte [] header = ArchiveUtils.toAsciiBytes(ArArchiveEntry.HEADER);
-        out.write(header);
+    @Override
+    public void close() throws IOException {
+        try {
+            if (!finished) {
+                finish();
+            }
+        } finally {
+            out.close();
+            prevEntry = null;
+        }
     }
 
     @Override
@@ -86,6 +84,51 @@ public class ArArchiveOutputStream extends ArchiveOutputStream {
         haveUnclosedEntry = false;
     }
 
+    @Override
+    public ArchiveEntry createArchiveEntry(final File inputFile, final String entryName)
+        throws IOException {
+        if (finished) {
+            throw new IOException("Stream has already been finished");
+        }
+        return new ArArchiveEntry(inputFile, entryName);
+    }
+
+    /**
+     * {@inheritDoc}
+     *
+     * @since 1.21
+     */
+    @Override
+    public ArchiveEntry createArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options) throws IOException {
+        if (finished) {
+            throw new IOException("Stream has already been finished");
+        }
+        return new ArArchiveEntry(inputPath, entryName, options);
+    }
+
+    private long fill(final long pOffset, final long pNewOffset, final char pFill) throws IOException {
+        final long diff = pNewOffset - pOffset;
+
+        if (diff > 0) {
+            for (int i = 0; i < diff; i++) {
+                write(pFill);
+            }
+        }
+
+        return pNewOffset;
+    }
+
+    @Override
+    public void finish() throws IOException {
+        if(haveUnclosedEntry) {
+            throw new IOException("This archive contains unclosed entries.");
+        }
+        if(finished) {
+            throw new IOException("This archive has already been finished");
+        }
+        finished = true;
+    }
+
     @Override
     public void putArchiveEntry(final ArchiveEntry pEntry) throws IOException {
         if(finished) {
@@ -113,16 +156,23 @@ public class ArArchiveOutputStream extends ArchiveOutputStream {
         haveUnclosedEntry = true;
     }
 
-    private long fill(final long pOffset, final long pNewOffset, final char pFill) throws IOException {
-        final long diff = pNewOffset - pOffset;
-
-        if (diff > 0) {
-            for (int i = 0; i < diff; i++) {
-                write(pFill);
-            }
-        }
+    /**
+     * Set the long file mode.
+     * This can be LONGFILE_ERROR(0) or LONGFILE_BSD(1).
+     * This specifies the treatment of long file names (names &gt;= 16).
+     * Default is LONGFILE_ERROR.
+     * @param longFileMode the mode to use
+     * @since 1.3
+     */
+    public void setLongFileMode(final int longFileMode) {
+        this.longFileMode = longFileMode;
+    }
 
-        return pNewOffset;
+    @Override
+    public void write(final byte[] b, final int off, final int len) throws IOException {
+        out.write(b, off, len);
+        count(len);
+        entryOffset += len;
     }
 
     private long write(final String data) throws IOException {
@@ -131,6 +181,11 @@ public class ArArchiveOutputStream extends ArchiveOutputStream {
         return bytes.length;
     }
 
+    private void writeArchiveHeader() throws IOException {
+        final byte [] header = ArchiveUtils.toAsciiBytes(ArArchiveEntry.HEADER);
+        out.write(header);
+    }
+
     private void writeEntryHeader(final ArArchiveEntry pEntry) throws IOException {
 
         long offset = 0;
@@ -195,59 +250,4 @@ public class ArArchiveOutputStream extends ArchiveOutputStream {
         }
 
     }
-
-    @Override
-    public void write(final byte[] b, final int off, final int len) throws IOException {
-        out.write(b, off, len);
-        count(len);
-        entryOffset += len;
-    }
-
-    /**
-     * Calls finish if necessary, and then closes the OutputStream
-     */
-    @Override
-    public void close() throws IOException {
-        try {
-            if (!finished) {
-                finish();
-            }
-        } finally {
-            out.close();
-            prevEntry = null;
-        }
-    }
-
-    @Override
-    public ArchiveEntry createArchiveEntry(final File inputFile, final String entryName)
-        throws IOException {
-        if (finished) {
-            throw new IOException("Stream has already been finished");
-        }
-        return new ArArchiveEntry(inputFile, entryName);
-    }
-
-    /**
-     * {@inheritDoc}
-     *
-     * @since 1.21
-     */
-    @Override
-    public ArchiveEntry createArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options) throws IOException {
-        if (finished) {
-            throw new IOException("Stream has already been finished");
-        }
-        return new ArArchiveEntry(inputPath, entryName, options);
-    }
-
-    @Override
-    public void finish() throws IOException {
-        if(haveUnclosedEntry) {
-            throw new IOException("This archive contains unclosed entries.");
-        }
-        if(finished) {
-            throw new IOException("This archive has already been finished");
-        }
-        finished = true;
-    }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java
index 5c966faf..0ba34a6d 100644
--- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java
@@ -30,6 +30,24 @@ import org.apache.commons.compress.archivers.zip.ZipUtil;
  * @since 1.6
  */
 public class ArjArchiveEntry implements ArchiveEntry {
+    /**
+     * The known values for HostOs.
+     */
+    public static class HostOs {
+        public static final int DOS = 0;
+        public static final int PRIMOS = 1;
+        public static final int UNIX = 2;
+        public static final int AMIGA = 3;
+        public static final int MAC_OS = 4;
+        public static final int OS_2 = 5;
+        public static final int APPLE_GS = 6;
+        public static final int ATARI_ST = 7;
+        public static final int NEXT = 8;
+        public static final int VAX_VMS = 9;
+        public static final int WIN95 = 10;
+        public static final int WIN32 = 11;
+    }
+
     private final LocalFileHeader localFileHeader;
 
     public ArjArchiveEntry() {
@@ -40,39 +58,25 @@ public class ArjArchiveEntry implements ArchiveEntry {
         this.localFileHeader = localFileHeader;
     }
 
-    /**
-     * Get this entry's name.
-     *
-     * <p>This method returns the raw name as it is stored inside of the archive.</p>
-     *
-     * @return This entry's name.
-     */
     @Override
-    public String getName() {
-        if ((localFileHeader.arjFlags & LocalFileHeader.Flags.PATHSYM) != 0) {
-            return localFileHeader.name.replace("/",
-                    File.separator);
+    public boolean equals(final Object obj) {
+        if (this == obj) {
+            return true;
         }
-        return localFileHeader.name;
+        if (obj == null || getClass() != obj.getClass()) {
+            return false;
+        }
+        final ArjArchiveEntry other = (ArjArchiveEntry) obj;
+        return localFileHeader.equals(other.localFileHeader);
     }
 
     /**
-     * Get this entry's file size.
-     *
-     * @return This entry's file size.
-     */
-    @Override
-    public long getSize() {
-        return localFileHeader.originalSize;
-    }
-
-    /** True if the entry refers to a directory.
-     *
-     * @return True if the entry refers to a directory
+     * The operating system the archive has been created on.
+     * @see HostOs
+     * @return the host OS code
      */
-    @Override
-    public boolean isDirectory() {
-        return localFileHeader.fileType == LocalFileHeader.FileTypes.DIRECTORY;
+    public int getHostOs() {
+        return localFileHeader.hostOS;
     }
 
     /**
@@ -97,6 +101,10 @@ public class ArjArchiveEntry implements ArchiveEntry {
         return new Date(ts);
     }
 
+    int getMethod() {
+        return localFileHeader.method;
+    }
+
     /**
      * File mode of this entry.
      *
@@ -109,37 +117,40 @@ public class ArjArchiveEntry implements ArchiveEntry {
     }
 
     /**
-     * File mode of this entry as Unix stat value.
+     * Get this entry's name.
      *
-     * <p>Will only be non-zero of the host os was UNIX.
+     * <p>This method returns the raw name as it is stored inside of the archive.</p>
      *
-     * @return the Unix mode
+     * @return This entry's name.
      */
-    public int getUnixMode() {
-        return isHostOsUnix() ? getMode() : 0;
+    @Override
+    public String getName() {
+        if ((localFileHeader.arjFlags & LocalFileHeader.Flags.PATHSYM) != 0) {
+            return localFileHeader.name.replace("/",
+                    File.separator);
+        }
+        return localFileHeader.name;
     }
 
     /**
-     * The operating system the archive has been created on.
-     * @see HostOs
-     * @return the host OS code
+     * Get this entry's file size.
+     *
+     * @return This entry's file size.
      */
-    public int getHostOs() {
-        return localFileHeader.hostOS;
+    @Override
+    public long getSize() {
+        return localFileHeader.originalSize;
     }
 
     /**
-     * Is the operating system the archive has been created on one
-     * that is considered a UNIX OS by arj?
-     * @return whether the operating system the archive has been
-     * created on is considered a UNIX OS by arj
+     * File mode of this entry as Unix stat value.
+     *
+     * <p>Will only be non-zero of the host os was UNIX.
+     *
+     * @return the Unix mode
      */
-    public boolean isHostOsUnix() {
-        return getHostOs() == HostOs.UNIX || getHostOs() == HostOs.NEXT;
-    }
-
-    int getMethod() {
-        return localFileHeader.method;
+    public int getUnixMode() {
+        return isHostOsUnix() ? getMode() : 0;
     }
 
     @Override
@@ -148,34 +159,23 @@ public class ArjArchiveEntry implements ArchiveEntry {
         return name == null ? 0 : name.hashCode();
     }
 
+    /** True if the entry refers to a directory.
+     *
+     * @return True if the entry refers to a directory
+     */
     @Override
-    public boolean equals(final Object obj) {
-        if (this == obj) {
-            return true;
-        }
-        if (obj == null || getClass() != obj.getClass()) {
-            return false;
-        }
-        final ArjArchiveEntry other = (ArjArchiveEntry) obj;
-        return localFileHeader.equals(other.localFileHeader);
+    public boolean isDirectory() {
+        return localFileHeader.fileType == LocalFileHeader.FileTypes.DIRECTORY;
     }
 
     /**
-     * The known values for HostOs.
+     * Is the operating system the archive has been created on one
+     * that is considered a UNIX OS by arj?
+     * @return whether the operating system the archive has been
+     * created on is considered a UNIX OS by arj
      */
-    public static class HostOs {
-        public static final int DOS = 0;
-        public static final int PRIMOS = 1;
-        public static final int UNIX = 2;
-        public static final int AMIGA = 3;
-        public static final int MAC_OS = 4;
-        public static final int OS_2 = 5;
-        public static final int APPLE_GS = 6;
-        public static final int ATARI_ST = 7;
-        public static final int NEXT = 8;
-        public static final int VAX_VMS = 9;
-        public static final int WIN95 = 10;
-        public static final int WIN32 = 11;
+    public boolean isHostOsUnix() {
+        return getHostOs() == HostOs.UNIX || getHostOs() == HostOs.NEXT;
     }
 
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java
index 1c2bdc9b..28462171 100644
--- a/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java
@@ -46,12 +46,38 @@ import org.apache.commons.compress.utils.IOUtils;
 public class ArjArchiveInputStream extends ArchiveInputStream {
     private static final int ARJ_MAGIC_1 = 0x60;
     private static final int ARJ_MAGIC_2 = 0xEA;
+    /**
+     * Checks if the signature matches what is expected for an arj file.
+     *
+     * @param signature
+     *            the bytes to check
+     * @param length
+     *            the number of bytes to check
+     * @return true, if this stream is an arj archive stream, false otherwise
+     */
+    public static boolean matches(final byte[] signature, final int length) {
+        return length >= 2 &&
+                (0xff & signature[0]) == ARJ_MAGIC_1 &&
+                (0xff & signature[1]) == ARJ_MAGIC_2;
+    }
     private final DataInputStream in;
     private final String charsetName;
     private final MainHeader mainHeader;
     private LocalFileHeader currentLocalFileHeader;
+
     private InputStream currentInputStream;
 
+    /**
+     * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in,
+     * and using the CP437 character encoding.
+     * @param inputStream the underlying stream, whose ownership is taken
+     * @throws ArchiveException if an exception occurs while reading
+     */
+    public ArjArchiveInputStream(final InputStream inputStream)
+            throws ArchiveException {
+        this(inputStream, "CP437");
+    }
+
     /**
      * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in.
      * @param inputStream the underlying stream, whose ownership is taken
@@ -76,15 +102,10 @@ public class ArjArchiveInputStream extends ArchiveInputStream {
         }
     }
 
-    /**
-     * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in,
-     * and using the CP437 character encoding.
-     * @param inputStream the underlying stream, whose ownership is taken
-     * @throws ArchiveException if an exception occurs while reading
-     */
-    public ArjArchiveInputStream(final InputStream inputStream)
-            throws ArchiveException {
-        this(inputStream, "CP437");
+    @Override
+    public boolean canReadEntryData(final ArchiveEntry ae) {
+        return ae instanceof ArjArchiveEntry
+            && ((ArjArchiveEntry) ae).getMethod() == LocalFileHeader.Methods.STORED;
     }
 
     @Override
@@ -92,10 +113,57 @@ public class ArjArchiveInputStream extends ArchiveInputStream {
         in.close();
     }
 
-    private int read8(final DataInputStream dataIn) throws IOException {
-        final int value = dataIn.readUnsignedByte();
-        count(1);
-        return value;
+    /**
+     * Gets the archive's comment.
+     * @return the archive's comment
+     */
+    public String getArchiveComment() {
+        return mainHeader.comment;
+    }
+
+    /**
+     * Gets the archive's recorded name.
+     * @return the archive's name
+     */
+    public String getArchiveName() {
+        return mainHeader.name;
+    }
+
+    @Override
+    public ArjArchiveEntry getNextEntry() throws IOException {
+        if (currentInputStream != null) {
+            // return value ignored as IOUtils.skip ensures the stream is drained completely
+            IOUtils.skip(currentInputStream, Long.MAX_VALUE);
+            currentInputStream.close();
+            currentLocalFileHeader = null;
+            currentInputStream = null;
+        }
+
+        currentLocalFileHeader = readLocalFileHeader();
+        if (currentLocalFileHeader != null) {
+            currentInputStream = new BoundedInputStream(in, currentLocalFileHeader.compressedSize);
+            if (currentLocalFileHeader.method == LocalFileHeader.Methods.STORED) {
+                currentInputStream = new CRC32VerifyingInputStream(currentInputStream,
+                        currentLocalFileHeader.originalSize, currentLocalFileHeader.originalCrc32);
+            }
+            return new ArjArchiveEntry(currentLocalFileHeader);
+        }
+        currentInputStream = null;
+        return null;
+    }
+
+    @Override
+    public int read(final byte[] b, final int off, final int len) throws IOException {
+        if (len == 0) {
+            return 0;
+        }
+        if (currentLocalFileHeader == null) {
+            throw new IllegalStateException("No current arj entry");
+        }
+        if (currentLocalFileHeader.method != LocalFileHeader.Methods.STORED) {
+            throw new IOException("Unsupported compression method " + currentLocalFileHeader.method);
+        }
+        return currentInputStream.read(b, off, len);
     }
 
     private int read16(final DataInputStream dataIn) throws IOException {
@@ -110,24 +178,24 @@ public class ArjArchiveInputStream extends ArchiveInputStream {
         return Integer.reverseBytes(value);
     }
 
-    private String readString(final DataInputStream dataIn) throws IOException {
-        try (final ByteArrayOutputStream buffer = new ByteArrayOutputStream()) {
-            int nextByte;
-            while ((nextByte = dataIn.readUnsignedByte()) != 0) {
-                buffer.write(nextByte);
-            }
-            return buffer.toString(Charsets.toCharset(charsetName).name());
-        }
+    private int read8(final DataInputStream dataIn) throws IOException {
+        final int value = dataIn.readUnsignedByte();
+        count(1);
+        return value;
     }
 
-    private byte[] readRange(final InputStream in, final int len)
-        throws IOException {
-        final byte[] b = IOUtils.readRange(in, len);
-        count(b.length);
-        if (b.length < len) {
-            throw new EOFException();
+    private void readExtraData(final int firstHeaderSize, final DataInputStream firstHeader,
+                               final LocalFileHeader localFileHeader) throws IOException {
+        if (firstHeaderSize >= 33) {
+            localFileHeader.extendedFilePosition = read32(firstHeader);
+            if (firstHeaderSize >= 45) {
+                localFileHeader.dateTimeAccessed = read32(firstHeader);
+                localFileHeader.dateTimeCreated = read32(firstHeader);
+                localFileHeader.originalSizeEvenForVolumes = read32(firstHeader);
+                pushedBackBytes(12);
+            }
+            pushedBackBytes(4);
         }
-        return b;
     }
 
     private byte[] readHeader() throws IOException {
@@ -158,63 +226,6 @@ public class ArjArchiveInputStream extends ArchiveInputStream {
         return basicHeaderBytes;
     }
 
-    private MainHeader readMainHeader() throws IOException {
-        final byte[] basicHeaderBytes = readHeader();
-        if (basicHeaderBytes == null) {
-            throw new IOException("Archive ends without any headers");
-        }
-        final DataInputStream basicHeader = new DataInputStream(
-                new ByteArrayInputStream(basicHeaderBytes));
-
-        final int firstHeaderSize = basicHeader.readUnsignedByte();
-        final byte[] firstHeaderBytes = readRange(basicHeader, firstHeaderSize - 1);
-        pushedBackBytes(firstHeaderBytes.length);
-
-        final DataInputStream firstHeader = new DataInputStream(
-                new ByteArrayInputStream(firstHeaderBytes));
-
-        final MainHeader hdr = new MainHeader();
-        hdr.archiverVersionNumber = firstHeader.readUnsignedByte();
-        hdr.minVersionToExtract = firstHeader.readUnsignedByte();
-        hdr.hostOS = firstHeader.readUnsignedByte();
-        hdr.arjFlags = firstHeader.readUnsignedByte();
-        hdr.securityVersion = firstHeader.readUnsignedByte();
-        hdr.fileType = firstHeader.readUnsignedByte();
-        hdr.reserved = firstHeader.readUnsignedByte();
-        hdr.dateTimeCreated = read32(firstHeader);
-        hdr.dateTimeModified = read32(firstHeader);
-        hdr.archiveSize = 0xffffFFFFL & read32(firstHeader);
-        hdr.securityEnvelopeFilePosition = read32(firstHeader);
-        hdr.fileSpecPosition = read16(firstHeader);
-        hdr.securityEnvelopeLength = read16(firstHeader);
-        pushedBackBytes(20); // count has already counted them via readRange
-        hdr.encryptionVersion = firstHeader.readUnsignedByte();
-        hdr.lastChapter = firstHeader.readUnsignedByte();
-
-        if (firstHeaderSize >= 33) {
-            hdr.arjProtectionFactor = firstHeader.readUnsignedByte();
-            hdr.arjFlags2 = firstHeader.readUnsignedByte();
-            firstHeader.readUnsignedByte();
-            firstHeader.readUnsignedByte();
-        }
-
-        hdr.name = readString(basicHeader);
-        hdr.comment = readString(basicHeader);
-
-        final  int extendedHeaderSize = read16(in);
-        if (extendedHeaderSize > 0) {
-            hdr.extendedHeaderBytes = readRange(in, extendedHeaderSize);
-            final long extendedHeaderCrc32 = 0xffffFFFFL & read32(in);
-            final CRC32 crc32 = new CRC32();
-            crc32.update(hdr.extendedHeaderBytes);
-            if (extendedHeaderCrc32 != crc32.getValue()) {
-                throw new IOException("Extended header CRC32 verification failure");
-            }
-        }
-
-        return hdr;
-    }
-
     private LocalFileHeader readLocalFileHeader() throws IOException {
         final byte[] basicHeaderBytes = readHeader();
         if (basicHeaderBytes == null) {
@@ -269,91 +280,80 @@ public class ArjArchiveInputStream extends ArchiveInputStream {
         }
     }
 
-    private void readExtraData(final int firstHeaderSize, final DataInputStream firstHeader,
-                               final LocalFileHeader localFileHeader) throws IOException {
-        if (firstHeaderSize >= 33) {
-            localFileHeader.extendedFilePosition = read32(firstHeader);
-            if (firstHeaderSize >= 45) {
-                localFileHeader.dateTimeAccessed = read32(firstHeader);
-                localFileHeader.dateTimeCreated = read32(firstHeader);
-                localFileHeader.originalSizeEvenForVolumes = read32(firstHeader);
-                pushedBackBytes(12);
-            }
-            pushedBackBytes(4);
+    private MainHeader readMainHeader() throws IOException {
+        final byte[] basicHeaderBytes = readHeader();
+        if (basicHeaderBytes == null) {
+            throw new IOException("Archive ends without any headers");
         }
-    }
+        final DataInputStream basicHeader = new DataInputStream(
+                new ByteArrayInputStream(basicHeaderBytes));
 
-    /**
-     * Checks if the signature matches what is expected for an arj file.
-     *
-     * @param signature
-     *            the bytes to check
-     * @param length
-     *            the number of bytes to check
-     * @return true, if this stream is an arj archive stream, false otherwise
-     */
-    public static boolean matches(final byte[] signature, final int length) {
-        return length >= 2 &&
-                (0xff & signature[0]) == ARJ_MAGIC_1 &&
-                (0xff & signature[1]) == ARJ_MAGIC_2;
-    }
+        final int firstHeaderSize = basicHeader.readUnsignedByte();
+        final byte[] firstHeaderBytes = readRange(basicHeader, firstHeaderSize - 1);
+        pushedBackBytes(firstHeaderBytes.length);
 
-    /**
-     * Gets the archive's recorded name.
-     * @return the archive's name
-     */
-    public String getArchiveName() {
-        return mainHeader.name;
-    }
+        final DataInputStream firstHeader = new DataInputStream(
+                new ByteArrayInputStream(firstHeaderBytes));
 
-    /**
-     * Gets the archive's comment.
-     * @return the archive's comment
-     */
-    public String getArchiveComment() {
-        return mainHeader.comment;
-    }
+        final MainHeader hdr = new MainHeader();
+        hdr.archiverVersionNumber = firstHeader.readUnsignedByte();
+        hdr.minVersionToExtract = firstHeader.readUnsignedByte();
+        hdr.hostOS = firstHeader.readUnsignedByte();
+        hdr.arjFlags = firstHeader.readUnsignedByte();
+        hdr.securityVersion = firstHeader.readUnsignedByte();
+        hdr.fileType = firstHeader.readUnsignedByte();
+        hdr.reserved = firstHeader.readUnsignedByte();
+        hdr.dateTimeCreated = read32(firstHeader);
+        hdr.dateTimeModified = read32(firstHeader);
+        hdr.archiveSize = 0xffffFFFFL & read32(firstHeader);
+        hdr.securityEnvelopeFilePosition = read32(firstHeader);
+        hdr.fileSpecPosition = read16(firstHeader);
+        hdr.securityEnvelopeLength = read16(firstHeader);
+        pushedBackBytes(20); // count has already counted them via readRange
+        hdr.encryptionVersion = firstHeader.readUnsignedByte();
+        hdr.lastChapter = firstHeader.readUnsignedByte();
 
-    @Override
-    public ArjArchiveEntry getNextEntry() throws IOException {
-        if (currentInputStream != null) {
-            // return value ignored as IOUtils.skip ensures the stream is drained completely
-            IOUtils.skip(currentInputStream, Long.MAX_VALUE);
-            currentInputStream.close();
-            currentLocalFileHeader = null;
-            currentInputStream = null;
+        if (firstHeaderSize >= 33) {
+            hdr.arjProtectionFactor = firstHeader.readUnsignedByte();
+            hdr.arjFlags2 = firstHeader.readUnsignedByte();
+            firstHeader.readUnsignedByte();
+            firstHeader.readUnsignedByte();
         }
 
-        currentLocalFileHeader = readLocalFileHeader();
-        if (currentLocalFileHeader != null) {
-            currentInputStream = new BoundedInputStream(in, currentLocalFileHeader.compressedSize);
-            if (currentLocalFileHeader.method == LocalFileHeader.Methods.STORED) {
-                currentInputStream = new CRC32VerifyingInputStream(currentInputStream,
-                        currentLocalFileHeader.originalSize, currentLocalFileHeader.originalCrc32);
+        hdr.name = readString(basicHeader);
+        hdr.comment = readString(basicHeader);
+
+        final  int extendedHeaderSize = read16(in);
+        if (extendedHeaderSize > 0) {
+            hdr.extendedHeaderBytes = readRange(in, extendedHeaderSize);
+            final long extendedHeaderCrc32 = 0xffffFFFFL & read32(in);
+            final CRC32 crc32 = new CRC32();
+            crc32.update(hdr.extendedHeaderBytes);
+            if (extendedHeaderCrc32 != crc32.getValue()) {
+                throw new IOException("Extended header CRC32 verification failure");
             }
-            return new ArjArchiveEntry(currentLocalFileHeader);
         }
-        currentInputStream = null;
-        return null;
-    }
 
-    @Override
-    public boolean canReadEntryData(final ArchiveEntry ae) {
-        return ae instanceof ArjArchiveEntry
-            && ((ArjArchiveEntry) ae).getMethod() == LocalFileHeader.Methods.STORED;
+        return hdr;
     }
 
-    @Override
-    public int read(final byte[] b, final int off, final int len) throws IOException {
-        if (len == 0) {
-            return 0;
-        }
-        if (currentLocalFileHeader == null) {
-            throw new IllegalStateException("No current arj entry");
+    private byte[] readRange(final InputStream in, final int len)
+        throws IOException {
+        final byte[] b = IOUtils.readRange(in, len);
+        count(b.length);
+        if (b.length < len) {
+            throw new EOFException();
         }
-        if (currentLocalFileHeader.method != LocalFileHeader.Methods.STORED) {
-            throw new IOException("Unsupported compression method " + currentLocalFileHeader.method);
+        return b;
+    }
+
+    private String readString(final DataInputStream dataIn) throws IOException {
+        try (final ByteArrayOutputStream buffer = new ByteArrayOutputStream()) {
+            int nextByte;
+            while ((nextByte = dataIn.readUnsignedByte()) != 0) {
+                buffer.write(nextByte);
+            }
+            return buffer.toString(Charsets.toCharset(charsetName).name());
         }
-        return currentInputStream.read(b, off, len);
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/LocalFileHeader.java b/src/main/java/org/apache/commons/compress/archivers/arj/LocalFileHeader.java
index d33bb6c5..c3bba4a2 100644
--- a/src/main/java/org/apache/commons/compress/archivers/arj/LocalFileHeader.java
+++ b/src/main/java/org/apache/commons/compress/archivers/arj/LocalFileHeader.java
@@ -21,6 +21,30 @@ import java.util.Arrays;
 import java.util.Objects;
 
 class LocalFileHeader {
+    static class FileTypes {
+        static final int BINARY = 0;
+        static final int SEVEN_BIT_TEXT = 1;
+        static final int COMMENT_HEADER = 2;
+        static final int DIRECTORY = 3;
+        static final int VOLUME_LABEL = 4;
+        static final int CHAPTER_LABEL = 5;
+    }
+    static class Flags {
+        static final int GARBLED = 0x01;
+        static final int VOLUME = 0x04;
+        static final int EXTFILE = 0x08;
+        static final int PATHSYM = 0x10;
+        static final int BACKUP = 0x20;
+    }
+    static class Methods {
+        static final int STORED = 0;
+        static final int COMPRESSED_MOST = 1;
+        static final int COMPRESSED = 2;
+        static final int COMPRESSED_FASTER = 3;
+        static final int COMPRESSED_FASTEST = 4;
+        static final int NO_DATA_NO_CRC = 8;
+        static final int NO_DATA = 9;
+    }
     int archiverVersionNumber;
     int minVersionToExtract;
     int hostOS;
@@ -33,45 +57,60 @@ class LocalFileHeader {
     long originalSize;
     long originalCrc32;
     int fileSpecPosition;
+
     int fileAccessMode;
     int firstChapter;
     int lastChapter;
-
     int extendedFilePosition;
+
     int dateTimeAccessed;
     int dateTimeCreated;
+
     int originalSizeEvenForVolumes;
 
     String name;
+
     String comment;
 
     byte[][] extendedHeaders;
 
-    static class Flags {
-        static final int GARBLED = 0x01;
-        static final int VOLUME = 0x04;
-        static final int EXTFILE = 0x08;
-        static final int PATHSYM = 0x10;
-        static final int BACKUP = 0x20;
-    }
-
-    static class FileTypes {
-        static final int BINARY = 0;
-        static final int SEVEN_BIT_TEXT = 1;
-        static final int COMMENT_HEADER = 2;
-        static final int DIRECTORY = 3;
-        static final int VOLUME_LABEL = 4;
-        static final int CHAPTER_LABEL = 5;
+    @Override
+    public boolean equals(final Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null || getClass() != obj.getClass()) {
+            return false;
+        }
+        final LocalFileHeader other = (LocalFileHeader) obj;
+        return
+            archiverVersionNumber == other.archiverVersionNumber &&
+            minVersionToExtract == other.minVersionToExtract &&
+            hostOS == other.hostOS &&
+            arjFlags == other.arjFlags &&
+            method == other.method &&
+            fileType == other.fileType &&
+            reserved == other.reserved &&
+            dateTimeModified == other.dateTimeModified &&
+            compressedSize == other.compressedSize &&
+            originalSize == other.originalSize &&
+            originalCrc32 == other.originalCrc32 &&
+            fileSpecPosition == other.fileSpecPosition &&
+            fileAccessMode == other.fileAccessMode &&
+            firstChapter == other.firstChapter &&
+            lastChapter == other.lastChapter &&
+            extendedFilePosition == other.extendedFilePosition &&
+            dateTimeAccessed == other.dateTimeAccessed &&
+            dateTimeCreated == other.dateTimeCreated &&
+            originalSizeEvenForVolumes == other.originalSizeEvenForVolumes &&
+            Objects.equals(name, other.name) &&
+            Objects.equals(comment, other.comment) &&
+            Arrays.deepEquals(extendedHeaders, other.extendedHeaders);
     }
 
-    static class Methods {
-        static final int STORED = 0;
-        static final int COMPRESSED_MOST = 1;
-        static final int COMPRESSED = 2;
-        static final int COMPRESSED_FASTER = 3;
-        static final int COMPRESSED_FASTEST = 4;
-        static final int NO_DATA_NO_CRC = 8;
-        static final int NO_DATA = 9;
+    @Override
+    public int hashCode() {
+        return name == null ? 0 : name.hashCode();
     }
 
     @Override
@@ -125,43 +164,4 @@ class LocalFileHeader {
         return builder.toString();
     }
 
-    @Override
-    public int hashCode() {
-        return name == null ? 0 : name.hashCode();
-    }
-
-    @Override
-    public boolean equals(final Object obj) {
-        if (this == obj) {
-            return true;
-        }
-        if (obj == null || getClass() != obj.getClass()) {
-            return false;
-        }
-        final LocalFileHeader other = (LocalFileHeader) obj;
-        return
-            archiverVersionNumber == other.archiverVersionNumber &&
-            minVersionToExtract == other.minVersionToExtract &&
-            hostOS == other.hostOS &&
-            arjFlags == other.arjFlags &&
-            method == other.method &&
-            fileType == other.fileType &&
-            reserved == other.reserved &&
-            dateTimeModified == other.dateTimeModified &&
-            compressedSize == other.compressedSize &&
-            originalSize == other.originalSize &&
-            originalCrc32 == other.originalCrc32 &&
-            fileSpecPosition == other.fileSpecPosition &&
-            fileAccessMode == other.fileAccessMode &&
-            firstChapter == other.firstChapter &&
-            lastChapter == other.lastChapter &&
-            extendedFilePosition == other.extendedFilePosition &&
-            dateTimeAccessed == other.dateTimeAccessed &&
-            dateTimeCreated == other.dateTimeCreated &&
-            originalSizeEvenForVolumes == other.originalSizeEvenForVolumes &&
-            Objects.equals(name, other.name) &&
-            Objects.equals(comment, other.comment) &&
-            Arrays.deepEquals(extendedHeaders, other.extendedHeaders);
-    }
-
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/arj/MainHeader.java b/src/main/java/org/apache/commons/compress/archivers/arj/MainHeader.java
index 2dba92ee..af4101cd 100644
--- a/src/main/java/org/apache/commons/compress/archivers/arj/MainHeader.java
+++ b/src/main/java/org/apache/commons/compress/archivers/arj/MainHeader.java
@@ -20,27 +20,6 @@ package org.apache.commons.compress.archivers.arj;
 import java.util.Arrays;
 
 class MainHeader {
-    int archiverVersionNumber;
-    int minVersionToExtract;
-    int hostOS;
-    int arjFlags;
-    int securityVersion;
-    int fileType;
-    int reserved;
-    int dateTimeCreated;
-    int dateTimeModified;
-    long archiveSize;
-    int securityEnvelopeFilePosition;
-    int fileSpecPosition;
-    int securityEnvelopeLength;
-    int encryptionVersion;
-    int lastChapter;
-    int arjProtectionFactor;
-    int arjFlags2;
-    String name;
-    String comment;
-    byte[] extendedHeaderBytes;
-
     static class Flags {
         static final int GARBLED = 0x01;
         static final int OLD_SECURED_NEW_ANSI_PAGE = 0x02;
@@ -51,7 +30,6 @@ class MainHeader {
         static final int SECURED = 0x40;
         static final int ALTNAME = 0x80;
     }
-
     static class HostOS {
         static final int MS_DOS = 0;
         static final int PRIMOS = 1;
@@ -66,6 +44,28 @@ class MainHeader {
         static final int WIN95 = 10;
         static final int WIN32 = 11;
     }
+    int archiverVersionNumber;
+    int minVersionToExtract;
+    int hostOS;
+    int arjFlags;
+    int securityVersion;
+    int fileType;
+    int reserved;
+    int dateTimeCreated;
+    int dateTimeModified;
+    long archiveSize;
+    int securityEnvelopeFilePosition;
+    int fileSpecPosition;
+    int securityEnvelopeLength;
+    int encryptionVersion;
+    int lastChapter;
+    int arjProtectionFactor;
+    int arjFlags2;
+    String name;
+
+    String comment;
+
+    byte[] extendedHeaderBytes;
 
     @Override
     public String toString() {
diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.java
index 3b61a4e9..0b7f3b1b 100644
--- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.java
@@ -196,6 +196,37 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
 
     private long uid;
 
+    /**
+     * Creates a CpioArchiveEntry with a specified name for a
+     * specified file. The format of this entry will be the new
+     * format.
+     *
+     * @param inputFile
+     *            The file to gather information from.
+     * @param entryName
+     *            The name of this entry.
+     */
+    public CpioArchiveEntry(final File inputFile, final String entryName) {
+        this(FORMAT_NEW, inputFile, entryName);
+    }
+
+    /**
+     * Creates a CpioArchiveEntry with a specified name for a
+     * specified file. The format of this entry will be the new
+     * format.
+     *
+     * @param inputPath
+     *            The file to gather information from.
+     * @param entryName
+     *            The name of this entry.
+     * @param options options indicating how symbolic links are handled.
+     * @throws IOException if an I/O error occurs
+     * @since 1.21
+     */
+    public CpioArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options) throws IOException {
+        this(FORMAT_NEW, inputPath, entryName, options);
+    }
+
     /**
      * Creates a CpioArchiveEntry with a specified format.
      *
@@ -234,111 +265,6 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         this.fileFormat = format;
     }
 
-    /**
-     * Creates a CpioArchiveEntry with a specified name. The format of
-     * this entry will be the new format.
-     *
-     * @param name
-     *            The name of this entry.
-     */
-    public CpioArchiveEntry(final String name) {
-        this(FORMAT_NEW, name);
-    }
-
-    /**
-     * Creates a CpioArchiveEntry with a specified name.
-     *
-     * @param format
-     *            The cpio format for this entry.
-     * @param name
-     *            The name of this entry.
-     * <p>
-     * Possible format values are:
-     * <pre>
-     * CpioConstants.FORMAT_NEW
-     * CpioConstants.FORMAT_NEW_CRC
-     * CpioConstants.FORMAT_OLD_BINARY
-     * CpioConstants.FORMAT_OLD_ASCII
-     * </pre>
-     *
-     * @since 1.1
-     */
-    public CpioArchiveEntry(final short format, final String name) {
-        this(format);
-        this.name = name;
-    }
-
-    /**
-     * Creates a CpioArchiveEntry with a specified name. The format of
-     * this entry will be the new format.
-     *
-     * @param name
-     *            The name of this entry.
-     * @param size
-     *            The size of this entry
-     */
-    public CpioArchiveEntry(final String name, final long size) {
-        this(name);
-        this.setSize(size);
-    }
-
-    /**
-     * Creates a CpioArchiveEntry with a specified name.
-     *
-     * @param format
-     *            The cpio format for this entry.
-     * @param name
-     *            The name of this entry.
-     * @param size
-     *            The size of this entry
-     * <p>
-     * Possible format values are:
-     * <pre>
-     * CpioConstants.FORMAT_NEW
-     * CpioConstants.FORMAT_NEW_CRC
-     * CpioConstants.FORMAT_OLD_BINARY
-     * CpioConstants.FORMAT_OLD_ASCII
-     * </pre>
-     *
-     * @since 1.1
-     */
-    public CpioArchiveEntry(final short format, final String name,
-                            final long size) {
-        this(format, name);
-        this.setSize(size);
-    }
-
-    /**
-     * Creates a CpioArchiveEntry with a specified name for a
-     * specified file. The format of this entry will be the new
-     * format.
-     *
-     * @param inputFile
-     *            The file to gather information from.
-     * @param entryName
-     *            The name of this entry.
-     */
-    public CpioArchiveEntry(final File inputFile, final String entryName) {
-        this(FORMAT_NEW, inputFile, entryName);
-    }
-
-    /**
-     * Creates a CpioArchiveEntry with a specified name for a
-     * specified file. The format of this entry will be the new
-     * format.
-     *
-     * @param inputPath
-     *            The file to gather information from.
-     * @param entryName
-     *            The name of this entry.
-     * @param options options indicating how symbolic links are handled.
-     * @throws IOException if an I/O error occurs
-     * @since 1.21
-     */
-    public CpioArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options) throws IOException {
-        this(FORMAT_NEW, inputPath, entryName, options);
-    }
-
     /**
      * Creates a CpioArchiveEntry with a specified name for a
      * specified file.
@@ -412,6 +338,80 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         setTime(Files.getLastModifiedTime(inputPath, options));
     }
 
+    /**
+     * Creates a CpioArchiveEntry with a specified name.
+     *
+     * @param format
+     *            The cpio format for this entry.
+     * @param name
+     *            The name of this entry.
+     * <p>
+     * Possible format values are:
+     * <pre>
+     * CpioConstants.FORMAT_NEW
+     * CpioConstants.FORMAT_NEW_CRC
+     * CpioConstants.FORMAT_OLD_BINARY
+     * CpioConstants.FORMAT_OLD_ASCII
+     * </pre>
+     *
+     * @since 1.1
+     */
+    public CpioArchiveEntry(final short format, final String name) {
+        this(format);
+        this.name = name;
+    }
+
+    /**
+     * Creates a CpioArchiveEntry with a specified name.
+     *
+     * @param format
+     *            The cpio format for this entry.
+     * @param name
+     *            The name of this entry.
+     * @param size
+     *            The size of this entry
+     * <p>
+     * Possible format values are:
+     * <pre>
+     * CpioConstants.FORMAT_NEW
+     * CpioConstants.FORMAT_NEW_CRC
+     * CpioConstants.FORMAT_OLD_BINARY
+     * CpioConstants.FORMAT_OLD_ASCII
+     * </pre>
+     *
+     * @since 1.1
+     */
+    public CpioArchiveEntry(final short format, final String name,
+                            final long size) {
+        this(format, name);
+        this.setSize(size);
+    }
+
+    /**
+     * Creates a CpioArchiveEntry with a specified name. The format of
+     * this entry will be the new format.
+     *
+     * @param name
+     *            The name of this entry.
+     */
+    public CpioArchiveEntry(final String name) {
+        this(FORMAT_NEW, name);
+    }
+
+    /**
+     * Creates a CpioArchiveEntry with a specified name. The format of
+     * this entry will be the new format.
+     *
+     * @param name
+     *            The name of this entry.
+     * @param size
+     *            The size of this entry
+     */
+    public CpioArchiveEntry(final String name, final long size) {
+        this(name);
+        this.setSize(size);
+    }
+
     /**
      * Checks if the method is allowed for the defined format.
      */
@@ -430,6 +430,33 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         }
     }
 
+    /* (non-Javadoc)
+     * @see Object#equals(Object)
+     */
+    @Override
+    public boolean equals(final Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null || getClass() != obj.getClass()) {
+            return false;
+        }
+        final CpioArchiveEntry other = (CpioArchiveEntry) obj;
+        if (name == null) {
+            return other.name == null;
+        }
+        return name.equals(other.name);
+    }
+
+    /**
+     * Gets the alignment boundary for this CPIO format
+     *
+     * @return Returns the aligment boundary (0, 2, 4) in bytes
+     */
+    public int getAlignmentBoundary() {
+        return this.alignmentBoundary;
+    }
+
     /**
      * Gets the checksum.
      * Only supported for the new formats.
@@ -442,6 +469,23 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         return this.chksum & 0xFFFFFFFFL;
     }
 
+    /**
+     * Gets the number of bytes needed to pad the data to the alignment boundary.
+     *
+     * @return the number of bytes needed to pad the data (0,1,2,3)
+     */
+    public int getDataPadCount() {
+        if (this.alignmentBoundary == 0) {
+            return 0;
+        }
+        final long size = this.filesize;
+        final int remain = (int) (size % this.alignmentBoundary);
+        if (remain > 0) {
+            return this.alignmentBoundary - remain;
+        }
+        return 0;
+    }
+
     /**
      * Gets the device id.
      *
@@ -479,17 +523,6 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         return this.min;
     }
 
-    /**
-     * Gets the filesize.
-     *
-     * @return Returns the filesize.
-     * @see org.apache.commons.compress.archivers.ArchiveEntry#getSize()
-     */
-    @Override
-    public long getSize() {
-        return this.filesize;
-    }
-
     /**
      * Gets the format for this entry.
      *
@@ -508,24 +541,6 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         return this.gid;
     }
 
-    /**
-     * Gets the header size for this CPIO format
-     *
-     * @return Returns the header size in bytes.
-     */
-    public int getHeaderSize() {
-        return this.headerSize;
-    }
-
-    /**
-     * Gets the alignment boundary for this CPIO format
-     *
-     * @return Returns the aligment boundary (0, 2, 4) in bytes
-     */
-    public int getAlignmentBoundary() {
-        return this.alignmentBoundary;
-    }
-
     /**
      * Gets the number of bytes needed to pad the header to the alignment boundary.
      *
@@ -583,20 +598,12 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
     }
 
     /**
-     * Gets the number of bytes needed to pad the data to the alignment boundary.
+     * Gets the header size for this CPIO format
      *
-     * @return the number of bytes needed to pad the data (0,1,2,3)
+     * @return Returns the header size in bytes.
      */
-    public int getDataPadCount() {
-        if (this.alignmentBoundary == 0) {
-            return 0;
-        }
-        final long size = this.filesize;
-        final int remain = (int) (size % this.alignmentBoundary);
-        if (remain > 0) {
-            return this.alignmentBoundary - remain;
-        }
-        return 0;
+    public int getHeaderSize() {
+        return this.headerSize;
     }
 
     /**
@@ -608,6 +615,11 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         return this.inode;
     }
 
+    @Override
+    public Date getLastModifiedDate() {
+        return new Date(1000 * getTime());
+    }
+
     /**
      * Gets the mode of this entry (e.g. directory, regular file).
      *
@@ -677,6 +689,17 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         return this.rmin;
     }
 
+    /**
+     * Gets the filesize.
+     *
+     * @return Returns the filesize.
+     * @see org.apache.commons.compress.archivers.ArchiveEntry#getSize()
+     */
+    @Override
+    public long getSize() {
+        return this.filesize;
+    }
+
     /**
      * Gets the time in seconds.
      *
@@ -686,11 +709,6 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         return this.mtime;
     }
 
-    @Override
-    public Date getLastModifiedDate() {
-        return new Date(1000 * getTime());
-    }
-
     /**
      * Gets the user id.
      *
@@ -700,6 +718,14 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         return this.uid;
     }
 
+    /* (non-Javadoc)
+     * @see Object#hashCode()
+     */
+    @Override
+    public int hashCode() {
+        return Objects.hash(name);
+    }
+
     /**
      * Checks if this entry represents a block device.
      *
@@ -821,19 +847,6 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         this.min = min;
     }
 
-    /**
-     * Sets the filesize.
-     *
-     * @param size
-     *            The filesize to set.
-     */
-    public void setSize(final long size) {
-        if (size < 0 || size > 0xFFFFFFFFL) {
-            throw new IllegalArgumentException("Invalid entry size <" + size + ">");
-        }
-        this.filesize = size;
-    }
-
     /**
      * Sets the group id.
      *
@@ -945,13 +958,16 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
     }
 
     /**
-     * Sets the time in seconds.
+     * Sets the filesize.
      *
-     * @param time
-     *            The time to set.
+     * @param size
+     *            The filesize to set.
      */
-    public void setTime(final long time) {
-        this.mtime = time;
+    public void setSize(final long size) {
+        if (size < 0 || size > 0xFFFFFFFFL) {
+            throw new IllegalArgumentException("Invalid entry size <" + size + ">");
+        }
+        this.filesize = size;
     }
 
     /**
@@ -964,6 +980,16 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
         this.mtime = time.to(TimeUnit.SECONDS);
     }
 
+    /**
+     * Sets the time in seconds.
+     *
+     * @param time
+     *            The time to set.
+     */
+    public void setTime(final long time) {
+        this.mtime = time;
+    }
+
     /**
      * Sets the user id.
      *
@@ -973,30 +999,4 @@ public class CpioArchiveEntry implements CpioConstants, ArchiveEntry {
     public void setUID(final long uid) {
         this.uid = uid;
     }
-
-    /* (non-Javadoc)
-     * @see Object#hashCode()
-     */
-    @Override
-    public int hashCode() {
-        return Objects.hash(name);
-    }
-
-    /* (non-Javadoc)
-     * @see Object#equals(Object)
-     */
-    @Override
-    public boolean equals(final Object obj) {
-        if (this == obj) {
-            return true;
-        }
-        if (obj == null || getClass() != obj.getClass()) {
-            return false;
-        }
-        final CpioArchiveEntry other = (CpioArchiveEntry) obj;
-        if (name == null) {
-            return other.name == null;
-        }
-        return name.equals(other.name);
-    }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
index 63ad8024..222f3144 100644
--- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
@@ -67,6 +67,66 @@ import org.apache.commons.compress.utils.IOUtils;
 public class CpioArchiveInputStream extends ArchiveInputStream implements
         CpioConstants {
 
+    /**
+     * Checks if the signature matches one of the following magic values:
+     *
+     * Strings:
+     *
+     * "070701" - MAGIC_NEW
+     * "070702" - MAGIC_NEW_CRC
+     * "070707" - MAGIC_OLD_ASCII
+     *
+     * Octal Binary value:
+     *
+     * 070707 - MAGIC_OLD_BINARY (held as a short) = 0x71C7 or 0xC771
+     * @param signature data to match
+     * @param length length of data
+     * @return whether the buffer seems to contain CPIO data
+     */
+    public static boolean matches(final byte[] signature, final int length) {
+        if (length < 6) {
+            return false;
+        }
+
+        // Check binary values
+        if (signature[0] == 0x71 && (signature[1] & 0xFF) == 0xc7) {
+            return true;
+        }
+        if (signature[1] == 0x71 && (signature[0] & 0xFF) == 0xc7) {
+            return true;
+        }
+
+        // Check Ascii (String) values
+        // 3037 3037 30nn
+        if (signature[0] != 0x30) {
+            return false;
+        }
+        if (signature[1] != 0x37) {
+            return false;
+        }
+        if (signature[2] != 0x30) {
+            return false;
+        }
+        if (signature[3] != 0x37) {
+            return false;
+        }
+        if (signature[4] != 0x30) {
+            return false;
+        }
+        // Check last byte
+        if (signature[5] == 0x31) {
+            return true;
+        }
+        if (signature[5] == 0x32) {
+            return true;
+        }
+        if (signature[5] == 0x37) {
+            return true;
+        }
+
+        return false;
+    }
+
     private boolean closed;
 
     private CpioArchiveEntry entry;
@@ -80,10 +140,10 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
     private long crc;
 
     private final InputStream in;
-
     // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection)
     private final byte[] twoBytesBuf = new byte[2];
     private final byte[] fourBytesBuf = new byte[4];
+
     private final byte[] sixBytesBuf = new byte[6];
 
     private final int blockSize;
@@ -108,21 +168,6 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
         this(in, BLOCK_SIZE, CharsetNames.US_ASCII);
     }
 
-    /**
-     * Construct the cpio input stream with a blocksize of {@link
-     * CpioConstants#BLOCK_SIZE BLOCK_SIZE}.
-     *
-     * @param in
-     *            The cpio stream
-     * @param encoding
-     *            The encoding of file names to expect - use null for
-     *            the platform's default.
-     * @since 1.6
-     */
-    public CpioArchiveInputStream(final InputStream in, final String encoding) {
-        this(in, BLOCK_SIZE, encoding);
-    }
-
     /**
      * Construct the cpio input stream with a blocksize of {@link
      * CpioConstants#BLOCK_SIZE BLOCK_SIZE} expecting ASCII file
@@ -161,6 +206,21 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
         this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
     }
 
+    /**
+     * Construct the cpio input stream with a blocksize of {@link
+     * CpioConstants#BLOCK_SIZE BLOCK_SIZE}.
+     *
+     * @param in
+     *            The cpio stream
+     * @param encoding
+     *            The encoding of file names to expect - use null for
+     *            the platform's default.
+     * @since 1.6
+     */
+    public CpioArchiveInputStream(final InputStream in, final String encoding) {
+        this(in, BLOCK_SIZE, encoding);
+    }
+
     /**
      * Returns 0 after EOF has reached for the current entry data, otherwise
      * always return 1.
@@ -277,11 +337,9 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
         return this.entry;
     }
 
-    private void skip(final int bytes) throws IOException{
-        // bytes cannot be more than 3 bytes
-        if (bytes > 0) {
-            readFully(fourBytesBuf, 0, bytes);
-        }
+    @Override
+    public ArchiveEntry getNextEntry() throws IOException {
+        return getNextCPIOEntry();
     }
 
     /**
@@ -344,24 +402,10 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
         return tmpread;
     }
 
-    private final int readFully(final byte[] b, final int off, final int len)
-            throws IOException {
-        final int count = IOUtils.readFully(in, b, off, len);
-        count(count);
-        if (count < len) {
-            throw new EOFException();
-        }
-        return count;
-    }
-
-    private final byte[] readRange(final int len)
+    private long readAsciiLong(final int length, final int radix)
             throws IOException {
-        final byte[] b = IOUtils.readRange(in, len);
-        count(b.length);
-        if (b.length < len) {
-            throw new EOFException();
-        }
-        return b;
+        final byte[] tmpBuffer = readRange(length);
+        return Long.parseLong(ArchiveUtils.toAsciiString(tmpBuffer), radix);
     }
 
     private long readBinaryLong(final int length, final boolean swapHalfWord)
@@ -370,10 +414,23 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
         return CpioUtil.byteArray2long(tmp, swapHalfWord);
     }
 
-    private long readAsciiLong(final int length, final int radix)
+    private String readCString(final int length) throws IOException {
+        // don't include trailing NUL in file name to decode
+        final byte[] tmpBuffer = readRange(length - 1);
+        if (this.in.read() == -1) {
+            throw new EOFException();
+        }
+        return zipEncoding.decode(tmpBuffer);
+    }
+
+    private final int readFully(final byte[] b, final int off, final int len)
             throws IOException {
-        final byte[] tmpBuffer = readRange(length);
-        return Long.parseLong(ArchiveUtils.toAsciiString(tmpBuffer), radix);
+        final int count = IOUtils.readFully(in, b, off, len);
+        count(count);
+        if (count < len) {
+            throw new EOFException();
+        }
+        return count;
     }
 
     private CpioArchiveEntry readNewEntry(final boolean hasCrc)
@@ -487,13 +544,21 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
         return ret;
     }
 
-    private String readCString(final int length) throws IOException {
-        // don't include trailing NUL in file name to decode
-        final byte[] tmpBuffer = readRange(length - 1);
-        if (this.in.read() == -1) {
+    private final byte[] readRange(final int len)
+            throws IOException {
+        final byte[] b = IOUtils.readRange(in, len);
+        count(b.length);
+        if (b.length < len) {
             throw new EOFException();
         }
-        return zipEncoding.decode(tmpBuffer);
+        return b;
+    }
+
+    private void skip(final int bytes) throws IOException{
+        // bytes cannot be more than 3 bytes
+        if (bytes > 0) {
+            readFully(fourBytesBuf, 0, bytes);
+        }
     }
 
     /**
@@ -531,11 +596,6 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
         return total;
     }
 
-    @Override
-    public ArchiveEntry getNextEntry() throws IOException {
-        return getNextCPIOEntry();
-    }
-
     /**
      * Skips the padding zeros written after the TRAILER!!! entry.
      */
@@ -551,64 +611,4 @@ public class CpioArchiveInputStream extends ArchiveInputStream implements
             remainingBytes -= skipped;
         }
     }
-
-    /**
-     * Checks if the signature matches one of the following magic values:
-     *
-     * Strings:
-     *
-     * "070701" - MAGIC_NEW
-     * "070702" - MAGIC_NEW_CRC
-     * "070707" - MAGIC_OLD_ASCII
-     *
-     * Octal Binary value:
-     *
-     * 070707 - MAGIC_OLD_BINARY (held as a short) = 0x71C7 or 0xC771
-     * @param signature data to match
-     * @param length length of data
-     * @return whether the buffer seems to contain CPIO data
-     */
-    public static boolean matches(final byte[] signature, final int length) {
-        if (length < 6) {
-            return false;
-        }
-
-        // Check binary values
-        if (signature[0] == 0x71 && (signature[1] & 0xFF) == 0xc7) {
-            return true;
-        }
-        if (signature[1] == 0x71 && (signature[0] & 0xFF) == 0xc7) {
-            return true;
-        }
-
-        // Check Ascii (String) values
-        // 3037 3037 30nn
-        if (signature[0] != 0x30) {
-            return false;
-        }
-        if (signature[1] != 0x37) {
-            return false;
-        }
-        if (signature[2] != 0x30) {
-            return false;
-        }
-        if (signature[3] != 0x37) {
-            return false;
-        }
-        if (signature[4] != 0x30) {
-            return false;
-        }
-        // Check last byte
-        if (signature[5] == 0x31) {
-            return true;
-        }
-        if (signature[5] == 0x32) {
-            return true;
-        }
-        if (signature[5] == 0x37) {
-            return true;
-        }
-
-        return false;
-    }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java
index 26faaff7..aaa777a2 100644
--- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java
@@ -100,6 +100,17 @@ public class CpioArchiveOutputStream extends ArchiveOutputStream implements
     // the provided encoding (for unit tests)
     final String encoding;
 
+    /**
+     * Construct the cpio output stream. The format for this CPIO stream is the
+     * "new" format using ASCII encoding for file names
+     *
+     * @param out
+     *            The cpio stream
+     */
+    public CpioArchiveOutputStream(final OutputStream out) {
+        this(out, FORMAT_NEW);
+    }
+
     /**
      * Construct the cpio output stream with a specified format, a
      * blocksize of {@link CpioConstants#BLOCK_SIZE BLOCK_SIZE} and
@@ -167,17 +178,6 @@ public class CpioArchiveOutputStream extends ArchiveOutputStream implements
         this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding);
     }
 
-    /**
-     * Construct the cpio output stream. The format for this CPIO stream is the
-     * "new" format using ASCII encoding for file names
-     *
-     * @param out
-     *            The cpio stream
-     */
-    public CpioArchiveOutputStream(final OutputStream out) {
-        this(out, FORMAT_NEW);
-    }
-
     /**
      * Construct the cpio output stream. The format for this CPIO stream is the
      * "new" format.
@@ -193,6 +193,101 @@ public class CpioArchiveOutputStream extends ArchiveOutputStream implements
         this(out, FORMAT_NEW, BLOCK_SIZE, encoding);
     }
 
+    /**
+     * Closes the CPIO output stream as well as the stream being filtered.
+     *
+     * @throws IOException
+     *             if an I/O error has occurred or if a CPIO file error has
+     *             occurred
+     */
+    @Override
+    public void close() throws IOException {
+        try {
+            if (!finished) {
+                finish();
+            }
+        } finally {
+            if (!this.closed) {
+                out.close();
+                this.closed = true;
+            }
+        }
+    }
+
+    /*(non-Javadoc)
+     *
+     * @see
+     * org.apache.commons.compress.archivers.ArchiveOutputStream#closeArchiveEntry
+     * ()
+     */
+    @Override
+    public void closeArchiveEntry() throws IOException {
+        if(finished) {
+            throw new IOException("Stream has already been finished");
+        }
+
+        ensureOpen();
+
+        if (entry == null) {
+            throw new IOException("Trying to close non-existent entry");
+        }
+
+        if (this.entry.getSize() != this.written) {
+            throw new IOException("Invalid entry size (expected "
+                    + this.entry.getSize() + " but got " + this.written
+                    + " bytes)");
+        }
+        pad(this.entry.getDataPadCount());
+        if (this.entry.getFormat() == FORMAT_NEW_CRC
+            && this.crc != this.entry.getChksum()) {
+            throw new IOException("CRC Error");
+        }
+        this.entry = null;
+        this.crc = 0;
+        this.written = 0;
+    }
+
+    /**
+     * Creates a new ArchiveEntry. The entryName must be an ASCII encoded string.
+     *
+     * @see org.apache.commons.compress.archivers.ArchiveOutputStream#createArchiveEntry(java.io.File, String)
+     */
+    @Override
+    public ArchiveEntry createArchiveEntry(final File inputFile, final String entryName)
+            throws IOException {
+        if(finished) {
+            throw new IOException("Stream has already been finished");
+        }
+        return new CpioArchiveEntry(inputFile, entryName);
+    }
+
+    /**
+     * Creates a new ArchiveEntry. The entryName must be an ASCII encoded string.
+     *
+     * @see org.apache.commons.compress.archivers.ArchiveOutputStream#createArchiveEntry(java.io.File, String)
+     */
+    @Override
+    public ArchiveEntry createArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options)
+            throws IOException {
+        if(finished) {
+            throw new IOException("Stream has already been finished");
+        }
+        return new CpioArchiveEntry(inputPath, entryName, options);
+    }
+
+    /**
+     * Encodes the given string using the configured encoding.
+     *
+     * @param str the String to write
+     * @throws IOException if the string couldn't be written
+     * @return result of encoding the string
+     */
+    private byte[] encode(final String str) throws IOException {
+        final ByteBuffer buf = zipEncoding.encode(str);
+        final int len = buf.limit() - buf.position();
+        return Arrays.copyOfRange(buf.array(), buf.arrayOffset(), buf.arrayOffset() + len);
+    }
+
     /**
      * Check to make sure that this stream has not been closed
      *
@@ -205,6 +300,47 @@ public class CpioArchiveOutputStream extends ArchiveOutputStream implements
         }
     }
 
+    /**
+     * Finishes writing the contents of the CPIO output stream without closing
+     * the underlying stream. Use this method when applying multiple filters in
+     * succession to the same output stream.
+     *
+     * @throws IOException
+     *             if an I/O exception has occurred or if a CPIO file error has
+     *             occurred
+     */
+    @Override
+    public void finish() throws IOException {
+        ensureOpen();
+        if (finished) {
+            throw new IOException("This archive has already been finished");
+        }
+
+        if (this.entry != null) {
+            throw new IOException("This archive contains unclosed entries.");
+        }
+        this.entry = new CpioArchiveEntry(this.entryFormat);
+        this.entry.setName(CPIO_TRAILER);
+        this.entry.setNumberOfLinks(1);
+        writeHeader(this.entry);
+        closeArchiveEntry();
+
+        final int lengthOfLastBlock = (int) (getBytesWritten() % blockSize);
+        if (lengthOfLastBlock != 0) {
+            pad(blockSize - lengthOfLastBlock);
+        }
+
+        finished = true;
+    }
+
+    private void pad(final int count) throws IOException{
+        if (count > 0){
+            final byte[] buff = new byte[count];
+            out.write(buff);
+            count(count);
+        }
+    }
+
     /**
      * Begins writing a new CPIO file entry and positions the stream to the
      * start of the entry data. Closes the current entry if still active. The
@@ -248,6 +384,92 @@ public class CpioArchiveOutputStream extends ArchiveOutputStream implements
         this.written = 0;
     }
 
+    /**
+     * Writes an array of bytes to the current CPIO entry data. This method will
+     * block until all the bytes are written.
+     *
+     * @param b
+     *            the data to be written
+     * @param off
+     *            the start offset in the data
+     * @param len
+     *            the number of bytes that are written
+     * @throws IOException
+     *             if an I/O error has occurred or if a CPIO file error has
+     *             occurred
+     */
+    @Override
+    public void write(final byte[] b, final int off, final int len)
+            throws IOException {
+        ensureOpen();
+        if (off < 0 || len < 0 || off > b.length - len) {
+            throw new IndexOutOfBoundsException();
+        }
+        if (len == 0) {
+            return;
+        }
+
+        if (this.entry == null) {
+            throw new IOException("No current CPIO entry");
+        }
+        if (this.written + len > this.entry.getSize()) {
+            throw new IOException("Attempt to write past end of STORED entry");
+        }
+        out.write(b, off, len);
+        this.written += len;
+        if (this.entry.getFormat() == FORMAT_NEW_CRC) {
+            for (int pos = 0; pos < len; pos++) {
+                this.crc += b[pos] & 0xFF;
+                this.crc &= 0xFFFFFFFFL;
+            }
+        }
+        count(len);
+    }
+
+    private void writeAsciiLong(final long number, final int length,
+            final int radix) throws IOException {
+        final StringBuilder tmp = new StringBuilder();
+        final String tmpStr;
+        if (radix == 16) {
+            tmp.append(Long.toHexString(number));
+        } else if (radix == 8) {
+            tmp.append(Long.toOctalString(number));
+        } else {
+            tmp.append(number);
+        }
+
+        if (tmp.length() <= length) {
+            final int insertLength = length - tmp.length();
+            for (int pos = 0; pos < insertLength; pos++) {
+                tmp.insert(0, "0");
+            }
+            tmpStr = tmp.toString();
+        } else {
+            tmpStr = tmp.substring(tmp.length() - length);
+        }
+        final byte[] b = ArchiveUtils.toAsciiBytes(tmpStr);
+        out.write(b);
+        count(b.length);
+    }
+
+    private void writeBinaryLong(final long number, final int length,
+            final boolean swapHalfWord) throws IOException {
+        final byte[] tmp = CpioUtil.long2byteArray(number, length, swapHalfWord);
+        out.write(tmp);
+        count(tmp.length);
+    }
+
+    /**
+     * Writes an encoded string to the stream followed by \0
+     * @param str the String to write
+     * @throws IOException if the string couldn't be written
+     */
+    private void writeCString(final byte[] str) throws IOException {
+        out.write(str);
+        out.write('\0');
+        count(str.length + 1);
+    }
+
     private void writeHeader(final CpioArchiveEntry e) throws IOException {
         switch (e.getFormat()) {
         case FORMAT_NEW:
@@ -366,226 +588,4 @@ public class CpioArchiveOutputStream extends ArchiveOutputStream implements
         pad(entry.getHeaderPadCount(name.length));
     }
 
-    /*(non-Javadoc)
-     *
-     * @see
-     * org.apache.commons.compress.archivers.ArchiveOutputStream#closeArchiveEntry
-     * ()
-     */
-    @Override
-    public void closeArchiveEntry() throws IOException {
-        if(finished) {
-            throw new IOException("Stream has already been finished");
-        }
-
-        ensureOpen();
-
-        if (entry == null) {
-            throw new IOException("Trying to close non-existent entry");
-        }
-
-        if (this.entry.getSize() != this.written) {
-            throw new IOException("Invalid entry size (expected "
-                    + this.entry.getSize() + " but got " + this.written
-                    + " bytes)");
-        }
-        pad(this.entry.getDataPadCount());
-        if (this.entry.getFormat() == FORMAT_NEW_CRC
-            && this.crc != this.entry.getChksum()) {
-            throw new IOException("CRC Error");
-        }
-        this.entry = null;
-        this.crc = 0;
-        this.written = 0;
-    }
-
-    /**
-     * Writes an array of bytes to the current CPIO entry data. This method will
-     * block until all the bytes are written.
-     *
-     * @param b
-     *            the data to be written
-     * @param off
-     *            the start offset in the data
-     * @param len
-     *            the number of bytes that are written
-     * @throws IOException
-     *             if an I/O error has occurred or if a CPIO file error has
-     *             occurred
-     */
-    @Override
-    public void write(final byte[] b, final int off, final int len)
-            throws IOException {
-        ensureOpen();
-        if (off < 0 || len < 0 || off > b.length - len) {
-            throw new IndexOutOfBoundsException();
-        }
-        if (len == 0) {
-            return;
-        }
-
-        if (this.entry == null) {
-            throw new IOException("No current CPIO entry");
-        }
-        if (this.written + len > this.entry.getSize()) {
-            throw new IOException("Attempt to write past end of STORED entry");
-        }
-        out.write(b, off, len);
-        this.written += len;
-        if (this.entry.getFormat() == FORMAT_NEW_CRC) {
-            for (int pos = 0; pos < len; pos++) {
-                this.crc += b[pos] & 0xFF;
-                this.crc &= 0xFFFFFFFFL;
-            }
-        }
-        count(len);
-    }
-
-    /**
-     * Finishes writing the contents of the CPIO output stream without closing
-     * the underlying stream. Use this method when applying multiple filters in
-     * succession to the same output stream.
-     *
-     * @throws IOException
-     *             if an I/O exception has occurred or if a CPIO file error has
-     *             occurred
-     */
-    @Override
-    public void finish() throws IOException {
-        ensureOpen();
-        if (finished) {
-            throw new IOException("This archive has already been finished");
-        }
-
-        if (this.entry != null) {
-            throw new IOException("This archive contains unclosed entries.");
-        }
-        this.entry = new CpioArchiveEntry(this.entryFormat);
-        this.entry.setName(CPIO_TRAILER);
-        this.entry.setNumberOfLinks(1);
-        writeHeader(this.entry);
-        closeArchiveEntry();
-
-        final int lengthOfLastBlock = (int) (getBytesWritten() % blockSize);
-        if (lengthOfLastBlock != 0) {
-            pad(blockSize - lengthOfLastBlock);
-        }
-
-        finished = true;
-    }
-
-    /**
-     * Closes the CPIO output stream as well as the stream being filtered.
-     *
-     * @throws IOException
-     *             if an I/O error has occurred or if a CPIO file error has
-     *             occurred
-     */
-    @Override
-    public void close() throws IOException {
-        try {
-            if (!finished) {
-                finish();
-            }
-        } finally {
-            if (!this.closed) {
-                out.close();
-                this.closed = true;
-            }
-        }
-    }
-
-    private void pad(final int count) throws IOException{
-        if (count > 0){
-            final byte[] buff = new byte[count];
-            out.write(buff);
-            count(count);
-        }
-    }
-
-    private void writeBinaryLong(final long number, final int length,
-            final boolean swapHalfWord) throws IOException {
-        final byte[] tmp = CpioUtil.long2byteArray(number, length, swapHalfWord);
-        out.write(tmp);
-        count(tmp.length);
-    }
-
-    private void writeAsciiLong(final long number, final int length,
-            final int radix) throws IOException {
-        final StringBuilder tmp = new StringBuilder();
-        final String tmpStr;
-        if (radix == 16) {
-            tmp.append(Long.toHexString(number));
-        } else if (radix == 8) {
-            tmp.append(Long.toOctalString(number));
-        } else {
-            tmp.append(number);
-        }
-
-        if (tmp.length() <= length) {
-            final int insertLength = length - tmp.length();
-            for (int pos = 0; pos < insertLength; pos++) {
-                tmp.insert(0, "0");
-            }
-            tmpStr = tmp.toString();
-        } else {
-            tmpStr = tmp.substring(tmp.length() - length);
-        }
-        final byte[] b = ArchiveUtils.toAsciiBytes(tmpStr);
-        out.write(b);
-        count(b.length);
-    }
-
-    /**
-     * Encodes the given string using the configured encoding.
-     *
-     * @param str the String to write
-     * @throws IOException if the string couldn't be written
-     * @return result of encoding the string
-     */
-    private byte[] encode(final String str) throws IOException {
-        final ByteBuffer buf = zipEncoding.encode(str);
-        final int len = buf.limit() - buf.position();
-        return Arrays.copyOfRange(buf.array(), buf.arrayOffset(), buf.arrayOffset() + len);
-    }
-
-    /**
-     * Writes an encoded string to the stream followed by \0
-     * @param str the String to write
-     * @throws IOException if the string couldn't be written
-     */
-    private void writeCString(final byte[] str) throws IOException {
-        out.write(str);
-        out.write('\0');
-        count(str.length + 1);
-    }
-
-    /**
-     * Creates a new ArchiveEntry. The entryName must be an ASCII encoded string.
-     *
-     * @see org.apache.commons.compress.archivers.ArchiveOutputStream#createArchiveEntry(java.io.File, String)
-     */
-    @Override
-    public ArchiveEntry createArchiveEntry(final File inputFile, final String entryName)
-            throws IOException {
-        if(finished) {
-            throw new IOException("Stream has already been finished");
-        }
-        return new CpioArchiveEntry(inputFile, entryName);
-    }
-
-    /**
-     * Creates a new ArchiveEntry. The entryName must be an ASCII encoded string.
-     *
-     * @see org.apache.commons.compress.archivers.ArchiveOutputStream#createArchiveEntry(java.io.File, String)
-     */
-    @Override
-    public ArchiveEntry createArchiveEntry(final Path inputPath, final String entryName, final LinkOption... options)
-            throws IOException {
-        if(finished) {
-            throw new IOException("Stream has already been finished");
-        }
-        return new CpioArchiveEntry(inputPath, entryName, options);
-    }
-
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioUtil.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioUtil.java
index b79650a3..ca0b3a80 100644
--- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioUtil.java
+++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioUtil.java
@@ -25,13 +25,6 @@ package org.apache.commons.compress.archivers.cpio;
  */
 class CpioUtil {
 
-    /**
-     * Extracts the file type bits from a mode.
-     */
-    static long fileType(final long mode) {
-        return mode & CpioConstants.S_IFMT;
-    }
-
     /**
      * Converts a byte array to a long. Halfwords can be swapped by setting
      * swapHalfWord=true.
@@ -70,6 +63,13 @@ class CpioUtil {
         return ret;
     }
 
+    /**
+     * Extracts the file type bits from a mode.
+     */
+    static long fileType(final long mode) {
+        return mode & CpioConstants.S_IFMT;
+    }
+
     /**
      * Converts a long number to a byte array
      * Halfwords can be swapped by setting swapHalfWord=true.
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/Dirent.java b/src/main/java/org/apache/commons/compress/archivers/dump/Dirent.java
index b8fd1ce0..30a522e1 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/Dirent.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/Dirent.java
@@ -50,6 +50,17 @@ class Dirent {
         return ino;
     }
 
+    /**
+     * Get name of directory entry.
+     *
+     * <p>This method returns the raw name as it is stored inside of the archive.</p>
+     *
+     * @return the directory name
+     */
+    String getName() {
+        return name;
+    }
+
     /**
      * Get ino of parent directory.
      * @return the parent i-node
@@ -66,17 +77,6 @@ class Dirent {
         return type;
     }
 
-    /**
-     * Get name of directory entry.
-     *
-     * <p>This method returns the raw name as it is stored inside of the archive.</p>
-     *
-     * @return the directory name
-     */
-    String getName() {
-        return name;
-    }
-
     /**
      * @see Object#toString()
      */
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java
index 920a6230..2f2fbff4 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java
@@ -22,20 +22,30 @@ package org.apache.commons.compress.archivers.dump;
  * Various constants associated with dump archives.
  */
 public final class DumpArchiveConstants {
-    public static final int TP_SIZE = 1024;
-    public static final int NTREC = 10;
-    public static final int HIGH_DENSITY_NTREC = 32;
-    public static final int OFS_MAGIC = 60011;
-    public static final int NFS_MAGIC = 60012;
-    public static final int FS_UFS2_MAGIC = 0x19540119;
-    public static final int CHECKSUM = 84446;
-    public static final int LBLSIZE = 16;
-    public static final int NAMELEN = 64;
+    /**
+     * The type of compression.
+     */
+    public enum COMPRESSION_TYPE {
+        ZLIB(0),
+        BZLIB(1),
+        LZO(2);
 
-    /* do not instantiate */
-    private DumpArchiveConstants() {
-    }
+        public static COMPRESSION_TYPE find(final int code) {
+            for (final COMPRESSION_TYPE t : values()) {
+                if (t.code == code) {
+                    return t;
+                }
+            }
+
+            return null;
+        }
+
+        final int code;
 
+        COMPRESSION_TYPE(final int code) {
+            this.code = code;
+        }
+    }
     /**
      * The type of tape segment.
      */
@@ -47,12 +57,6 @@ public final class DumpArchiveConstants {
         END(5),
         CLRI(6);
 
-        final int code;
-
-        SEGMENT_TYPE(final int code) {
-            this.code = code;
-        }
-
         public static SEGMENT_TYPE find(final int code) {
             for (final SEGMENT_TYPE t : values()) {
                 if (t.code == code) {
@@ -62,30 +66,26 @@ public final class DumpArchiveConstants {
 
             return null;
         }
-    }
-
-    /**
-     * The type of compression.
-     */
-    public enum COMPRESSION_TYPE {
-        ZLIB(0),
-        BZLIB(1),
-        LZO(2);
 
         final int code;
 
-        COMPRESSION_TYPE(final int code) {
+        SEGMENT_TYPE(final int code) {
             this.code = code;
         }
+    }
+    public static final int TP_SIZE = 1024;
+    public static final int NTREC = 10;
+    public static final int HIGH_DENSITY_NTREC = 32;
+    public static final int OFS_MAGIC = 60011;
+    public static final int NFS_MAGIC = 60012;
+    public static final int FS_UFS2_MAGIC = 0x19540119;
+    public static final int CHECKSUM = 84446;
 
-        public static COMPRESSION_TYPE find(final int code) {
-            for (final COMPRESSION_TYPE t : values()) {
-                if (t.code == code) {
-                    return t;
-                }
-            }
+    public static final int LBLSIZE = 16;
 
-            return null;
-        }
+    public static final int NAMELEN = 64;
+
+    /* do not instantiate */
+    private DumpArchiveConstants() {
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java
index abea9995..c0b1ef19 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java
@@ -180,12 +180,188 @@ import org.apache.commons.compress.archivers.ArchiveEntry;
  * @NotThreadSafe
  */
 public class DumpArchiveEntry implements ArchiveEntry {
+    public enum PERMISSION {
+        SETUID(04000),
+        SETGUI(02000),
+        STICKY(01000),
+        USER_READ(00400),
+        USER_WRITE(00200),
+        USER_EXEC(00100),
+        GROUP_READ(00040),
+        GROUP_WRITE(00020),
+        GROUP_EXEC(00010),
+        WORLD_READ(00004),
+        WORLD_WRITE(00002),
+        WORLD_EXEC(00001);
+
+        public static Set<PERMISSION> find(final int code) {
+            final Set<PERMISSION> set = new HashSet<>();
+
+            for (final PERMISSION p : PERMISSION.values()) {
+                if ((code & p.code) == p.code) {
+                    set.add(p);
+                }
+            }
+
+            if (set.isEmpty()) {
+                return Collections.emptySet();
+            }
+
+            return EnumSet.copyOf(set);
+        }
+
+        private final int code;
+
+        PERMISSION(final int code) {
+            this.code = code;
+        }
+    }
+    /**
+     * Archive entry as stored on tape. There is one TSH for (at most)
+     * every 512k in the file.
+     */
+    static class TapeSegmentHeader {
+        private DumpArchiveConstants.SEGMENT_TYPE type;
+        private int volume;
+        private int ino;
+        private int count;
+        private int holes;
+        private final byte[] cdata = new byte[512]; // map of any 'holes'
+
+        public int getCdata(final int idx) {
+            return cdata[idx];
+        }
+
+        public int getCount() {
+            return count;
+        }
+
+        public int getHoles() {
+            return holes;
+        }
+
+        public int getIno() {
+            return ino;
+        }
+
+        public DumpArchiveConstants.SEGMENT_TYPE getType() {
+            return type;
+        }
+
+        public int getVolume() {
+            return volume;
+        }
+
+        void setIno(final int ino) {
+            this.ino = ino;
+        }
+    }
+    public enum TYPE {
+        WHITEOUT(14),
+        SOCKET(12),
+        LINK(10),
+        FILE(8),
+        BLKDEV(6),
+        DIRECTORY(4),
+        CHRDEV(2),
+        FIFO(1),
+        UNKNOWN(15);
+
+        public static TYPE find(final int code) {
+            TYPE type = UNKNOWN;
+
+            for (final TYPE t : TYPE.values()) {
+                if (code == t.code) {
+                    type = t;
+                }
+            }
+
+            return type;
+        }
+
+        private final int code;
+
+        TYPE(final int code) {
+            this.code = code;
+        }
+    }
+    /**
+     * Populate the dump archive entry and tape segment header with
+     * the contents of the buffer.
+     *
+     * @param buffer buffer to read content from
+     */
+    static DumpArchiveEntry parse(final byte[] buffer) {
+        final DumpArchiveEntry entry = new DumpArchiveEntry();
+        final TapeSegmentHeader header = entry.header;
+
+        header.type = DumpArchiveConstants.SEGMENT_TYPE.find(DumpArchiveUtil.convert32(
+                    buffer, 0));
+
+        //header.dumpDate = new Date(1000L * DumpArchiveUtil.convert32(buffer, 4));
+        //header.previousDumpDate = new Date(1000L * DumpArchiveUtil.convert32(
+        //            buffer, 8));
+        header.volume = DumpArchiveUtil.convert32(buffer, 12);
+        //header.tapea = DumpArchiveUtil.convert32(buffer, 16);
+        entry.ino = header.ino = DumpArchiveUtil.convert32(buffer, 20);
+
+        //header.magic = DumpArchiveUtil.convert32(buffer, 24);
+        //header.checksum = DumpArchiveUtil.convert32(buffer, 28);
+        final int m = DumpArchiveUtil.convert16(buffer, 32);
+
+        // determine the type of the file.
+        entry.setType(TYPE.find((m >> 12) & 0x0F));
+
+        // determine the standard permissions
+        entry.setMode(m);
+
+        entry.nlink = DumpArchiveUtil.convert16(buffer, 34);
+        // inumber, oldids?
+        entry.setSize(DumpArchiveUtil.convert64(buffer, 40));
+
+        long t = (1000L * DumpArchiveUtil.convert32(buffer, 48)) +
+            (DumpArchiveUtil.convert32(buffer, 52) / 1000);
+        entry.setAccessTime(new Date(t));
+        t = (1000L * DumpArchiveUtil.convert32(buffer, 56)) +
+            (DumpArchiveUtil.convert32(buffer, 60) / 1000);
+        entry.setLastModifiedDate(new Date(t));
+        t = (1000L * DumpArchiveUtil.convert32(buffer, 64)) +
+            (DumpArchiveUtil.convert32(buffer, 68) / 1000);
+        entry.ctime = t;
+
+        // db: 72-119 - direct blocks
+        // id: 120-131 - indirect blocks
+        //entry.flags = DumpArchiveUtil.convert32(buffer, 132);
+        //entry.blocks = DumpArchiveUtil.convert32(buffer, 136);
+        entry.generation = DumpArchiveUtil.convert32(buffer, 140);
+        entry.setUserId(DumpArchiveUtil.convert32(buffer, 144));
+        entry.setGroupId(DumpArchiveUtil.convert32(buffer, 148));
+        // two 32-bit spare values.
+        header.count = DumpArchiveUtil.convert32(buffer, 160);
+
+        header.holes = 0;
+
+        for (int i = 0; (i < 512) && (i < header.count); i++) {
+            if (buffer[164 + i] == 0) {
+                header.holes++;
+            }
+        }
+
+        System.arraycopy(buffer, 164, header.cdata, 0, 512);
+
+        entry.volume = header.getVolume();
+
+        //entry.isSummaryOnly = false;
+        return entry;
+    }
     private String name;
     private TYPE type = TYPE.UNKNOWN;
     private int mode;
     private Set<PERMISSION> permissions = Collections.emptySet();
     private long size;
+
     private long atime;
+
     private long mtime;
     private int uid;
     private int gid;
@@ -194,19 +370,21 @@ public class DumpArchiveEntry implements ArchiveEntry {
      * Currently unused
      */
     private final DumpArchiveSummary summary = null;
-
     // this information is available from standard index.
     private final TapeSegmentHeader header = new TapeSegmentHeader();
     private String simpleName;
     private String originalName;
-
     // this information is available from QFA index
     private int volume;
     private long offset;
     private int ino;
+
     private int nlink;
+
     private long ctime;
+
     private int generation;
+
     private boolean isDeleted;
 
     /**
@@ -242,44 +420,36 @@ public class DumpArchiveEntry implements ArchiveEntry {
         this.offset = 0;
     }
 
-    /**
-     * Returns the path of the entry.
-     * @return the path of the entry.
-     */
-    public String getSimpleName() {
-        return simpleName;
-    }
+    @Override
+    public boolean equals(final Object o) {
+        if (o == this) {
+            return true;
+        }
+        if (o == null || !o.getClass().equals(getClass())) {
+            return false;
+        }
 
-    /**
-     * Sets the path of the entry.
-     * @param simpleName the simple name
-     */
-    protected void setSimpleName(final String simpleName) {
-        this.simpleName = simpleName;
-    }
+        final DumpArchiveEntry rhs = (DumpArchiveEntry) o;
 
-    /**
-     * Returns the ino of the entry.
-     * @return the ino
-     */
-    public int getIno() {
-        return header.getIno();
-    }
+        if (ino != rhs.ino) {
+            return false;
+        }
 
-    /**
-     * Return the number of hard links to the entry.
-     * @return the number of hard links
-     */
-    public int getNlink() {
-        return nlink;
+        // summary is always null right now, but this may change some day
+        if ((summary == null && rhs.summary != null) // NOSONAR
+                || (summary != null && !summary.equals(rhs.summary))) { // NOSONAR
+            return false;
+        }
+
+        return true;
     }
 
     /**
-     * Set the number of hard links.
-     * @param nlink the number of hard links
+     * Returns the time the file was last accessed.
+     * @return the access time
      */
-    public void setNlink(final int nlink) {
-        this.nlink = nlink;
+    public Date getAccessTime() {
+        return new Date(atime);
     }
 
     /**
@@ -291,11 +461,10 @@ public class DumpArchiveEntry implements ArchiveEntry {
     }
 
     /**
-     * Set the file creation time.
-     * @param ctime the creation time
+     * Returns the size of the entry as read from the archive.
      */
-    public void setCreationTime(final Date ctime) {
-        this.ctime = ctime.getTime();
+    long getEntrySize() {
+        return size;
     }
 
     /**
@@ -307,301 +476,174 @@ public class DumpArchiveEntry implements ArchiveEntry {
     }
 
     /**
-     * Set the generation of the file.
-     * @param generation the generation
+     * Return the group id
+     * @return the group id
      */
-    public void setGeneration(final int generation) {
-        this.generation = generation;
+    public int getGroupId() {
+        return gid;
     }
 
     /**
-     * Has this file been deleted? (On valid on incremental dumps.)
-     * @return whether the file has been deleted
+     * Return the number of records in this segment.
+     * @return the number of records
      */
-    public boolean isDeleted() {
-        return isDeleted;
+    public int getHeaderCount() {
+        return header.getCount();
     }
 
     /**
-     * Set whether this file has been deleted.
-     * @param isDeleted whether the file has been deleted
+     * Return the number of sparse records in this segment.
+     * @return the number of sparse records
      */
-    public void setDeleted(final boolean isDeleted) {
-        this.isDeleted = isDeleted;
+    public int getHeaderHoles() {
+        return header.getHoles();
     }
 
     /**
-     * Return the offset within the archive
-     * @return the offset
+     * Return the type of the tape segment header.
+     * @return the segment header
      */
-    public long getOffset() {
-        return offset;
+    public DumpArchiveConstants.SEGMENT_TYPE getHeaderType() {
+        return header.getType();
     }
 
     /**
-     * Set the offset within the archive.
-     * @param offset the offset
+     * Returns the ino of the entry.
+     * @return the ino
      */
-    public void setOffset(final long offset) {
-        this.offset = offset;
+    public int getIno() {
+        return header.getIno();
     }
 
     /**
-     * Return the tape volume where this file is located.
-     * @return the volume
+     * The last modified date.
+     * @return the last modified date
      */
-    public int getVolume() {
-        return volume;
+    @Override
+    public Date getLastModifiedDate() {
+        return new Date(mtime);
     }
 
     /**
-     * Set the tape volume.
-     * @param volume the volume
+     * Return the access permissions on the entry.
+     * @return the access permissions
      */
-    public void setVolume(final int volume) {
-        this.volume = volume;
+    public int getMode() {
+        return mode;
     }
 
     /**
-     * Return the type of the tape segment header.
-     * @return the segment header
+     * Returns the name of the entry.
+     *
+     * <p>This method returns the raw name as it is stored inside of the archive.</p>
+     *
+     * @return the name of the entry.
      */
-    public DumpArchiveConstants.SEGMENT_TYPE getHeaderType() {
-        return header.getType();
+    @Override
+    public String getName() {
+        return name;
     }
 
     /**
-     * Return the number of records in this segment.
-     * @return the number of records
+     * Return the number of hard links to the entry.
+     * @return the number of hard links
      */
-    public int getHeaderCount() {
-        return header.getCount();
+    public int getNlink() {
+        return nlink;
     }
 
     /**
-     * Return the number of sparse records in this segment.
-     * @return the number of sparse records
+     * Return the offset within the archive
+     * @return the offset
      */
-    public int getHeaderHoles() {
-        return header.getHoles();
+    public long getOffset() {
+        return offset;
     }
 
     /**
-     * Is this a sparse record?
-     * @param idx index of the record to check
-     * @return whether this is a sparse record
+     * Returns the unmodified name of the entry.
+     * @return the name of the entry.
      */
-    public boolean isSparseRecord(final int idx) {
-        return (header.getCdata(idx) & 0x01) == 0;
+    String getOriginalName() {
+        return originalName;
     }
 
-    @Override
-    public int hashCode() {
-        return ino;
+    /**
+     * Returns the permissions on the entry.
+     * @return the permissions
+     */
+    public Set<PERMISSION> getPermissions() {
+        return permissions;
     }
 
-    @Override
-    public boolean equals(final Object o) {
-        if (o == this) {
-            return true;
-        }
-        if (o == null || !o.getClass().equals(getClass())) {
-            return false;
-        }
-
-        final DumpArchiveEntry rhs = (DumpArchiveEntry) o;
-
-        if (ino != rhs.ino) {
-            return false;
-        }
-
-        // summary is always null right now, but this may change some day
-        if ((summary == null && rhs.summary != null) // NOSONAR
-                || (summary != null && !summary.equals(rhs.summary))) { // NOSONAR
-            return false;
-        }
-
-        return true;
+    /**
+     * Returns the path of the entry.
+     * @return the path of the entry.
+     */
+    public String getSimpleName() {
+        return simpleName;
     }
 
+    /**
+     * Returns the size of the entry.
+     * @return the size
+     */
     @Override
-    public String toString() {
-        return getName();
+    public long getSize() {
+        return isDirectory() ? SIZE_UNKNOWN : size;
     }
 
     /**
-     * Populate the dump archive entry and tape segment header with
-     * the contents of the buffer.
-     *
-     * @param buffer buffer to read content from
+     * Get the type of the entry.
+     * @return the type
      */
-    static DumpArchiveEntry parse(final byte[] buffer) {
-        final DumpArchiveEntry entry = new DumpArchiveEntry();
-        final TapeSegmentHeader header = entry.header;
-
-        header.type = DumpArchiveConstants.SEGMENT_TYPE.find(DumpArchiveUtil.convert32(
-                    buffer, 0));
-
-        //header.dumpDate = new Date(1000L * DumpArchiveUtil.convert32(buffer, 4));
-        //header.previousDumpDate = new Date(1000L * DumpArchiveUtil.convert32(
-        //            buffer, 8));
-        header.volume = DumpArchiveUtil.convert32(buffer, 12);
-        //header.tapea = DumpArchiveUtil.convert32(buffer, 16);
-        entry.ino = header.ino = DumpArchiveUtil.convert32(buffer, 20);
-
-        //header.magic = DumpArchiveUtil.convert32(buffer, 24);
-        //header.checksum = DumpArchiveUtil.convert32(buffer, 28);
-        final int m = DumpArchiveUtil.convert16(buffer, 32);
-
-        // determine the type of the file.
-        entry.setType(TYPE.find((m >> 12) & 0x0F));
-
-        // determine the standard permissions
-        entry.setMode(m);
-
-        entry.nlink = DumpArchiveUtil.convert16(buffer, 34);
-        // inumber, oldids?
-        entry.setSize(DumpArchiveUtil.convert64(buffer, 40));
-
-        long t = (1000L * DumpArchiveUtil.convert32(buffer, 48)) +
-            (DumpArchiveUtil.convert32(buffer, 52) / 1000);
-        entry.setAccessTime(new Date(t));
-        t = (1000L * DumpArchiveUtil.convert32(buffer, 56)) +
-            (DumpArchiveUtil.convert32(buffer, 60) / 1000);
-        entry.setLastModifiedDate(new Date(t));
-        t = (1000L * DumpArchiveUtil.convert32(buffer, 64)) +
-            (DumpArchiveUtil.convert32(buffer, 68) / 1000);
-        entry.ctime = t;
-
-        // db: 72-119 - direct blocks
-        // id: 120-131 - indirect blocks
-        //entry.flags = DumpArchiveUtil.convert32(buffer, 132);
-        //entry.blocks = DumpArchiveUtil.convert32(buffer, 136);
-        entry.generation = DumpArchiveUtil.convert32(buffer, 140);
-        entry.setUserId(DumpArchiveUtil.convert32(buffer, 144));
-        entry.setGroupId(DumpArchiveUtil.convert32(buffer, 148));
-        // two 32-bit spare values.
-        header.count = DumpArchiveUtil.convert32(buffer, 160);
-
-        header.holes = 0;
-
-        for (int i = 0; (i < 512) && (i < header.count); i++) {
-            if (buffer[164 + i] == 0) {
-                header.holes++;
-            }
-        }
-
-        System.arraycopy(buffer, 164, header.cdata, 0, 512);
-
-        entry.volume = header.getVolume();
-
-        //entry.isSummaryOnly = false;
-        return entry;
+    public TYPE getType() {
+        return type;
     }
 
     /**
-     * Update entry with information from next tape segment header.
+     * Return the user id.
+     * @return the user id
      */
-    void update(final byte[] buffer) {
-        header.volume = DumpArchiveUtil.convert32(buffer, 16);
-        header.count = DumpArchiveUtil.convert32(buffer, 160);
-
-        header.holes = 0;
-
-        for (int i = 0; (i < 512) && (i < header.count); i++) {
-            if (buffer[164 + i] == 0) {
-                header.holes++;
-            }
-        }
-
-        System.arraycopy(buffer, 164, header.cdata, 0, 512);
+    public int getUserId() {
+        return uid;
     }
 
     /**
-     * Archive entry as stored on tape. There is one TSH for (at most)
-     * every 512k in the file.
+     * Return the tape volume where this file is located.
+     * @return the volume
      */
-    static class TapeSegmentHeader {
-        private DumpArchiveConstants.SEGMENT_TYPE type;
-        private int volume;
-        private int ino;
-        private int count;
-        private int holes;
-        private final byte[] cdata = new byte[512]; // map of any 'holes'
-
-        public DumpArchiveConstants.SEGMENT_TYPE getType() {
-            return type;
-        }
-
-        public int getVolume() {
-            return volume;
-        }
-
-        public int getIno() {
-            return ino;
-        }
-
-        void setIno(final int ino) {
-            this.ino = ino;
-        }
-
-        public int getCount() {
-            return count;
-        }
-
-        public int getHoles() {
-            return holes;
-        }
-
-        public int getCdata(final int idx) {
-            return cdata[idx];
-        }
+    public int getVolume() {
+        return volume;
     }
 
-    /**
-     * Returns the name of the entry.
-     *
-     * <p>This method returns the raw name as it is stored inside of the archive.</p>
-     *
-     * @return the name of the entry.
-     */
     @Override
-    public String getName() {
-        return name;
+    public int hashCode() {
+        return ino;
     }
 
     /**
-     * Returns the unmodified name of the entry.
-     * @return the name of the entry.
+     * Is this a block device?
+     * @return whether this is a block device
      */
-    String getOriginalName() {
-        return originalName;
+    public boolean isBlkDev() {
+        return type == TYPE.BLKDEV;
     }
 
     /**
-     * Sets the name of the entry.
-     * @param name the name
+     * Is this a character device?
+     * @return whether this is a character device
      */
-    public final void setName(String name) {
-        this.originalName = name;
-        if (name != null) {
-            if (isDirectory() && !name.endsWith("/")) {
-                name += "/";
-            }
-            if (name.startsWith("./")) {
-                name = name.substring(2);
-            }
-        }
-        this.name = name;
+    public boolean isChrDev() {
+        return type == TYPE.CHRDEV;
     }
 
     /**
-     * The last modified date.
-     * @return the last modified date
+     * Has this file been deleted? (On valid on incremental dumps.)
+     * @return whether the file has been deleted
      */
-    @Override
-    public Date getLastModifiedDate() {
-        return new Date(mtime);
+    public boolean isDeleted() {
+        return isDeleted;
     }
 
     /**
@@ -613,6 +655,14 @@ public class DumpArchiveEntry implements ArchiveEntry {
         return type == TYPE.DIRECTORY;
     }
 
+    /**
+     * Is this a fifo/pipe?
+     * @return whether this is a fifo
+     */
+    public boolean isFifo() {
+        return type == TYPE.FIFO;
+    }
+
     /**
      * Is this a regular file?
      * @return whether this is a regular file
@@ -630,51 +680,60 @@ public class DumpArchiveEntry implements ArchiveEntry {
     }
 
     /**
-     * Is this a character device?
-     * @return whether this is a character device
+     * Is this a sparse record?
+     * @param idx index of the record to check
+     * @return whether this is a sparse record
      */
-    public boolean isChrDev() {
-        return type == TYPE.CHRDEV;
+    public boolean isSparseRecord(final int idx) {
+        return (header.getCdata(idx) & 0x01) == 0;
     }
 
     /**
-     * Is this a block device?
-     * @return whether this is a block device
+     * Set the time the file was last accessed.
+     * @param atime the access time
      */
-    public boolean isBlkDev() {
-        return type == TYPE.BLKDEV;
+    public void setAccessTime(final Date atime) {
+        this.atime = atime.getTime();
     }
 
     /**
-     * Is this a fifo/pipe?
-     * @return whether this is a fifo
+     * Set the file creation time.
+     * @param ctime the creation time
      */
-    public boolean isFifo() {
-        return type == TYPE.FIFO;
+    public void setCreationTime(final Date ctime) {
+        this.ctime = ctime.getTime();
     }
 
     /**
-     * Get the type of the entry.
-     * @return the type
+     * Set whether this file has been deleted.
+     * @param isDeleted whether the file has been deleted
      */
-    public TYPE getType() {
-        return type;
+    public void setDeleted(final boolean isDeleted) {
+        this.isDeleted = isDeleted;
     }
 
     /**
-     * Set the type of the entry.
-     * @param type the type
+     * Set the generation of the file.
+     * @param generation the generation
      */
-    public void setType(final TYPE type) {
-        this.type = type;
+    public void setGeneration(final int generation) {
+        this.generation = generation;
     }
 
     /**
-     * Return the access permissions on the entry.
-     * @return the access permissions
+     * Set the group id.
+     * @param gid the group id
      */
-    public int getMode() {
-        return mode;
+    public void setGroupId(final int gid) {
+        this.gid = gid;
+    }
+
+    /**
+     * Set the time the file was last modified.
+     * @param mtime the last modified time
+     */
+    public void setLastModifiedDate(final Date mtime) {
+        this.mtime = mtime.getTime();
     }
 
     /**
@@ -687,67 +746,60 @@ public class DumpArchiveEntry implements ArchiveEntry {
     }
 
     /**
-     * Returns the permissions on the entry.
-     * @return the permissions
-     */
-    public Set<PERMISSION> getPermissions() {
-        return permissions;
-    }
-
-    /**
-     * Returns the size of the entry.
-     * @return the size
-     */
-    @Override
-    public long getSize() {
-        return isDirectory() ? SIZE_UNKNOWN : size;
-    }
-
-    /**
-     * Returns the size of the entry as read from the archive.
+     * Sets the name of the entry.
+     * @param name the name
      */
-    long getEntrySize() {
-        return size;
+    public final void setName(String name) {
+        this.originalName = name;
+        if (name != null) {
+            if (isDirectory() && !name.endsWith("/")) {
+                name += "/";
+            }
+            if (name.startsWith("./")) {
+                name = name.substring(2);
+            }
+        }
+        this.name = name;
     }
 
     /**
-     * Set the size of the entry.
-     * @param size the size
+     * Set the number of hard links.
+     * @param nlink the number of hard links
      */
-    public void setSize(final long size) {
-        this.size = size;
+    public void setNlink(final int nlink) {
+        this.nlink = nlink;
     }
 
     /**
-     * Set the time the file was last modified.
-     * @param mtime the last modified time
+     * Set the offset within the archive.
+     * @param offset the offset
      */
-    public void setLastModifiedDate(final Date mtime) {
-        this.mtime = mtime.getTime();
+    public void setOffset(final long offset) {
+        this.offset = offset;
     }
 
     /**
-     * Returns the time the file was last accessed.
-     * @return the access time
+     * Sets the path of the entry.
+     * @param simpleName the simple name
      */
-    public Date getAccessTime() {
-        return new Date(atime);
+    protected void setSimpleName(final String simpleName) {
+        this.simpleName = simpleName;
     }
 
     /**
-     * Set the time the file was last accessed.
-     * @param atime the access time
+     * Set the size of the entry.
+     * @param size the size
      */
-    public void setAccessTime(final Date atime) {
-        this.atime = atime.getTime();
+    public void setSize(final long size) {
+        this.size = size;
     }
 
     /**
-     * Return the user id.
-     * @return the user id
+     * Set the type of the entry.
+     * @param type the type
      */
-    public int getUserId() {
-        return uid;
+    public void setType(final TYPE type) {
+        this.type = type;
     }
 
     /**
@@ -759,85 +811,33 @@ public class DumpArchiveEntry implements ArchiveEntry {
     }
 
     /**
-     * Return the group id
-     * @return the group id
+     * Set the tape volume.
+     * @param volume the volume
      */
-    public int getGroupId() {
-        return gid;
+    public void setVolume(final int volume) {
+        this.volume = volume;
     }
 
-    /**
-     * Set the group id.
-     * @param gid the group id
-     */
-    public void setGroupId(final int gid) {
-        this.gid = gid;
+    @Override
+    public String toString() {
+        return getName();
     }
 
-    public enum TYPE {
-        WHITEOUT(14),
-        SOCKET(12),
-        LINK(10),
-        FILE(8),
-        BLKDEV(6),
-        DIRECTORY(4),
-        CHRDEV(2),
-        FIFO(1),
-        UNKNOWN(15);
-
-        private final int code;
-
-        TYPE(final int code) {
-            this.code = code;
-        }
+    /**
+     * Update entry with information from next tape segment header.
+     */
+    void update(final byte[] buffer) {
+        header.volume = DumpArchiveUtil.convert32(buffer, 16);
+        header.count = DumpArchiveUtil.convert32(buffer, 160);
 
-        public static TYPE find(final int code) {
-            TYPE type = UNKNOWN;
+        header.holes = 0;
 
-            for (final TYPE t : TYPE.values()) {
-                if (code == t.code) {
-                    type = t;
-                }
+        for (int i = 0; (i < 512) && (i < header.count); i++) {
+            if (buffer[164 + i] == 0) {
+                header.holes++;
             }
-
-            return type;
-        }
-    }
-
-    public enum PERMISSION {
-        SETUID(04000),
-        SETGUI(02000),
-        STICKY(01000),
-        USER_READ(00400),
-        USER_WRITE(00200),
-        USER_EXEC(00100),
-        GROUP_READ(00040),
-        GROUP_WRITE(00020),
-        GROUP_EXEC(00010),
-        WORLD_READ(00004),
-        WORLD_WRITE(00002),
-        WORLD_EXEC(00001);
-
-        private final int code;
-
-        PERMISSION(final int code) {
-            this.code = code;
         }
 
-        public static Set<PERMISSION> find(final int code) {
-            final Set<PERMISSION> set = new HashSet<>();
-
-            for (final PERMISSION p : PERMISSION.values()) {
-                if ((code & p.code) == p.code) {
-                    set.add(p);
-                }
-            }
-
-            if (set.isEmpty()) {
-                return Collections.emptySet();
-            }
-
-            return EnumSet.copyOf(set);
-        }
+        System.arraycopy(buffer, 164, header.cdata, 0, 512);
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveException.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveException.java
index 635b1d9f..0252a23f 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveException.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveException.java
@@ -34,12 +34,12 @@ public class DumpArchiveException extends IOException {
         super(msg);
     }
 
-    public DumpArchiveException(final Throwable cause) {
+    public DumpArchiveException(final String msg, final Throwable cause) {
+        super(msg);
         initCause(cause);
     }
 
-    public DumpArchiveException(final String msg, final Throwable cause) {
-        super(msg);
+    public DumpArchiveException(final Throwable cause) {
         initCause(cause);
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java
index cc5a5b94..d3c69969 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java
@@ -48,6 +48,29 @@ import org.apache.commons.compress.utils.IOUtils;
  * @NotThreadSafe
  */
 public class DumpArchiveInputStream extends ArchiveInputStream {
+    /**
+     * Look at the first few bytes of the file to decide if it's a dump
+     * archive. With 32 bytes we can look at the magic value, with a full
+     * 1k we can verify the checksum.
+     * @param buffer data to match
+     * @param length length of data
+     * @return whether the buffer seems to contain dump data
+     */
+    public static boolean matches(final byte[] buffer, final int length) {
+        // do we have enough of the header?
+        if (length < 32) {
+            return false;
+        }
+
+        // this is the best test
+        if (length >= DumpArchiveConstants.TP_SIZE) {
+            return DumpArchiveUtil.verify(buffer);
+        }
+
+        // this will work in a pinch.
+        return DumpArchiveConstants.NFS_MAGIC == DumpArchiveUtil.convert32(buffer,
+            24);
+    }
     private final DumpArchiveSummary summary;
     private DumpArchiveEntry active;
     private boolean isClosed;
@@ -59,6 +82,7 @@ public class DumpArchiveInputStream extends ArchiveInputStream {
     private byte[] blockBuffer;
     private int recordOffset;
     private long filepos;
+
     protected TapeInputStream raw;
 
     // map of ino -> dirent entry. We can use this to reconstruct full paths.
@@ -145,10 +169,15 @@ public class DumpArchiveInputStream extends ArchiveInputStream {
                 });
     }
 
-    @Deprecated
+    /**
+     * Closes the stream for this entry.
+     */
     @Override
-    public int getCount() {
-        return (int) getBytesRead();
+    public void close() throws IOException {
+        if (!isClosed) {
+            isClosed = true;
+            raw.close();
+        }
     }
 
     @Override
@@ -156,60 +185,10 @@ public class DumpArchiveInputStream extends ArchiveInputStream {
         return raw.getBytesRead();
     }
 
-    /**
-     * Return the archive summary information.
-     * @return the summary
-     */
-    public DumpArchiveSummary getSummary() {
-        return summary;
-    }
-
-    /**
-     * Read CLRI (deleted inode) segment.
-     */
-    private void readCLRI() throws IOException {
-        final byte[] buffer = raw.readRecord();
-
-        if (!DumpArchiveUtil.verify(buffer)) {
-            throw new InvalidFormatException();
-        }
-
-        active = DumpArchiveEntry.parse(buffer);
-
-        if (DumpArchiveConstants.SEGMENT_TYPE.CLRI != active.getHeaderType()) {
-            throw new InvalidFormatException();
-        }
-
-        // we don't do anything with this yet.
-        if (raw.skip((long) DumpArchiveConstants.TP_SIZE * active.getHeaderCount())
-            == -1) {
-            throw new EOFException();
-        }
-        readIdx = active.getHeaderCount();
-    }
-
-    /**
-     * Read BITS segment.
-     */
-    private void readBITS() throws IOException {
-        final byte[] buffer = raw.readRecord();
-
-        if (!DumpArchiveUtil.verify(buffer)) {
-            throw new InvalidFormatException();
-        }
-
-        active = DumpArchiveEntry.parse(buffer);
-
-        if (DumpArchiveConstants.SEGMENT_TYPE.BITS != active.getHeaderType()) {
-            throw new InvalidFormatException();
-        }
-
-        // we don't do anything with this yet.
-        if (raw.skip((long) DumpArchiveConstants.TP_SIZE * active.getHeaderCount())
-            == -1) {
-            throw new EOFException();
-        }
-        readIdx = active.getHeaderCount();
+    @Deprecated
+    @Override
+    public int getCount() {
+        return (int) getBytesRead();
     }
 
     /**
@@ -314,92 +293,6 @@ public class DumpArchiveInputStream extends ArchiveInputStream {
         return entry;
     }
 
-    /**
-     * Read directory entry.
-     */
-    private void readDirectoryEntry(DumpArchiveEntry entry)
-        throws IOException {
-        long size = entry.getEntrySize();
-        boolean first = true;
-
-        while (first ||
-                DumpArchiveConstants.SEGMENT_TYPE.ADDR == entry.getHeaderType()) {
-            // read the header that we just peeked at.
-            if (!first) {
-                raw.readRecord();
-            }
-
-            if (!names.containsKey(entry.getIno()) &&
-                    DumpArchiveConstants.SEGMENT_TYPE.INODE == entry.getHeaderType()) {
-                pending.put(entry.getIno(), entry);
-            }
-
-            final int datalen = DumpArchiveConstants.TP_SIZE * entry.getHeaderCount();
-
-            if (blockBuffer.length < datalen) {
-                blockBuffer = IOUtils.readRange(raw, datalen);
-                if (blockBuffer.length != datalen) {
-                    throw new EOFException();
-                }
-            } else if (raw.read(blockBuffer, 0, datalen) != datalen) {
-                throw new EOFException();
-            }
-
-            int reclen = 0;
-
-            for (int i = 0; i < datalen - 8 && i < size - 8;
-                    i += reclen) {
-                final int ino = DumpArchiveUtil.convert32(blockBuffer, i);
-                reclen = DumpArchiveUtil.convert16(blockBuffer, i + 4);
-
-                final byte type = blockBuffer[i + 6];
-
-                final String name = DumpArchiveUtil.decode(zipEncoding, blockBuffer, i + 8, blockBuffer[i + 7]);
-
-                if (".".equals(name) || "..".equals(name)) {
-                    // do nothing...
-                    continue;
-                }
-
-                final Dirent d = new Dirent(ino, entry.getIno(), type, name);
-
-                /*
-                if ((type == 4) && names.containsKey(ino)) {
-                    System.out.println("we already have ino: " +
-                                       names.get(ino));
-                }
-                */
-
-                names.put(ino, d);
-
-                // check whether this allows us to fill anything in the pending list.
-                pending.forEach((k, v) -> {
-                    final String path = getPath(v);
-
-                    if (path != null) {
-                        v.setName(path);
-                        v.setSimpleName(names.get(k).getName());
-                        queue.add(v);
-                    }
-                });
-
-                // remove anything that we found. (We can't do it earlier
-                // because of concurrent modification exceptions.)
-                queue.forEach(e -> pending.remove(e.getIno()));
-            }
-
-            final byte[] peekBytes = raw.peek();
-
-            if (!DumpArchiveUtil.verify(peekBytes)) {
-                throw new InvalidFormatException();
-            }
-
-            entry = DumpArchiveEntry.parse(peekBytes);
-            first = false;
-            size -= DumpArchiveConstants.TP_SIZE;
-        }
-    }
-
     /**
      * Get full path for specified archive entry, or null if there's a gap.
      *
@@ -444,6 +337,14 @@ public class DumpArchiveInputStream extends ArchiveInputStream {
         return sb.toString();
     }
 
+    /**
+     * Return the archive summary information.
+     * @return the summary
+     */
+    public DumpArchiveSummary getSummary() {
+        return summary;
+    }
+
     /**
      * Reads bytes from the current dump archive entry.
      *
@@ -520,38 +421,137 @@ public class DumpArchiveInputStream extends ArchiveInputStream {
     }
 
     /**
-     * Closes the stream for this entry.
+     * Read BITS segment.
      */
-    @Override
-    public void close() throws IOException {
-        if (!isClosed) {
-            isClosed = true;
-            raw.close();
+    private void readBITS() throws IOException {
+        final byte[] buffer = raw.readRecord();
+
+        if (!DumpArchiveUtil.verify(buffer)) {
+            throw new InvalidFormatException();
+        }
+
+        active = DumpArchiveEntry.parse(buffer);
+
+        if (DumpArchiveConstants.SEGMENT_TYPE.BITS != active.getHeaderType()) {
+            throw new InvalidFormatException();
         }
+
+        // we don't do anything with this yet.
+        if (raw.skip((long) DumpArchiveConstants.TP_SIZE * active.getHeaderCount())
+            == -1) {
+            throw new EOFException();
+        }
+        readIdx = active.getHeaderCount();
     }
 
     /**
-     * Look at the first few bytes of the file to decide if it's a dump
-     * archive. With 32 bytes we can look at the magic value, with a full
-     * 1k we can verify the checksum.
-     * @param buffer data to match
-     * @param length length of data
-     * @return whether the buffer seems to contain dump data
+     * Read CLRI (deleted inode) segment.
      */
-    public static boolean matches(final byte[] buffer, final int length) {
-        // do we have enough of the header?
-        if (length < 32) {
-            return false;
+    private void readCLRI() throws IOException {
+        final byte[] buffer = raw.readRecord();
+
+        if (!DumpArchiveUtil.verify(buffer)) {
+            throw new InvalidFormatException();
         }
 
-        // this is the best test
-        if (length >= DumpArchiveConstants.TP_SIZE) {
-            return DumpArchiveUtil.verify(buffer);
+        active = DumpArchiveEntry.parse(buffer);
+
+        if (DumpArchiveConstants.SEGMENT_TYPE.CLRI != active.getHeaderType()) {
+            throw new InvalidFormatException();
         }
 
-        // this will work in a pinch.
-        return DumpArchiveConstants.NFS_MAGIC == DumpArchiveUtil.convert32(buffer,
-            24);
+        // we don't do anything with this yet.
+        if (raw.skip((long) DumpArchiveConstants.TP_SIZE * active.getHeaderCount())
+            == -1) {
+            throw new EOFException();
+        }
+        readIdx = active.getHeaderCount();
+    }
+
+    /**
+     * Read directory entry.
+     */
+    private void readDirectoryEntry(DumpArchiveEntry entry)
+        throws IOException {
+        long size = entry.getEntrySize();
+        boolean first = true;
+
+        while (first ||
+                DumpArchiveConstants.SEGMENT_TYPE.ADDR == entry.getHeaderType()) {
+            // read the header that we just peeked at.
+            if (!first) {
+                raw.readRecord();
+            }
+
+            if (!names.containsKey(entry.getIno()) &&
+                    DumpArchiveConstants.SEGMENT_TYPE.INODE == entry.getHeaderType()) {
+                pending.put(entry.getIno(), entry);
+            }
+
+            final int datalen = DumpArchiveConstants.TP_SIZE * entry.getHeaderCount();
+
+            if (blockBuffer.length < datalen) {
+                blockBuffer = IOUtils.readRange(raw, datalen);
+                if (blockBuffer.length != datalen) {
+                    throw new EOFException();
+                }
+            } else if (raw.read(blockBuffer, 0, datalen) != datalen) {
+                throw new EOFException();
+            }
+
+            int reclen = 0;
+
+            for (int i = 0; i < datalen - 8 && i < size - 8;
+                    i += reclen) {
+                final int ino = DumpArchiveUtil.convert32(blockBuffer, i);
+                reclen = DumpArchiveUtil.convert16(blockBuffer, i + 4);
+
+                final byte type = blockBuffer[i + 6];
+
+                final String name = DumpArchiveUtil.decode(zipEncoding, blockBuffer, i + 8, blockBuffer[i + 7]);
+
+                if (".".equals(name) || "..".equals(name)) {
+                    // do nothing...
+                    continue;
+                }
+
+                final Dirent d = new Dirent(ino, entry.getIno(), type, name);
+
+                /*
+                if ((type == 4) && names.containsKey(ino)) {
+                    System.out.println("we already have ino: " +
+                                       names.get(ino));
+                }
+                */
+
+                names.put(ino, d);
+
+                // check whether this allows us to fill anything in the pending list.
+                pending.forEach((k, v) -> {
+                    final String path = getPath(v);
+
+                    if (path != null) {
+                        v.setName(path);
+                        v.setSimpleName(names.get(k).getName());
+                        queue.add(v);
+                    }
+                });
+
+                // remove anything that we found. (We can't do it earlier
+                // because of concurrent modification exceptions.)
+                queue.forEach(e -> pending.remove(e.getIno()));
+            }
+
+            final byte[] peekBytes = raw.peek();
+
+            if (!DumpArchiveUtil.verify(peekBytes)) {
+                throw new InvalidFormatException();
+            }
+
+            entry = DumpArchiveEntry.parse(peekBytes);
+            first = false;
+            size -= DumpArchiveConstants.TP_SIZE;
+        }
     }
 
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
index b197564c..b9378f5d 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
@@ -62,6 +62,29 @@ public class DumpArchiveSummary {
         //extAttributes = DumpArchiveUtil.convert32(buffer, 900);
     }
 
+    @Override
+    public boolean equals(final Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null) {
+            return false;
+        }
+        if (getClass() != obj.getClass()) {
+            return false;
+        }
+        DumpArchiveSummary other = (DumpArchiveSummary) obj;
+        return Objects.equals(devname, other.devname) && dumpDate == other.dumpDate && Objects.equals(hostname, other.hostname);
+    }
+
+    /**
+     * Get the device name, e.g., /dev/sda3 or /dev/mapper/vg0-home.
+     * @return device name
+     */
+    public String getDevname() {
+        return devname;
+    }
+
     /**
      * Get the date of this dump.
      * @return the date of this dump.
@@ -71,43 +94,44 @@ public class DumpArchiveSummary {
     }
 
     /**
-     * Set dump date.
-     * @param dumpDate the dump date
+     * Get the last mountpoint, e.g., /home.
+     * @return last mountpoint
      */
-    public void setDumpDate(final Date dumpDate) {
-        this.dumpDate = dumpDate.getTime();
+    public String getFilesystem() {
+        return filesys;
     }
 
     /**
-     * Get the date of the previous dump at this level higher.
-     * @return dumpdate may be null
+     * Get the inode of the first record on this volume.
+     * @return inode of the first record on this volume.
      */
-    public Date getPreviousDumpDate() {
-        return new Date(previousDumpDate);
+    public int getFirstRecord() {
+        return firstrec;
     }
 
     /**
-     * Set previous dump date.
-     * @param previousDumpDate the previous dump dat
+     * Get the miscellaneous flags. See below.
+     * @return flags
      */
-    public void setPreviousDumpDate(final Date previousDumpDate) {
-        this.previousDumpDate = previousDumpDate.getTime();
+    public int getFlags() {
+        return flags;
     }
 
     /**
-     * Get volume (tape) number.
-     * @return volume (tape) number.
+     * Get the hostname of the system where the dump was performed.
+     * @return hostname the host name
      */
-    public int getVolume() {
-        return volume;
+    public String getHostname() {
+        return hostname;
     }
 
     /**
-     * Set volume (tape) number.
-     * @param volume the volume number
+     * Get dump label. This may be autogenerated or it may be specified
+     * bu the user.
+     * @return dump label
      */
-    public void setVolume(final int volume) {
-        this.volume = volume;
+    public String getLabel() {
+        return label;
     }
 
     /**
@@ -123,100 +147,101 @@ public class DumpArchiveSummary {
     }
 
     /**
-     * Set level.
-     * @param level the level
+     * Get the number of records per tape block. This is typically
+     * between 10 and 32.
+     * @return the number of records per tape block
      */
-    public void setLevel(final int level) {
-        this.level = level;
+    public int getNTRec() {
+        return ntrec;
     }
 
     /**
-     * Get dump label. This may be autogenerated or it may be specified
-     * bu the user.
-     * @return dump label
+     * Get the date of the previous dump at this level higher.
+     * @return dumpdate may be null
      */
-    public String getLabel() {
-        return label;
+    public Date getPreviousDumpDate() {
+        return new Date(previousDumpDate);
     }
 
     /**
-     * Set dump label.
-     * @param label the label
+     * Get volume (tape) number.
+     * @return volume (tape) number.
      */
-    public void setLabel(final String label) {
-        this.label = label;
+    public int getVolume() {
+        return volume;
     }
 
-    /**
-     * Get the last mountpoint, e.g., /home.
-     * @return last mountpoint
-     */
-    public String getFilesystem() {
-        return filesys;
+    @Override
+    public int hashCode() {
+        return Objects.hash(devname, dumpDate, hostname);
     }
 
     /**
-     * Set the last mountpoint.
-     * @param fileSystem the last mountpoint
+     * Is this volume compressed? N.B., individual blocks may or may not be compressed.
+     * The first block is never compressed.
+     * @return true if volume is compressed
      */
-    public void setFilesystem(final String fileSystem) {
-        this.filesys = fileSystem;
+    public boolean isCompressed() {
+        return (flags & 0x0080) == 0x0080;
     }
 
     /**
-     * Get the device name, e.g., /dev/sda3 or /dev/mapper/vg0-home.
-     * @return device name
+     * Does this volume contain extended attributes.
+     * @return true if volume contains extended attributes.
      */
-    public String getDevname() {
-        return devname;
+    public boolean isExtendedAttributes() {
+        return (flags & 0x8000) == 0x8000;
     }
 
     /**
-     * Set the device name.
-     * @param devname the device name
+     * Does this volume only contain metadata?
+     * @return true if volume only contains meta-data
      */
-    public void setDevname(final String devname) {
-        this.devname = devname;
+    public boolean isMetaDataOnly() {
+        return (flags & 0x0100) == 0x0100;
     }
 
     /**
-     * Get the hostname of the system where the dump was performed.
-     * @return hostname the host name
+     * Is this the new header format? (We do not currently support the
+     * old format.)
+     *
+     * @return true if using new header format
      */
-    public String getHostname() {
-        return hostname;
+    public boolean isNewHeader() {
+        return (flags & 0x0001) == 0x0001;
     }
 
     /**
-     * Set the hostname.
-     * @param hostname the host name
+     * Is this the new inode format? (We do not currently support the
+     * old format.)
+     * @return true if using new inode format
      */
-    public void setHostname(final String hostname) {
-        this.hostname = hostname;
+    public boolean isNewInode() {
+        return (flags & 0x0002) == 0x0002;
     }
 
     /**
-     * Get the miscellaneous flags. See below.
-     * @return flags
+     * Set the device name.
+     * @param devname the device name
      */
-    public int getFlags() {
-        return flags;
+    public void setDevname(final String devname) {
+        this.devname = devname;
     }
 
     /**
-     * Set the miscellaneous flags.
-     * @param flags flags
+     * Set dump date.
+     * @param dumpDate the dump date
      */
-    public void setFlags(final int flags) {
-        this.flags = flags;
+    public void setDumpDate(final Date dumpDate) {
+        this.dumpDate = dumpDate.getTime();
     }
 
     /**
-     * Get the inode of the first record on this volume.
-     * @return inode of the first record on this volume.
+     * Set the last mountpoint.
+     * @param fileSystem the last mountpoint
      */
-    public int getFirstRecord() {
-        return firstrec;
+    public void setFilesystem(final String fileSystem) {
+        this.filesys = fileSystem;
     }
 
     /**
@@ -228,83 +253,58 @@ public class DumpArchiveSummary {
     }
 
     /**
-     * Get the number of records per tape block. This is typically
-     * between 10 and 32.
-     * @return the number of records per tape block
+     * Set the miscellaneous flags.
+     * @param flags flags
      */
-    public int getNTRec() {
-        return ntrec;
+    public void setFlags(final int flags) {
+        this.flags = flags;
     }
 
     /**
-     * Set the number of records per tape block.
-     * @param ntrec the number of records per tape block
+     * Set the hostname.
+     * @param hostname the host name
      */
-    public void setNTRec(final int ntrec) {
-        this.ntrec = ntrec;
+    public void setHostname(final String hostname) {
+        this.hostname = hostname;
     }
 
     /**
-     * Is this the new header format? (We do not currently support the
-     * old format.)
-     *
-     * @return true if using new header format
+     * Set dump label.
+     * @param label the label
      */
-    public boolean isNewHeader() {
-        return (flags & 0x0001) == 0x0001;
+    public void setLabel(final String label) {
+        this.label = label;
     }
 
     /**
-     * Is this the new inode format? (We do not currently support the
-     * old format.)
-     * @return true if using new inode format
+     * Set level.
+     * @param level the level
      */
-    public boolean isNewInode() {
-        return (flags & 0x0002) == 0x0002;
+    public void setLevel(final int level) {
+        this.level = level;
     }
 
     /**
-     * Is this volume compressed? N.B., individual blocks may or may not be compressed.
-     * The first block is never compressed.
-     * @return true if volume is compressed
+     * Set the number of records per tape block.
+     * @param ntrec the number of records per tape block
      */
-    public boolean isCompressed() {
-        return (flags & 0x0080) == 0x0080;
+    public void setNTRec(final int ntrec) {
+        this.ntrec = ntrec;
     }
 
     /**
-     * Does this volume only contain metadata?
-     * @return true if volume only contains meta-data
+     * Set previous dump date.
+     * @param previousDumpDate the previous dump dat
      */
-    public boolean isMetaDataOnly() {
-        return (flags & 0x0100) == 0x0100;
+    public void setPreviousDumpDate(final Date previousDumpDate) {
+        this.previousDumpDate = previousDumpDate.getTime();
     }
 
     /**
-     * Does this volume contain extended attributes.
-     * @return true if volume contains extended attributes.
+     * Set volume (tape) number.
+     * @param volume the volume number
      */
-    public boolean isExtendedAttributes() {
-        return (flags & 0x8000) == 0x8000;
-    }
-
-    @Override
-    public int hashCode() {
-        return Objects.hash(devname, dumpDate, hostname);
-    }
-
-    @Override
-    public boolean equals(final Object obj) {
-        if (this == obj) {
-            return true;
-        }
-        if (obj == null) {
-            return false;
-        }
-        if (getClass() != obj.getClass()) {
-            return false;
-        }
-        DumpArchiveSummary other = (DumpArchiveSummary) obj;
-        return Objects.equals(devname, other.devname) && dumpDate == other.dumpDate && Objects.equals(hostname, other.hostname);
+    public void setVolume(final int volume) {
+        this.volume = volume;
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.java
index 516df050..80cd9358 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.java
@@ -28,12 +28,6 @@ import org.apache.commons.compress.utils.ByteUtils;
  * Various utilities for dump archives.
  */
 class DumpArchiveUtil {
-    /**
-     * Private constructor to prevent instantiation.
-     */
-    private DumpArchiveUtil() {
-    }
-
     /**
      * Calculate checksum for buffer.
      *
@@ -52,33 +46,25 @@ class DumpArchiveUtil {
     }
 
     /**
-     * Verifies that the buffer contains a tape segment header.
+     * Read 2-byte integer from buffer.
      *
      * @param buffer
-     * @return Whether the buffer contains a tape segment header.
+     * @param offset
+     * @return the 2-byte entry as an int
      */
-    public static final boolean verify(final byte[] buffer) {
-        // verify magic. for now only accept NFS_MAGIC.
-        final int magic = convert32(buffer, 24);
-
-        if (magic != DumpArchiveConstants.NFS_MAGIC) {
-            return false;
-        }
-
-        //verify checksum...
-        final int checksum = convert32(buffer, 28);
-
-        return checksum == calculateChecksum(buffer);
+    public static final int convert16(final byte[] buffer, final int offset) {
+        return (int) ByteUtils.fromLittleEndian(buffer, offset, 2);
     }
 
     /**
-     * Gets the ino associated with this buffer.
+     * Read 4-byte integer from buffer.
      *
      * @param buffer
-     * @return the ino associated with this buffer.
+     * @param offset
+     * @return the 4-byte entry as an int
      */
-    public static final int getIno(final byte[] buffer) {
-        return convert32(buffer, 20);
+    public static final int convert32(final byte[] buffer, final int offset) {
+        return (int) ByteUtils.fromLittleEndian(buffer, offset, 4);
     }
 
     /**
@@ -93,32 +79,46 @@ class DumpArchiveUtil {
     }
 
     /**
-     * Read 4-byte integer from buffer.
+     * Decodes a byte array to a string.
+     */
+    static String decode(final ZipEncoding encoding, final byte[] b, final int offset, final int len)
+        throws IOException {
+        return encoding.decode(Arrays.copyOfRange(b, offset, offset + len));
+    }
+
+    /**
+     * Gets the ino associated with this buffer.
      *
      * @param buffer
-     * @param offset
-     * @return the 4-byte entry as an int
+     * @return the ino associated with this buffer.
      */
-    public static final int convert32(final byte[] buffer, final int offset) {
-        return (int) ByteUtils.fromLittleEndian(buffer, offset, 4);
+    public static final int getIno(final byte[] buffer) {
+        return convert32(buffer, 20);
     }
 
     /**
-     * Read 2-byte integer from buffer.
+     * Verifies that the buffer contains a tape segment header.
      *
      * @param buffer
-     * @param offset
-     * @return the 2-byte entry as an int
+     * @return Whether the buffer contains a tape segment header.
      */
-    public static final int convert16(final byte[] buffer, final int offset) {
-        return (int) ByteUtils.fromLittleEndian(buffer, offset, 2);
+    public static final boolean verify(final byte[] buffer) {
+        // verify magic. for now only accept NFS_MAGIC.
+        final int magic = convert32(buffer, 24);
+
+        if (magic != DumpArchiveConstants.NFS_MAGIC) {
+            return false;
+        }
+
+        //verify checksum...
+        final int checksum = convert32(buffer, 28);
+
+        return checksum == calculateChecksum(buffer);
     }
 
     /**
-     * Decodes a byte array to a string.
+     * Private constructor to prevent instantiation.
      */
-    static String decode(final ZipEncoding encoding, final byte[] b, final int offset, final int len)
-        throws IOException {
-        return encoding.decode(Arrays.copyOfRange(b, offset, offset + len));
+    private DumpArchiveUtil() {
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java b/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java
index 6e00db70..08d23f7f 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/TapeInputStream.java
@@ -35,10 +35,10 @@ import org.apache.commons.compress.utils.IOUtils;
  * @NotThreadSafe
  */
 class TapeInputStream extends FilterInputStream {
+    private static final int RECORD_SIZE = DumpArchiveConstants.TP_SIZE;
     private byte[] blockBuffer = new byte[DumpArchiveConstants.TP_SIZE];
     private int currBlkIdx = -1;
     private int blockSize = DumpArchiveConstants.TP_SIZE;
-    private static final int RECORD_SIZE = DumpArchiveConstants.TP_SIZE;
     private int readOffset = DumpArchiveConstants.TP_SIZE;
     private boolean isCompressed;
     private long bytesRead;
@@ -53,53 +53,61 @@ class TapeInputStream extends FilterInputStream {
     }
 
     /**
-     * Set the DumpArchive Buffer's block size. We need to sync the block size with the
-     * dump archive's actual block size since compression is handled at the
-     * block level.
-     *
-     * @param recsPerBlock
-     *            records per block
-     * @param isCompressed
-     *            true if the archive is compressed
-     * @throws IOException
-     *             more than one block has been read
-     * @throws IOException
-     *             there was an error reading additional blocks.
-     * @throws IOException
-     *             recsPerBlock is smaller than 1
+     * @see java.io.InputStream#available
      */
-    public void resetBlockSize(final int recsPerBlock, final boolean isCompressed)
-        throws IOException {
-        this.isCompressed = isCompressed;
-
-        if (recsPerBlock < 1) {
-            throw new IOException("Block with " + recsPerBlock
-                + " records found, must be at least 1");
+    @Override
+    public int available() throws IOException {
+        if (readOffset < blockSize) {
+            return blockSize - readOffset;
         }
-        blockSize = RECORD_SIZE * recsPerBlock;
 
-        // save first block in case we need it again
-        final byte[] oldBuffer = blockBuffer;
+        return in.available();
+    }
 
-        // read rest of new block
-        blockBuffer = new byte[blockSize];
-        System.arraycopy(oldBuffer, 0, blockBuffer, 0, RECORD_SIZE);
-        readFully(blockBuffer, RECORD_SIZE, blockSize - RECORD_SIZE);
+    /**
+     * Close the input stream.
+     *
+     * @throws IOException on error
+     */
+    @Override
+    public void close() throws IOException {
+        if (in != null && in != System.in) {
+            in.close();
+        }
+    }
 
-        this.currBlkIdx = 0;
-        this.readOffset = RECORD_SIZE;
+    /**
+     * Gets number of bytes read.
+     *
+     * @return number of bytes read.
+     */
+    public long getBytesRead() {
+        return bytesRead;
     }
 
     /**
-     * @see java.io.InputStream#available
+     * Peek at the next record from the input stream and return the data.
+     *
+     * @return The record data.
+     * @throws IOException on error
      */
-    @Override
-    public int available() throws IOException {
-        if (readOffset < blockSize) {
-            return blockSize - readOffset;
+    public byte[] peek() throws IOException {
+        // we need to read from the underlying stream. This
+        // isn't a problem since it would be the first step in
+        // any subsequent read() anyway.
+        if (readOffset == blockSize) {
+            try {
+                readBlock(true);
+            } catch (final ShortFileException sfe) { // NOSONAR
+                return null;
+            }
         }
 
-        return in.available();
+        // copy data, increment counters.
+        final byte[] b = new byte[RECORD_SIZE];
+        System.arraycopy(blockBuffer, readOffset, b, 0, b.length);
+
+        return b;
     }
 
     /**
@@ -165,110 +173,6 @@ class TapeInputStream extends FilterInputStream {
         return bytes;
     }
 
-    /**
-     * Skip bytes. Same as read but without the arraycopy.
-     *
-     * <p>skips the full given length unless EOF is reached.</p>
-     *
-     * @param len length to read, must be a multiple of the stream's
-     * record size
-     */
-    @Override
-    public long skip(final long len) throws IOException {
-        if ((len % RECORD_SIZE) != 0) {
-            throw new IllegalArgumentException(
-                "All reads must be multiple of record size (" + RECORD_SIZE +
-                " bytes.");
-        }
-
-        long bytes = 0;
-
-        while (bytes < len) {
-            // we need to read from the underlying stream.
-            // this will reset readOffset value. We do not perform
-            // any decompression if we won't eventually read the data.
-            // return -1 if there's a problem.
-            if (readOffset == blockSize) {
-                try {
-                    readBlock((len - bytes) < blockSize);
-                } catch (final ShortFileException sfe) { // NOSONAR
-                    return -1;
-                }
-            }
-
-            long n = 0;
-
-            if ((readOffset + (len - bytes)) <= blockSize) {
-                // we can read entirely from the buffer.
-                n = len - bytes;
-            } else {
-                // copy what we can from the buffer.
-                n = (long) blockSize - readOffset;
-            }
-
-            // do not copy data but still increment counters.
-            readOffset = ExactMath.add(readOffset, n);
-            bytes += n;
-        }
-
-        return bytes;
-    }
-
-    /**
-     * Close the input stream.
-     *
-     * @throws IOException on error
-     */
-    @Override
-    public void close() throws IOException {
-        if (in != null && in != System.in) {
-            in.close();
-        }
-    }
-
-    /**
-     * Peek at the next record from the input stream and return the data.
-     *
-     * @return The record data.
-     * @throws IOException on error
-     */
-    public byte[] peek() throws IOException {
-        // we need to read from the underlying stream. This
-        // isn't a problem since it would be the first step in
-        // any subsequent read() anyway.
-        if (readOffset == blockSize) {
-            try {
-                readBlock(true);
-            } catch (final ShortFileException sfe) { // NOSONAR
-                return null;
-            }
-        }
-
-        // copy data, increment counters.
-        final byte[] b = new byte[RECORD_SIZE];
-        System.arraycopy(blockBuffer, readOffset, b, 0, b.length);
-
-        return b;
-    }
-
-    /**
-     * Read a record from the input stream and return the data.
-     *
-     * @return The record data.
-     * @throws IOException on error
-     */
-    public byte[] readRecord() throws IOException {
-        final byte[] result = new byte[RECORD_SIZE];
-
-        // the read implementation will loop internally as long as
-        // input is available
-        if (-1 == read(result, 0, result.length)) {
-            throw new ShortFileException();
-        }
-
-        return result;
-    }
-
     /**
      * Read next block. All decompression is handled here.
      *
@@ -365,11 +269,107 @@ class TapeInputStream extends FilterInputStream {
     }
 
     /**
-     * Gets number of bytes read.
+     * Read a record from the input stream and return the data.
      *
-     * @return number of bytes read.
+     * @return The record data.
+     * @throws IOException on error
      */
-    public long getBytesRead() {
-        return bytesRead;
+    public byte[] readRecord() throws IOException {
+        final byte[] result = new byte[RECORD_SIZE];
+
+        // the read implementation will loop internally as long as
+        // input is available
+        if (-1 == read(result, 0, result.length)) {
+            throw new ShortFileException();
+        }
+
+        return result;
+    }
+
+    /**
+     * Set the DumpArchive Buffer's block size. We need to sync the block size with the
+     * dump archive's actual block size since compression is handled at the
+     * block level.
+     *
+     * @param recsPerBlock
+     *            records per block
+     * @param isCompressed
+     *            true if the archive is compressed
+     * @throws IOException
+     *             more than one block has been read
+     * @throws IOException
+     *             there was an error reading additional blocks.
+     * @throws IOException
+     *             recsPerBlock is smaller than 1
+     */
+    public void resetBlockSize(final int recsPerBlock, final boolean isCompressed)
+        throws IOException {
+        this.isCompressed = isCompressed;
+
+        if (recsPerBlock < 1) {
+            throw new IOException("Block with " + recsPerBlock
+                + " records found, must be at least 1");
+        }
+        blockSize = RECORD_SIZE * recsPerBlock;
+
+        // save first block in case we need it again
+        final byte[] oldBuffer = blockBuffer;
+
+        // read rest of new block
+        blockBuffer = new byte[blockSize];
+        System.arraycopy(oldBuffer, 0, blockBuffer, 0, RECORD_SIZE);
+        readFully(blockBuffer, RECORD_SIZE, blockSize - RECORD_SIZE);
+
+        this.currBlkIdx = 0;
+        this.readOffset = RECORD_SIZE;
+    }
+
+    /**
+     * Skip bytes. Same as read but without the arraycopy.
+     *
+     * <p>skips the full given length unless EOF is reached.</p>
+     *
+     * @param len length to read, must be a multiple of the stream's
+     * record size
+     */
+    @Override
+    public long skip(final long len) throws IOException {
+        if ((len % RECORD_SIZE) != 0) {
+            throw new IllegalArgumentException(
+                "All reads must be multiple of record size (" + RECORD_SIZE +
+                " bytes.");
+        }
+
+        long bytes = 0;
+
+        while (bytes < len) {
+            // we need to read from the underlying stream.
+            // this will reset readOffset value. We do not perform
+            // any decompression if we won't eventually read the data.
+            // return -1 if there's a problem.
+            if (readOffset == blockSize) {
+                try {
+                    readBlock((len - bytes) < blockSize);
+                } catch (final ShortFileException sfe) { // NOSONAR
+                    return -1;
+                }
+            }
+
+            long n = 0;
+
+            if ((readOffset + (len - bytes)) <= blockSize) {
+                // we can read entirely from the buffer.
+                n = len - bytes;
+            } else {
+                // copy what we can from the buffer.
+                n = (long) blockSize - readOffset;
+            }
+
+            // do not copy data but still increment counters.
+            readOffset = ExactMath.add(readOffset, n);
+            bytes += n;
+        }
+
+        return bytes;
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/examples/Archiver.java b/src/main/java/org/apache/commons/compress/archivers/examples/Archiver.java
index cdbe7b71..cd6f137a 100644
--- a/src/main/java/org/apache/commons/compress/archivers/examples/Archiver.java
+++ b/src/main/java/org/apache/commons/compress/archivers/examples/Archiver.java
@@ -114,16 +114,11 @@ public class Archiver {
      *
      * @param target the stream to write the new archive to.
      * @param directory the directory that contains the files to archive.
-     * @param fileVisitOptions linkOptions to configure the traversal of the source {@code directory}.
-     * @param linkOptions indicating how symbolic links are handled.
      * @throws IOException if an I/O error occurs or the archive cannot be created for other reasons.
      * @since 1.21
      */
-    public void create(final ArchiveOutputStream target, final Path directory,
-        final EnumSet<FileVisitOption> fileVisitOptions, final LinkOption... linkOptions) throws IOException {
-        Files.walkFileTree(directory, fileVisitOptions, Integer.MAX_VALUE,
-            new ArchiverFileVisitor(target, directory, linkOptions));
-        target.finish();
+    public void create(final ArchiveOutputStream target, final Path directory) throws IOException {
+        create(target, directory, EMPTY_FileVisitOption);
     }
 
     /**
@@ -131,11 +126,16 @@ public class Archiver {
      *
      * @param target the stream to write the new archive to.
      * @param directory the directory that contains the files to archive.
+     * @param fileVisitOptions linkOptions to configure the traversal of the source {@code directory}.
+     * @param linkOptions indicating how symbolic links are handled.
      * @throws IOException if an I/O error occurs or the archive cannot be created for other reasons.
      * @since 1.21
      */
-    public void create(final ArchiveOutputStream target, final Path directory) throws IOException {
-        create(target, directory, EMPTY_FileVisitOption);
+    public void create(final ArchiveOutputStream target, final Path directory,
+        final EnumSet<FileVisitOption> fileVisitOptions, final LinkOption... linkOptions) throws IOException {
+        Files.walkFileTree(directory, fileVisitOptions, Integer.MAX_VALUE,
+            new ArchiverFileVisitor(target, directory, linkOptions));
+        target.finish();
     }
 
     /**
diff --git a/src/main/java/org/apache/commons/compress/archivers/examples/CloseableConsumerAdapter.java b/src/main/java/org/apache/commons/compress/archivers/examples/CloseableConsumerAdapter.java
index 832410e2..7afea60e 100644
--- a/src/main/java/org/apache/commons/compress/archivers/examples/CloseableConsumerAdapter.java
+++ b/src/main/java/org/apache/commons/compress/archivers/examples/CloseableConsumerAdapter.java
@@ -30,15 +30,15 @@ final class CloseableConsumerAdapter implements Closeable {
         this.consumer = Objects.requireNonNull(consumer, "consumer");
     }
 
-    <C extends Closeable> C track(final C closeable) {
-        this.closeable = closeable;
-        return closeable;
-    }
-
     @Override
     public void close() throws IOException {
         if (closeable != null) {
             consumer.accept(closeable);
         }
     }
+
+    <C extends Closeable> C track(final C closeable) {
+        this.closeable = closeable;
+        return closeable;
+    }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveEntry.java
index f0c05f04..d89f68d5 100644
--- a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveEntry.java
@@ -36,8 +36,9 @@ public class JarArchiveEntry extends ZipArchiveEntry {
     private final Attributes manifestAttributes = null;
     private final Certificate[] certificates = null;
 
-    public JarArchiveEntry(final ZipEntry entry) throws ZipException {
+    public JarArchiveEntry(final JarEntry entry) throws ZipException {
         super(entry);
+
     }
 
     public JarArchiveEntry(final String name) {
@@ -48,21 +49,8 @@ public class JarArchiveEntry extends ZipArchiveEntry {
         super(entry);
     }
 
-    public JarArchiveEntry(final JarEntry entry) throws ZipException {
+    public JarArchiveEntry(final ZipEntry entry) throws ZipException {
         super(entry);
-
-    }
-
-    /**
-     * This method is not implemented and won't ever be.
-     * The JVM equivalent has a different name {@link java.util.jar.JarEntry#getAttributes()}
-     *
-     * @deprecated since 1.5, do not use; always returns null
-     * @return Always returns null.
-     */
-    @Deprecated
-    public Attributes getManifestAttributes() {
-        return manifestAttributes;
     }
 
     /**
@@ -87,4 +75,16 @@ public class JarArchiveEntry extends ZipArchiveEntry {
         return null;
     }
 
+    /**
+     * This method is not implemented and won't ever be.
+     * The JVM equivalent has a different name {@link java.util.jar.JarEntry#getAttributes()}
+     *
+     * @deprecated since 1.5, do not use; always returns null
+     * @return Always returns null.
+     */
+    @Deprecated
+    public Attributes getManifestAttributes() {
+        return manifestAttributes;
+    }
+
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java
index 47b1583c..494f03c6 100644
--- a/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java
@@ -32,6 +32,20 @@ import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream;
  */
 public class JarArchiveInputStream extends ZipArchiveInputStream {
 
+    /**
+     * Checks if the signature matches what is expected for a jar file
+     * (in this case it is the same as for a zip file).
+     *
+     * @param signature
+     *            the bytes to check
+     * @param length
+     *            the number of bytes to check
+     * @return true, if this stream is a jar archive stream, false otherwise
+     */
+    public static boolean matches(final byte[] signature, final int length ) {
+        return ZipArchiveInputStream.matches(signature, length);
+    }
+
     /**
      * Creates an instance from the input stream using the default encoding.
      *
@@ -52,27 +66,13 @@ public class JarArchiveInputStream extends ZipArchiveInputStream {
         super(inputStream, encoding);
     }
 
-    public JarArchiveEntry getNextJarEntry() throws IOException {
-        final ZipArchiveEntry entry = getNextZipEntry();
-        return entry == null ? null : new JarArchiveEntry(entry);
-    }
-
     @Override
     public ArchiveEntry getNextEntry() throws IOException {
         return getNextJarEntry();
     }
 
-    /**
-     * Checks if the signature matches what is expected for a jar file
-     * (in this case it is the same as for a zip file).
-     *
-     * @param signature
-     *            the bytes to check
-     * @param length
-     *            the number of bytes to check
-     * @return true, if this stream is a jar archive stream, false otherwise
-     */
-    public static boolean matches(final byte[] signature, final int length ) {
-        return ZipArchiveInputStream.matches(signature, length);
+    public JarArchiveEntry getNextJarEntry() throws IOException {
+        final ZipArchiveEntry entry = getNextZipEntry();
+        return entry == null ? null : new JarArchiveEntry(entry);
     }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
index 4353bf52..0a77fa6e 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
@@ -39,6 +39,52 @@ import org.apache.commons.compress.PasswordRequiredException;
 
 class AES256SHA256Decoder extends AbstractCoder {
     
+    static byte[] sha256Password(final byte[] password, final int numCyclesPower, final byte[] salt) {
+        final MessageDigest digest;
+        try {
+            digest = MessageDigest.getInstance("SHA-256");
+        } catch (final NoSuchAlgorithmException noSuchAlgorithmException) {
+            throw new IllegalStateException("SHA-256 is unsupported by your Java implementation", noSuchAlgorithmException);
+        }
+        final byte[] extra = new byte[8];
+        for (long j = 0; j < (1L << numCyclesPower); j++) {
+            digest.update(salt);
+            digest.update(password);
+            digest.update(extra);
+            for (int k = 0; k < extra.length; k++) {
+                ++extra[k];
+                if (extra[k] != 0) {
+                    break;
+                }
+            }
+        }
+        return digest.digest();
+    }
+
+    static byte[] sha256Password(final char[] password, final int numCyclesPower, final byte[] salt) {
+        return sha256Password(utf16Decode(password), numCyclesPower, salt);
+    }
+
+    /**
+     * Convenience method that encodes Unicode characters into bytes in UTF-16 (ittle-endian byte order) charset
+     *
+     * @param chars characters to encode
+     * @return encoded characters
+     * @since 1.23
+     */
+    static byte[] utf16Decode(final char[] chars) {
+        if (chars == null) {
+            return null;
+        }
+        final ByteBuffer encoded = UTF_16LE.encode(CharBuffer.wrap(chars));
+        if (encoded.hasArray()) {
+            return encoded.array();
+        }
+        final byte[] e = new byte[encoded.remaining()];
+        encoded.get(e);
+        return e;
+    }
+
     AES256SHA256Decoder() {
         super(AES256Options.class);
     }
@@ -50,6 +96,13 @@ class AES256SHA256Decoder extends AbstractCoder {
             private boolean isInitialized;
             private CipherInputStream cipherInputStream;
 
+            @Override
+            public void close() throws IOException {
+                if (cipherInputStream != null) {
+                    cipherInputStream.close();
+                }
+            }
+
             private CipherInputStream init() throws IOException {
                 if (isInitialized) {
                     return cipherInputStream;
@@ -109,13 +162,6 @@ class AES256SHA256Decoder extends AbstractCoder {
             public int read(final byte[] b, final int off, final int len) throws IOException {
                 return init().read(b, off, len);
             }
-
-            @Override
-            public void close() throws IOException {
-                if (cipherInputStream != null) {
-                    cipherInputStream.close();
-                }
-            }
         };
     }
 
@@ -134,11 +180,22 @@ class AES256SHA256Decoder extends AbstractCoder {
             private int count = 0;
 
             @Override
-            public void write(int b) throws IOException {
-                cipherBlockBuffer[count++] = (byte) b;
-                if (count == cipherBlockSize) {
-                    flushBuffer();
+            public void close() throws IOException {
+                if (count > 0) {
+                    cipherOutputStream.write(cipherBlockBuffer);
                 }
+                cipherOutputStream.close();
+            }
+
+            @Override
+            public void flush() throws IOException {
+                cipherOutputStream.flush();
+            }
+
+            private void flushBuffer() throws IOException {
+                cipherOutputStream.write(cipherBlockBuffer);
+                count = 0;
+                Arrays.fill(cipherBlockBuffer, (byte) 0);
             }
 
             @Override
@@ -161,23 +218,12 @@ class AES256SHA256Decoder extends AbstractCoder {
                 }
             }
 
-            private void flushBuffer() throws IOException {
-                cipherOutputStream.write(cipherBlockBuffer);
-                count = 0;
-                Arrays.fill(cipherBlockBuffer, (byte) 0);
-            }
-
-            @Override
-            public void flush() throws IOException {
-                cipherOutputStream.flush();
-            }
-
             @Override
-            public void close() throws IOException {
-                if (count > 0) {
-                    cipherOutputStream.write(cipherBlockBuffer);
+            public void write(int b) throws IOException {
+                cipherBlockBuffer[count++] = (byte) b;
+                if (count == cipherBlockSize) {
+                    flushBuffer();
                 }
-                cipherOutputStream.close();
             }
         };
     }
@@ -201,50 +247,4 @@ class AES256SHA256Decoder extends AbstractCoder {
 
         return props;
     }
-
-    static byte[] sha256Password(final char[] password, final int numCyclesPower, final byte[] salt) {
-        return sha256Password(utf16Decode(password), numCyclesPower, salt);
-    }
-
-    static byte[] sha256Password(final byte[] password, final int numCyclesPower, final byte[] salt) {
-        final MessageDigest digest;
-        try {
-            digest = MessageDigest.getInstance("SHA-256");
-        } catch (final NoSuchAlgorithmException noSuchAlgorithmException) {
-            throw new IllegalStateException("SHA-256 is unsupported by your Java implementation", noSuchAlgorithmException);
-        }
-        final byte[] extra = new byte[8];
-        for (long j = 0; j < (1L << numCyclesPower); j++) {
-            digest.update(salt);
-            digest.update(password);
-            digest.update(extra);
-            for (int k = 0; k < extra.length; k++) {
-                ++extra[k];
-                if (extra[k] != 0) {
-                    break;
-                }
-            }
-        }
-        return digest.digest();
-    }
-
-    /**
-     * Convenience method that encodes Unicode characters into bytes in UTF-16 (ittle-endian byte order) charset
-     *
-     * @param chars characters to encode
-     * @return encoded characters
-     * @since 1.23
-     */
-    static byte[] utf16Decode(final char[] chars) {
-        if (chars == null) {
-            return null;
-        }
-        final ByteBuffer encoded = UTF_16LE.encode(CharBuffer.wrap(chars));
-        if (encoded.hasArray()) {
-            return encoded.array();
-        }
-        final byte[] e = new byte[encoded.remaining()];
-        encoded.get(e);
-        return e;
-    }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/Archive.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/Archive.java
index 429ca86c..fec68fe7 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/Archive.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/Archive.java
@@ -20,6 +20,12 @@ package org.apache.commons.compress.archivers.sevenz;
 import java.util.BitSet;
 
 class Archive {
+    private static String lengthOf(final long[] a) {
+        return a == null ? "(null)" : String.valueOf(a.length);
+    }
+    private static String lengthOf(final Object[] a) {
+        return a == null ? "(null)" : String.valueOf(a.length);
+    }
     /// Offset from beginning of file + SIGNATURE_HEADER_SIZE to packed streams.
     long packPos;
     /// Size of each packed stream.
@@ -32,8 +38,10 @@ class Archive {
     Folder[] folders = Folder.EMPTY_FOLDER_ARRAY;
     /// Temporary properties for non-empty files (subsumed into the files array later).
     SubStreamsInfo subStreamsInfo;
+
     /// The files and directories in the archive.
     SevenZArchiveEntry[] files = SevenZArchiveEntry.EMPTY_SEVEN_Z_ARCHIVE_ENTRY_ARRAY;
+
     /// Mapping between folders, files and streams.
     StreamMap streamMap;
 
@@ -44,12 +52,4 @@ class Archive {
             + " CRCs, " + lengthOf(folders) + " folders, " + lengthOf(files)
             + " files and " + streamMap;
     }
-
-    private static String lengthOf(final long[] a) {
-        return a == null ? "(null)" : String.valueOf(a.length);
-    }
-
-    private static String lengthOf(final Object[] a) {
-        return a == null ? "(null)" : String.valueOf(a.length);
-    }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedSeekableByteChannelInputStream.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedSeekableByteChannelInputStream.java
index ca8b754f..309a3e3a 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedSeekableByteChannelInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/BoundedSeekableByteChannelInputStream.java
@@ -39,6 +39,11 @@ class BoundedSeekableByteChannelInputStream extends InputStream {
         }
     }
 
+    @Override
+    public void close() {
+        // the nested channel is controlled externally
+    }
+
     @Override
     public int read() throws IOException {
         if (bytesRemaining > 0) {
@@ -98,9 +103,4 @@ class BoundedSeekableByteChannelInputStream extends InputStream {
         buffer.flip();
         return read;
     }
-
-    @Override
-    public void close() {
-        // the nested channel is controlled externally
-    }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java
index dfa1c581..83cdc5e0 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/CLI.java
@@ -25,6 +25,22 @@ public class CLI {
 
     private enum Mode {
         LIST("Analysing") {
+            private String getContentMethods(final SevenZArchiveEntry entry) {
+                final StringBuilder sb = new StringBuilder();
+                boolean first = true;
+                for (final SevenZMethodConfiguration m : entry.getContentMethods()) {
+                    if (!first) {
+                        sb.append(", ");
+                    }
+                    first = false;
+                    sb.append(m.getMethod());
+                    if (m.getOptions() != null) {
+                        sb.append("(").append(m.getOptions()).append(")");
+                    }
+                }
+                return sb.toString();
+            }
+
             @Override
             public void takeAction(final SevenZFile archive, final SevenZArchiveEntry entry) {
                 System.out.print(entry.getName());
@@ -45,22 +61,6 @@ public class CLI {
                     System.out.println();
                 }
             }
-
-            private String getContentMethods(final SevenZArchiveEntry entry) {
-                final StringBuilder sb = new StringBuilder();
-                boolean first = true;
-                for (final SevenZMethodConfiguration m : entry.getContentMethods()) {
-                    if (!first) {
-                        sb.append(", ");
-                    }
-                    first = false;
-                    sb.append(m.getMethod());
-                    if (m.getOptions() != null) {
-                        sb.append("(").append(m.getOptions()).append(")");
-                    }
-                }
-                return sb.toString();
-            }
         };
 
         private final String message;
@@ -74,6 +74,13 @@ public class CLI {
             throws IOException;
     }
 
+    private static Mode grabMode(final String[] args) {
+        if (args.length < 2) {
+            return Mode.LIST;
+        }
+        return Enum.valueOf(Mode.class, args[1].toUpperCase());
+    }
+
     public static void main(final String[] args) throws Exception {
         if (args.length == 0) {
             usage();
@@ -97,11 +104,4 @@ public class CLI {
         System.out.println("Parameters: archive-name [list]");
     }
 
-    private static Mode grabMode(final String[] args) {
-        if (args.length < 2) {
-            return Mode.LIST;
-        }
-        return Enum.valueOf(Mode.class, args[1].toUpperCase());
-    }
-
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java
index 4dbeeae9..4e03caae 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/Coders.java
@@ -44,64 +44,6 @@ import org.tukaani.xz.SPARCOptions;
 import org.tukaani.xz.X86Options;
 
 class Coders {
-    private static final Map<SevenZMethod, AbstractCoder> CODER_MAP = new HashMap<SevenZMethod, AbstractCoder>() {
-
-        private static final long serialVersionUID = 1664829131806520867L;
-
-        {
-            put(SevenZMethod.COPY, new CopyDecoder());
-            put(SevenZMethod.LZMA, new LZMADecoder());
-            put(SevenZMethod.LZMA2, new LZMA2Decoder());
-            put(SevenZMethod.DEFLATE, new DeflateDecoder());
-            put(SevenZMethod.DEFLATE64, new Deflate64Decoder());
-            put(SevenZMethod.BZIP2, new BZIP2Decoder());
-            put(SevenZMethod.AES256SHA256, new AES256SHA256Decoder());
-            put(SevenZMethod.BCJ_X86_FILTER, new BCJDecoder(new X86Options()));
-            put(SevenZMethod.BCJ_PPC_FILTER, new BCJDecoder(new PowerPCOptions()));
-            put(SevenZMethod.BCJ_IA64_FILTER, new BCJDecoder(new IA64Options()));
-            put(SevenZMethod.BCJ_ARM_FILTER, new BCJDecoder(new ARMOptions()));
-            put(SevenZMethod.BCJ_ARM_THUMB_FILTER, new BCJDecoder(new ARMThumbOptions()));
-            put(SevenZMethod.BCJ_SPARC_FILTER, new BCJDecoder(new SPARCOptions()));
-            put(SevenZMethod.DELTA_FILTER, new DeltaDecoder());
-        }
-    };
-
-    static AbstractCoder findByMethod(final SevenZMethod method) {
-        return CODER_MAP.get(method);
-    }
-
-    static InputStream addDecoder(final String archiveName, final InputStream is, final long uncompressedLength,
-            final Coder coder, final byte[] password, final int maxMemoryLimitInKb) throws IOException {
-        final AbstractCoder cb = findByMethod(SevenZMethod.byId(coder.decompressionMethodId));
-        if (cb == null) {
-            throw new IOException("Unsupported compression method " +
-                                  Arrays.toString(coder.decompressionMethodId)
-                                  + " used in " + archiveName);
-        }
-        return cb.decode(archiveName, is, uncompressedLength, coder, password, maxMemoryLimitInKb);
-    }
-
-    static OutputStream addEncoder(final OutputStream out, final SevenZMethod method,
-                                   final Object options) throws IOException {
-        final AbstractCoder cb = findByMethod(method);
-        if (cb == null) {
-            throw new IOException("Unsupported compression method " + method);
-        }
-        return cb.encode(out, options);
-    }
-
-    static class CopyDecoder extends AbstractCoder {
-        @Override
-        InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength,
-                final Coder coder, final byte[] password, final int maxMemoryLimitInKb) throws IOException {
-            return in;
-        }
-        @Override
-        OutputStream encode(final OutputStream out, final Object options) {
-            return out;
-        }
-    }
-
     static class BCJDecoder extends AbstractCoder {
         private final FilterOptions opts;
         BCJDecoder(final FilterOptions opts) {
@@ -128,36 +70,52 @@ class Coders {
         }
     }
 
-    static class DeflateDecoder extends AbstractCoder {
-        private static final byte[] ONE_ZERO_BYTE = new byte[1];
-        DeflateDecoder() {
+    static class BZIP2Decoder extends AbstractCoder {
+        BZIP2Decoder() {
             super(Number.class);
         }
 
         @Override
         InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength,
                 final Coder coder, final byte[] password, final int maxMemoryLimitInKb)
-            throws IOException {
-            final Inflater inflater = new Inflater(true);
-            // Inflater with nowrap=true has this odd contract for a zero padding
-            // byte following the data stream; this used to be zlib's requirement
-            // and has been fixed a long time ago, but the contract persists so
-            // we comply.
-            // https://docs.oracle.com/javase/7/docs/api/java/util/zip/Inflater.html#Inflater(boolean)
-            final InflaterInputStream inflaterInputStream = new InflaterInputStream(new SequenceInputStream(in,
-                new ByteArrayInputStream(ONE_ZERO_BYTE)), inflater);
-            return new DeflateDecoderInputStream(inflaterInputStream, inflater);
+                throws IOException {
+            return new BZip2CompressorInputStream(in);
+        }
+        @Override
+        OutputStream encode(final OutputStream out, final Object options)
+                throws IOException {
+            final int blockSize = toInt(options, BZip2CompressorOutputStream.MAX_BLOCKSIZE);
+            return new BZip2CompressorOutputStream(out, blockSize);
         }
+    }
 
+    static class CopyDecoder extends AbstractCoder {
+        @Override
+        InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength,
+                final Coder coder, final byte[] password, final int maxMemoryLimitInKb) throws IOException {
+            return in;
+        }
         @Override
         OutputStream encode(final OutputStream out, final Object options) {
-            final int level = toInt(options, 9);
-            final Deflater deflater = new Deflater(level, true);
-            final DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream(out, deflater);
-            return new DeflateDecoderOutputStream(deflaterOutputStream, deflater);
+            return out;
         }
+    }
 
-         static class DeflateDecoderInputStream extends InputStream {
+    static class Deflate64Decoder extends AbstractCoder {
+        Deflate64Decoder() {
+            super(Number.class);
+        }
+
+        @Override
+        InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength,
+                final Coder coder, final byte[] password, final int maxMemoryLimitInKb)
+            throws IOException {
+            return new Deflate64CompressorInputStream(in);
+        }
+    }
+
+    static class DeflateDecoder extends AbstractCoder {
+        static class DeflateDecoderInputStream extends InputStream {
 
               final InflaterInputStream inflaterInputStream;
               Inflater inflater;
@@ -169,13 +127,17 @@ class Coders {
             }
 
             @Override
-            public int read() throws IOException {
-                return inflaterInputStream.read();
+            public void close() throws IOException {
+                try {
+                    inflaterInputStream.close();
+                } finally {
+                    inflater.end();
+                }
             }
 
             @Override
-            public int read(final byte[] b, final int off, final int len) throws IOException {
-                return inflaterInputStream.read(b, off, len);
+            public int read() throws IOException {
+                return inflaterInputStream.read();
             }
 
             @Override
@@ -184,16 +146,11 @@ class Coders {
             }
 
             @Override
-            public void close() throws IOException {
-                try {
-                    inflaterInputStream.close();
-                } finally {
-                    inflater.end();
-                }
+            public int read(final byte[] b, final int off, final int len) throws IOException {
+                return inflaterInputStream.read(b, off, len);
             }
         }
-
-         static class DeflateDecoderOutputStream extends OutputStream {
+        static class DeflateDecoderOutputStream extends OutputStream {
 
               final DeflaterOutputStream deflaterOutputStream;
               Deflater deflater;
@@ -205,8 +162,12 @@ class Coders {
             }
 
             @Override
-            public void write(final int b) throws IOException {
-                deflaterOutputStream.write(b);
+            public void close() throws IOException {
+                try {
+                    deflaterOutputStream.close();
+                } finally {
+                    deflater.end();
+                }
             }
 
             @Override
@@ -220,46 +181,85 @@ class Coders {
             }
 
             @Override
-            public void close() throws IOException {
-                try {
-                    deflaterOutputStream.close();
-                } finally {
-                    deflater.end();
-                }
+            public void write(final int b) throws IOException {
+                deflaterOutputStream.write(b);
             }
         }
-    }
 
-    static class Deflate64Decoder extends AbstractCoder {
-        Deflate64Decoder() {
+        private static final byte[] ONE_ZERO_BYTE = new byte[1];
+
+        DeflateDecoder() {
             super(Number.class);
         }
 
-        @Override
+         @Override
         InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength,
                 final Coder coder, final byte[] password, final int maxMemoryLimitInKb)
             throws IOException {
-            return new Deflate64CompressorInputStream(in);
+            final Inflater inflater = new Inflater(true);
+            // Inflater with nowrap=true has this odd contract for a zero padding
+            // byte following the data stream; this used to be zlib's requirement
+            // and has been fixed a long time ago, but the contract persists so
+            // we comply.
+            // https://docs.oracle.com/javase/7/docs/api/java/util/zip/Inflater.html#Inflater(boolean)
+            final InflaterInputStream inflaterInputStream = new InflaterInputStream(new SequenceInputStream(in,
+                new ByteArrayInputStream(ONE_ZERO_BYTE)), inflater);
+            return new DeflateDecoderInputStream(inflaterInputStream, inflater);
+        }
+
+         @Override
+        OutputStream encode(final OutputStream out, final Object options) {
+            final int level = toInt(options, 9);
+            final Deflater deflater = new Deflater(level, true);
+            final DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream(out, deflater);
+            return new DeflateDecoderOutputStream(deflaterOutputStream, deflater);
         }
     }
 
-    static class BZIP2Decoder extends AbstractCoder {
-        BZIP2Decoder() {
-            super(Number.class);
+    private static final Map<SevenZMethod, AbstractCoder> CODER_MAP = new HashMap<SevenZMethod, AbstractCoder>() {
+
+        private static final long serialVersionUID = 1664829131806520867L;
+
+        {
+            put(SevenZMethod.COPY, new CopyDecoder());
+            put(SevenZMethod.LZMA, new LZMADecoder());
+            put(SevenZMethod.LZMA2, new LZMA2Decoder());
+            put(SevenZMethod.DEFLATE, new DeflateDecoder());
+            put(SevenZMethod.DEFLATE64, new Deflate64Decoder());
+            put(SevenZMethod.BZIP2, new BZIP2Decoder());
+            put(SevenZMethod.AES256SHA256, new AES256SHA256Decoder());
+            put(SevenZMethod.BCJ_X86_FILTER, new BCJDecoder(new X86Options()));
+            put(SevenZMethod.BCJ_PPC_FILTER, new BCJDecoder(new PowerPCOptions()));
+            put(SevenZMethod.BCJ_IA64_FILTER, new BCJDecoder(new IA64Options()));
+            put(SevenZMethod.BCJ_ARM_FILTER, new BCJDecoder(new ARMOptions()));
+            put(SevenZMethod.BCJ_ARM_THUMB_FILTER, new BCJDecoder(new ARMThumbOptions()));
+            put(SevenZMethod.BCJ_SPARC_FILTER, new BCJDecoder(new SPARCOptions()));
+            put(SevenZMethod.DELTA_FILTER, new DeltaDecoder());
         }
+    };
 
-        @Override
-        InputStream decode(final String archiveName, final InputStream in, final long uncompressedLength,
-                final Coder coder, final byte[] password, final int maxMemoryLimitInKb)
-                throws IOException {
-            return new BZip2CompressorInputStream(in);
+    static InputStream addDecoder(final String archiveName, final InputStream is, final long uncompressedLength,
+            final Coder coder, final byte[] password, final int maxMemoryLimitInKb) throws IOException {
+        final AbstractCoder cb = findByMethod(SevenZMethod.byId(coder.decompressionMethodId));
+        if (cb == null) {
+            throw new IOException("Unsupported compression method " +
+                                  Arrays.toString(coder.decompressionMethodId)
+                                  + " used in " + archiveName);
         }
-        @Override
-        OutputStream encode(final OutputStream out, final Object options)
-                throws IOException {
-            final int blockSize = toInt(options, BZip2CompressorOutputStream.MAX_BLOCKSIZE);
-            return new BZip2CompressorOutputStream(out, blockSize);
+        return cb.decode(archiveName, is, uncompressedLength, coder, password, maxMemoryLimitInKb);
+    }
+
+    static OutputStream addEncoder(final OutputStream out, final SevenZMethod method,
+                                   final Object options) throws IOException {
+        final AbstractCoder cb = findByMethod(method);
+        if (cb == null) {
+            throw new IOException("Unsupported compression method " + method);
         }
+        return cb.encode(out, options);
+    }
+
+    static AbstractCoder findByMethod(final SevenZMethod method) {
+        return CODER_MAP.get(method);
     }
 
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java
index 826a6939..f34c4cb0 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/DeltaDecoder.java
@@ -54,15 +54,15 @@ class DeltaDecoder extends AbstractCoder {
         };
     }
 
-    @Override
-    Object getOptionsFromCoder(final Coder coder, final InputStream in) {
-        return getOptionsFromCoder(coder);
-    }
-
     private int getOptionsFromCoder(final Coder coder) {
         if (coder.properties == null || coder.properties.length == 0) {
             return 1;
         }
         return (0xff & coder.properties[0]) + 1;
     }
+
+    @Override
+    Object getOptionsFromCoder(final Coder coder, final InputStream in) {
+        return getOptionsFromCoder(coder);
+    }
 }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java
index cfa7e4b3..1124fe71 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java
@@ -25,6 +25,7 @@ import java.util.LinkedList;
  * The unit of solid compression.
  */
 class Folder {
+    static final Folder[] EMPTY_FOLDER_ARRAY = {};
     /// List of coders used in this folder, eg. one for compression, one for encryption.
     Coder[] coders;
     /// Total number of input streams across all coders.
@@ -46,7 +47,28 @@ class Folder {
     /// output streams and the number of non-empty files in this
     /// folder.
     int numUnpackSubStreams;
-    static final Folder[] EMPTY_FOLDER_ARRAY = {};
+
+    int findBindPairForInStream(final int index) {
+        if (bindPairs != null) {
+            for (int i = 0; i < bindPairs.length; i++) {
+                if (bindPairs[i].inIndex == index) {
+                    return i;
+                }
+            }
+        }
+        return -1;
+    }
+
+    int findBindPairForOutStream(final int index) {
+        if (bindPairs != null) {
+            for (int i = 0; i < bindPairs.length; i++) {
+                if (bindPairs[i].outIndex == index) {
+                    return i;
+                }
+            }
+        }
+        return -1;
+    }
 
     /**
      * Sorts Coders using bind pairs.
@@ -71,28 +93,6 @@ class Folder {
         return l;
     }
 
-    int findBindPairForInStream(final int index) {
-        if (bindPairs != null) {
-            for (int i = 0; i < bindPairs.length; i++) {
-                if (bindPairs[i].inIndex == index) {
-                    return i;
-                }
-            }
-        }
-        return -1;
-    }
-
-    int findBindPairForOutStream(final int index) {
-        if (bindPairs != null) {
-            for (int i = 0; i < bindPairs.length; i++) {
-                if (bindPairs[i].outIndex == index) {
-                    return i;
-                }
-            }
-        }
-        return -1;
-    }
-
     long getUnpackSize() {
         if (totalOutputStreams == 0) {
             return 0;
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java
index 289e8067..ba84194d 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMA2Decoder.java
@@ -53,29 +53,6 @@ class LZMA2Decoder extends AbstractCoder {
         return getOptions(opts).getOutputStream(new FinishableWrapperOutputStream(out));
     }
 
-    @Override
-    byte[] getOptionsAsProperties(final Object opts) {
-        final int dictSize = getDictSize(opts);
-        final int lead = Integer.numberOfLeadingZeros(dictSize);
-        final int secondBit = (dictSize >>> (30 - lead)) - 2;
-        return new byte[] {
-            (byte) ((19 - lead) * 2 + secondBit)
-        };
-    }
-
-    @Override
-    Object getOptionsFromCoder(final Coder coder, final InputStream in)
-        throws IOException {
-        return getDictionarySize(coder);
-    }
-
-    private int getDictSize(final Object opts) {
-        if (opts instanceof LZMA2Options) {
-            return ((LZMA2Options) opts).getDictSize();
-        }
-        return numberOptionOrDefault(opts);
-    }
-
     private int getDictionarySize(final Coder coder) throws IOException {
         if (coder.properties == null) {
             throw new IOException("Missing LZMA2 properties");
@@ -96,6 +73,13 @@ class LZMA2Decoder extends AbstractCoder {
         return (2 | (dictionarySizeBits & 0x1)) << (dictionarySizeBits / 2 + 11);
     }
 
+    private int getDictSize(final Object opts) {
+        if (opts instanceof LZMA2Options) {
+            return ((LZMA2Options) opts).getDictSize();
+        }
+        return numberOptionOrDefault(opts);
+    }
+
     private LZMA2Options getOptions(final Object opts) throws IOException {
         if (opts instanceof LZMA2Options) {
             return (LZMA2Options) opts;
@@ -105,6 +89,22 @@ class LZMA2Decoder extends AbstractCoder {
         return options;
     }
 
+    @Override
+    byte[] getOptionsAsProperties(final Object opts) {
+        final int dictSize = getDictSize(opts);
+        final int lead = Integer.numberOfLeadingZeros(dictSize);
+        final int secondBit = (dictSize >>> (30 - lead)) - 2;
+        return new byte[] {
+            (byte) ((19 - lead) * 2 + secondBit)
+        };
+    }
+
+    @Override
+    Object getOptionsFromCoder(final Coder coder, final InputStream in)
+        throws IOException {
+        return getDictionarySize(coder);
+    }
+
     private int numberOptionOrDefault(final Object opts) {
         return toInt(opts, LZMA2Options.DICT_SIZE_DEFAULT);
     }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMADecoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMADecoder.java
index 82626e8f..9ef390da 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMADecoder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/LZMADecoder.java
@@ -63,6 +63,19 @@ class LZMADecoder extends AbstractCoder {
         return new FlushShieldFilterOutputStream(new LZMAOutputStream(out, getOptions(opts), false));
     }
 
+    private int getDictionarySize(final Coder coder) throws IllegalArgumentException {
+        return (int) ByteUtils.fromLittleEndian(coder.properties, 1, 4);
+    }
+
+    private LZMA2Options getOptions(final Object opts) throws IOException {
+        if (opts instanceof LZMA2Options) {
+            return (LZMA2Options) opts;
+        }
+        final LZMA2Options options = new LZMA2Options();
+        options.setDictSize(numberOptionOrDefault(opts));
+        return options;
+    }
+
     @Override
     byte[] getOptionsAsProperties(final Object opts) throws IOException {
         final LZMA2Options options = getOptions(opts);
@@ -95,19 +108,6 @@ class LZMADecoder extends AbstractCoder {
         return opts;
     }
 
-    private int getDictionarySize(final Coder coder) throws IllegalArgumentException {
-        return (int) ByteUtils.fromLittleEndian(coder.properties, 1, 4);
-    }
-
-    private LZMA2Options getOptions(final Object opts) throws IOException {
-        if (opts instanceof LZMA2Options) {
-            return (LZMA2Options) opts;
-        }
-        final LZMA2Options options = new LZMA2Options();
-        options.setDictSize(numberOptionOrDefault(opts));
-        return options;
-    }
-
     private int numberOptionOrDefault(final Object opts) {
         return toInt(opts, LZMA2Options.DICT_SIZE_DEFAULT);
     }
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java
index a7d938d5..4ec6b000 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java
@@ -90,28 +90,220 @@ import org.apache.commons.compress.utils.InputStreamStatistics;
  * @since 1.6
  */
 public class SevenZFile implements Closeable {
+    private static class ArchiveStatistics {
+        private int numberOfPackedStreams;
+        private long numberOfCoders;
+        private long numberOfOutStreams;
+        private long numberOfInStreams;
+        private long numberOfUnpackSubStreams;
+        private int numberOfFolders;
+        private BitSet folderHasCrc;
+        private int numberOfEntries;
+        private int numberOfEntriesWithStream;
+
+        void assertValidity(final int maxMemoryLimitInKb) throws IOException {
+            if (numberOfEntriesWithStream > 0 && numberOfFolders == 0) {
+                throw new IOException("archive with entries but no folders");
+            }
+            if (numberOfEntriesWithStream > numberOfUnpackSubStreams) {
+                throw new IOException("archive doesn't contain enough substreams for entries");
+            }
+
+            final long memoryNeededInKb = estimateSize() / 1024;
+            if (maxMemoryLimitInKb < memoryNeededInKb) {
+                throw new MemoryLimitException(memoryNeededInKb, maxMemoryLimitInKb);
+            }
+        }
+
+        private long bindPairSize() {
+            return 16;
+        }
+
+        private long coderSize() {
+            return 2 /* methodId is between 1 and four bytes currently, COPY and LZMA2 are the most common with 1 */
+                + 16
+                + 4 /* properties, guess */
+                ;
+        }
+
+        private long entrySize() {
+            return 100; /* real size depends on name length, everything without name is about 70 bytes */
+        }
+
+        long estimateSize() {
+            final long lowerBound = 16L * numberOfPackedStreams /* packSizes, packCrcs in Archive */
+                + numberOfPackedStreams / 8 /* packCrcsDefined in Archive */
+                + numberOfFolders * folderSize() /* folders in Archive */
+                + numberOfCoders * coderSize() /* coders in Folder */
+                + (numberOfOutStreams - numberOfFolders) * bindPairSize() /* bindPairs in Folder */
+                + 8L * (numberOfInStreams - numberOfOutStreams + numberOfFolders) /* packedStreams in Folder */
+                + 8L * numberOfOutStreams /* unpackSizes in Folder */
+                + numberOfEntries * entrySize() /* files in Archive */
+                + streamMapSize()
+                ;
+            return 2 * lowerBound /* conservative guess */;
+        }
+
+        private long folderSize() {
+            return 30; /* nested arrays are accounted for separately */
+        }
+
+        private long streamMapSize() {
+            return 8 * numberOfFolders /* folderFirstPackStreamIndex, folderFirstFileIndex */
+                + 8 * numberOfPackedStreams /* packStreamOffsets */
+                + 4 * numberOfEntries /* fileFolderIndex */
+                ;
+        }
+
+        @Override
+        public String toString() {
+            return "Archive with " + numberOfEntries + " entries in " + numberOfFolders
+                + " folders. Estimated size " + estimateSize()/ 1024L + " kB.";
+        }
+    }
+
     static final int SIGNATURE_HEADER_SIZE = 32;
 
     private static final String DEFAULT_FILE_NAME = "unknown archive";
+    /** Shared with SevenZOutputFile and tests, neither mutates it. */
+    static final byte[] sevenZSignature = { //NOSONAR
+        (byte)'7', (byte)'z', (byte)0xBC, (byte)0xAF, (byte)0x27, (byte)0x1C
+    };
+    private static int assertFitsIntoNonNegativeInt(final String what, final long value) throws IOException {
+        if (value > Integer.MAX_VALUE || value < 0) {
+            throw new IOException("Cannot handle " + what + " " + value);
+        }
+        return (int) value;
+    }
+    private static void get(final ByteBuffer buf, final byte[] to) throws IOException {
+        if (buf.remaining() < to.length) {
+            throw new EOFException();
+        }
+        buf.get(to);
+    }
+    private static char getChar(final ByteBuffer buf) throws IOException {
+        if (buf.remaining() < 2) {
+            throw new EOFException();
+        }
+        return buf.getChar();
+    }
+    private static int getInt(final ByteBuffer buf) throws IOException {
+        if (buf.remaining() < 4) {
+            throw new EOFException();
+        }
+        return buf.getInt();
+    }
+    private static long getLong(final ByteBuffer buf) throws IOException {
+        if (buf.remaining() < 8) {
+            throw new EOFException();
+        }
+        return buf.getLong();
+    }
+    private static int getUnsignedByte(final ByteBuffer buf) throws IOException {
+        if (!buf.hasRemaining()) {
+            throw new EOFException();
+        }
+        return buf.get() & 0xff;
+    }
+
+    /**
+     * Checks if the signature matches what is expected for a 7z file.
+     *
+     * @param signature
+     *            the bytes to check
+     * @param length
+     *            the number of bytes to check
+     * @return true, if this is the signature of a 7z archive.
+     * @since 1.8
+     */
+    public static boolean matches(final byte[] signature, final int length) {
+        if (length < sevenZSignature.length) {
+            return false;
+        }
+
+        for (int i = 0; i < sevenZSignature.length; i++) {
+            if (signature[i] != sevenZSignature[i]) {
+                return false;
+            }
+        }
+        return true;
+    }
+    private static long readUint64(final ByteBuffer in) throws IOException {
+        // long rather than int as it might get shifted beyond the range of an int
+        final long firstByte = getUnsignedByte(in);
+        int mask = 0x80;
+        long value = 0;
+        for (int i = 0; i < 8; i++) {
+            if ((firstByte & mask) == 0) {
+                return value | (firstByte & mask - 1) << 8 * i;
+            }
+            final long nextByte = getUnsignedByte(in);
+            value |= nextByte << 8 * i;
+            mask >>>= 1;
+        }
+        return value;
+    }
+
+    private static long skipBytesFully(final ByteBuffer input, long bytesToSkip) {
+        if (bytesToSkip < 1) {
+            return 0;
+        }
+        final int current = input.position();
+        final int maxSkip = input.remaining();
+        if (maxSkip < bytesToSkip) {
+            bytesToSkip = maxSkip;
+        }
+        input.position(current + (int) bytesToSkip);
+        return bytesToSkip;
+    }
 
     private final String fileName;
+
     private SeekableByteChannel channel;
+
     private final Archive archive;
+
     private int currentEntryIndex = -1;
+
     private int currentFolderIndex = -1;
+
     private InputStream currentFolderInputStream;
+
     private byte[] password;
+
     private final SevenZFileOptions options;
 
     private long compressedBytesReadFromCurrentEntry;
+
     private long uncompressedBytesReadFromCurrentEntry;
 
     private final ArrayList<InputStream> deferredBlockStreams = new ArrayList<>();
 
-    /** Shared with SevenZOutputFile and tests, neither mutates it. */
-    static final byte[] sevenZSignature = { //NOSONAR
-        (byte)'7', (byte)'z', (byte)0xBC, (byte)0xAF, (byte)0x27, (byte)0x1C
-    };
+    /**
+     * Reads a file as unencrypted 7z archive
+     *
+     * @param fileName the file to read
+     * @throws IOException if reading the archive fails
+     */
+    public SevenZFile(final File fileName) throws IOException {
+        this(fileName, SevenZFileOptions.DEFAULT);
+    }
+
+    /**
+     * Reads a file as 7z archive
+     *
+     * @param fileName the file to read
+     * @param password optional password if the archive is encrypted -
+     * the byte array is supposed to be the UTF16-LE encoded
+     * representation of the password.
+     * @throws IOException if reading the archive fails
+     * @deprecated use the char[]-arg version for the password instead
+     */
+    @Deprecated
+    public SevenZFile(final File fileName, final byte[] password) throws IOException {
+        this(Files.newByteChannel(fileName.toPath(), EnumSet.of(StandardOpenOption.READ)),
+                fileName.getAbsolutePath(), password, true, SevenZFileOptions.DEFAULT);
+    }
 
     /**
      * Reads a file as 7z archive
@@ -140,19 +332,15 @@ public class SevenZFile implements Closeable {
     }
 
     /**
-     * Reads a file as 7z archive
+     * Reads a file as unencrypted 7z archive
      *
      * @param fileName the file to read
-     * @param password optional password if the archive is encrypted -
-     * the byte array is supposed to be the UTF16-LE encoded
-     * representation of the password.
-     * @throws IOException if reading the archive fails
-     * @deprecated use the char[]-arg version for the password instead
+     * @param options the options to apply
+     * @throws IOException if reading the archive fails or the memory limit (if set) is too small
+     * @since 1.19
      */
-    @Deprecated
-    public SevenZFile(final File fileName, final byte[] password) throws IOException {
-        this(Files.newByteChannel(fileName.toPath(), EnumSet.of(StandardOpenOption.READ)),
-                fileName.getAbsolutePath(), password, true, SevenZFileOptions.DEFAULT);
+    public SevenZFile(final File fileName, final SevenZFileOptions options) throws IOException {
+        this(fileName, null, options);
     }
 
     /**
@@ -171,19 +359,24 @@ public class SevenZFile implements Closeable {
     }
 
     /**
-     * Reads a SeekableByteChannel as 7z archive with addtional options.
+     * Reads a SeekableByteChannel as 7z archive
      *
      * <p>{@link
      * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
      * allows you to read from an in-memory archive.</p>
      *
      * @param channel the channel to read
-     * @param options the options to apply
-     * @throws IOException if reading the archive fails or the memory limit (if set) is too small
-     * @since 1.19
+     * @param password optional password if the archive is encrypted -
+     * the byte array is supposed to be the UTF16-LE encoded
+     * representation of the password.
+     * @throws IOException if reading the archive fails
+     * @since 1.13
+     * @deprecated use the char[]-arg version for the password instead
      */
-    public SevenZFile(final SeekableByteChannel channel, final SevenZFileOptions options) throws IOException {
-        this(channel, DEFAULT_FILE_NAME, null, options);
+    @Deprecated
+    public SevenZFile(final SeekableByteChannel channel,
+                      final byte[] password) throws IOException {
+        this(channel, DEFAULT_FILE_NAME, password);
     }
 
     /**
@@ -222,25 +415,23 @@ public class SevenZFile implements Closeable {
     }
 
     /**
-     * Reads a SeekableByteChannel as 7z archive
+     * Reads a SeekableByteChannel as 7z archive with addtional options.
      *
      * <p>{@link
      * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
      * allows you to read from an in-memory archive.</p>
      *
      * @param channel the channel to read
-     * @param fileName name of the archive - only used for error reporting
-     * @param password optional password if the archive is encrypted
-     * @throws IOException if reading the archive fails
-     * @since 1.17
+     * @param options the options to apply
+     * @throws IOException if reading the archive fails or the memory limit (if set) is too small
+     * @since 1.19
      */
-    public SevenZFile(final SeekableByteChannel channel, final String fileName,
-                      final char[] password) throws IOException {
-        this(channel, fileName, password, SevenZFileOptions.DEFAULT);
+    public SevenZFile(final SeekableByteChannel channel, final SevenZFileOptions options) throws IOException {
+        this(channel, DEFAULT_FILE_NAME, null, options);
     }
 
     /**
-     * Reads a SeekableByteChannel as 7z archive with addtional options.
+     * Reads a SeekableByteChannel as 7z archive
      *
      * <p>{@link
      * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
@@ -248,72 +439,14 @@ public class SevenZFile implements Closeable {
      *
      * @param channel the channel to read
      * @param fileName name of the archive - only used for error reporting
-     * @param password optional password if the archive is encrypted
-     * @param options the options to apply
-     * @throws IOException if reading the archive fails or the memory limit (if set) is too small
-     * @since 1.19
-     */
-    public SevenZFile(final SeekableByteChannel channel, final String fileName, final char[] password,
-            final SevenZFileOptions options) throws IOException {
-        this(channel, fileName, AES256SHA256Decoder.utf16Decode(password), false, options);
-    }
-
-    /**
-     * Reads a SeekableByteChannel as 7z archive
-     *
-     * <p>{@link
-     * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
-     * allows you to read from an in-memory archive.</p>
-     *
-     * @param channel the channel to read
-     * @param fileName name of the archive - only used for error reporting
-     * @throws IOException if reading the archive fails
-     * @since 1.17
+     * @throws IOException if reading the archive fails
+     * @since 1.17
      */
     public SevenZFile(final SeekableByteChannel channel, final String fileName)
         throws IOException {
         this(channel, fileName, SevenZFileOptions.DEFAULT);
     }
 
-    /**
-     * Reads a SeekableByteChannel as 7z archive with additional options.
-     *
-     * <p>{@link
-     * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
-     * allows you to read from an in-memory archive.</p>
-     *
-     * @param channel the channel to read
-     * @param fileName name of the archive - only used for error reporting
-     * @param options the options to apply
-     * @throws IOException if reading the archive fails or the memory limit (if set) is too small
-     * @since 1.19
-     */
-    public SevenZFile(final SeekableByteChannel channel, final String fileName, final SevenZFileOptions options)
-            throws IOException {
-        this(channel, fileName, null, false, options);
-    }
-
-    /**
-     * Reads a SeekableByteChannel as 7z archive
-     *
-     * <p>{@link
-     * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
-     * allows you to read from an in-memory archive.</p>
-     *
-     * @param channel the channel to read
-     * @param password optional password if the archive is encrypted -
-     * the byte array is supposed to be the UTF16-LE encoded
-     * representation of the password.
-     * @throws IOException if reading the archive fails
-     * @since 1.13
-     * @deprecated use the char[]-arg version for the password instead
-     */
-    @Deprecated
-    public SevenZFile(final SeekableByteChannel channel,
-                      final byte[] password) throws IOException {
-        this(channel, DEFAULT_FILE_NAME, password);
-    }
-
     /**
      * Reads a SeekableByteChannel as 7z archive
      *
@@ -358,819 +491,765 @@ public class SevenZFile implements Closeable {
     }
 
     /**
-     * Reads a file as unencrypted 7z archive
+     * Reads a SeekableByteChannel as 7z archive
      *
-     * @param fileName the file to read
+     * <p>{@link
+     * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
+     * allows you to read from an in-memory archive.</p>
+     *
+     * @param channel the channel to read
+     * @param fileName name of the archive - only used for error reporting
+     * @param password optional password if the archive is encrypted
      * @throws IOException if reading the archive fails
+     * @since 1.17
      */
-    public SevenZFile(final File fileName) throws IOException {
-        this(fileName, SevenZFileOptions.DEFAULT);
+    public SevenZFile(final SeekableByteChannel channel, final String fileName,
+                      final char[] password) throws IOException {
+        this(channel, fileName, password, SevenZFileOptions.DEFAULT);
     }
 
     /**
-     * Reads a file as unencrypted 7z archive
+     * Reads a SeekableByteChannel as 7z archive with addtional options.
      *
-     * @param fileName the file to read
+     * <p>{@link
+     * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
+     * allows you to read from an in-memory archive.</p>
+     *
+     * @param channel the channel to read
+     * @param fileName name of the archive - only used for error reporting
+     * @param password optional password if the archive is encrypted
      * @param options the options to apply
      * @throws IOException if reading the archive fails or the memory limit (if set) is too small
      * @since 1.19
      */
-    public SevenZFile(final File fileName, final SevenZFileOptions options) throws IOException {
-        this(fileName, null, options);
+    public SevenZFile(final SeekableByteChannel channel, final String fileName, final char[] password,
+            final SevenZFileOptions options) throws IOException {
+        this(channel, fileName, AES256SHA256Decoder.utf16Decode(password), false, options);
     }
 
     /**
-     * Closes the archive.
-     * @throws IOException if closing the file fails
+     * Reads a SeekableByteChannel as 7z archive with additional options.
+     *
+     * <p>{@link
+     * org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
+     * allows you to read from an in-memory archive.</p>
+     *
+     * @param channel the channel to read
+     * @param fileName name of the archive - only used for error reporting
+     * @param options the options to apply
+     * @throws IOException if reading the archive fails or the memory limit (if set) is too small
+     * @since 1.19
      */
-    @Override
-    public void close() throws IOException {
-        if (channel != null) {
-            try {
-                channel.close();
-            } finally {
-                channel = null;
-                if (password != null) {
-                    Arrays.fill(password, (byte) 0);
+    public SevenZFile(final SeekableByteChannel channel, final String fileName, final SevenZFileOptions options)
+            throws IOException {
+        this(channel, fileName, null, false, options);
+    }
+
+    private InputStream buildDecoderStack(final Folder folder, final long folderOffset,
+                final int firstPackStreamIndex, final SevenZArchiveEntry entry) throws IOException {
+        channel.position(folderOffset);
+        InputStream inputStreamStack = new FilterInputStream(new BufferedInputStream(
+              new BoundedSeekableByteChannelInputStream(channel,
+                  archive.packSizes[firstPackStreamIndex]))) {
+            private void count(final int c) {
+                compressedBytesReadFromCurrentEntry += c;
+            }
+            @Override
+            public int read() throws IOException {
+                final int r = in.read();
+                if (r >= 0) {
+                    count(1);
                 }
-                password = null;
+                return r;
+            }
+            @Override
+            public int read(final byte[] b) throws IOException {
+                return read(b, 0, b.length);
+            }
+            @Override
+            public int read(final byte[] b, final int off, final int len) throws IOException {
+                if (len == 0) {
+                    return 0;
+                }
+                final int r = in.read(b, off, len);
+                if (r >= 0) {
+                    count(r);
+                }
+                return r;
             }
+        };
+        final LinkedList<SevenZMethodConfiguration> methods = new LinkedList<>();
+        for (final Coder coder : folder.getOrderedCoders()) {
+            if (coder.numInStreams != 1 || coder.numOutStreams != 1) {
+                throw new IOException("Multi input/output stream coders are not yet supported");
+            }
+            final SevenZMethod method = SevenZMethod.byId(coder.decompressionMethodId);
+            inputStreamStack = Coders.addDecoder(fileName, inputStreamStack,
+                    folder.getUnpackSizeForCoder(coder), coder, password, options.getMaxMemoryLimitInKb());
+            methods.addFirst(new SevenZMethodConfiguration(method,
+                     Coders.findByMethod(method).getOptionsFromCoder(coder, inputStreamStack)));
+        }
+        entry.setContentMethods(methods);
+        if (folder.hasCrc) {
+            return new CRC32VerifyingInputStream(inputStreamStack,
+                    folder.getUnpackSize(), folder.crc);
         }
+        return inputStreamStack;
     }
 
     /**
-     * Returns the next Archive Entry in this archive.
+     * Build the decoding stream for the entry to be read.
+     * This method may be called from a random access(getInputStream) or
+     * sequential access(getNextEntry).
+     * If this method is called from a random access, some entries may
+     * need to be skipped(we put them to the deferredBlockStreams and
+     * skip them when actually needed to improve the performance)
      *
-     * @return the next entry,
-     *         or {@code null} if there are no more entries
-     * @throws IOException if the next entry could not be read
+     * @param entryIndex     the index of the entry to be read
+     * @param isRandomAccess is this called in a random access
+     * @throws IOException if there are exceptions when reading the file
      */
-    public SevenZArchiveEntry getNextEntry() throws IOException {
-        if (currentEntryIndex >= archive.files.length - 1) {
-            return null;
+    private void buildDecodingStream(final int entryIndex, final boolean isRandomAccess) throws IOException {
+        if (archive.streamMap == null) {
+            throw new IOException("Archive doesn't contain stream information to read entries");
         }
-        ++currentEntryIndex;
-        final SevenZArchiveEntry entry = archive.files[currentEntryIndex];
-        if (entry.getName() == null && options.getUseDefaultNameForUnnamedEntries()) {
-            entry.setName(getDefaultName());
+        final int folderIndex = archive.streamMap.fileFolderIndex[entryIndex];
+        if (folderIndex < 0) {
+            deferredBlockStreams.clear();
+            // TODO: previously it'd return an empty stream?
+            // new BoundedInputStream(new ByteArrayInputStream(ByteUtils.EMPTY_BYTE_ARRAY), 0);
+            return;
         }
-        buildDecodingStream(currentEntryIndex, false);
-        uncompressedBytesReadFromCurrentEntry = compressedBytesReadFromCurrentEntry = 0;
-        return entry;
-    }
-
-    /**
-     * Returns a copy of meta-data of all archive entries.
-     *
-     * <p>This method only provides meta-data, the entries can not be
-     * used to read the contents, you still need to process all
-     * entries in order using {@link #getNextEntry} for that.</p>
-     *
-     * <p>The content methods are only available for entries that have
-     * already been reached via {@link #getNextEntry}.</p>
-     *
-     * @return a copy of meta-data of all archive entries.
-     * @since 1.11
-     */
-    public Iterable<SevenZArchiveEntry> getEntries() {
-        return new ArrayList<>(Arrays.asList(archive.files));
-    }
+        final SevenZArchiveEntry file = archive.files[entryIndex];
+        boolean isInSameFolder = false;
+        if (currentFolderIndex == folderIndex) {
+            // (COMPRESS-320).
+            // The current entry is within the same (potentially opened) folder. The
+            // previous stream has to be fully decoded before we can start reading
+            // but don't do it eagerly -- if the user skips over the entire folder nothing
+            // is effectively decompressed.
+            if (entryIndex > 0) {
+                file.setContentMethods(archive.files[entryIndex - 1].getContentMethods());
+            }
 
-    private Archive readHeaders(final byte[] password) throws IOException {
-        final ByteBuffer buf = ByteBuffer.allocate(12 /* signature + 2 bytes version + 4 bytes CRC */)
-            .order(ByteOrder.LITTLE_ENDIAN);
-        readFully(buf);
-        final byte[] signature = new byte[6];
-        buf.get(signature);
-        if (!Arrays.equals(signature, sevenZSignature)) {
-            throw new IOException("Bad 7z signature");
+            // if this is called in a random access, then the content methods of previous entry may be null
+            // the content methods should be set to methods of the first entry as it must not be null,
+            // and the content methods would only be set if the content methods was not set
+            if(isRandomAccess && file.getContentMethods() == null) {
+                final int folderFirstFileIndex = archive.streamMap.folderFirstFileIndex[folderIndex];
+                final SevenZArchiveEntry folderFirstFile = archive.files[folderFirstFileIndex];
+                file.setContentMethods(folderFirstFile.getContentMethods());
+            }
+            isInSameFolder = true;
+        } else {
+            currentFolderIndex = folderIndex;
+            // We're opening a new folder. Discard any queued streams/ folder stream.
+            reopenFolderInputStream(folderIndex, file);
         }
-        // 7zFormat.txt has it wrong - it's first major then minor
-        final byte archiveVersionMajor = buf.get();
-        final byte archiveVersionMinor = buf.get();
-        if (archiveVersionMajor != 0) {
-            throw new IOException(String.format("Unsupported 7z version (%d,%d)",
-                    archiveVersionMajor, archiveVersionMinor));
+
+        boolean haveSkippedEntries = false;
+        if (isRandomAccess) {
+            // entries will only need to be skipped if it's a random access
+            haveSkippedEntries = skipEntriesWhenNeeded(entryIndex, isInSameFolder, folderIndex);
         }
 
-        boolean headerLooksValid = false;  // See https://www.7-zip.org/recover.html - "There is no correct End Header at the end of archive"
-        final long startHeaderCrc = 0xffffFFFFL & buf.getInt();
-        if (startHeaderCrc == 0) {
-            // This is an indication of a corrupt header - peek the next 20 bytes
-            final long currentPosition = channel.position();
-            final ByteBuffer peekBuf = ByteBuffer.allocate(20);
-            readFully(peekBuf);
-            channel.position(currentPosition);
-            // Header invalid if all data is 0
-            while (peekBuf.hasRemaining()) {
-                if (peekBuf.get()!=0) {
-                    headerLooksValid = true;
-                    break;
-                }
-            }
-        } else {
-            headerLooksValid = true;
+        if (isRandomAccess && currentEntryIndex == entryIndex && !haveSkippedEntries) {
+            // we don't need to add another entry to the deferredBlockStreams when :
+            // 1. If this method is called in a random access and the entry index
+            // to be read equals to the current entry index, the input stream
+            // has already been put in the deferredBlockStreams
+            // 2. If this entry has not been read(which means no entries are skipped)
+            return;
         }
 
-        if (headerLooksValid) {
-            return initializeArchive(readStartHeader(startHeaderCrc), password, true);
-        }
-        // No valid header found - probably first file of multipart archive was removed too early. Scan for end header.
-        if (options.getTryToRecoverBrokenArchives()) {
-            return tryToLocateEndHeader(password);
+        InputStream fileStream = new BoundedInputStream(currentFolderInputStream, file.getSize());
+        if (file.getHasCrc()) {
+            fileStream = new CRC32VerifyingInputStream(fileStream, file.getSize(), file.getCrcValue());
         }
-        throw new IOException("archive seems to be invalid.\nYou may want to retry and enable the"
-            + " tryToRecoverBrokenArchives if the archive could be a multi volume archive that has been closed"
-            + " prematurely.");
+
+        deferredBlockStreams.add(fileStream);
     }
 
-    private Archive tryToLocateEndHeader(final byte[] password) throws IOException {
-        final ByteBuffer nidBuf = ByteBuffer.allocate(1);
-        final long searchLimit = 1024L * 1024 * 1;
-        // Main header, plus bytes that readStartHeader would read
-        final long previousDataSize = channel.position() + 20;
-        final long minPos;
-        // Determine minimal position - can't start before current position
-        if (channel.position() + searchLimit > channel.size()) {
-            minPos = channel.position();
-        } else {
-            minPos = channel.size() - searchLimit;
+    private void calculateStreamMap(final Archive archive) throws IOException {
+        final StreamMap streamMap = new StreamMap();
+
+        int nextFolderPackStreamIndex = 0;
+        final int numFolders = archive.folders != null ? archive.folders.length : 0;
+        streamMap.folderFirstPackStreamIndex = new int[numFolders];
+        for (int i = 0; i < numFolders; i++) {
+            streamMap.folderFirstPackStreamIndex[i] = nextFolderPackStreamIndex;
+            nextFolderPackStreamIndex += archive.folders[i].packedStreams.length;
         }
-        long pos = channel.size() - 1;
-        // Loop: Try from end of archive
-        while (pos > minPos) {
-            pos--;
-            channel.position(pos);
-            nidBuf.rewind();
-            if (channel.read(nidBuf) < 1) {
-                throw new EOFException();
+
+        long nextPackStreamOffset = 0;
+        final int numPackSizes = archive.packSizes.length;
+        streamMap.packStreamOffsets = new long[numPackSizes];
+        for (int i = 0; i < numPackSizes; i++) {
+            streamMap.packStreamOffsets[i] = nextPackStreamOffset;
+            nextPackStreamOffset += archive.packSizes[i];
+        }
+
+        streamMap.folderFirstFileIndex = new int[numFolders];
+        streamMap.fileFolderIndex = new int[archive.files.length];
+        int nextFolderIndex = 0;
+        int nextFolderUnpackStreamIndex = 0;
+        for (int i = 0; i < archive.files.length; i++) {
+            if (!archive.files[i].hasStream() && nextFolderUnpackStreamIndex == 0) {
+                streamMap.fileFolderIndex[i] = -1;
+                continue;
             }
-            final int nid = nidBuf.array()[0];
-            // First indicator: Byte equals one of these header identifiers
-            if (nid == NID.kEncodedHeader || nid == NID.kHeader) {
-                try {
-                    // Try to initialize Archive structure from here
-                    final StartHeader startHeader = new StartHeader();
-                    startHeader.nextHeaderOffset = pos - previousDataSize;
-                    startHeader.nextHeaderSize = channel.size() - pos;
-                    final Archive result = initializeArchive(startHeader, password, false);
-                    // Sanity check: There must be some data...
-                    if (result.packSizes.length > 0 && result.files.length > 0) {
-                        return result;
+            if (nextFolderUnpackStreamIndex == 0) {
+                for (; nextFolderIndex < archive.folders.length; ++nextFolderIndex) {
+                    streamMap.folderFirstFileIndex[nextFolderIndex] = i;
+                    if (archive.folders[nextFolderIndex].numUnpackSubStreams > 0) {
+                        break;
                     }
-                } catch (final Exception ignore) {
-                    // Wrong guess...
+                }
+                if (nextFolderIndex >= archive.folders.length) {
+                    throw new IOException("Too few folders in archive");
                 }
             }
-        }
-        throw new IOException("Start header corrupt and unable to guess end header");
-    }
-
-    private Archive initializeArchive(final StartHeader startHeader, final byte[] password, final boolean verifyCrc) throws IOException {
-        assertFitsIntoNonNegativeInt("nextHeaderSize", startHeader.nextHeaderSize);
-        final int nextHeaderSizeInt = (int) startHeader.nextHeaderSize;
-        channel.position(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset);
-        if (verifyCrc) {
-            final long position = channel.position();
-            final CheckedInputStream cis = new CheckedInputStream(Channels.newInputStream(channel), new CRC32());
-            if (cis.skip(nextHeaderSizeInt) != nextHeaderSizeInt) {
-                throw new IOException("Problem computing NextHeader CRC-32");
+            streamMap.fileFolderIndex[i] = nextFolderIndex;
+            if (!archive.files[i].hasStream()) {
+                continue;
             }
-            if (startHeader.nextHeaderCrc != cis.getChecksum().getValue()) {
-                throw new IOException("NextHeader CRC-32 mismatch");
+            ++nextFolderUnpackStreamIndex;
+            if (nextFolderUnpackStreamIndex >= archive.folders[nextFolderIndex].numUnpackSubStreams) {
+                ++nextFolderIndex;
+                nextFolderUnpackStreamIndex = 0;
             }
-            channel.position(position);
-        }
-        Archive archive = new Archive();
-        ByteBuffer buf = ByteBuffer.allocate(nextHeaderSizeInt).order(ByteOrder.LITTLE_ENDIAN);
-        readFully(buf);
-        int nid = getUnsignedByte(buf);
-        if (nid == NID.kEncodedHeader) {
-            buf = readEncodedHeader(buf, archive, password);
-            // Archive gets rebuilt with the new header
-            archive = new Archive();
-            nid = getUnsignedByte(buf);
         }
-        if (nid != NID.kHeader) {
-            throw new IOException("Broken or unsupported archive: no Header");
-        }
-        readHeader(buf, archive);
-        archive.subStreamsInfo = null;
-        return archive;
-    }
-
-    private StartHeader readStartHeader(final long startHeaderCrc) throws IOException {
-        final StartHeader startHeader = new StartHeader();
-        // using Stream rather than ByteBuffer for the benefit of the
-        // built-in CRC check
-        try (DataInputStream dataInputStream = new DataInputStream(new CRC32VerifyingInputStream(
-                new BoundedSeekableByteChannelInputStream(channel, 20), 20, startHeaderCrc))) {
-             startHeader.nextHeaderOffset = Long.reverseBytes(dataInputStream.readLong());
-             if (startHeader.nextHeaderOffset < 0
-                 || startHeader.nextHeaderOffset + SIGNATURE_HEADER_SIZE > channel.size()) {
-                 throw new IOException("nextHeaderOffset is out of bounds");
-             }
-
-             startHeader.nextHeaderSize = Long.reverseBytes(dataInputStream.readLong());
-             final long nextHeaderEnd = startHeader.nextHeaderOffset + startHeader.nextHeaderSize;
-             if (nextHeaderEnd < startHeader.nextHeaderOffset
-                 || nextHeaderEnd + SIGNATURE_HEADER_SIZE > channel.size()) {
-                 throw new IOException("nextHeaderSize is out of bounds");
-             }
-
-             startHeader.nextHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(dataInputStream.readInt());
 
-             return startHeader;
-        }
+        archive.streamMap = streamMap;
     }
 
-    private void readHeader(final ByteBuffer header, final Archive archive) throws IOException {
-        final int pos = header.position();
-        final ArchiveStatistics stats = sanityCheckAndCollectStatistics(header);
-        stats.assertValidity(options.getMaxMemoryLimitInKb());
-        header.position(pos);
-
-        int nid = getUnsignedByte(header);
-
-        if (nid == NID.kArchiveProperties) {
-            readArchiveProperties(header);
-            nid = getUnsignedByte(header);
-        }
-
-        if (nid == NID.kAdditionalStreamsInfo) {
-            throw new IOException("Additional streams unsupported");
-            //nid = getUnsignedByte(header);
-        }
-
-        if (nid == NID.kMainStreamsInfo) {
-            readStreamsInfo(header, archive);
-            nid = getUnsignedByte(header);
-        }
-
-        if (nid == NID.kFilesInfo) {
-            readFilesInfo(header, archive);
-            nid = getUnsignedByte(header);
+    private void checkEntryIsInitialized(final Map<Integer, SevenZArchiveEntry> archiveEntries, final int index) {
+        if (archiveEntries.get(index) == null) {
+            archiveEntries.put(index, new SevenZArchiveEntry());
         }
     }
 
-    private ArchiveStatistics sanityCheckAndCollectStatistics(final ByteBuffer header)
-        throws IOException {
-        final ArchiveStatistics stats = new ArchiveStatistics();
-
-        int nid = getUnsignedByte(header);
-
-        if (nid == NID.kArchiveProperties) {
-            sanityCheckArchiveProperties(header);
-            nid = getUnsignedByte(header);
-        }
-
-        if (nid == NID.kAdditionalStreamsInfo) {
-            throw new IOException("Additional streams unsupported");
-            //nid = getUnsignedByte(header);
+    /**
+     * Closes the archive.
+     * @throws IOException if closing the file fails
+     */
+    @Override
+    public void close() throws IOException {
+        if (channel != null) {
+            try {
+                channel.close();
+            } finally {
+                channel = null;
+                if (password != null) {
+                    Arrays.fill(password, (byte) 0);
+                }
+                password = null;
+            }
         }
+    }
 
-        if (nid == NID.kMainStreamsInfo) {
-            sanityCheckStreamsInfo(header, stats);
-            nid = getUnsignedByte(header);
+    private InputStream getCurrentStream() throws IOException {
+        if (archive.files[currentEntryIndex].getSize() == 0) {
+            return new ByteArrayInputStream(ByteUtils.EMPTY_BYTE_ARRAY);
         }
-
-        if (nid == NID.kFilesInfo) {
-            sanityCheckFilesInfo(header, stats);
-            nid = getUnsignedByte(header);
+        if (deferredBlockStreams.isEmpty()) {
+            throw new IllegalStateException("No current 7z entry (call getNextEntry() first).");
         }
 
-        if (nid != NID.kEnd) {
-            throw new IOException("Badly terminated header, found " + nid);
+        while (deferredBlockStreams.size() > 1) {
+            // In solid compression mode we need to decompress all leading folder'
+            // streams to get access to an entry. We defer this until really needed
+            // so that entire blocks can be skipped without wasting time for decompression.
+            try (final InputStream stream = deferredBlockStreams.remove(0)) {
+                IOUtils.skip(stream, Long.MAX_VALUE);
+            }
+            compressedBytesReadFromCurrentEntry = 0;
         }
 
-        return stats;
+        return deferredBlockStreams.get(0);
     }
 
-    private void readArchiveProperties(final ByteBuffer input) throws IOException {
-        // FIXME: the reference implementation just throws them away?
-        int nid =  getUnsignedByte(input);
-        while (nid != NID.kEnd) {
-            final long propertySize = readUint64(input);
-            final byte[] property = new byte[(int)propertySize];
-            get(input, property);
-            nid = getUnsignedByte(input);
+    /**
+     * Derives a default file name from the archive name - if known.
+     *
+     * <p>This implements the same heuristics the 7z tools use. In
+     * 7z's case if an archive contains entries without a name -
+     * i.e. {@link SevenZArchiveEntry#getName} returns {@code null} -
+     * then its command line and GUI tools will use this default name
+     * when extracting the entries.</p>
+     *
+     * @return null if the name of the archive is unknown. Otherwise
+     * if the name of the archive has got any extension, it is
+     * stripped and the remainder returned. Finally if the name of the
+     * archive hasn't got any extension then a {@code ~} character is
+     * appended to the archive name.
+     *
+     * @since 1.19
+     */
+    public String getDefaultName() {
+        if (DEFAULT_FILE_NAME.equals(fileName) || fileName == null) {
+            return null;
         }
-    }
 
-    private void sanityCheckArchiveProperties(final ByteBuffer header)
-        throws IOException {
-        int nid =  getUnsignedByte(header);
-        while (nid != NID.kEnd) {
-            final int propertySize =
-                assertFitsIntoNonNegativeInt("propertySize", readUint64(header));
-            if (skipBytesFully(header, propertySize) < propertySize) {
-                throw new IOException("invalid property size");
-            }
-            nid = getUnsignedByte(header);
+        final String lastSegment = new File(fileName).getName();
+        final int dotPos = lastSegment.lastIndexOf(".");
+        if (dotPos > 0) { // if the file starts with a dot then this is not an extension
+            return lastSegment.substring(0, dotPos);
         }
+        return lastSegment + "~";
     }
 
-    private ByteBuffer readEncodedHeader(final ByteBuffer header, final Archive archive,
-                                         final byte[] password) throws IOException {
-        final int pos = header.position();
-        final ArchiveStatistics stats = new ArchiveStatistics();
-        sanityCheckStreamsInfo(header, stats);
-        stats.assertValidity(options.getMaxMemoryLimitInKb());
-        header.position(pos);
-
-        readStreamsInfo(header, archive);
+    /**
+     * Returns a copy of meta-data of all archive entries.
+     *
+     * <p>This method only provides meta-data, the entries can not be
+     * used to read the contents, you still need to process all
+     * entries in order using {@link #getNextEntry} for that.</p>
+     *
+     * <p>The content methods are only available for entries that have
+     * already been reached via {@link #getNextEntry}.</p>
+     *
+     * @return a copy of meta-data of all archive entries.
+     * @since 1.11
+     */
+    public Iterable<SevenZArchiveEntry> getEntries() {
+        return new ArrayList<>(Arrays.asList(archive.files));
+    }
 
-        if (archive.folders == null || archive.folders.length == 0) {
-            throw new IOException("no folders, can't read encoded header");
+    /**
+     * Returns an InputStream for reading the contents of the given entry.
+     *
+     * <p>For archives using solid compression randomly accessing
+     * entries will be significantly slower than reading the archive
+     * sequentially.</p>
+     *
+     * @param entry the entry to get the stream for.
+     * @return a stream to read the entry from.
+     * @throws IOException if unable to create an input stream from the zipentry
+     * @since 1.20
+     */
+    public InputStream getInputStream(final SevenZArchiveEntry entry) throws IOException {
+        int entryIndex = -1;
+        for (int i = 0; i < this.archive.files.length;i++) {
+            if (entry == this.archive.files[i]) {
+                entryIndex = i;
+                break;
+            }
         }
-        if (archive.packSizes == null || archive.packSizes.length == 0) {
-            throw new IOException("no packed streams, can't read encoded header");
+
+        if (entryIndex < 0) {
+            throw new IllegalArgumentException("Can not find " + entry.getName() + " in " + this.fileName);
         }
 
-        // FIXME: merge with buildDecodingStream()/buildDecoderStack() at some stage?
-        final Folder folder = archive.folders[0];
-        final int firstPackStreamIndex = 0;
-        final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos +
-                0;
+        buildDecodingStream(entryIndex, true);
+        currentEntryIndex = entryIndex;
+        currentFolderIndex = archive.streamMap.fileFolderIndex[entryIndex];
+        return getCurrentStream();
+    }
 
-        channel.position(folderOffset);
-        InputStream inputStreamStack = new BoundedSeekableByteChannelInputStream(channel,
-                archive.packSizes[firstPackStreamIndex]);
-        for (final Coder coder : folder.getOrderedCoders()) {
-            if (coder.numInStreams != 1 || coder.numOutStreams != 1) {
-                throw new IOException("Multi input/output stream coders are not yet supported");
-            }
-            inputStreamStack = Coders.addDecoder(fileName, inputStreamStack, //NOSONAR
-                    folder.getUnpackSizeForCoder(coder), coder, password, options.getMaxMemoryLimitInKb());
-        }
-        if (folder.hasCrc) {
-            inputStreamStack = new CRC32VerifyingInputStream(inputStreamStack,
-                    folder.getUnpackSize(), folder.crc);
+    /**
+     * Returns the next Archive Entry in this archive.
+     *
+     * @return the next entry,
+     *         or {@code null} if there are no more entries
+     * @throws IOException if the next entry could not be read
+     */
+    public SevenZArchiveEntry getNextEntry() throws IOException {
+        if (currentEntryIndex >= archive.files.length - 1) {
+            return null;
         }
-        final int unpackSize = assertFitsIntoNonNegativeInt("unpackSize", folder.getUnpackSize());
-        final byte[] nextHeader = IOUtils.readRange(inputStreamStack, unpackSize);
-        if (nextHeader.length < unpackSize) {
-            throw new IOException("premature end of stream");
+        ++currentEntryIndex;
+        final SevenZArchiveEntry entry = archive.files[currentEntryIndex];
+        if (entry.getName() == null && options.getUseDefaultNameForUnnamedEntries()) {
+            entry.setName(getDefaultName());
         }
-        inputStreamStack.close();
-        return ByteBuffer.wrap(nextHeader).order(ByteOrder.LITTLE_ENDIAN);
+        buildDecodingStream(currentEntryIndex, false);
+        uncompressedBytesReadFromCurrentEntry = compressedBytesReadFromCurrentEntry = 0;
+        return entry;
     }
 
-    private void sanityCheckStreamsInfo(final ByteBuffer header,
-        final ArchiveStatistics stats) throws IOException {
-        int nid = getUnsignedByte(header);
-
-        if (nid == NID.kPackInfo) {
-            sanityCheckPackInfo(header, stats);
-            nid = getUnsignedByte(header);
-        }
-
-        if (nid == NID.kUnpackInfo) {
-            sanityCheckUnpackInfo(header, stats);
-            nid = getUnsignedByte(header);
-        }
+    /**
+     * Provides statistics for bytes read from the current entry.
+     *
+     * @return statistics for bytes read from the current entry
+     * @since 1.17
+     */
+    public InputStreamStatistics getStatisticsForCurrentEntry() {
+        return new InputStreamStatistics() {
+            @Override
+            public long getCompressedCount() {
+                return compressedBytesReadFromCurrentEntry;
+            }
+            @Override
+            public long getUncompressedCount() {
+                return uncompressedBytesReadFromCurrentEntry;
+            }
+        };
+    }
 
-        if (nid == NID.kSubStreamsInfo) {
-            sanityCheckSubStreamsInfo(header, stats);
-            nid = getUnsignedByte(header);
-        }
+    /**
+     * Find out if any data of current entry has been read or not.
+     * This is achieved by comparing the bytes remaining to read
+     * and the size of the file.
+     *
+     * @return true if any data of current entry has been read
+     * @since 1.21
+     */
+    private boolean hasCurrentEntryBeenRead() {
+        boolean hasCurrentEntryBeenRead = false;
+        if (!deferredBlockStreams.isEmpty()) {
+            final InputStream currentEntryInputStream = deferredBlockStreams.get(deferredBlockStreams.size() - 1);
+            // get the bytes remaining to read, and compare it with the size of
+            // the file to figure out if the file has been read
+            if (currentEntryInputStream instanceof CRC32VerifyingInputStream) {
+                hasCurrentEntryBeenRead = ((CRC32VerifyingInputStream) currentEntryInputStream).getBytesRemaining() != archive.files[currentEntryIndex].getSize();
+            }
 
-        if (nid != NID.kEnd) {
-            throw new IOException("Badly terminated StreamsInfo");
+            if (currentEntryInputStream instanceof BoundedInputStream) {
+                hasCurrentEntryBeenRead = ((BoundedInputStream) currentEntryInputStream).getBytesRemaining() != archive.files[currentEntryIndex].getSize();
+            }
         }
+        return hasCurrentEntryBeenRead;
     }
 
-    private void readStreamsInfo(final ByteBuffer header, final Archive archive) throws IOException {
-        int nid = getUnsignedByte(header);
-
-        if (nid == NID.kPackInfo) {
-            readPackInfo(header, archive);
-            nid = getUnsignedByte(header);
+    private Archive initializeArchive(final StartHeader startHeader, final byte[] password, final boolean verifyCrc) throws IOException {
+        assertFitsIntoNonNegativeInt("nextHeaderSize", startHeader.nextHeaderSize);
+        final int nextHeaderSizeInt = (int) startHeader.nextHeaderSize;
+        channel.position(SIGNATURE_HEADER_SIZE + startHeader.nextHeaderOffset);
+        if (verifyCrc) {
+            final long position = channel.position();
+            final CheckedInputStream cis = new CheckedInputStream(Channels.newInputStream(channel), new CRC32());
+            if (cis.skip(nextHeaderSizeInt) != nextHeaderSizeInt) {
+                throw new IOException("Problem computing NextHeader CRC-32");
+            }
+            if (startHeader.nextHeaderCrc != cis.getChecksum().getValue()) {
+                throw new IOException("NextHeader CRC-32 mismatch");
+            }
+            channel.position(position);
         }
-
-        if (nid == NID.kUnpackInfo) {
-            readUnpackInfo(header, archive);
-            nid = getUnsignedByte(header);
-        } else {
-            // archive without unpack/coders info
-            archive.folders = Folder.EMPTY_FOLDER_ARRAY;
+        Archive archive = new Archive();
+        ByteBuffer buf = ByteBuffer.allocate(nextHeaderSizeInt).order(ByteOrder.LITTLE_ENDIAN);
+        readFully(buf);
+        int nid = getUnsignedByte(buf);
+        if (nid == NID.kEncodedHeader) {
+            buf = readEncodedHeader(buf, archive, password);
+            // Archive gets rebuilt with the new header
+            archive = new Archive();
+            nid = getUnsignedByte(buf);
         }
-
-        if (nid == NID.kSubStreamsInfo) {
-            readSubStreamsInfo(header, archive);
-            nid = getUnsignedByte(header);
+        if (nid != NID.kHeader) {
+            throw new IOException("Broken or unsupported archive: no Header");
         }
+        readHeader(buf, archive);
+        archive.subStreamsInfo = null;
+        return archive;
     }
 
-    private void sanityCheckPackInfo(final ByteBuffer header, final ArchiveStatistics stats) throws IOException {
-        final long packPos = readUint64(header);
-        if (packPos < 0 || SIGNATURE_HEADER_SIZE + packPos > channel.size()
-            || SIGNATURE_HEADER_SIZE + packPos < 0) {
-            throw new IOException("packPos (" + packPos + ") is out of range");
-        }
-        final long numPackStreams = readUint64(header);
-        stats.numberOfPackedStreams = assertFitsIntoNonNegativeInt("numPackStreams", numPackStreams);
-        int nid = getUnsignedByte(header);
-        if (nid == NID.kSize) {
-            long totalPackSizes = 0;
-            for (int i = 0; i < stats.numberOfPackedStreams; i++) {
-                final long packSize = readUint64(header);
-                totalPackSizes += packSize;
-                final long endOfPackStreams = SIGNATURE_HEADER_SIZE + packPos + totalPackSizes;
-                if (packSize < 0
-                    || endOfPackStreams > channel.size()
-                    || endOfPackStreams < packPos) {
-                    throw new IOException("packSize (" + packSize + ") is out of range");
-                }
-            }
-            nid = getUnsignedByte(header);
-        }
-
-        if (nid == NID.kCRC) {
-            final int crcsDefined = readAllOrBits(header, stats.numberOfPackedStreams)
-                .cardinality();
-            if (skipBytesFully(header, 4 * crcsDefined) < 4 * crcsDefined) {
-                throw new IOException("invalid number of CRCs in PackInfo");
-            }
-            nid = getUnsignedByte(header);
+    /**
+     * Reads a byte of data.
+     *
+     * @return the byte read, or -1 if end of input is reached
+     * @throws IOException
+     *             if an I/O error has occurred
+     */
+    public int read() throws IOException {
+        final int b = getCurrentStream().read();
+        if (b >= 0) {
+            uncompressedBytesReadFromCurrentEntry++;
         }
+        return b;
+    }
 
-        if (nid != NID.kEnd) {
-            throw new IOException("Badly terminated PackInfo (" + nid + ")");
-        }
+    /**
+     * Reads data into an array of bytes.
+     *
+     * @param b the array to write data to
+     * @return the number of bytes read, or -1 if end of input is reached
+     * @throws IOException
+     *             if an I/O error has occurred
+     */
+    public int read(final byte[] b) throws IOException {
+        return read(b, 0, b.length);
     }
 
-    private void readPackInfo(final ByteBuffer header, final Archive archive) throws IOException {
-        archive.packPos = readUint64(header);
-        final int numPackStreamsInt = (int) readUint64(header);
-        int nid = getUnsignedByte(header);
-        if (nid == NID.kSize) {
-            archive.packSizes = new long[numPackStreamsInt];
-            for (int i = 0; i < archive.packSizes.length; i++) {
-                archive.packSizes[i] = readUint64(header);
-            }
-            nid = getUnsignedByte(header);
+    /**
+     * Reads data into an array of bytes.
+     *
+     * @param b the array to write data to
+     * @param off offset into the buffer to start filling at
+     * @param len of bytes to read
+     * @return the number of bytes read, or -1 if end of input is reached
+     * @throws IOException
+     *             if an I/O error has occurred
+     */
+    public int read(final byte[] b, final int off, final int len) throws IOException {
+        if (len == 0) {
+            return 0;
         }
-
-        if (nid == NID.kCRC) {
-            archive.packCrcsDefined = readAllOrBits(header, numPackStreamsInt);
-            archive.packCrcs = new long[numPackStreamsInt];
-            for (int i = 0; i < numPackStreamsInt; i++) {
-                if (archive.packCrcsDefined.get(i)) {
-                    archive.packCrcs[i] = 0xffffFFFFL & getInt(header);
-                }
-            }
-
-            nid = getUnsignedByte(header);
+        final int cnt = getCurrentStream().read(b, off, len);
+        if (cnt > 0) {
+            uncompressedBytesReadFromCurrentEntry += cnt;
         }
+        return cnt;
     }
 
-    private void sanityCheckUnpackInfo(final ByteBuffer header, final ArchiveStatistics stats)
-        throws IOException {
-        int nid = getUnsignedByte(header);
-        if (nid != NID.kFolder) {
-            throw new IOException("Expected kFolder, got " + nid);
-        }
-        final long numFolders = readUint64(header);
-        stats.numberOfFolders = assertFitsIntoNonNegativeInt("numFolders", numFolders);
-        final int external = getUnsignedByte(header);
-        if (external != 0) {
-            throw new IOException("External unsupported");
+    private BitSet readAllOrBits(final ByteBuffer header, final int size) throws IOException {
+        final int areAllDefined = getUnsignedByte(header);
+        final BitSet bits;
+        if (areAllDefined != 0) {
+            bits = new BitSet(size);
+            for (int i = 0; i < size; i++) {
+                bits.set(i, true);
+            }
+        } else {
+            bits = readBits(header, size);
         }
+        return bits;
+    }
 
-        final List<Integer> numberOfOutputStreamsPerFolder = new LinkedList<>();
-        for (int i = 0; i < stats.numberOfFolders; i++) {
-            numberOfOutputStreamsPerFolder.add(sanityCheckFolder(header, stats));
+    private void readArchiveProperties(final ByteBuffer input) throws IOException {
+        // FIXME: the reference implementation just throws them away?
+        int nid =  getUnsignedByte(input);
+        while (nid != NID.kEnd) {
+            final long propertySize = readUint64(input);
+            final byte[] property = new byte[(int)propertySize];
+            get(input, property);
+            nid = getUnsignedByte(input);
         }
+    }
 
-        final long totalNumberOfBindPairs = stats.numberOfOutStreams - stats.numberOfFolders;
-        final long packedStreamsRequiredByFolders = stats.numberOfInStreams - totalNumberOfBindPairs;
-        if (packedStreamsRequiredByFolders < stats.numberOfPackedStreams) {
-            throw new IOException("archive doesn't contain enough packed streams");
+    private BitSet readBits(final ByteBuffer header, final int size) throws IOException {
+        final BitSet bits = new BitSet(size);
+        int mask = 0;
+        int cache = 0;
+        for (int i = 0; i < size; i++) {
+            if (mask == 0) {
+                mask = 0x80;
+                cache = getUnsignedByte(header);
+            }
+            bits.set(i, (cache & mask) != 0);
+            mask >>>= 1;
         }
+        return bits;
+    }
 
-        nid = getUnsignedByte(header);
-        if (nid != NID.kCodersUnpackSize) {
-            throw new IOException("Expected kCodersUnpackSize, got " + nid);
-        }
+    private ByteBuffer readEncodedHeader(final ByteBuffer header, final Archive archive,
+                                         final byte[] password) throws IOException {
+        final int pos = header.position();
+        final ArchiveStatistics stats = new ArchiveStatistics();
+        sanityCheckStreamsInfo(header, stats);
+        stats.assertValidity(options.getMaxMemoryLimitInKb());
+        header.position(pos);
 
-        for (final int numberOfOutputStreams : numberOfOutputStreamsPerFolder) {
-            for (int i = 0; i < numberOfOutputStreams; i++) {
-                final long unpackSize = readUint64(header);
-                if (unpackSize < 0) {
-                    throw new IllegalArgumentException("negative unpackSize");
-                }
-            }
-        }
+        readStreamsInfo(header, archive);
 
-        nid = getUnsignedByte(header);
-        if (nid == NID.kCRC) {
-            stats.folderHasCrc = readAllOrBits(header, stats.numberOfFolders);
-            final int crcsDefined = stats.folderHasCrc.cardinality();
-            if (skipBytesFully(header, 4 * crcsDefined) < 4 * crcsDefined) {
-                throw new IOException("invalid number of CRCs in UnpackInfo");
-            }
-            nid = getUnsignedByte(header);
+        if (archive.folders == null || archive.folders.length == 0) {
+            throw new IOException("no folders, can't read encoded header");
         }
-
-        if (nid != NID.kEnd) {
-            throw new IOException("Badly terminated UnpackInfo");
+        if (archive.packSizes == null || archive.packSizes.length == 0) {
+            throw new IOException("no packed streams, can't read encoded header");
         }
-    }
 
-    private void readUnpackInfo(final ByteBuffer header, final Archive archive) throws IOException {
-        int nid = getUnsignedByte(header);
-        final int numFoldersInt = (int) readUint64(header);
-        final Folder[] folders = new Folder[numFoldersInt];
-        archive.folders = folders;
-        /* final int external = */ getUnsignedByte(header);
-        for (int i = 0; i < numFoldersInt; i++) {
-            folders[i] = readFolder(header);
-        }
+        // FIXME: merge with buildDecodingStream()/buildDecoderStack() at some stage?
+        final Folder folder = archive.folders[0];
+        final int firstPackStreamIndex = 0;
+        final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos +
+                0;
 
-        nid = getUnsignedByte(header);
-        for (final Folder folder : folders) {
-            assertFitsIntoNonNegativeInt("totalOutputStreams", folder.totalOutputStreams);
-            folder.unpackSizes = new long[(int)folder.totalOutputStreams];
-            for (int i = 0; i < folder.totalOutputStreams; i++) {
-                folder.unpackSizes[i] = readUint64(header);
+        channel.position(folderOffset);
+        InputStream inputStreamStack = new BoundedSeekableByteChannelInputStream(channel,
+                archive.packSizes[firstPackStreamIndex]);
+        for (final Coder coder : folder.getOrderedCoders()) {
+            if (coder.numInStreams != 1 || coder.numOutStreams != 1) {
+                throw new IOException("Multi input/output stream coders are not yet supported");
             }
+            inputStreamStack = Coders.addDecoder(fileName, inputStreamStack, //NOSONAR
+                    folder.getUnpackSizeForCoder(coder), coder, password, options.getMaxMemoryLimitInKb());
         }
-
-        nid = getUnsignedByte(header);
-        if (nid == NID.kCRC) {
-            final BitSet crcsDefined = readAllOrBits(header, numFoldersInt);
-            for (int i = 0; i < numFoldersInt; i++) {
-                if (crcsDefined.get(i)) {
-                    folders[i].hasCrc = true;
-                    folders[i].crc = 0xffffFFFFL & getInt(header);
-                } else {
-                    folders[i].hasCrc = false;
-                }
-            }
-
-            nid = getUnsignedByte(header);
+        if (folder.hasCrc) {
+            inputStreamStack = new CRC32VerifyingInputStream(inputStreamStack,
+                    folder.getUnpackSize(), folder.crc);
+        }
+        final int unpackSize = assertFitsIntoNonNegativeInt("unpackSize", folder.getUnpackSize());
+        final byte[] nextHeader = IOUtils.readRange(inputStreamStack, unpackSize);
+        if (nextHeader.length < unpackSize) {
+            throw new IOException("premature end of stream");
         }
+        inputStreamStack.close();
+        return ByteBuffer.wrap(nextHeader).order(ByteOrder.LITTLE_ENDIAN);
     }
 
-    private void sanityCheckSubStreamsInfo(final ByteBuffer header, final ArchiveStatistics stats) throws IOException {
-
-        int nid = getUnsignedByte(header);
-        final List<Integer> numUnpackSubStreamsPerFolder = new LinkedList<>();
-        if (nid == NID.kNumUnpackStream) {
-            for (int i = 0; i < stats.numberOfFolders; i++) {
-                numUnpackSubStreamsPerFolder.add(assertFitsIntoNonNegativeInt("numStreams", readUint64(header)));
+    private void readFilesInfo(final ByteBuffer header, final Archive archive) throws IOException {
+        final int numFilesInt = (int) readUint64(header);
+        final Map<Integer, SevenZArchiveEntry> fileMap = new LinkedHashMap<>();
+        BitSet isEmptyStream = null;
+        BitSet isEmptyFile = null;
+        BitSet isAnti = null;
+        while (true) {
+            final int propertyType = getUnsignedByte(header);
+            if (propertyType == 0) {
+                break;
             }
-            stats.numberOfUnpackSubStreams = numUnpackSubStreamsPerFolder.stream().mapToLong(Integer::longValue).sum();
-            nid = getUnsignedByte(header);
-        } else {
-            stats.numberOfUnpackSubStreams = stats.numberOfFolders;
-        }
-
-        assertFitsIntoNonNegativeInt("totalUnpackStreams", stats.numberOfUnpackSubStreams);
-
-        if (nid == NID.kSize) {
-            for (final int numUnpackSubStreams : numUnpackSubStreamsPerFolder) {
-                if (numUnpackSubStreams == 0) {
-                    continue;
+            final long size = readUint64(header);
+            switch (propertyType) {
+                case NID.kEmptyStream: {
+                    isEmptyStream = readBits(header, numFilesInt);
+                    break;
                 }
-                for (int i = 0; i < numUnpackSubStreams - 1; i++) {
-                    final long size = readUint64(header);
-                    if (size < 0) {
-                        throw new IOException("negative unpackSize");
+                case NID.kEmptyFile: {
+                    isEmptyFile = readBits(header, isEmptyStream.cardinality());
+                    break;
+                }
+                case NID.kAnti: {
+                    isAnti = readBits(header, isEmptyStream.cardinality());
+                    break;
+                }
+                case NID.kName: {
+                    /* final int external = */ getUnsignedByte(header);
+                    final byte[] names = new byte[(int) (size - 1)];
+                    final int namesLength = names.length;
+                    get(header, names);
+                    int nextFile = 0;
+                    int nextName = 0;
+                    for (int i = 0; i < namesLength; i += 2) {
+                        if (names[i] == 0 && names[i + 1] == 0) {
+                            checkEntryIsInitialized(fileMap, nextFile);
+                            fileMap.get(nextFile).setName(new String(names, nextName, i - nextName, UTF_16LE));
+                            nextName = i + 2;
+                            nextFile++;
+                        }
+                    }
+                    if (nextName != namesLength || nextFile != numFilesInt) {
+                        throw new IOException("Error parsing file names");
                     }
+                    break;
                 }
-            }
-            nid = getUnsignedByte(header);
-        }
-
-        int numDigests = 0;
-        if (numUnpackSubStreamsPerFolder.isEmpty()) {
-            numDigests = stats.folderHasCrc == null ? stats.numberOfFolders
-                : stats.numberOfFolders - stats.folderHasCrc.cardinality();
-        } else {
-            int folderIdx = 0;
-            for (final int numUnpackSubStreams : numUnpackSubStreamsPerFolder) {
-                if (numUnpackSubStreams != 1 || stats.folderHasCrc == null
-                    || !stats.folderHasCrc.get(folderIdx++)) {
-                    numDigests += numUnpackSubStreams;
+                case NID.kCTime: {
+                    final BitSet timesDefined = readAllOrBits(header, numFilesInt);
+                    /* final int external = */ getUnsignedByte(header);
+                    for (int i = 0; i < numFilesInt; i++) {
+                        checkEntryIsInitialized(fileMap, i);
+                        final SevenZArchiveEntry entryAtIndex = fileMap.get(i);
+                        entryAtIndex.setHasCreationDate(timesDefined.get(i));
+                        if (entryAtIndex.getHasCreationDate()) {
+                            entryAtIndex.setCreationDate(getLong(header));
+                        }
+                    }
+                    break;
                 }
-            }
-        }
-
-        if (nid == NID.kCRC) {
-            assertFitsIntoNonNegativeInt("numDigests", numDigests);
-            final int missingCrcs = readAllOrBits(header, numDigests)
-                .cardinality();
-            if (skipBytesFully(header, 4 * missingCrcs) < 4 * missingCrcs) {
-                throw new IOException("invalid number of missing CRCs in SubStreamInfo");
-            }
-            nid = getUnsignedByte(header);
-        }
-
-        if (nid != NID.kEnd) {
-            throw new IOException("Badly terminated SubStreamsInfo");
-        }
-    }
-
-    private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) throws IOException {
-        for (final Folder folder : archive.folders) {
-            folder.numUnpackSubStreams = 1;
-        }
-        long unpackStreamsCount = archive.folders.length;
-
-        int nid = getUnsignedByte(header);
-        if (nid == NID.kNumUnpackStream) {
-            unpackStreamsCount = 0;
-            for (final Folder folder : archive.folders) {
-                final long numStreams = readUint64(header);
-                folder.numUnpackSubStreams = (int)numStreams;
-                unpackStreamsCount += numStreams;
-            }
-            nid = getUnsignedByte(header);
-        }
+                case NID.kATime: {
+                    final BitSet timesDefined = readAllOrBits(header, numFilesInt);
+                    /* final int external = */ getUnsignedByte(header);
+                    for (int i = 0; i < numFilesInt; i++) {
+                        checkEntryIsInitialized(fileMap, i);
+                        final SevenZArchiveEntry entryAtIndex = fileMap.get(i);
+                        entryAtIndex.setHasAccessDate(timesDefined.get(i));
+                        if (entryAtIndex.getHasAccessDate()) {
+                            entryAtIndex.setAccessDate(getLong(header));
+                        }
+                    }
+                    break;
+                }
+                case NID.kMTime: {
+                    final BitSet timesDefined = readAllOrBits(header, numFilesInt);
+                    /* final int external = */ getUnsignedByte(header);
+                    for (int i = 0; i < numFilesInt; i++) {
+                        checkEntryIsInitialized(fileMap, i);
+                        final SevenZArchiveEntry entryAtIndex = fileMap.get(i);
+                        entryAtIndex.setHasLastModifiedDate(timesDefined.get(i));
+                        if (entryAtIndex.getHasLastModifiedDate()) {
+                            entryAtIndex.setLastModifiedDate(getLong(header));
+                        }
+                    }
+                    break;
+                }
+                case NID.kWinAttributes: {
+                    final BitSet attributesDefined = readAllOrBits(header, numFilesInt);
+                    /* final int external = */ getUnsignedByte(header);
+                    for (int i = 0; i < numFilesInt; i++) {
+                        checkEntryIsInitialized(fileMap, i);
+                        final SevenZArchiveEntry entryAtIndex = fileMap.get(i);
+                        entryAtIndex.setHasWindowsAttributes(attributesDefined.get(i));
+                        if (entryAtIndex.getHasWindowsAttributes()) {
+                            entryAtIndex.setWindowsAttributes(getInt(header));
+                        }
+                    }
+                    break;
+                }
+                case NID.kDummy: {
+                    // 7z 9.20 asserts the content is all zeros and ignores the property
+                    // Compress up to 1.8.1 would throw an exception, now we ignore it (see COMPRESS-287
 
-        final int totalUnpackStreams = (int) unpackStreamsCount;
-        final SubStreamsInfo subStreamsInfo = new SubStreamsInfo();
-        subStreamsInfo.unpackSizes = new long[totalUnpackStreams];
-        subStreamsInfo.hasCrc = new BitSet(totalUnpackStreams);
-        subStreamsInfo.crcs = new long[totalUnpackStreams];
+                    skipBytesFully(header, size);
+                    break;
+                }
 
-        int nextUnpackStream = 0;
-        for (final Folder folder : archive.folders) {
-            if (folder.numUnpackSubStreams == 0) {
-                continue;
-            }
-            long sum = 0;
-            if (nid == NID.kSize) {
-                for (int i = 0; i < folder.numUnpackSubStreams - 1; i++) {
-                    final long size = readUint64(header);
-                    subStreamsInfo.unpackSizes[nextUnpackStream++] = size;
-                    sum += size;
+                default: {
+                    // Compress up to 1.8.1 would throw an exception, now we ignore it (see COMPRESS-287
+                    skipBytesFully(header, size);
+                    break;
                 }
             }
-            if (sum > folder.getUnpackSize()) {
-                throw new IOException("sum of unpack sizes of folder exceeds total unpack size");
-            }
-            subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - sum;
-        }
-        if (nid == NID.kSize) {
-            nid = getUnsignedByte(header);
         }
-
-        int numDigests = 0;
-        for (final Folder folder : archive.folders) {
-            if (folder.numUnpackSubStreams != 1 || !folder.hasCrc) {
-                numDigests += folder.numUnpackSubStreams;
+        int nonEmptyFileCounter = 0;
+        int emptyFileCounter = 0;
+        for (int i = 0; i < numFilesInt; i++) {
+            final SevenZArchiveEntry entryAtIndex = fileMap.get(i);
+            if (entryAtIndex == null) {
+                continue;
             }
-        }
-
-        if (nid == NID.kCRC) {
-            final BitSet hasMissingCrc = readAllOrBits(header, numDigests);
-            final long[] missingCrcs = new long[numDigests];
-            for (int i = 0; i < numDigests; i++) {
-                if (hasMissingCrc.get(i)) {
-                    missingCrcs[i] = 0xffffFFFFL & getInt(header);
+            entryAtIndex.setHasStream(isEmptyStream == null || !isEmptyStream.get(i));
+            if (entryAtIndex.hasStream()) {
+                if (archive.subStreamsInfo == null) {
+                    throw new IOException("Archive contains file with streams but no subStreamsInfo");
                 }
-            }
-            int nextCrc = 0;
-            int nextMissingCrc = 0;
-            for (final Folder folder: archive.folders) {
-                if (folder.numUnpackSubStreams == 1 && folder.hasCrc) {
-                    subStreamsInfo.hasCrc.set(nextCrc, true);
-                    subStreamsInfo.crcs[nextCrc] = folder.crc;
-                    ++nextCrc;
-                } else {
-                    for (int i = 0; i < folder.numUnpackSubStreams; i++) {
-                        subStreamsInfo.hasCrc.set(nextCrc, hasMissingCrc.get(nextMissingCrc));
-                        subStreamsInfo.crcs[nextCrc] = missingCrcs[nextMissingCrc];
-                        ++nextCrc;
-                        ++nextMissingCrc;
-                    }
+                entryAtIndex.setDirectory(false);
+                entryAtIndex.setAntiItem(false);
+                entryAtIndex.setHasCrc(archive.subStreamsInfo.hasCrc.get(nonEmptyFileCounter));
+                entryAtIndex.setCrcValue(archive.subStreamsInfo.crcs[nonEmptyFileCounter]);
+                entryAtIndex.setSize(archive.subStreamsInfo.unpackSizes[nonEmptyFileCounter]);
+                if (entryAtIndex.getSize() < 0) {
+                    throw new IOException("broken archive, entry with negative size");
                 }
+                ++nonEmptyFileCounter;
+            } else {
+                entryAtIndex.setDirectory(isEmptyFile == null || !isEmptyFile.get(emptyFileCounter));
+                entryAtIndex.setAntiItem(isAnti != null && isAnti.get(emptyFileCounter));
+                entryAtIndex.setHasCrc(false);
+                entryAtIndex.setSize(0);
+                ++emptyFileCounter;
             }
-
-            nid = getUnsignedByte(header);
         }
-
-        archive.subStreamsInfo = subStreamsInfo;
+        archive.files = fileMap.values().stream().filter(Objects::nonNull).toArray(SevenZArchiveEntry[]::new);
+        calculateStreamMap(archive);
     }
 
-    private int sanityCheckFolder(final ByteBuffer header, final ArchiveStatistics stats)
-        throws IOException {
-
-        final int numCoders = assertFitsIntoNonNegativeInt("numCoders", readUint64(header));
-        if (numCoders == 0) {
-            throw new IOException("Folder without coders");
-        }
-        stats.numberOfCoders += numCoders;
+    private Folder readFolder(final ByteBuffer header) throws IOException {
+        final Folder folder = new Folder();
 
-        long totalOutStreams = 0;
+        final long numCoders = readUint64(header);
+        final Coder[] coders = new Coder[(int)numCoders];
         long totalInStreams = 0;
-        for (int i = 0; i < numCoders; i++) {
+        long totalOutStreams = 0;
+        for (int i = 0; i < coders.length; i++) {
+            coders[i] = new Coder();
             final int bits = getUnsignedByte(header);
             final int idSize = bits & 0xf;
-            get(header, new byte[idSize]);
-
             final boolean isSimple = (bits & 0x10) == 0;
             final boolean hasAttributes = (bits & 0x20) != 0;
             final boolean moreAlternativeMethods = (bits & 0x80) != 0;
-            if (moreAlternativeMethods) {
-                throw new IOException("Alternative methods are unsupported, please report. " + // NOSONAR
-                    "The reference implementation doesn't support them either.");
-            }
 
+            coders[i].decompressionMethodId = new byte[idSize];
+            get(header, coders[i].decompressionMethodId);
             if (isSimple) {
-                totalInStreams++;
-                totalOutStreams++;
+                coders[i].numInStreams = 1;
+                coders[i].numOutStreams = 1;
             } else {
-                totalInStreams +=
-                    assertFitsIntoNonNegativeInt("numInStreams", readUint64(header));
-                totalOutStreams +=
-                    assertFitsIntoNonNegativeInt("numOutStreams", readUint64(header));
+                coders[i].numInStreams = readUint64(header);
+                coders[i].numOutStreams = readUint64(header);
             }
-
-            if (hasAttributes) {
-                final int propertiesSize =
-                    assertFitsIntoNonNegativeInt("propertiesSize", readUint64(header));
-                if (skipBytesFully(header, propertiesSize) < propertiesSize) {
-                    throw new IOException("invalid propertiesSize in folder");
-                }
-            }
-        }
-        assertFitsIntoNonNegativeInt("totalInStreams", totalInStreams);
-        assertFitsIntoNonNegativeInt("totalOutStreams", totalOutStreams);
-        stats.numberOfOutStreams += totalOutStreams;
-        stats.numberOfInStreams += totalInStreams;
-
-        if (totalOutStreams == 0) {
-            throw new IOException("Total output streams can't be 0");
-        }
-
-        final int numBindPairs =
-            assertFitsIntoNonNegativeInt("numBindPairs", totalOutStreams - 1);
-        if (totalInStreams < numBindPairs) {
-            throw new IOException("Total input streams can't be less than the number of bind pairs");
-        }
-        final BitSet inStreamsBound = new BitSet((int) totalInStreams);
-        for (int i = 0; i < numBindPairs; i++) {
-            final int inIndex = assertFitsIntoNonNegativeInt("inIndex", readUint64(header));
-            if (totalInStreams <= inIndex) {
-                throw new IOException("inIndex is bigger than number of inStreams");
-            }
-            inStreamsBound.set(inIndex);
-            final int outIndex = assertFitsIntoNonNegativeInt("outIndex", readUint64(header));
-            if (totalOutStreams <= outIndex) {
-                throw new IOException("outIndex is bigger than number of outStreams");
-            }
-        }
-
-        final int numPackedStreams =
-            assertFitsIntoNonNegativeInt("numPackedStreams", totalInStreams - numBindPairs);
-
-        if (numPackedStreams == 1) {
-            if (inStreamsBound.nextClearBit(0) == -1) {
-                throw new IOException("Couldn't find stream's bind pair index");
-            }
-        } else {
-            for (int i = 0; i < numPackedStreams; i++) {
-                final int packedStreamIndex =
-                    assertFitsIntoNonNegativeInt("packedStreamIndex", readUint64(header));
-                if (packedStreamIndex >= totalInStreams) {
-                    throw new IOException("packedStreamIndex is bigger than number of totalInStreams");
-                }
-            }
-        }
-
-        return (int) totalOutStreams;
-    }
-
-    private Folder readFolder(final ByteBuffer header) throws IOException {
-        final Folder folder = new Folder();
-
-        final long numCoders = readUint64(header);
-        final Coder[] coders = new Coder[(int)numCoders];
-        long totalInStreams = 0;
-        long totalOutStreams = 0;
-        for (int i = 0; i < coders.length; i++) {
-            coders[i] = new Coder();
-            final int bits = getUnsignedByte(header);
-            final int idSize = bits & 0xf;
-            final boolean isSimple = (bits & 0x10) == 0;
-            final boolean hasAttributes = (bits & 0x20) != 0;
-            final boolean moreAlternativeMethods = (bits & 0x80) != 0;
-
-            coders[i].decompressionMethodId = new byte[idSize];
-            get(header, coders[i].decompressionMethodId);
-            if (isSimple) {
-                coders[i].numInStreams = 1;
-                coders[i].numOutStreams = 1;
-            } else {
-                coders[i].numInStreams = readUint64(header);
-                coders[i].numOutStreams = readUint64(header);
-            }
-            totalInStreams += coders[i].numInStreams;
-            totalOutStreams += coders[i].numOutStreams;
+            totalInStreams += coders[i].numInStreams;
+            totalOutStreams += coders[i].numOutStreams;
             if (hasAttributes) {
                 final long propertiesSize = readUint64(header);
                 coders[i].properties = new byte[(int)propertiesSize];
@@ -1215,78 +1294,389 @@ public class SevenZFile implements Closeable {
         return folder;
     }
 
-    private BitSet readAllOrBits(final ByteBuffer header, final int size) throws IOException {
-        final int areAllDefined = getUnsignedByte(header);
-        final BitSet bits;
-        if (areAllDefined != 0) {
-            bits = new BitSet(size);
-            for (int i = 0; i < size; i++) {
-                bits.set(i, true);
+    private void readFully(final ByteBuffer buf) throws IOException {
+        buf.rewind();
+        IOUtils.readFully(channel, buf);
+        buf.flip();
+    }
+
+    private void readHeader(final ByteBuffer header, final Archive archive) throws IOException {
+        final int pos = header.position();
+        final ArchiveStatistics stats = sanityCheckAndCollectStatistics(header);
+        stats.assertValidity(options.getMaxMemoryLimitInKb());
+        header.position(pos);
+
+        int nid = getUnsignedByte(header);
+
+        if (nid == NID.kArchiveProperties) {
+            readArchiveProperties(header);
+            nid = getUnsignedByte(header);
+        }
+
+        if (nid == NID.kAdditionalStreamsInfo) {
+            throw new IOException("Additional streams unsupported");
+            //nid = getUnsignedByte(header);
+        }
+
+        if (nid == NID.kMainStreamsInfo) {
+            readStreamsInfo(header, archive);
+            nid = getUnsignedByte(header);
+        }
+
+        if (nid == NID.kFilesInfo) {
+            readFilesInfo(header, archive);
+            nid = getUnsignedByte(header);
+        }
+    }
+
+    private Archive readHeaders(final byte[] password) throws IOException {
+        final ByteBuffer buf = ByteBuffer.allocate(12 /* signature + 2 bytes version + 4 bytes CRC */)
+            .order(ByteOrder.LITTLE_ENDIAN);
+        readFully(buf);
+        final byte[] signature = new byte[6];
+        buf.get(signature);
+        if (!Arrays.equals(signature, sevenZSignature)) {
+            throw new IOException("Bad 7z signature");
+        }
+        // 7zFormat.txt has it wrong - it's first major then minor
+        final byte archiveVersionMajor = buf.get();
+        final byte archiveVersionMinor = buf.get();
+        if (archiveVersionMajor != 0) {
+            throw new IOException(String.format("Unsupported 7z version (%d,%d)",
+                    archiveVersionMajor, archiveVersionMinor));
+        }
+
+        boolean headerLooksValid = false;  // See https://www.7-zip.org/recover.html - "There is no correct End Header at the end of archive"
+        final long startHeaderCrc = 0xffffFFFFL & buf.getInt();
+        if (startHeaderCrc == 0) {
+            // This is an indication of a corrupt header - peek the next 20 bytes
+            final long currentPosition = channel.position();
+            final ByteBuffer peekBuf = ByteBuffer.allocate(20);
+            readFully(peekBuf);
+            channel.position(currentPosition);
+            // Header invalid if all data is 0
+            while (peekBuf.hasRemaining()) {
+                if (peekBuf.get()!=0) {
+                    headerLooksValid = true;
+                    break;
+                }
             }
         } else {
-            bits = readBits(header, size);
+            headerLooksValid = true;
         }
-        return bits;
+
+        if (headerLooksValid) {
+            return initializeArchive(readStartHeader(startHeaderCrc), password, true);
+        }
+        // No valid header found - probably first file of multipart archive was removed too early. Scan for end header.
+        if (options.getTryToRecoverBrokenArchives()) {
+            return tryToLocateEndHeader(password);
+        }
+        throw new IOException("archive seems to be invalid.\nYou may want to retry and enable the"
+            + " tryToRecoverBrokenArchives if the archive could be a multi volume archive that has been closed"
+            + " prematurely.");
     }
 
-    private BitSet readBits(final ByteBuffer header, final int size) throws IOException {
-        final BitSet bits = new BitSet(size);
-        int mask = 0;
-        int cache = 0;
-        for (int i = 0; i < size; i++) {
-            if (mask == 0) {
-                mask = 0x80;
-                cache = getUnsignedByte(header);
+    private void readPackInfo(final ByteBuffer header, final Archive archive) throws IOException {
+        archive.packPos = readUint64(header);
+        final int numPackStreamsInt = (int) readUint64(header);
+        int nid = getUnsignedByte(header);
+        if (nid == NID.kSize) {
+            archive.packSizes = new long[numPackStreamsInt];
+            for (int i = 0; i < archive.packSizes.length; i++) {
+                archive.packSizes[i] = readUint64(header);
             }
-            bits.set(i, (cache & mask) != 0);
-            mask >>>= 1;
+            nid = getUnsignedByte(header);
+        }
+
+        if (nid == NID.kCRC) {
+            archive.packCrcsDefined = readAllOrBits(header, numPackStreamsInt);
+            archive.packCrcs = new long[numPackStreamsInt];
+            for (int i = 0; i < numPackStreamsInt; i++) {
+                if (archive.packCrcsDefined.get(i)) {
+                    archive.packCrcs[i] = 0xffffFFFFL & getInt(header);
+                }
+            }
+
+            nid = getUnsignedByte(header);
         }
-        return bits;
     }
 
-    private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatistics stats) throws IOException {
-        stats.numberOfEntries = assertFitsIntoNonNegativeInt("numFiles", readUint64(header));
+    private StartHeader readStartHeader(final long startHeaderCrc) throws IOException {
+        final StartHeader startHeader = new StartHeader();
+        // using Stream rather than ByteBuffer for the benefit of the
+        // built-in CRC check
+        try (DataInputStream dataInputStream = new DataInputStream(new CRC32VerifyingInputStream(
+                new BoundedSeekableByteChannelInputStream(channel, 20), 20, startHeaderCrc))) {
+             startHeader.nextHeaderOffset = Long.reverseBytes(dataInputStream.readLong());
+             if (startHeader.nextHeaderOffset < 0
+                 || startHeader.nextHeaderOffset + SIGNATURE_HEADER_SIZE > channel.size()) {
+                 throw new IOException("nextHeaderOffset is out of bounds");
+             }
 
-        int emptyStreams = -1;
-        while (true) {
-            final int propertyType = getUnsignedByte(header);
-            if (propertyType == 0) {
-                break;
-            }
-            final long size = readUint64(header);
-            switch (propertyType) {
-                case NID.kEmptyStream: {
-                    emptyStreams = readBits(header, stats.numberOfEntries).cardinality();
-                    break;
-                }
-                case NID.kEmptyFile: {
-                    if (emptyStreams == -1) {
-                        throw new IOException("Header format error: kEmptyStream must appear before kEmptyFile");
-                    }
-                    readBits(header, emptyStreams);
-                    break;
-                }
-                case NID.kAnti: {
-                    if (emptyStreams == -1) {
-                        throw new IOException("Header format error: kEmptyStream must appear before kAnti");
-                    }
-                    readBits(header, emptyStreams);
-                    break;
-                }
-                case NID.kName: {
-                    final int external = getUnsignedByte(header);
-                    if (external != 0) {
-                        throw new IOException("Not implemented");
-                    }
-                    final int namesLength =
-                        assertFitsIntoNonNegativeInt("file names length", size - 1);
-                    if ((namesLength & 1) != 0) {
-                        throw new IOException("File names length invalid");
-                    }
+             startHeader.nextHeaderSize = Long.reverseBytes(dataInputStream.readLong());
+             final long nextHeaderEnd = startHeader.nextHeaderOffset + startHeader.nextHeaderSize;
+             if (nextHeaderEnd < startHeader.nextHeaderOffset
+                 || nextHeaderEnd + SIGNATURE_HEADER_SIZE > channel.size()) {
+                 throw new IOException("nextHeaderSize is out of bounds");
+             }
 
-                    int filesSeen = 0;
-                    for (int i = 0; i < namesLength; i += 2) {
-                        final char c = getChar(header);
+             startHeader.nextHeaderCrc = 0xffffFFFFL & Integer.reverseBytes(dataInputStream.readInt());
+
+             return startHeader;
+        }
+    }
+
+    private void readStreamsInfo(final ByteBuffer header, final Archive archive) throws IOException {
+        int nid = getUnsignedByte(header);
+
+        if (nid == NID.kPackInfo) {
+            readPackInfo(header, archive);
+            nid = getUnsignedByte(header);
+        }
+
+        if (nid == NID.kUnpackInfo) {
+            readUnpackInfo(header, archive);
+            nid = getUnsignedByte(header);
+        } else {
+            // archive without unpack/coders info
+            archive.folders = Folder.EMPTY_FOLDER_ARRAY;
+        }
+
+        if (nid == NID.kSubStreamsInfo) {
+            readSubStreamsInfo(header, archive);
+            nid = getUnsignedByte(header);
+        }
+    }
+
+    private void readSubStreamsInfo(final ByteBuffer header, final Archive archive) throws IOException {
+        for (final Folder folder : archive.folders) {
+            folder.numUnpackSubStreams = 1;
+        }
+        long unpackStreamsCount = archive.folders.length;
+
+        int nid = getUnsignedByte(header);
+        if (nid == NID.kNumUnpackStream) {
+            unpackStreamsCount = 0;
+            for (final Folder folder : archive.folders) {
+                final long numStreams = readUint64(header);
+                folder.numUnpackSubStreams = (int)numStreams;
+                unpackStreamsCount += numStreams;
+            }
+            nid = getUnsignedByte(header);
+        }
+
+        final int totalUnpackStreams = (int) unpackStreamsCount;
+        final SubStreamsInfo subStreamsInfo = new SubStreamsInfo();
+        subStreamsInfo.unpackSizes = new long[totalUnpackStreams];
+        subStreamsInfo.hasCrc = new BitSet(totalUnpackStreams);
+        subStreamsInfo.crcs = new long[totalUnpackStreams];
+
+        int nextUnpackStream = 0;
+        for (final Folder folder : archive.folders) {
+            if (folder.numUnpackSubStreams == 0) {
+                continue;
+            }
+            long sum = 0;
+            if (nid == NID.kSize) {
+                for (int i = 0; i < folder.numUnpackSubStreams - 1; i++) {
+                    final long size = readUint64(header);
+                    subStreamsInfo.unpackSizes[nextUnpackStream++] = size;
+                    sum += size;
+                }
+            }
+            if (sum > folder.getUnpackSize()) {
+                throw new IOException("sum of unpack sizes of folder exceeds total unpack size");
+            }
+            subStreamsInfo.unpackSizes[nextUnpackStream++] = folder.getUnpackSize() - sum;
+        }
+        if (nid == NID.kSize) {
+            nid = getUnsignedByte(header);
+        }
+
+        int numDigests = 0;
+        for (final Folder folder : archive.folders) {
+            if (folder.numUnpackSubStreams != 1 || !folder.hasCrc) {
+                numDigests += folder.numUnpackSubStreams;
+            }
+        }
+
+        if (nid == NID.kCRC) {
+            final BitSet hasMissingCrc = readAllOrBits(header, numDigests);
+            final long[] missingCrcs = new long[numDigests];
+            for (int i = 0; i < numDigests; i++) {
+                if (hasMissingCrc.get(i)) {
+                    missingCrcs[i] = 0xffffFFFFL & getInt(header);
+                }
+            }
+            int nextCrc = 0;
+            int nextMissingCrc = 0;
+            for (final Folder folder: archive.folders) {
+                if (folder.numUnpackSubStreams == 1 && folder.hasCrc) {
+                    subStreamsInfo.hasCrc.set(nextCrc, true);
+                    subStreamsInfo.crcs[nextCrc] = folder.crc;
+                    ++nextCrc;
+                } else {
+                    for (int i = 0; i < folder.numUnpackSubStreams; i++) {
+                        subStreamsInfo.hasCrc.set(nextCrc, hasMissingCrc.get(nextMissingCrc));
+                        subStreamsInfo.crcs[nextCrc] = missingCrcs[nextMissingCrc];
+                        ++nextCrc;
+                        ++nextMissingCrc;
+                    }
+                }
+            }
+
+            nid = getUnsignedByte(header);
+        }
+
+        archive.subStreamsInfo = subStreamsInfo;
+    }
+
+    private void readUnpackInfo(final ByteBuffer header, final Archive archive) throws IOException {
+        int nid = getUnsignedByte(header);
+        final int numFoldersInt = (int) readUint64(header);
+        final Folder[] folders = new Folder[numFoldersInt];
+        archive.folders = folders;
+        /* final int external = */ getUnsignedByte(header);
+        for (int i = 0; i < numFoldersInt; i++) {
+            folders[i] = readFolder(header);
+        }
+
+        nid = getUnsignedByte(header);
+        for (final Folder folder : folders) {
+            assertFitsIntoNonNegativeInt("totalOutputStreams", folder.totalOutputStreams);
+            folder.unpackSizes = new long[(int)folder.totalOutputStreams];
+            for (int i = 0; i < folder.totalOutputStreams; i++) {
+                folder.unpackSizes[i] = readUint64(header);
+            }
+        }
+
+        nid = getUnsignedByte(header);
+        if (nid == NID.kCRC) {
+            final BitSet crcsDefined = readAllOrBits(header, numFoldersInt);
+            for (int i = 0; i < numFoldersInt; i++) {
+                if (crcsDefined.get(i)) {
+                    folders[i].hasCrc = true;
+                    folders[i].crc = 0xffffFFFFL & getInt(header);
+                } else {
+                    folders[i].hasCrc = false;
+                }
+            }
+
+            nid = getUnsignedByte(header);
+        }
+    }
+
+    /**
+     * Discard any queued streams/ folder stream, and reopen the current folder input stream.
+     *
+     * @param folderIndex the index of the folder to reopen
+     * @param file        the 7z entry to read
+     * @throws IOException if exceptions occur when reading the 7z file
+     */
+    private void reopenFolderInputStream(final int folderIndex, final SevenZArchiveEntry file) throws IOException {
+        deferredBlockStreams.clear();
+        if (currentFolderInputStream != null) {
+            currentFolderInputStream.close();
+            currentFolderInputStream = null;
+        }
+        final Folder folder = archive.folders[folderIndex];
+        final int firstPackStreamIndex = archive.streamMap.folderFirstPackStreamIndex[folderIndex];
+        final long folderOffset = SIGNATURE_HEADER_SIZE + archive.packPos +
+                archive.streamMap.packStreamOffsets[firstPackStreamIndex];
+
+        currentFolderInputStream = buildDecoderStack(folder, folderOffset, firstPackStreamIndex, file);
+    }
+
+    private ArchiveStatistics sanityCheckAndCollectStatistics(final ByteBuffer header)
+        throws IOException {
+        final ArchiveStatistics stats = new ArchiveStatistics();
+
+        int nid = getUnsignedByte(header);
+
+        if (nid == NID.kArchiveProperties) {
+            sanityCheckArchiveProperties(header);
+            nid = getUnsignedByte(header);
+        }
+
+        if (nid == NID.kAdditionalStreamsInfo) {
+            throw new IOException("Additional streams unsupported");
+            //nid = getUnsignedByte(header);
+        }
+
+        if (nid == NID.kMainStreamsInfo) {
+            sanityCheckStreamsInfo(header, stats);
+            nid = getUnsignedByte(header);
+        }
+
+        if (nid == NID.kFilesInfo) {
+            sanityCheckFilesInfo(header, stats);
+            nid = getUnsignedByte(header);
+        }
+
+        if (nid != NID.kEnd) {
+            throw new IOException("Badly terminated header, found " + nid);
+        }
+
+        return stats;
+    }
+
+    private void sanityCheckArchiveProperties(final ByteBuffer header)
+        throws IOException {
+        int nid =  getUnsignedByte(header);
+        while (nid != NID.kEnd) {
+            final int propertySize =
+                assertFitsIntoNonNegativeInt("propertySize", readUint64(header));
+            if (skipBytesFully(header, propertySize) < propertySize) {
+                throw new IOException("invalid property size");
+            }
+            nid = getUnsignedByte(header);
+        }
+    }
+
+    private void sanityCheckFilesInfo(final ByteBuffer header, final ArchiveStatistics stats) throws IOException {
+        stats.numberOfEntries = assertFitsIntoNonNegativeInt("numFiles", readUint64(header));
+
+        int emptyStreams = -1;
+        while (true) {
+            final int propertyType = getUnsignedByte(header);
+            if (propertyType == 0) {
+                break;
+            }
+            final long size = readUint64(header);
+            switch (propertyType) {
+                case NID.kEmptyStream: {
+                    emptyStreams = readBits(header, stats.numberOfEntries).cardinality();
+                    break;
+                }
+                case NID.kEmptyFile: {
+                    if (emptyStreams == -1) {
+                        throw new IOException("Header format error: kEmptyStream must appear before kEmptyFile");
+                    }
+                    readBits(header, emptyStreams);
+                    break;
+                }
+                case NID.kAnti: {
+                    if (emptyStreams == -1) {
+                        throw new IOException("Header format error: kEmptyStream must appear before kAnti");
+                    }
+                    readBits(header, emptyStreams);
+                    break;
+                }
+                case NID.kName: {
+                    final int external = getUnsignedByte(header);
+                    if (external != 0) {
+                        throw new IOException("Not implemented");
+                    }
+                    final int namesLength =
+                        assertFitsIntoNonNegativeInt("file names length", size - 1);
+                    if ((namesLength & 1) != 0) {
+                        throw new IOException("File names length invalid");
+                    }
+
+                    int filesSeen = 0;
+                    for (int i = 0; i < namesLength; i += 2) {
+                        final char c = getChar(header);
                         if (c == 0) {
                             filesSeen++;
                         }
@@ -1370,767 +1760,377 @@ public class SevenZFile implements Closeable {
         stats.numberOfEntriesWithStream = stats.numberOfEntries - Math.max(emptyStreams, 0);
     }
 
-    private void readFilesInfo(final ByteBuffer header, final Archive archive) throws IOException {
-        final int numFilesInt = (int) readUint64(header);
-        final Map<Integer, SevenZArchiveEntry> fileMap = new LinkedHashMap<>();
-        BitSet isEmptyStream = null;
-        BitSet isEmptyFile = null;
-        BitSet isAnti = null;
-        while (true) {
-            final int propertyType = getUnsignedByte(header);
-            if (propertyType == 0) {
-                break;
-            }
-            final long size = readUint64(header);
-            switch (propertyType) {
-                case NID.kEmptyStream: {
-                    isEmptyStream = readBits(header, numFilesInt);
-                    break;
-                }
-                case NID.kEmptyFile: {
-                    isEmptyFile = readBits(header, isEmptyStream.cardinality());
-                    break;
-                }
-                case NID.kAnti: {
-                    isAnti = readBits(header, isEmptyStream.cardinality());
-                    break;
-                }
-                case NID.kName: {
-                    /* final int external = */ getUnsignedByte(header);
-                    final byte[] names = new byte[(int) (size - 1)];
-                    final int namesLength = names.length;
-                    get(header, names);
-                    int nextFile = 0;
-                    int nextName = 0;
-                    for (int i = 0; i < namesLength; i += 2) {
-                        if (names[i] == 0 && names[i + 1] == 0) {
-                            checkEntryIsInitialized(fileMap, nextFile);
-                            fileMap.get(nextFile).setName(new String(names, nextName, i - nextName, UTF_16LE));
-                            nextName = i + 2;
-                            nextFile++;
-                        }
-                    }
-                    if (nextName != namesLength || nextFile != numFilesInt) {
-                        throw new IOException("Error parsing file names");
-                    }
-                    break;
-                }
-                case NID.kCTime: {
-                    final BitSet timesDefined = readAllOrBits(header, numFilesInt);
-                    /* final int external = */ getUnsignedByte(header);
-                    for (int i = 0; i < numFilesInt; i++) {
-                        checkEntryIsInitialized(fileMap, i);
-                        final SevenZArchiveEntry entryAtIndex = fileMap.get(i);
-                        entryAtIndex.setHasCreationDate(timesDefined.get(i));
-                        if (entryAtIndex.getHasCreationDate()) {
-                            entryAtIndex.setCreationDate(getLong(header));
-                        }
-                    }
-                    break;
-                }
-                case NID.kATime: {
-                    final BitSet timesDefined = readAllOrBits(header, numFilesInt);
-                    /* final int external = */ getUnsignedByte(header);
... 89000 lines suppressed ...


[commons-compress] 05/05: Use Files APIs and try-with-resources

Posted by gg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ggregory pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git

commit 8e112661e3b4494b5e6e4371817b9a6e56e5080a
Author: Gary Gregory <ga...@gmail.com>
AuthorDate: Sun Dec 11 03:02:36 2022 -0500

    Use Files APIs and try-with-resources
---
 .../apache/commons/compress/AbstractTestCase.java  | 15 ++--
 .../commons/compress/archivers/ArTestCase.java     | 88 ++++++++++------------
 .../archivers/ArchiveOutputStreamTest.java         | 12 +--
 .../commons/compress/archivers/CpioTestCase.java   | 12 ++-
 .../commons/compress/archivers/DumpTestCase.java   | 12 +--
 .../commons/compress/archivers/JarTestCase.java    | 52 ++++++-------
 .../commons/compress/archivers/TarTestCase.java    | 31 +++-----
 .../commons/compress/archivers/ZipTestCase.java    | 20 ++---
 .../cpio/CpioArchiveOutputStreamTest.java          | 22 +-----
 .../archivers/tar/TarArchiveInputStreamTest.java   |  4 +-
 .../compress/archivers/zip/ExplodeSupportTest.java | 22 +++---
 .../commons/compress/archivers/zip/Lister.java     |  4 +-
 .../zip/ParallelScatterZipCreatorTest.java         | 19 ++---
 .../compress/archivers/zip/Zip64SupportIT.java     |  3 +-
 .../zip/ZipFileIgnoringLocalFileHeaderTest.java    |  6 +-
 .../compress/compressors/BZip2TestCase.java        | 35 ++++-----
 .../compress/compressors/DeflateTestCase.java      | 22 +++---
 .../compress/compressors/FramedSnappyTestCase.java | 21 +++---
 .../commons/compress/compressors/GZipTestCase.java | 41 ++++------
 .../commons/compress/compressors/LZMATestCase.java | 54 +++++--------
 .../compress/compressors/Pack200TestCase.java      |  8 +-
 .../commons/compress/compressors/ZTestCase.java    | 20 ++---
 .../brotli/BrotliCompressorInputStreamTest.java    |  6 +-
 .../lz4/BlockLZ4CompressorRoundtripTest.java       | 10 +--
 .../compress/compressors/lz4/FactoryTest.java      | 12 ++-
 .../lz4/FramedLZ4CompressorRoundtripTest.java      |  8 +-
 .../compressors/pack200/Pack200UtilsTest.java      | 37 +++------
 .../FramedSnappyCompressorInputStreamTest.java     | 47 +++---------
 .../compressors/snappy/SnappyRoundtripTest.java    | 14 ++--
 .../zstandard/ZstdCompressorInputStreamTest.java   |  6 +-
 .../compressors/zstandard/ZstdRoundtripTest.java   | 19 ++---
 31 files changed, 250 insertions(+), 432 deletions(-)

diff --git a/src/test/java/org/apache/commons/compress/AbstractTestCase.java b/src/test/java/org/apache/commons/compress/AbstractTestCase.java
index 9b4d5f9b..c4fcdc6d 100644
--- a/src/test/java/org/apache/commons/compress/AbstractTestCase.java
+++ b/src/test/java/org/apache/commons/compress/AbstractTestCase.java
@@ -141,7 +141,7 @@ public abstract class AbstractTestCase {
             throws IOException, FileNotFoundException {
         final ArchiveEntry entry = out.createArchiveEntry(infile, filename);
         out.putArchiveEntry(entry);
-        IOUtils.copy(infile, out);
+        Files.copy(infile.toPath(), out);
         out.closeArchiveEntry();
         archiveList.add(filename);
     }
@@ -175,20 +175,17 @@ public abstract class AbstractTestCase {
         try {
             ArchiveEntry entry = null;
             while ((entry = in.getNextEntry()) != null) {
-                final File outfile = new File(result.getCanonicalPath() + "/result/"
-                        + entry.getName());
-                long copied=0;
-                if (entry.isDirectory()){
+                final File outfile = new File(result.getCanonicalPath() + "/result/" + entry.getName());
+                long copied = 0;
+                if (entry.isDirectory()) {
                     outfile.mkdirs();
                 } else {
                     outfile.getParentFile().mkdirs();
-                    try (OutputStream out = Files.newOutputStream(outfile.toPath())) {
-                        copied = IOUtils.copy(in, out);
-                    }
+                    copied = Files.copy(in, outfile.toPath());
                 }
                 final long size = entry.getSize();
                 if (size != ArchiveEntry.SIZE_UNKNOWN) {
-                    assertEquals("Entry.size should equal bytes read.",size, copied);
+                    assertEquals("Entry.size should equal bytes read.", size, copied);
                 }
 
                 if (!outfile.exists()) {
diff --git a/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java
index dd1b548c..51641aec 100644
--- a/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java
+++ b/src/test/java/org/apache/commons/compress/archivers/ArTestCase.java
@@ -47,17 +47,16 @@ public final class ArTestCase extends AbstractTestCase {
         final File file1 = getFile("test1.xml");
         final File file2 = getFile("test2.xml");
 
-        final OutputStream out = Files.newOutputStream(output.toPath());
-        final ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("ar", out);
-        os.putArchiveEntry(new ArArchiveEntry("test1.xml", file1.length()));
-        IOUtils.copy(Files.newInputStream(file1.toPath()), os);
-        os.closeArchiveEntry();
-
-        os.putArchiveEntry(new ArArchiveEntry("test2.xml", file2.length()));
-        IOUtils.copy(Files.newInputStream(file2.toPath()), os);
-        os.closeArchiveEntry();
+        try (final OutputStream out = Files.newOutputStream(output.toPath());
+                ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("ar", out)) {
+            os.putArchiveEntry(new ArArchiveEntry("test1.xml", file1.length()));
+            Files.copy(file1.toPath(), os);
+            os.closeArchiveEntry();
 
-        os.close();
+            os.putArchiveEntry(new ArArchiveEntry("test2.xml", file2.length()));
+            Files.copy(file2.toPath(), os);
+            os.closeArchiveEntry();
+        }
     }
 
     @Test
@@ -68,18 +67,16 @@ public final class ArTestCase extends AbstractTestCase {
         final File file2 = getFile("test2.xml");
         {
             // create
-
-            final OutputStream out = Files.newOutputStream(output.toPath());
-            final ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("ar", out);
-            os.putArchiveEntry(new ArArchiveEntry("test1.xml", file1.length()));
-            IOUtils.copy(Files.newInputStream(file1.toPath()), os);
-            os.closeArchiveEntry();
-
-            os.putArchiveEntry(new ArArchiveEntry("test2.xml", file2.length()));
-            IOUtils.copy(Files.newInputStream(file2.toPath()), os);
-            os.closeArchiveEntry();
-            os.close();
-            out.close();
+            try (OutputStream out = Files.newOutputStream(output.toPath());
+                    ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("ar", out)) {
+                os.putArchiveEntry(new ArArchiveEntry("test1.xml", file1.length()));
+                Files.copy(file1.toPath(), os);
+                os.closeArchiveEntry();
+
+                os.putArchiveEntry(new ArArchiveEntry("test2.xml", file2.length()));
+                Files.copy(file2.toPath(), os);
+                os.closeArchiveEntry();
+            }
         }
 
         assertEquals(8 + 60 + file1.length() + (file1.length() % 2) + 60 + file2.length() + (file2.length() % 2), output.length());
@@ -122,26 +119,24 @@ public final class ArTestCase extends AbstractTestCase {
         long sum = 0;
 
         {
-            final InputStream is = Files.newInputStream(output2.toPath());
-            final ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream(new BufferedInputStream(is));
-            while (true) {
-                final ArArchiveEntry entry = (ArArchiveEntry) ais.getNextEntry();
-                if (entry == null) {
-                    break;
-                }
+            try (InputStream is = Files.newInputStream(output2.toPath());
+                    ArchiveInputStream ais = new ArchiveStreamFactory().createArchiveInputStream(new BufferedInputStream(is))) {
+                while (true) {
+                    final ArArchiveEntry entry = (ArArchiveEntry) ais.getNextEntry();
+                    if (entry == null) {
+                        break;
+                    }
 
-                IOUtils.copy(ais, new ByteArrayOutputStream());
+                    IOUtils.copy(ais, new ByteArrayOutputStream());
 
-                sum += entry.getLength();
-                files++;
+                    sum += entry.getLength();
+                    files++;
+                }
             }
-            ais.close();
-            is.close();
         }
 
         assertEquals(1, files);
         assertEquals(file1.length(), sum);
-
     }
 
     @Test
@@ -151,17 +146,16 @@ public final class ArTestCase extends AbstractTestCase {
             final File file1 = getFile("test1.xml");
             final File file2 = getFile("test2.xml");
 
-            final OutputStream out = Files.newOutputStream(output.toPath());
-            final ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("ar", out);
-            os.putArchiveEntry(new ArArchiveEntry("test1.xml", file1.length()));
-            IOUtils.copy(Files.newInputStream(file1.toPath()), os);
-            os.closeArchiveEntry();
+            try (OutputStream out = Files.newOutputStream(output.toPath());
+                    ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("ar", out)) {
+                os.putArchiveEntry(new ArArchiveEntry("test1.xml", file1.length()));
+                Files.copy(file1.toPath(), os);
+                os.closeArchiveEntry();
 
-            os.putArchiveEntry(new ArArchiveEntry("test2.xml", file2.length()));
-            IOUtils.copy(Files.newInputStream(file2.toPath()), os);
-            os.closeArchiveEntry();
-            os.close();
-            out.close();
+                os.putArchiveEntry(new ArArchiveEntry("test2.xml", file2.length()));
+                Files.copy(file2.toPath(), os);
+                os.closeArchiveEntry();
+            }
         }
 
         // UnArArchive Operation
@@ -171,9 +165,7 @@ public final class ArTestCase extends AbstractTestCase {
             final ArArchiveEntry entry = (ArArchiveEntry) in.getNextEntry();
 
             final File target = new File(dir, entry.getName());
-            try (final OutputStream out = Files.newOutputStream(target.toPath())) {
-                IOUtils.copy(in, out);
-            }
+            Files.copy(in, target.toPath());
         }
     }
 
diff --git a/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java
index e2f4ef9e..95d6b675 100644
--- a/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/ArchiveOutputStreamTest.java
@@ -42,9 +42,7 @@ public class ArchiveOutputStreamTest extends AbstractTestCase {
         throws Exception {
         final ArchiveOutputStream aos1 = factory.createArchiveOutputStream(archiveType, out1);
         aos1.putArchiveEntry(aos1.createArchiveEntry(dummy, "dummy"));
-        try (InputStream is = Files.newInputStream(dummy.toPath())) {
-            IOUtils.copy(is, aos1);
-        }
+        Files.copy(dummy.toPath(), aos1);
         return aos1;
     }
 
@@ -55,9 +53,7 @@ public class ArchiveOutputStreamTest extends AbstractTestCase {
         ArchiveOutputStream aos1;
         aos1 = factory.createArchiveOutputStream(archiveType, out1);
         aos1.putArchiveEntry(aos1.createArchiveEntry(dummy, "dummy"));
-        try (InputStream is = Files.newInputStream(dummy.toPath())) {
-            IOUtils.copy(is, aos1);
-        }
+        Files.copy(dummy.toPath(), aos1);
         aos1.closeArchiveEntry();
         aos1.close(); // omitted finish
 
@@ -71,9 +67,7 @@ public class ArchiveOutputStreamTest extends AbstractTestCase {
         }
 
         aos1.putArchiveEntry(aos1.createArchiveEntry(dummy, "dummy"));
-        try (InputStream is = Files.newInputStream(dummy.toPath())) {
-            IOUtils.copy(is, aos1);
-        }
+        Files.copy(dummy.toPath(), aos1);
 
         // TODO check if second putArchiveEntry() can follow without closeAE?
 
diff --git a/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java b/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java
index 4c9db4e9..510b67bb 100644
--- a/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java
+++ b/src/test/java/org/apache/commons/compress/archivers/CpioTestCase.java
@@ -50,11 +50,11 @@ public final class CpioTestCase extends AbstractTestCase {
         final OutputStream out = Files.newOutputStream(output.toPath());
         final ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("cpio", out);
         os.putArchiveEntry(new CpioArchiveEntry("test1.xml", file1.length()));
-        IOUtils.copy(Files.newInputStream(file1.toPath()), os);
+        Files.copy(file1.toPath(), os);
         os.closeArchiveEntry();
 
         os.putArchiveEntry(new CpioArchiveEntry("test2.xml", file2.length()));
-        IOUtils.copy(Files.newInputStream(file2.toPath()), os);
+        Files.copy(file2.toPath(), os);
         os.closeArchiveEntry();
 
         os.close();
@@ -75,13 +75,13 @@ public final class CpioTestCase extends AbstractTestCase {
             CpioArchiveEntry entry = new CpioArchiveEntry("test1.xml", file1Length);
             entry.setMode(CpioConstants.C_ISREG);
             os.putArchiveEntry(entry);
-            IOUtils.copy(Files.newInputStream(file1.toPath()), os);
+            Files.copy(file1.toPath(), os);
             os.closeArchiveEntry();
 
             entry = new CpioArchiveEntry("test2.xml", file2Length);
             entry.setMode(CpioConstants.C_ISREG);
             os.putArchiveEntry(entry);
-            IOUtils.copy(Files.newInputStream(file2.toPath()), os);
+            Files.copy(file2.toPath(), os);
             os.closeArchiveEntry();
             os.finish();
             os.close();
@@ -97,9 +97,7 @@ public final class CpioTestCase extends AbstractTestCase {
         ArchiveEntry entry = null;
         while ((entry = in.getNextEntry()) != null) {
             final File cpioget = new File(dir, entry.getName());
-            final OutputStream out = Files.newOutputStream(cpioget.toPath());
-            IOUtils.copy(in, out);
-            out.close();
+            Files.copy(in, cpioget.toPath());
             result.put(entry.getName(), cpioget);
         }
         in.close();
diff --git a/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java b/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java
index 5e47b3c4..c25f5fa8 100644
--- a/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java
+++ b/src/test/java/org/apache/commons/compress/archivers/DumpTestCase.java
@@ -88,10 +88,8 @@ public final class DumpTestCase extends AbstractTestCase {
     private void unarchiveAll(final File input) throws Exception {
         final InputStream is = Files.newInputStream(input.toPath());
         ArchiveInputStream in = null;
-        OutputStream out = null;
         try {
-            in = ArchiveStreamFactory.DEFAULT
-                .createArchiveInputStream("dump", is);
+            in = ArchiveStreamFactory.DEFAULT.createArchiveInputStream("dump", is);
 
             ArchiveEntry entry = in.getNextEntry();
             while (entry != null) {
@@ -102,16 +100,10 @@ public final class DumpTestCase extends AbstractTestCase {
                     entry = in.getNextEntry();
                     continue;
                 }
-                out = Files.newOutputStream(archiveEntry.toPath());
-                IOUtils.copy(in, out);
-                out.close();
-                out = null;
+                Files.copy(in, archiveEntry.toPath());
                 entry = in.getNextEntry();
             }
         } finally {
-            if (out != null) {
-                out.close();
-            }
             if (in != null) {
                 in.close();
             }
diff --git a/src/test/java/org/apache/commons/compress/archivers/JarTestCase.java b/src/test/java/org/apache/commons/compress/archivers/JarTestCase.java
index b19e2b49..8fbc967b 100644
--- a/src/test/java/org/apache/commons/compress/archivers/JarTestCase.java
+++ b/src/test/java/org/apache/commons/compress/archivers/JarTestCase.java
@@ -42,11 +42,11 @@ public final class JarTestCase extends AbstractTestCase {
         final ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("jar", out);
 
         os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml"));
-        IOUtils.copy(Files.newInputStream(file1.toPath()), os);
+        Files.copy(file1.toPath(), os);
         os.closeArchiveEntry();
 
         os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml"));
-        IOUtils.copy(Files.newInputStream(file2.toPath()), os);
+        Files.copy(file2.toPath(), os);
         os.closeArchiveEntry();
 
         os.close();
@@ -56,32 +56,24 @@ public final class JarTestCase extends AbstractTestCase {
     @Test
     public void testJarUnarchive() throws Exception {
         final File input = getFile("bla.jar");
-        final InputStream is = Files.newInputStream(input.toPath());
-        final ArchiveInputStream in = ArchiveStreamFactory.DEFAULT.createArchiveInputStream("jar", is);
-
-        ZipArchiveEntry entry = (ZipArchiveEntry)in.getNextEntry();
-        File o = new File(dir, entry.getName());
-        o.getParentFile().mkdirs();
-        OutputStream out = Files.newOutputStream(o.toPath());
-        IOUtils.copy(in, out);
-        out.close();
-
-        entry = (ZipArchiveEntry)in.getNextEntry();
-        o = new File(dir, entry.getName());
-        o.getParentFile().mkdirs();
-        out = Files.newOutputStream(o.toPath());
-        IOUtils.copy(in, out);
-        out.close();
-
-        entry = (ZipArchiveEntry)in.getNextEntry();
-        o = new File(dir, entry.getName());
-        o.getParentFile().mkdirs();
-        out = Files.newOutputStream(o.toPath());
-        IOUtils.copy(in, out);
-        out.close();
-
-        in.close();
-        is.close();
+        try (InputStream is = Files.newInputStream(input.toPath());
+                ArchiveInputStream in = ArchiveStreamFactory.DEFAULT.createArchiveInputStream("jar", is)) {
+
+            ZipArchiveEntry entry = (ZipArchiveEntry) in.getNextEntry();
+            File o = new File(dir, entry.getName());
+            o.getParentFile().mkdirs();
+            Files.copy(in, o.toPath());
+
+            entry = (ZipArchiveEntry) in.getNextEntry();
+            o = new File(dir, entry.getName());
+            o.getParentFile().mkdirs();
+            Files.copy(in, o.toPath());
+
+            entry = (ZipArchiveEntry) in.getNextEntry();
+            o = new File(dir, entry.getName());
+            o.getParentFile().mkdirs();
+            Files.copy(in, o.toPath());
+        }
     }
 
     @Test
@@ -99,9 +91,7 @@ public final class JarTestCase extends AbstractTestCase {
                 entry = in.getNextEntry();
                 continue;
             }
-            final OutputStream out = Files.newOutputStream(archiveEntry.toPath());
-            IOUtils.copy(in, out);
-            out.close();
+            Files.copy(in, archiveEntry.toPath());
             entry = in.getNextEntry();
         }
 
diff --git a/src/test/java/org/apache/commons/compress/archivers/TarTestCase.java b/src/test/java/org/apache/commons/compress/archivers/TarTestCase.java
index b3536f35..16ffc264 100644
--- a/src/test/java/org/apache/commons/compress/archivers/TarTestCase.java
+++ b/src/test/java/org/apache/commons/compress/archivers/TarTestCase.java
@@ -314,8 +314,8 @@ public final class TarTestCase extends AbstractTestCase {
     public void testTarArchiveCreation() throws Exception {
         final File output = new File(dir, "bla.tar");
         final File file1 = getFile("test1.xml");
-        final OutputStream out = Files.newOutputStream(output.toPath());
-        try (ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("tar", out)) {
+        try (OutputStream out = Files.newOutputStream(output.toPath());
+                ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("tar", out)) {
             final TarArchiveEntry entry = new TarArchiveEntry("testdata/test1.xml");
             entry.setModTime(0);
             entry.setSize(file1.length());
@@ -325,9 +325,7 @@ public final class TarTestCase extends AbstractTestCase {
             entry.setGroupName("excalibur");
             entry.setMode(0100000);
             os.putArchiveEntry(entry);
-            try (final InputStream iInputStream = Files.newInputStream(file1.toPath())) {
-                IOUtils.copy(iInputStream, os);
-            }
+            Files.copy(file1.toPath(), os);
             os.closeArchiveEntry();
         }
     }
@@ -351,13 +349,10 @@ public final class TarTestCase extends AbstractTestCase {
         entry.setGroupName("excalibur");
         entry.setMode(0100000);
         os.putArchiveEntry(entry);
-        try (final InputStream in = Files.newInputStream(file1.toPath())) {
-            IOUtils.copy(in, os);
-            os.closeArchiveEntry();
-            os.close();
-            out.close();
-        }
-
+        Files.copy(file1.toPath(), os);
+        os.closeArchiveEntry();
+        os.close();
+        out.close();
 
         ArchiveOutputStream os2 = null;
         try {
@@ -374,7 +369,7 @@ public final class TarTestCase extends AbstractTestCase {
             entry2.setGroupName("excalibur");
             entry2.setMode(0100000);
             os2.putArchiveEntry(entry);
-            IOUtils.copy(Files.newInputStream(file1.toPath()), os2);
+            Files.copy(file1.toPath(), os2);
             os2.closeArchiveEntry();
         } catch(final IOException e) {
             assertTrue(true);
@@ -570,9 +565,7 @@ public final class TarTestCase extends AbstractTestCase {
         final File file = getFile("bla.tar");
         try (final TarFile tarFile = new TarFile(file)) {
             final TarArchiveEntry entry = tarFile.getEntries().get(0);
-            try (final OutputStream out = Files.newOutputStream(new File(dir, entry.getName()).toPath())) {
-                IOUtils.copy(tarFile.getInputStream(entry), out);
-            }
+            Files.copy(tarFile.getInputStream(entry), new File(dir, entry.getName()).toPath());
         }
     }
 
@@ -580,11 +573,9 @@ public final class TarTestCase extends AbstractTestCase {
     public void testTarUnarchive() throws Exception {
         final File input = getFile("bla.tar");
         try (final InputStream is = Files.newInputStream(input.toPath());
-             final ArchiveInputStream in = ArchiveStreamFactory.DEFAULT.createArchiveInputStream("tar", is)) {
+                final ArchiveInputStream in = ArchiveStreamFactory.DEFAULT.createArchiveInputStream("tar", is)) {
             final TarArchiveEntry entry = (TarArchiveEntry) in.getNextEntry();
-            try (final OutputStream out = Files.newOutputStream(new File(dir, entry.getName()).toPath())) {
-                IOUtils.copy(in, out);
-            }
+            Files.copy(in, new File(dir, entry.getName()).toPath());
         }
     }
 }
diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java
index 209d3884..cd837f64 100644
--- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java
+++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java
@@ -79,8 +79,8 @@ public final class ZipTestCase extends AbstractTestCase {
             zipArchiveEntry.setMethod(ZipEntry.DEFLATED);
 
             zipArchiveOutputStream.putArchiveEntry(zipArchiveEntry);
-            try (final InputStream input = Files.newInputStream(fileToAdd.toPath())) {
-                IOUtils.copy(input, zipArchiveOutputStream);
+            try {
+                Files.copy(fileToAdd.toPath(), zipArchiveOutputStream);
             } finally {
                 zipArchiveOutputStream.closeArchiveEntry();
             }
@@ -725,15 +725,11 @@ public final class ZipTestCase extends AbstractTestCase {
         try (final OutputStream out = Files.newOutputStream(output.toPath())) {
             try (ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("zip", out)) {
                 os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml"));
-                try (final InputStream input = Files.newInputStream(file1.toPath())) {
-                    IOUtils.copy(input, os);
-                }
+                Files.copy(file1.toPath(), os);
                 os.closeArchiveEntry();
 
                 os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml"));
-                try (final InputStream input = Files.newInputStream(file2.toPath())) {
-                    IOUtils.copy(input, os);
-                }
+                Files.copy(file2.toPath(), os);
                 os.closeArchiveEntry();
             }
         }
@@ -748,9 +744,7 @@ public final class ZipTestCase extends AbstractTestCase {
                 while ((entry = (ZipArchiveEntry) archiveInputStream.getNextEntry()) != null) {
                     final File outfile = new File(resultDir.getCanonicalPath() + "/result/" + entry.getName());
                     outfile.getParentFile().mkdirs();
-                    try (OutputStream o = Files.newOutputStream(outfile.toPath())) {
-                        IOUtils.copy(archiveInputStream, o);
-                    }
+                    Files.copy(archiveInputStream, outfile.toPath());
                     results.add(outfile);
                 }
             }
@@ -865,9 +859,7 @@ public final class ZipTestCase extends AbstractTestCase {
         try (final InputStream is = Files.newInputStream(input.toPath());
                 final ArchiveInputStream in = ArchiveStreamFactory.DEFAULT.createArchiveInputStream("zip", is)) {
             final ZipArchiveEntry entry = (ZipArchiveEntry) in.getNextEntry();
-            try (final OutputStream out = Files.newOutputStream(new File(dir, entry.getName()).toPath())) {
-                IOUtils.copy(in, out);
-            }
+            Files.copy(in, new File(dir, entry.getName()).toPath());
         }
     }
 }
diff --git a/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStreamTest.java
index 08be214a..b45a6729 100644
--- a/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStreamTest.java
@@ -36,32 +36,18 @@ public class CpioArchiveOutputStreamTest extends AbstractTestCase {
     public void testWriteOldBinary() throws Exception {
         final File f = getFile("test1.xml");
         final File output = new File(dir, "test.cpio");
-        final OutputStream out = Files.newOutputStream(output.toPath());
-        InputStream in = null;
-        try {
-            final CpioArchiveOutputStream os = new CpioArchiveOutputStream(out, CpioConstants.FORMAT_OLD_BINARY);
+        try (final OutputStream out = Files.newOutputStream(output.toPath());
+                CpioArchiveOutputStream os = new CpioArchiveOutputStream(out, CpioConstants.FORMAT_OLD_BINARY)) {
             os.putArchiveEntry(new CpioArchiveEntry(CpioConstants.FORMAT_OLD_BINARY, f, "test1.xml"));
-            IOUtils.copy(in = Files.newInputStream(f.toPath()), os);
-            in.close();
-            in = null;
+            Files.copy(f.toPath(), os);
             os.closeArchiveEntry();
-            os.close();
-        } finally {
-            if (in != null) {
-                in.close();
-            }
-            out.close();
         }
 
-        try {
-            in = new CpioArchiveInputStream(Files.newInputStream(output.toPath()));
+        try (CpioArchiveInputStream in = new CpioArchiveInputStream(Files.newInputStream(output.toPath()))) {
             final CpioArchiveEntry e = ((CpioArchiveInputStream) in).getNextCPIOEntry();
             assertEquals("test1.xml", e.getName());
             assertNull(((CpioArchiveInputStream) in).getNextEntry());
         } finally {
-            if (in != null) {
-                in.close();
-            }
         }
     }
 }
diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
index ab9b1cbe..e7f0fadd 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
@@ -183,9 +183,7 @@ public class TarArchiveInputStreamTest extends AbstractTestCase {
                 TarArchiveEntry entry = is.getNextTarEntry();
                 int count = 0;
                 while (entry != null) {
-                    try (OutputStream out = Files.newOutputStream(new File(dir, String.valueOf(count)).toPath())) {
-                        IOUtils.copy(is, out);
-                    }
+                    Files.copy(is, new File(dir, String.valueOf(count)).toPath());
                     count++;
                     entry = is.getNextTarEntry();
                 }
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java
index db9b13b2..3a653733 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/ExplodeSupportTest.java
@@ -39,20 +39,20 @@ import org.junit.jupiter.api.Test;
 public class ExplodeSupportTest {
 
     private void testArchiveWithImplodeCompression(final String filename, final String entryName) throws IOException {
-        final ZipFile zip = new ZipFile(new File(filename));
-        final ZipArchiveEntry entry = zip.getEntries().nextElement();
-        assertEquals("entry name", entryName, entry.getName());
-        assertTrue("entry can't be read", zip.canReadEntryData(entry));
-        assertEquals("method", ZipMethod.IMPLODING.getCode(), entry.getMethod());
+        try (ZipFile zip = new ZipFile(new File(filename))) {
+            final ZipArchiveEntry entry = zip.getEntries().nextElement();
+            assertEquals("entry name", entryName, entry.getName());
+            assertTrue("entry can't be read", zip.canReadEntryData(entry));
+            assertEquals("method", ZipMethod.IMPLODING.getCode(), entry.getMethod());
 
-        final ByteArrayOutputStream bout = new ByteArrayOutputStream();
-        final CheckedOutputStream out = new CheckedOutputStream(bout, new CRC32());
-        IOUtils.copy(zip.getInputStream(entry), out);
+            final ByteArrayOutputStream bout = new ByteArrayOutputStream();
+            final CheckedOutputStream out = new CheckedOutputStream(bout, new CRC32());
+            IOUtils.copy(zip.getInputStream(entry), out);
 
-        out.flush();
+            out.flush();
 
-        assertEquals("CRC32", entry.getCrc(), out.getChecksum().getValue());
-        zip.close();
+            assertEquals("CRC32", entry.getCrc(), out.getChecksum().getValue());
+        }
     }
 
     @Test
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/Lister.java b/src/test/java/org/apache/commons/compress/archivers/zip/Lister.java
index 3aa4eba5..826d3616 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/Lister.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/Lister.java
@@ -52,9 +52,7 @@ public final class Lister {
         if (!f.getParentFile().exists()) {
             f.getParentFile().mkdirs();
         }
-        try (OutputStream fos = Files.newOutputStream(f.toPath())) {
-            IOUtils.copy(is, fos);
-        }
+        Files.copy(is, f.toPath());
     }
 
     private static void list(final ZipArchiveEntry entry) {
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java
index bf78ffb1..6edbe8a0 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java
@@ -68,17 +68,18 @@ public class ParallelScatterZipCreatorTest {
     }
 
     private void callableApi(final CallableConsumerSupplier consumerSupplier, final int compressionLevel) throws Exception {
-        final ZipArchiveOutputStream zos = new ZipArchiveOutputStream(result);
-        zos.setEncoding("UTF-8");
-        final ExecutorService es = Executors.newFixedThreadPool(1);
+        final Map<String, byte[]> entries;
+        final ParallelScatterZipCreator zipCreator;
+        try (ZipArchiveOutputStream zos = new ZipArchiveOutputStream(result)) {
+            zos.setEncoding("UTF-8");
+            final ExecutorService es = Executors.newFixedThreadPool(1);
 
-        final ScatterGatherBackingStoreSupplier supp = () -> new FileBasedScatterGatherBackingStore(tmp = File.createTempFile("parallelscatter", "n1"));
-
-        final ParallelScatterZipCreator zipCreator = new ParallelScatterZipCreator(es, supp, compressionLevel);
-        final Map<String, byte[]> entries = writeEntriesAsCallable(zipCreator, consumerSupplier.apply(zipCreator));
-        zipCreator.writeTo(zos);
-        zos.close();
+            final ScatterGatherBackingStoreSupplier supp = () -> new FileBasedScatterGatherBackingStore(tmp = File.createTempFile("parallelscatter", "n1"));
 
+            zipCreator = new ParallelScatterZipCreator(es, supp, compressionLevel);
+            entries = writeEntriesAsCallable(zipCreator, consumerSupplier.apply(zipCreator));
+            zipCreator.writeTo(zos);
+        }
 
         removeEntriesFoundInZipFile(result, entries);
         assertTrue(entries.isEmpty());
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java
index 54add5b3..f427c51b 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java
@@ -2055,8 +2055,7 @@ public class Zip64SupportIT {
 
             zipArchiveOutputStream.putArchiveEntry(new ZipArchiveEntry("input.bin"));
 
-            final InputStream inputStream = new FileInputStream(inputFile);
-            IOUtils.copy(inputStream, zipArchiveOutputStream);
+            Files.copy(inputFile.toPath(), zipArchiveOutputStream);
 
             zipArchiveOutputStream.closeArchiveEntry();
         }
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileIgnoringLocalFileHeaderTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileIgnoringLocalFileHeaderTest.java
index 85437b2c..bb6ff2ef 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileIgnoringLocalFileHeaderTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipFileIgnoringLocalFileHeaderTest.java
@@ -96,11 +96,9 @@ public class ZipFileIgnoringLocalFileHeaderTest {
     @Test
     public void testZipUnarchive() throws Exception {
         try (final ZipFile zf = openZipWithoutLFH("bla.zip")) {
-            for (final Enumeration<ZipArchiveEntry> e = zf.getEntries(); e.hasMoreElements(); ) {
+            for (final Enumeration<ZipArchiveEntry> e = zf.getEntries(); e.hasMoreElements();) {
                 final ZipArchiveEntry entry = e.nextElement();
-                try (final OutputStream out = Files.newOutputStream(new File(dir, entry.getName()).toPath())) {
-                    IOUtils.copy(zf.getInputStream(entry), out);
-                }
+                Files.copy(zf.getInputStream(entry), new File(dir, entry.getName()).toPath());
             }
         }
     }
diff --git a/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java b/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java
index edb56d99..eb1fd3ff 100644
--- a/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java
+++ b/src/test/java/org/apache/commons/compress/compressors/BZip2TestCase.java
@@ -36,40 +36,33 @@ public final class BZip2TestCase extends AbstractTestCase {
     public void testBzip2Unarchive() throws Exception {
         final File input = getFile("bla.txt.bz2");
         final File output = new File(dir, "bla.txt");
-        final InputStream is = Files.newInputStream(input.toPath());
-        final CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream("bzip2", is);
-        final OutputStream os = Files.newOutputStream(output.toPath());
-        IOUtils.copy(in, os);
-        is.close();
-        os.close();
+        try (final InputStream is = Files.newInputStream(input.toPath());
+                CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream("bzip2", is)) {
+            Files.copy(in, output.toPath());
+        }
     }
 
     @Test
-    public void testBzipCreation()  throws Exception {
+    public void testBzipCreation() throws Exception {
         File output = null;
         final File input = getFile("test.txt");
         {
             output = new File(dir, "test.txt.bz2");
-            final OutputStream out = Files.newOutputStream(output.toPath());
-            final CompressorOutputStream cos = new CompressorStreamFactory().createCompressorOutputStream("bzip2", out);
-            final InputStream in = Files.newInputStream(input.toPath());
-            IOUtils.copy(in, cos);
-            cos.close();
-            in.close();
+            try (OutputStream out = Files.newOutputStream(output.toPath());
+                    final CompressorOutputStream cos = new CompressorStreamFactory().createCompressorOutputStream("bzip2", out)) {
+                Files.copy(input.toPath(), cos);
+            }
         }
 
         final File decompressed = new File(dir, "decompressed.txt");
         {
-            final InputStream is = Files.newInputStream(output.toPath());
-            final CompressorInputStream in =
-                new CompressorStreamFactory().createCompressorInputStream("bzip2", is);
-            final OutputStream os = Files.newOutputStream(decompressed.toPath());
-            IOUtils.copy(in, os);
-            is.close();
-            os.close();
+            try (InputStream is = Files.newInputStream(output.toPath());
+                    CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream("bzip2", is)) {
+                Files.copy(in, decompressed.toPath());
+            }
         }
 
-        assertEquals(input.length(),decompressed.length());
+        assertEquals(input.length(), decompressed.length());
     }
 
     @Test
diff --git a/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java b/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java
index c472d0c7..095eb4a8 100644
--- a/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java
+++ b/src/test/java/org/apache/commons/compress/compressors/DeflateTestCase.java
@@ -38,13 +38,12 @@ public final class DeflateTestCase extends AbstractTestCase {
      * @throws Exception
      */
     @Test
-    public void testDeflateCreation()  throws Exception {
+    public void testDeflateCreation() throws Exception {
         final File input = getFile("test1.xml");
         final File output = new File(dir, "test1.xml.deflatez");
         try (OutputStream out = Files.newOutputStream(output.toPath())) {
-            try (CompressorOutputStream cos = new CompressorStreamFactory()
-                    .createCompressorOutputStream("deflate", out)) {
-                IOUtils.copy(Files.newInputStream(input.toPath()), cos);
+            try (CompressorOutputStream cos = new CompressorStreamFactory().createCompressorOutputStream("deflate", out)) {
+                Files.copy(input.toPath(), cos);
             }
         }
     }
@@ -59,11 +58,9 @@ public final class DeflateTestCase extends AbstractTestCase {
         final File input = getFile("bla.tar.deflatez");
         final File output = new File(dir, "bla.tar");
         try (InputStream is = Files.newInputStream(input.toPath())) {
-             // zlib header is expected by default
-            try (CompressorInputStream in = new CompressorStreamFactory()
-                    .createCompressorInputStream("deflate", is);
-                    OutputStream out = Files.newOutputStream(output.toPath())) {
-                IOUtils.copy(in, out);
+            // zlib header is expected by default
+            try (CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream("deflate", is)) {
+                Files.copy(in, output.toPath());
             }
         }
     }
@@ -81,7 +78,7 @@ public final class DeflateTestCase extends AbstractTestCase {
             final DeflateParameters params = new DeflateParameters();
             params.setWithZlibHeader(false);
             try (CompressorOutputStream cos = new DeflateCompressorOutputStream(out, params)) {
-                IOUtils.copy(Files.newInputStream(input.toPath()), cos);
+                Files.copy(input.toPath(), cos);
             }
         }
     }
@@ -98,9 +95,8 @@ public final class DeflateTestCase extends AbstractTestCase {
         try (InputStream is = Files.newInputStream(input.toPath())) {
             final DeflateParameters params = new DeflateParameters();
             params.setWithZlibHeader(false);
-            try (CompressorInputStream in = new DeflateCompressorInputStream(is, params);
-                    OutputStream out = Files.newOutputStream(output.toPath())) {
-                IOUtils.copy(in, out);
+            try (CompressorInputStream in = new DeflateCompressorInputStream(is, params)) {
+                Files.copy(in, output.toPath());
             }
         }
     }
diff --git a/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java b/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java
index 764a40df..adf08790 100644
--- a/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java
+++ b/src/test/java/org/apache/commons/compress/compressors/FramedSnappyTestCase.java
@@ -60,20 +60,18 @@ public final class FramedSnappyTestCase
         testRoundtrip(getFile("COMPRESS-256.7z"));
     }
 
-    private void testRoundtrip(final File input)  throws Exception {
+    private void testRoundtrip(final File input) throws Exception {
         final long start = System.currentTimeMillis();
         final File outputSz = new File(dir, input.getName() + ".sz");
-        try (InputStream is = Files.newInputStream(input.toPath());
-             OutputStream os = Files.newOutputStream(outputSz.toPath());
-             CompressorOutputStream sos = new CompressorStreamFactory()
-                 .createCompressorOutputStream("snappy-framed", os)) {
-            IOUtils.copy(is, sos);
+        try (OutputStream os = Files.newOutputStream(outputSz.toPath());
+                CompressorOutputStream sos = new CompressorStreamFactory().createCompressorOutputStream("snappy-framed", os)) {
+            Files.copy(input.toPath(), sos);
         }
         // System.err.println(input.getName() + " written, uncompressed bytes: " + input.length()
-        //    + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
+        // + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
         try (InputStream is = Files.newInputStream(input.toPath());
-             CompressorInputStream sis = new CompressorStreamFactory()
-                 .createCompressorInputStream("snappy-framed", Files.newInputStream(outputSz.toPath()))) {
+                CompressorInputStream sis = new CompressorStreamFactory().createCompressorInputStream("snappy-framed",
+                        Files.newInputStream(outputSz.toPath()))) {
             final byte[] expected = IOUtils.toByteArray(is);
             final byte[] actual = IOUtils.toByteArray(sis);
             assertArrayEquals(expected, actual);
@@ -116,9 +114,8 @@ public final class FramedSnappyTestCase
         try (InputStream is = Files.newInputStream(input.toPath())) {
             // the intermediate BufferedInputStream is there for mark
             // support in the autodetection test
-            try (CompressorInputStream in = wrapper.wrap(new BufferedInputStream(is));
-                    OutputStream out = Files.newOutputStream(output.toPath())) {
-                IOUtils.copy(in, out);
+            try (CompressorInputStream in = wrapper.wrap(new BufferedInputStream(is))) {
+                Files.copy(in, output.toPath());
                 assertEquals(995, in.getBytesRead());
             }
         }
diff --git a/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java b/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java
index 9945401d..26df7816 100644
--- a/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java
+++ b/src/test/java/org/apache/commons/compress/compressors/GZipTestCase.java
@@ -102,16 +102,13 @@ public final class GZipTestCase extends AbstractTestCase {
         OutputStream out = null;
         CompressorInputStream cin = null;
         try {
-            in = Files.newInputStream(getFile("bla.tgz").toPath());
             out = new ByteArrayOutputStream();
-            IOUtils.copy(in, out);
-            in.close();
+            Files.copy(getFile("bla.tgz").toPath(), out);
             out.close();
 
             final byte[] data = ((ByteArrayOutputStream) out).toByteArray();
             in = new ByteArrayInputStream(data, 0, data.length - 1);
-            cin = new CompressorStreamFactory()
-                .createCompressorInputStream("gz", in);
+            cin = new CompressorStreamFactory().createCompressorInputStream("gz", in);
             out = new ByteArrayOutputStream();
 
             try {
@@ -135,20 +132,17 @@ public final class GZipTestCase extends AbstractTestCase {
     }
 
     private void testExtraFlags(final int compressionLevel, final int flag, final int bufferSize) throws Exception {
-        final byte[] content;
-        try (InputStream fis = Files.newInputStream(getFile("test3.xml").toPath())) {
-            content = IOUtils.toByteArray(fis);
-        }
+        final byte[] content = Files.readAllBytes(getFile("test3.xml").toPath());
 
         final ByteArrayOutputStream bout = new ByteArrayOutputStream();
 
         final GzipParameters parameters = new GzipParameters();
         parameters.setCompressionLevel(compressionLevel);
         parameters.setBufferSize(bufferSize);
-        final GzipCompressorOutputStream out = new GzipCompressorOutputStream(bout, parameters);
-        IOUtils.copy(new ByteArrayInputStream(content), out);
-        out.flush();
-        out.close();
+        try (GzipCompressorOutputStream out = new GzipCompressorOutputStream(bout, parameters)) {
+            IOUtils.copy(new ByteArrayInputStream(content), out);
+            out.flush();
+        }
 
         assertEquals("extra flags (XFL)", flag, bout.toByteArray()[8]);
     }
@@ -169,13 +163,12 @@ public final class GZipTestCase extends AbstractTestCase {
     }
 
     @Test
-    public void testGzipCreation()  throws Exception {
+    public void testGzipCreation() throws Exception {
         final File input = getFile("test1.xml");
         final File output = new File(dir, "test1.xml.gz");
         try (OutputStream out = Files.newOutputStream(output.toPath())) {
-            try (CompressorOutputStream cos = new CompressorStreamFactory()
-                    .createCompressorOutputStream("gz", out)) {
-                IOUtils.copy(Files.newInputStream(input.toPath()), cos);
+            try (CompressorOutputStream cos = new CompressorStreamFactory().createCompressorOutputStream("gz", out)) {
+                Files.copy(input.toPath(), cos);
             }
         }
     }
@@ -185,10 +178,8 @@ public final class GZipTestCase extends AbstractTestCase {
         final File input = getFile("bla.tgz");
         final File output = new File(dir, "bla.tar");
         try (InputStream is = Files.newInputStream(input.toPath())) {
-            try (CompressorInputStream in = new CompressorStreamFactory()
-                    .createCompressorInputStream("gz", is);
-                    OutputStream out = Files.newOutputStream(output.toPath())) {
-                IOUtils.copy(in, out);
+            try (CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream("gz", is);) {
+                Files.copy(in, output.toPath());
             }
         }
     }
@@ -291,13 +282,11 @@ public final class GZipTestCase extends AbstractTestCase {
         parameters.setOperatingSystem(13);
         parameters.setFilename("test3.xml");
         parameters.setComment("Umlaute möglich?");
-        try (GzipCompressorOutputStream out = new GzipCompressorOutputStream(bout, parameters); InputStream fis = Files.newInputStream(getFile("test3" +
-                ".xml").toPath())) {
-            IOUtils.copy(fis, out);
+        try (GzipCompressorOutputStream out = new GzipCompressorOutputStream(bout, parameters)) {
+            Files.copy(getFile("test3" + ".xml").toPath(), out);
         }
 
-        final GzipCompressorInputStream input =
-            new GzipCompressorInputStream(new ByteArrayInputStream(bout.toByteArray()));
+        final GzipCompressorInputStream input = new GzipCompressorInputStream(new ByteArrayInputStream(bout.toByteArray()));
         input.close();
         final GzipParameters readParams = input.getMetaData();
         assertEquals(Deflater.BEST_COMPRESSION, readParams.getCompressionLevel());
diff --git a/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java b/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java
index b5d3fa23..2045ae1e 100644
--- a/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java
+++ b/src/test/java/org/apache/commons/compress/compressors/LZMATestCase.java
@@ -33,31 +33,18 @@ import org.junit.jupiter.api.Test;
 
 public final class LZMATestCase extends AbstractTestCase {
 
-    private void copy(final InputStream in, final File output) throws IOException {
-        try (OutputStream out = Files.newOutputStream(output.toPath())) {
-            IOUtils.copy(in, out);
-        } finally {
-            in.close();
-        }
-    }
-
     @Test
     public void lzmaRoundtrip() throws Exception {
         final File input = getFile("test1.xml");
         final File compressed = new File(dir, "test1.xml.xz");
         try (OutputStream out = Files.newOutputStream(compressed.toPath())) {
-            try (CompressorOutputStream cos = new CompressorStreamFactory()
-                    .createCompressorOutputStream("lzma", out)) {
-                IOUtils.copy(Files.newInputStream(input.toPath()), cos);
+            try (CompressorOutputStream cos = new CompressorStreamFactory().createCompressorOutputStream("lzma", out)) {
+                Files.copy(input.toPath(), cos);
             }
         }
-        byte[] orig;
-        try (InputStream is = Files.newInputStream(input.toPath())) {
-            orig = IOUtils.toByteArray(is);
-        }
+        final byte[] orig = Files.readAllBytes(input.toPath());
         final byte[] uncompressed;
-        try (InputStream is = Files.newInputStream(compressed.toPath());
-             CompressorInputStream in = new LZMACompressorInputStream(is)) {
+        try (InputStream is = Files.newInputStream(compressed.toPath()); CompressorInputStream in = new LZMACompressorInputStream(is)) {
             uncompressed = IOUtils.toByteArray(in);
         }
         Assert.assertArrayEquals(orig, uncompressed);
@@ -68,12 +55,11 @@ public final class LZMATestCase extends AbstractTestCase {
         final File input = getFile("bla.tar.lzma");
         final byte[] buf = new byte[2];
         try (InputStream is = Files.newInputStream(input.toPath())) {
-            final LZMACompressorInputStream in =
-                    new LZMACompressorInputStream(is);
-            IOUtils.toByteArray(in);
-            Assert.assertEquals(-1, in.read(buf));
-            Assert.assertEquals(-1, in.read(buf));
-            in.close();
+            try (LZMACompressorInputStream in = new LZMACompressorInputStream(is)) {
+                IOUtils.toByteArray(in);
+                Assert.assertEquals(-1, in.read(buf));
+                Assert.assertEquals(-1, in.read(buf));
+            }
         }
     }
 
@@ -81,12 +67,11 @@ public final class LZMATestCase extends AbstractTestCase {
     public void singleByteReadConsistentlyReturnsMinusOneAtEof() throws IOException {
         final File input = getFile("bla.tar.lzma");
         try (InputStream is = Files.newInputStream(input.toPath())) {
-            final LZMACompressorInputStream in =
-                    new LZMACompressorInputStream(is);
-            IOUtils.toByteArray(in);
-            Assert.assertEquals(-1, in.read());
-            Assert.assertEquals(-1, in.read());
-            in.close();
+            try (LZMACompressorInputStream in = new LZMACompressorInputStream(is)) {
+                IOUtils.toByteArray(in);
+                Assert.assertEquals(-1, in.read());
+                Assert.assertEquals(-1, in.read());
+            }
         }
     }
 
@@ -95,8 +80,9 @@ public final class LZMATestCase extends AbstractTestCase {
         final File input = getFile("bla.tar.lzma");
         final File output = new File(dir, "bla.tar");
         try (InputStream is = Files.newInputStream(input.toPath())) {
-            final CompressorInputStream in = new LZMACompressorInputStream(is);
-            copy(in, output);
+            try (final CompressorInputStream in = new LZMACompressorInputStream(is)) {
+                Files.copy(in, output.toPath());
+            }
         }
     }
 
@@ -105,9 +91,9 @@ public final class LZMATestCase extends AbstractTestCase {
         final File input = getFile("bla.tar.lzma");
         final File output = new File(dir, "bla.tar");
         try (InputStream is = new BufferedInputStream(Files.newInputStream(input.toPath()))) {
-            final CompressorInputStream in = new CompressorStreamFactory()
-                    .createCompressorInputStream(is);
-            copy(in, output);
+            try (final CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream(is)) {
+                Files.copy(in, output.toPath());
+            }
         }
     }
 }
diff --git a/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java b/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java
index 120b6a61..44d5a738 100644
--- a/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java
+++ b/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java
@@ -55,15 +55,11 @@ public final class Pack200TestCase extends AbstractTestCase {
              ArchiveOutputStream os = ArchiveStreamFactory.DEFAULT.createArchiveOutputStream("jar", out)) {
 
             os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml"));
-            try (InputStream inputStream = Files.newInputStream(file1.toPath())) {
-                IOUtils.copy(inputStream, os);
-            }
+            Files.copy(file1.toPath(), os);
             os.closeArchiveEntry();
 
             os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml"));
-            try (InputStream inputStream = Files.newInputStream(file2.toPath())) {
-                IOUtils.copy(inputStream, os);
-            }
+            Files.copy(file2.toPath(), os);
             os.closeArchiveEntry();
         }
 
diff --git a/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java b/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java
index 1a29b12a..0b2f9eaf 100644
--- a/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java
+++ b/src/test/java/org/apache/commons/compress/compressors/ZTestCase.java
@@ -38,21 +38,17 @@ public final class ZTestCase extends AbstractTestCase {
     public void testMatches() {
         assertFalse(ZCompressorInputStream.matches(new byte[] { 1, 2, 3, 4 }, 4));
         assertFalse(ZCompressorInputStream.matches(new byte[] { 0x1f, 2, 3, 4 }, 4));
-        assertFalse(ZCompressorInputStream.matches(new byte[] { 1, (byte)0x9d, 3, 4 },
-                                                   4));
-        assertFalse(ZCompressorInputStream.matches(new byte[] { 0x1f, (byte) 0x9d, 3, 4 },
-                                                   3));
-        assertTrue(ZCompressorInputStream.matches(new byte[] { 0x1f, (byte) 0x9d, 3, 4 },
-                                                  4));
+        assertFalse(ZCompressorInputStream.matches(new byte[] { 1, (byte) 0x9d, 3, 4 }, 4));
+        assertFalse(ZCompressorInputStream.matches(new byte[] { 0x1f, (byte) 0x9d, 3, 4 }, 3));
+        assertTrue(ZCompressorInputStream.matches(new byte[] { 0x1f, (byte) 0x9d, 3, 4 }, 4));
     }
 
     private void testUnarchive(final StreamWrapper<CompressorInputStream> wrapper) throws Exception {
         final File input = getFile("bla.tar.Z");
         final File output = new File(dir, "bla.tar");
         try (InputStream is = Files.newInputStream(input.toPath())) {
-            try (InputStream in = wrapper.wrap(is);
-                    OutputStream out = Files.newOutputStream(output.toPath())) {
-                IOUtils.copy(in, out);
+            try (InputStream in = wrapper.wrap(is)) {
+                Files.copy(in, output.toPath());
             }
         }
     }
@@ -64,14 +60,12 @@ public final class ZTestCase extends AbstractTestCase {
 
     @Test
     public void testZUnarchiveViaAutoDetection() throws Exception {
-        testUnarchive(is -> new CompressorStreamFactory()
-            .createCompressorInputStream(new BufferedInputStream(is)));
+        testUnarchive(is -> new CompressorStreamFactory().createCompressorInputStream(new BufferedInputStream(is)));
     }
 
     @Test
     public void testZUnarchiveViaFactory() throws Exception {
-        testUnarchive(is -> new CompressorStreamFactory()
-            .createCompressorInputStream(CompressorStreamFactory.Z, is));
+        testUnarchive(is -> new CompressorStreamFactory().createCompressorInputStream(CompressorStreamFactory.Z, is));
     }
 
 }
diff --git a/src/test/java/org/apache/commons/compress/compressors/brotli/BrotliCompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/brotli/BrotliCompressorInputStreamTest.java
index 08bbbe7a..db9d1981 100644
--- a/src/test/java/org/apache/commons/compress/compressors/brotli/BrotliCompressorInputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/brotli/BrotliCompressorInputStreamTest.java
@@ -124,10 +124,8 @@ public class BrotliCompressorInputStreamTest extends AbstractTestCase {
         final File input = getFile("bla.tar.br");
         final File output = new File(dir, "bla.tar");
         try (InputStream is = Files.newInputStream(input.toPath())) {
-            try (CompressorInputStream in = new CompressorStreamFactory()
-                    .createCompressorInputStream("br", is);
-                    OutputStream out = Files.newOutputStream(output.toPath())) {
-                IOUtils.copy(in, out);
+            try (CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream("br", is)) {
+                Files.copy(in, output.toPath());
             }
         }
     }
diff --git a/src/test/java/org/apache/commons/compress/compressors/lz4/BlockLZ4CompressorRoundtripTest.java b/src/test/java/org/apache/commons/compress/compressors/lz4/BlockLZ4CompressorRoundtripTest.java
index 90f5a8ad..ed440efa 100644
--- a/src/test/java/org/apache/commons/compress/compressors/lz4/BlockLZ4CompressorRoundtripTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/lz4/BlockLZ4CompressorRoundtripTest.java
@@ -67,17 +67,15 @@ public final class BlockLZ4CompressorRoundtripTest extends AbstractTestCase {
         final File input = getFile(testFile);
         long start = System.currentTimeMillis();
         final File outputSz = new File(dir, input.getName() + ".block.lz4");
-        try (InputStream is = Files.newInputStream(input.toPath());
-             final OutputStream os = Files.newOutputStream(outputSz.toPath());
-             BlockLZ4CompressorOutputStream los = new BlockLZ4CompressorOutputStream(os, params)) {
-            IOUtils.copy(is, los);
+        try (OutputStream os = Files.newOutputStream(outputSz.toPath()); BlockLZ4CompressorOutputStream los = new BlockLZ4CompressorOutputStream(os, params)) {
+            Files.copy(input.toPath(), los);
         }
         // System.err.println("Configuration: " + config);
         // System.err.println(input.getName() + " written, uncompressed bytes: " + input.length()
-        //    + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
+        // + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
         start = System.currentTimeMillis();
         try (InputStream is = Files.newInputStream(input.toPath());
-             BlockLZ4CompressorInputStream sis = new BlockLZ4CompressorInputStream(Files.newInputStream(outputSz.toPath()))) {
+                BlockLZ4CompressorInputStream sis = new BlockLZ4CompressorInputStream(Files.newInputStream(outputSz.toPath()))) {
             final byte[] expected = IOUtils.toByteArray(is);
             final byte[] actual = IOUtils.toByteArray(sis);
             Assert.assertArrayEquals(expected, actual);
diff --git a/src/test/java/org/apache/commons/compress/compressors/lz4/FactoryTest.java b/src/test/java/org/apache/commons/compress/compressors/lz4/FactoryTest.java
index 273b018c..057d02e1 100644
--- a/src/test/java/org/apache/commons/compress/compressors/lz4/FactoryTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/lz4/FactoryTest.java
@@ -45,17 +45,15 @@ public class FactoryTest extends AbstractTestCase {
         final File input = getFile("bla.tar");
         long start = System.currentTimeMillis();
         final File outputSz = new File(dir, input.getName() + "." + format + ".lz4");
-        try (InputStream is = Files.newInputStream(input.toPath());
-             OutputStream os = Files.newOutputStream(outputSz.toPath());
-             OutputStream los = new CompressorStreamFactory().createCompressorOutputStream(format, os)) {
-            IOUtils.copy(is, los);
+        try (OutputStream os = Files.newOutputStream(outputSz.toPath());
+                OutputStream los = new CompressorStreamFactory().createCompressorOutputStream(format, os)) {
+            Files.copy(input.toPath(), los);
         }
         // System.err.println(input.getName() + " written, uncompressed bytes: " + input.length()
-        //    + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
+        // + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
         start = System.currentTimeMillis();
         try (InputStream is = Files.newInputStream(input.toPath());
-             InputStream sis = new CompressorStreamFactory()
-                 .createCompressorInputStream(format, Files.newInputStream(outputSz.toPath()))) {
+                InputStream sis = new CompressorStreamFactory().createCompressorInputStream(format, Files.newInputStream(outputSz.toPath()))) {
             final byte[] expected = IOUtils.toByteArray(is);
             final byte[] actual = IOUtils.toByteArray(sis);
             Assert.assertArrayEquals(expected, actual);
diff --git a/src/test/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorRoundtripTest.java b/src/test/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorRoundtripTest.java
index f39cd739..6ec30675 100644
--- a/src/test/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorRoundtripTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorRoundtripTest.java
@@ -85,15 +85,13 @@ public final class FramedLZ4CompressorRoundtripTest extends AbstractTestCase {
             expected = IOUtils.toByteArray(is);
         }
         final ByteArrayOutputStream bos = new ByteArrayOutputStream();
-        try (FramedLZ4CompressorOutputStream los = new FramedLZ4CompressorOutputStream(bos,
-            params)) {
+        try (FramedLZ4CompressorOutputStream los = new FramedLZ4CompressorOutputStream(bos, params)) {
             IOUtils.copy(new ByteArrayInputStream(expected), los);
         }
         // System.err.println(input.getName() + " written, uncompressed bytes: " + input.length()
-        //    + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
+        // + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
         start = System.currentTimeMillis();
-        try (FramedLZ4CompressorInputStream sis = new FramedLZ4CompressorInputStream(
-            new ByteArrayInputStream(bos.toByteArray()))) {
+        try (FramedLZ4CompressorInputStream sis = new FramedLZ4CompressorInputStream(new ByteArrayInputStream(bos.toByteArray()))) {
             final byte[] actual = IOUtils.toByteArray(sis);
             Assert.assertArrayEquals(expected, actual);
         }
diff --git a/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java b/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java
index 6e38f6eb..b505d031 100644
--- a/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/pack200/Pack200UtilsTest.java
@@ -22,6 +22,7 @@ import java.io.File;
 import java.io.InputStream;
 import java.io.OutputStream;
 import java.nio.file.Files;
+import java.nio.file.StandardCopyOption;
 import java.util.HashMap;
 
 import org.apache.commons.compress.AbstractTestCase;
@@ -39,9 +40,8 @@ public final class Pack200UtilsTest extends AbstractTestCase {
         final File[] output = createTempDirAndFile();
         try {
             Pack200Utils.normalize(input, output[1], new HashMap<>());
-            try (InputStream is = Files.newInputStream(output[1].toPath())) {
-                final ArchiveInputStream in = ArchiveStreamFactory.DEFAULT
-                        .createArchiveInputStream("jar", is);
+            try (InputStream is = Files.newInputStream(output[1].toPath());
+                    ArchiveInputStream in = ArchiveStreamFactory.DEFAULT.createArchiveInputStream("jar", is)) {
 
                 ArchiveEntry entry = in.getNextEntry();
                 while (entry != null) {
@@ -52,9 +52,7 @@ public final class Pack200UtilsTest extends AbstractTestCase {
                         entry = in.getNextEntry();
                         continue;
                     }
-                    final OutputStream out = Files.newOutputStream(archiveEntry.toPath());
-                    IOUtils.copy(in, out);
-                    out.close();
+                    Files.copy(in, archiveEntry.toPath());
                     entry = in.getNextEntry();
                 }
 
@@ -71,23 +69,14 @@ public final class Pack200UtilsTest extends AbstractTestCase {
         final File input = getFile("bla.jar");
         final File[] output = createTempDirAndFile();
         try {
-            InputStream is = Files.newInputStream(input.toPath());
-            OutputStream os = null;
-            try {
-                os = Files.newOutputStream(output[1].toPath());
-                IOUtils.copy(is, os);
-            } finally {
-                is.close();
-                if (os != null) {
-                    os.close();
-                }
+            try (InputStream is = Files.newInputStream(input.toPath())) {
+                Files.copy(is, output[1].toPath(), StandardCopyOption.REPLACE_EXISTING);
             }
 
             Pack200Utils.normalize(output[1]);
-            is = Files.newInputStream(output[1].toPath());
-            try {
-                final ArchiveInputStream in = ArchiveStreamFactory.DEFAULT
-                    .createArchiveInputStream("jar", is);
+
+            try (InputStream is = Files.newInputStream(output[1].toPath());
+                    ArchiveInputStream in = ArchiveStreamFactory.DEFAULT.createArchiveInputStream("jar", is)) {
 
                 ArchiveEntry entry = in.getNextEntry();
                 while (entry != null) {
@@ -98,15 +87,9 @@ public final class Pack200UtilsTest extends AbstractTestCase {
                         entry = in.getNextEntry();
                         continue;
                     }
-                    final OutputStream out = Files.newOutputStream(archiveEntry.toPath());
-                    IOUtils.copy(in, out);
-                    out.close();
+                    Files.copy(in, archiveEntry.toPath());
                     entry = in.getNextEntry();
                 }
-
-                in.close();
-            } finally {
-                is.close();
             }
         } finally {
             output[1].delete();
diff --git a/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java
index db7422de..342430df 100644
--- a/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStreamTest.java
@@ -65,10 +65,8 @@ public final class FramedSnappyCompressorInputStreamTest
     public void readIWAFile() throws Exception {
         try (ZipFile zip = new ZipFile(getFile("testNumbersNew.numbers"))) {
             try (InputStream is = zip.getInputStream(zip.getEntry("Index/Document.iwa"))) {
-                try (FramedSnappyCompressorInputStream in =
-                        new FramedSnappyCompressorInputStream(is, FramedSnappyDialect.IWORK_ARCHIVE);
-                        OutputStream out = Files.newOutputStream(new File(dir, "snappyIWATest.raw").toPath())) {
-                    IOUtils.copy(in, out);
+                try (FramedSnappyCompressorInputStream in = new FramedSnappyCompressorInputStream(is, FramedSnappyDialect.IWORK_ARCHIVE)) {
+                    Files.copy(in, new File(dir, "snappyIWATest.raw").toPath());
                 }
             }
         }
@@ -81,13 +79,10 @@ public final class FramedSnappyCompressorInputStreamTest
     public void readIWAFileWithBiggerOffset() throws Exception {
         final File o = new File(dir, "COMPRESS-358.raw");
         try (InputStream is = Files.newInputStream(getFile("COMPRESS-358.iwa").toPath());
-             FramedSnappyCompressorInputStream in =
-                 new FramedSnappyCompressorInputStream(is, 1<<16, FramedSnappyDialect.IWORK_ARCHIVE);
-            OutputStream out = Files.newOutputStream(o.toPath())) {
-            IOUtils.copy(in, out);
+                FramedSnappyCompressorInputStream in = new FramedSnappyCompressorInputStream(is, 1 << 16, FramedSnappyDialect.IWORK_ARCHIVE);) {
+            Files.copy(in, o.toPath());
         }
-        try (InputStream a = Files.newInputStream(o.toPath());
-             InputStream e = Files.newInputStream(getFile("COMPRESS-358.uncompressed").toPath())) {
+        try (InputStream a = Files.newInputStream(o.toPath()); InputStream e = Files.newInputStream(getFile("COMPRESS-358.uncompressed").toPath())) {
             final byte[] expected = IOUtils.toByteArray(e);
             final byte[] actual = IOUtils.toByteArray(a);
             assertArrayEquals(expected, actual);
@@ -143,37 +138,15 @@ public final class FramedSnappyCompressorInputStreamTest
         final File outputSz = new File(dir, "lorem-ipsum.1");
         final File outputGz = new File(dir, "lorem-ipsum.2");
         try (InputStream isSz = Files.newInputStream(getFile("lorem-ipsum.txt.sz").toPath())) {
-            InputStream in = new FramedSnappyCompressorInputStream(isSz);
-            OutputStream out = null;
-            try {
-                out = Files.newOutputStream(outputSz.toPath());
-                IOUtils.copy(in, out);
-            } finally {
-                if (out != null) {
-                    out.close();
-                }
-                in.close();
+            try (InputStream in = new FramedSnappyCompressorInputStream(isSz)) {
+                Files.copy(in, outputSz.toPath());
             }
-            try (InputStream isGz = Files.newInputStream(getFile("lorem-ipsum.txt.gz").toPath())) {
-                in = new GzipCompressorInputStream(isGz);
-                try {
-                    out = Files.newOutputStream(outputGz.toPath());
-                    IOUtils.copy(in, out);
-                } finally {
-                    if (out != null) {
-                        out.close();
-                    }
-                    in.close();
-                }
+            try (InputStream isGz = Files.newInputStream(getFile("lorem-ipsum.txt.gz").toPath()); InputStream in = new GzipCompressorInputStream(isGz)) {
+                Files.copy(in, outputGz.toPath());
             }
         }
 
-        try (InputStream sz = Files.newInputStream(outputSz.toPath())) {
-            try (InputStream gz = Files.newInputStream(outputGz.toPath())) {
-                assertArrayEquals(IOUtils.toByteArray(sz),
-                        IOUtils.toByteArray(gz));
-            }
-        }
+        assertArrayEquals(Files.readAllBytes(outputSz.toPath()), Files.readAllBytes(outputGz.toPath()));
     }
 
     @Test
diff --git a/src/test/java/org/apache/commons/compress/compressors/snappy/SnappyRoundtripTest.java b/src/test/java/org/apache/commons/compress/compressors/snappy/SnappyRoundtripTest.java
index 96972bb5..651c78b6 100644
--- a/src/test/java/org/apache/commons/compress/compressors/snappy/SnappyRoundtripTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/snappy/SnappyRoundtripTest.java
@@ -104,22 +104,20 @@ public final class SnappyRoundtripTest extends AbstractTestCase {
     private void roundTripTest(final File input, final Parameters params) throws IOException {
         long start = System.currentTimeMillis();
         final File outputSz = new File(dir, input.getName() + ".raw.sz");
-        try (InputStream is = Files.newInputStream(input.toPath());
-             OutputStream os = Files.newOutputStream(outputSz.toPath());
-             SnappyCompressorOutputStream sos = new SnappyCompressorOutputStream(os, input.length(), params)) {
-            IOUtils.copy(is, sos);
+        try (OutputStream os = Files.newOutputStream(outputSz.toPath());
+                SnappyCompressorOutputStream sos = new SnappyCompressorOutputStream(os, input.length(), params)) {
+            Files.copy(input.toPath(), sos);
         }
         // System.err.println(input.getName() + " written, uncompressed bytes: " + input.length()
-        //    + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
+        // + ", compressed bytes: " + outputSz.length() + " after " + (System.currentTimeMillis() - start) + "ms");
         start = System.currentTimeMillis();
         try (InputStream is = Files.newInputStream(input.toPath());
-             SnappyCompressorInputStream sis = new SnappyCompressorInputStream(Files.newInputStream(outputSz.toPath()),
-                 params.getWindowSize())) {
+                SnappyCompressorInputStream sis = new SnappyCompressorInputStream(Files.newInputStream(outputSz.toPath()), params.getWindowSize())) {
             final byte[] expected = IOUtils.toByteArray(is);
             final byte[] actual = IOUtils.toByteArray(sis);
             Assert.assertArrayEquals(expected, actual);
         }
-        //System.err.println(outputSz.getName() + " read after " + (System.currentTimeMillis() - start) + "ms");
+        // System.err.println(outputSz.getName() + " read after " + (System.currentTimeMillis() - start) + "ms");
     }
 
     private void roundTripTest(final String testFile) throws IOException {
diff --git a/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStreamTest.java b/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStreamTest.java
index c94c2963..60a7fad3 100644
--- a/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdCompressorInputStreamTest.java
@@ -134,10 +134,8 @@ public class ZstdCompressorInputStreamTest extends AbstractTestCase {
         final File input = getFile("bla.tar.zst");
         final File output = new File(dir, "bla.tar");
         try (InputStream is = Files.newInputStream(input.toPath())) {
-            try (CompressorInputStream in = new CompressorStreamFactory()
-                    .createCompressorInputStream("zstd", is);
-                OutputStream out = Files.newOutputStream(output.toPath())) {
-                IOUtils.copy(in, out);
+            try (CompressorInputStream in = new CompressorStreamFactory().createCompressorInputStream("zstd", is);) {
+                Files.copy(in, output.toPath());
             }
         }
     }
diff --git a/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdRoundtripTest.java b/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdRoundtripTest.java
index beb422a2..e9a18e10 100644
--- a/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdRoundtripTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/zstandard/ZstdRoundtripTest.java
@@ -48,16 +48,14 @@ public class ZstdRoundtripTest extends AbstractTestCase {
         final File input = getFile("bla.tar");
         long start = System.currentTimeMillis();
         final File output = new File(dir, input.getName() + ".zstd");
-        try (InputStream is = Files.newInputStream(input.toPath());
-             OutputStream os = Files.newOutputStream(output.toPath());
-             CompressorOutputStream zos = new CompressorStreamFactory().createCompressorOutputStream("zstd", os)) {
-            IOUtils.copy(is, zos);
+        try (OutputStream os = Files.newOutputStream(output.toPath());
+                CompressorOutputStream zos = new CompressorStreamFactory().createCompressorOutputStream("zstd", os)) {
+            Files.copy(input.toPath(), zos);
         }
         start = System.currentTimeMillis();
-        try (InputStream is = Files.newInputStream(input.toPath());
-             CompressorInputStream zis = new CompressorStreamFactory()
-             .createCompressorInputStream("zstd", Files.newInputStream(output.toPath()))) {
-            final byte[] expected = IOUtils.toByteArray(is);
+        try (final InputStream inputStream = Files.newInputStream(output.toPath());
+                CompressorInputStream zis = new CompressorStreamFactory().createCompressorInputStream("zstd", inputStream)) {
+            final byte[] expected = Files.readAllBytes(input.toPath());
             final byte[] actual = IOUtils.toByteArray(zis);
             Assert.assertArrayEquals(expected, actual);
         }
@@ -67,10 +65,9 @@ public class ZstdRoundtripTest extends AbstractTestCase {
         final File input = getFile("bla.tar");
         long start = System.currentTimeMillis();
         final File output = new File(dir, input.getName() + ".zstd");
-        try (InputStream is = Files.newInputStream(input.toPath());
-             FileOutputStream os = new FileOutputStream(output);
+        try (FileOutputStream os = new FileOutputStream(output);
              ZstdCompressorOutputStream zos = oc.wrap(os)) {
-            IOUtils.copy(is, zos);
+            Files.copy(input.toPath(), zos);
         }
         //System.err.println(input.getName() + " written, uncompressed bytes: " + input.length()
         //    + ", compressed bytes: " + output.length() + " after " + (System.currentTimeMillis() - start) + "ms");


[commons-compress] 03/05: Replace hack with standard code

Posted by gg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ggregory pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git

commit 32add381d9da3d5c738a1694887f48429e480f7b
Author: Gary Gregory <ga...@gmail.com>
AuthorDate: Sun Dec 11 01:51:18 2022 -0500

    Replace hack with standard code
    
    Fixes serialVersionUID warnings
---
 .../commons/compress/archivers/ZipTestCase.java    | 35 +++++++++-------------
 1 file changed, 14 insertions(+), 21 deletions(-)

diff --git a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java
index b053d04f..209d3884 100644
--- a/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java
+++ b/src/test/java/org/apache/commons/compress/archivers/ZipTestCase.java
@@ -269,53 +269,46 @@ public final class ZipTestCase extends AbstractTestCase {
 
     @Test
     public void inputStreamStatisticsForBzip2Entry() throws IOException, ArchiveException {
-        final Map<String, List<Long>> expected = new HashMap<String, List<Long>>() {{
-            put("lots-of-as", Arrays.asList(42L, 39L));
-        }};
+        final Map<String, List<Long>> expected = new HashMap<>();
+        expected.put("lots-of-as", Arrays.asList(42L, 39L));
         testInputStreamStatistics("bzip2-zip.zip", expected);
     }
 
     @Test
     public void inputStreamStatisticsForDeflate64Entry() throws IOException, ArchiveException {
-        final Map<String, List<Long>> expected = new HashMap<String, List<Long>>() {{
-            put("input2", Arrays.asList(3072L, 2111L));
-        }};
+        final Map<String, List<Long>> expected = new HashMap<>();
+        expected.put("input2", Arrays.asList(3072L, 2111L));
         testInputStreamStatistics("COMPRESS-380/COMPRESS-380.zip", expected);
     }
 
 
     @Test
     public void inputStreamStatisticsForImplodedEntry() throws IOException, ArchiveException {
-        final Map<String, List<Long>> expected = new HashMap<String, List<Long>>() {{
-            put("LICENSE.TXT", Arrays.asList(11560L, 4131L));
-        }};
+        final Map<String, List<Long>> expected = new HashMap<>();
+        expected.put("LICENSE.TXT", Arrays.asList(11560L, 4131L));
         testInputStreamStatistics("imploding-8Kdict-3trees.zip", expected);
     }
 
-
     @Test
     public void inputStreamStatisticsForShrunkEntry() throws IOException, ArchiveException {
-        final Map<String, List<Long>> expected = new HashMap<String, List<Long>>() {{
-            put("TEST1.XML", Arrays.asList(76L, 66L));
-            put("TEST2.XML", Arrays.asList(81L, 76L));
-        }};
+        final Map<String, List<Long>> expected = new HashMap<>();
+        expected.put("TEST1.XML", Arrays.asList(76L, 66L));
+        expected.put("TEST2.XML", Arrays.asList(81L, 76L));
         testInputStreamStatistics("SHRUNK.ZIP", expected);
     }
 
     @Test
     public void inputStreamStatisticsForStoredEntry() throws IOException, ArchiveException {
-        final Map<String, List<Long>> expected = new HashMap<String, List<Long>>() {{
-            put("test.txt", Arrays.asList(5L, 5L));
-        }};
+        final Map<String, List<Long>> expected = new HashMap<>();
+        expected.put("test.txt", Arrays.asList(5L, 5L));
         testInputStreamStatistics("COMPRESS-264.zip", expected);
     }
 
     @Test
     public void inputStreamStatisticsOfZipBombExcel() throws IOException, ArchiveException {
-        final Map<String, List<Long>> expected = new HashMap<String, List<Long>>() {{
-            put("[Content_Types].xml", Arrays.asList(8390036L, 8600L));
-            put("xl/worksheets/sheet1.xml", Arrays.asList(1348L, 508L));
-        }};
+        final Map<String, List<Long>> expected = new HashMap<>();
+        expected.put("[Content_Types].xml", Arrays.asList(8390036L, 8600L));
+        expected.put("xl/worksheets/sheet1.xml", Arrays.asList(1348L, 508L));
         testInputStreamStatistics("zipbomb.xlsx", expected);
     }
 


[commons-compress] 04/05: Normalize formatting

Posted by gg...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

ggregory pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git

commit da55b9247936c9bb4dbbbb40255443affe7e71ec
Author: Gary Gregory <ga...@gmail.com>
AuthorDate: Sun Dec 11 01:51:44 2022 -0500

    Normalize formatting
---
 .../archivers/zip/X5455_ExtendedTimestamp.java     | 56 ++++++++++++----------
 1 file changed, 32 insertions(+), 24 deletions(-)

diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java b/src/main/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java
index d4a5d36c..b8964682 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java
@@ -113,8 +113,9 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      * @return ZipLong
      */
     private static ZipLong dateToZipLong(final Date d) {
-        if (d == null) { return null; }
-
+        if (d == null) {
+            return null;
+        }
         return unixTimeToZipLong(d.getTime() / 1000);
     }
 
@@ -124,9 +125,11 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
         }
         return new ZipLong(l);
     }
+
     private static Date zipLongToDate(final ZipLong unixTime) {
         return unixTime != null ? new Date(unixTime.getIntValue() * 1000L) : null;
     }
+
     // The 3 boolean fields (below) come from this flags byte.  The remaining 5 bits
     // are ignored according to the current version of the spec (December 2012).
     private byte flags;
@@ -189,7 +192,9 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      *
      * @return access time (seconds since epoch) or null.
      */
-    public ZipLong getAccessTime() { return accessTime; }
+    public ZipLong getAccessTime() {
+        return accessTime;
+    }
 
     /**
      * The actual data to put into central directory data - without Header-ID
@@ -216,9 +221,7 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      */
     @Override
     public ZipShort getCentralDirectoryLength() {
-        return new ZipShort(1 +
-                (bit0_modifyTimePresent ? 4 : 0)
-        );
+        return new ZipShort(1 + (bit0_modifyTimePresent ? 4 : 0));
     }
 
     /**
@@ -252,7 +255,9 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      *
      * @return create time (seconds since epoch) or null.
      */
-    public ZipLong getCreateTime() { return createTime; }
+    public ZipLong getCreateTime() {
+        return createTime;
+    }
 
     /**
      * Gets flags byte.  The flags byte tells us which of the
@@ -369,7 +374,9 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      *
      * @return true if bit0 of the flags byte is set.
      */
-    public boolean isBit0_modifyTimePresent() { return bit0_modifyTimePresent; }
+    public boolean isBit0_modifyTimePresent() {
+        return bit0_modifyTimePresent;
+    }
 
     /**
      * Returns whether bit1 of the flags byte is set or not,
@@ -378,7 +385,9 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      *
      * @return true if bit1 of the flags byte is set.
      */
-    public boolean isBit1_accessTimePresent() { return bit1_accessTimePresent; }
+    public boolean isBit1_accessTimePresent() {
+        return bit1_accessTimePresent;
+    }
 
     /**
      * Returns whether bit2 of the flags byte is set or not,
@@ -387,16 +396,16 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      *
      * @return true if bit2 of the flags byte is set.
      */
-    public boolean isBit2_createTimePresent() { return bit2_createTimePresent; }
+    public boolean isBit2_createTimePresent() {
+        return bit2_createTimePresent;
+    }
 
     /**
      * Doesn't do anything special since this class always uses the
      * same parsing logic for both central directory and local file data.
      */
     @Override
-    public void parseFromCentralDirectoryData(
-            final byte[] buffer, final int offset, final int length
-    ) throws ZipException {
+    public void parseFromCentralDirectoryData(final byte[] buffer, final int offset, final int length) throws ZipException {
         reset();
         parseFromLocalFileData(buffer, offset, length);
     }
@@ -410,9 +419,7 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      * @throws java.util.zip.ZipException on error
      */
     @Override
-    public void parseFromLocalFileData(
-            final byte[] data, int offset, final int length
-    ) throws ZipException {
+    public void parseFromLocalFileData(final byte[] data, int offset, final int length) throws ZipException {
         reset();
         if (length < 1) {
             throw new ZipException("X5455_ExtendedTimestamp too short, only " + length + " bytes");
@@ -463,7 +470,9 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      *
      * @param d access time as java.util.Date
      */
-    public void setAccessJavaTime(final Date d) { setAccessTime(dateToZipLong(d)); }
+    public void setAccessJavaTime(final Date d) {
+        setAccessTime(dateToZipLong(d));
+    }
 
     /**
      * <p>
@@ -479,8 +488,7 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      */
     public void setAccessTime(final ZipLong l) {
         bit1_accessTimePresent = l != null;
-        flags = (byte) (l != null ? (flags | ACCESS_TIME_BIT)
-                        : (flags & ~ACCESS_TIME_BIT));
+        flags = (byte) (l != null ? (flags | ACCESS_TIME_BIT) : (flags & ~ACCESS_TIME_BIT));
         this.accessTime = l;
     }
 
@@ -513,8 +521,7 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      */
     public void setCreateTime(final ZipLong l) {
         bit2_createTimePresent = l != null;
-        flags = (byte) (l != null ? (flags | CREATE_TIME_BIT)
-                        : (flags & ~CREATE_TIME_BIT));
+        flags = (byte) (l != null ? (flags | CREATE_TIME_BIT) : (flags & ~CREATE_TIME_BIT));
         this.createTime = l;
     }
 
@@ -552,7 +559,9 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      *
      * @param d modify time as java.util.Date
      */
-    public void setModifyJavaTime(final Date d) { setModifyTime(dateToZipLong(d)); }
+    public void setModifyJavaTime(final Date d) {
+        setModifyTime(dateToZipLong(d));
+    }
 
     /**
      * <p>
@@ -568,8 +577,7 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
      */
     public void setModifyTime(final ZipLong l) {
         bit0_modifyTimePresent = l != null;
-        flags = (byte) (l != null ? (flags | MODIFY_TIME_BIT)
-                        : (flags & ~MODIFY_TIME_BIT));
+        flags = (byte) (l != null ? (flags | MODIFY_TIME_BIT) : (flags & ~MODIFY_TIME_BIT));
         this.modifyTime = l;
     }