You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by gg...@apache.org on 2023/06/16 18:04:18 UTC
[commons-compress] branch master updated: Sanitize grammar issues and typos (#393)
This is an automated email from the ASF dual-hosted git repository.
ggregory pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/commons-compress.git
The following commit(s) were added to refs/heads/master by this push:
new 9d3d5425 Sanitize grammar issues and typos (#393)
9d3d5425 is described below
commit 9d3d54253d797b416bc20d07593af4a12b2d2c5f
Author: Martin Wiesner <ma...@users.noreply.github.com>
AuthorDate: Fri Jun 16 20:04:13 2023 +0200
Sanitize grammar issues and typos (#393)
* sanitizes grammar issues and typos
* No need to abbreviate in doc.
* Grammar
---------
Co-authored-by: Gary Gregory <ga...@users.noreply.github.com>
---
.../archivers/cpio/CpioArchiveInputStream.java | 2 +-
.../compress/archivers/dump/DumpArchiveSummary.java | 4 ++--
.../compress/archivers/sevenz/AES256SHA256Decoder.java | 2 +-
.../commons/compress/archivers/sevenz/Folder.java | 2 +-
.../commons/compress/archivers/sevenz/SevenZFile.java | 10 +++++-----
.../compress/archivers/tar/TarArchiveEntry.java | 4 ++--
.../compress/archivers/tar/TarArchiveInputStream.java | 4 ++--
.../compress/archivers/tar/TarArchiveOutputStream.java | 2 +-
.../commons/compress/archivers/tar/TarConstants.java | 4 ++--
.../apache/commons/compress/archivers/tar/TarFile.java | 4 ++--
.../commons/compress/archivers/tar/TarUtils.java | 12 ++++++------
.../compress/archivers/zip/ExtraFieldUtils.java | 2 +-
.../commons/compress/archivers/zip/NioZipEncoding.java | 2 +-
.../archivers/zip/ParallelScatterZipCreator.java | 4 ++--
.../compress/archivers/zip/ScatterZipOutputStream.java | 6 +++---
.../compress/archivers/zip/StreamCompressor.java | 2 +-
.../archivers/zip/X5455_ExtendedTimestamp.java | 2 +-
.../commons/compress/archivers/zip/X7875_NewUnix.java | 4 ++--
.../compress/archivers/zip/ZipArchiveEntryRequest.java | 2 +-
.../compress/archivers/zip/ZipArchiveInputStream.java | 18 +++++++++---------
.../compress/archivers/zip/ZipArchiveOutputStream.java | 6 +++---
.../commons/compress/archivers/zip/ZipEncoding.java | 2 +-
.../apache/commons/compress/archivers/zip/ZipFile.java | 14 +++++++-------
.../zip/ZipSplitReadOnlySeekableByteChannel.java | 8 ++++----
.../commons/compress/changes/ChangeSetPerformer.java | 10 +++++-----
.../commons/compress/changes/ChangeSetResults.java | 4 ++--
.../compress/compressors/CompressorStreamFactory.java | 4 ++--
.../compress/compressors/CompressorStreamProvider.java | 2 +-
.../lz4/FramedLZ4CompressorInputStream.java | 2 +-
.../compressors/lz77support/LZ77Compressor.java | 6 +++---
.../compress/compressors/pack200/Pack200Utils.java | 8 ++++----
.../commons/compress/harmony/pack200/NewAttribute.java | 4 ++--
.../compress/harmony/pack200/NewAttributeBands.java | 4 ++--
.../compress/harmony/pack200/SegmentHeader.java | 2 +-
.../compress/harmony/unpack200/NewAttributeBands.java | 9 +++++----
.../commons/compress/harmony/unpack200/Segment.java | 4 ++--
.../unpack200/SegmentConstantPoolArrayCache.java | 2 +-
.../harmony/unpack200/bytecode/ClassConstantPool.java | 2 +-
.../bytecode/forms/SingleByteReferenceForm.java | 2 +-
.../apache/commons/compress/java/util/jar/Pack200.java | 6 +++---
.../apache/commons/compress/utils/ArchiveUtils.java | 2 +-
.../compress/utils/FixedLengthBlockOutputStream.java | 2 +-
.../org/apache/commons/compress/AbstractTestCase.java | 2 +-
.../archivers/examples/SevenZArchiverTest.java | 2 +-
.../archivers/tar/TarArchiveOutputStreamTest.java | 12 ++++++------
.../archivers/zip/ParallelScatterZipCreatorTest.java | 2 +-
.../commons/compress/archivers/zip/X000A_NTFSTest.java | 6 +++---
.../archivers/zip/X5455_ExtendedTimestampTest.java | 2 +-
.../compress/archivers/zip/X7875_NewUnixTest.java | 2 +-
.../commons/compress/archivers/zip/Zip64SupportIT.java | 8 ++++----
.../archivers/zip/ZipSplitOutputStreamTest.java | 2 +-
.../commons/compress/changes/ChangeSetTestCase.java | 10 +++++-----
.../commons/compress/compressors/Pack200TestCase.java | 2 +-
.../compressors/snappy/SnappyRoundtripTest.java | 2 +-
.../harmony/pack200/tests/NewAttributeBandsTest.java | 2 +-
55 files changed, 126 insertions(+), 125 deletions(-)
diff --git a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
index 222f3144..862f8ef1 100644
--- a/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java
@@ -37,7 +37,7 @@ import org.apache.commons.compress.utils.IOUtils;
*
* <p>
* The stream can be read by extracting a cpio entry (containing all
- * informations about a entry) and afterwards reading from the stream the file
+ * information about an entry) and afterwards reading from the stream the file
* specified by the entry.
* </p>
* <pre>
diff --git a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
index 57e2341d..519104a2 100644
--- a/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
+++ b/src/main/java/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java
@@ -126,8 +126,8 @@ public class DumpArchiveSummary {
}
/**
- * Get dump label. This may be autogenerated or it may be specified
- * bu the user.
+ * Get dump label. This may be autogenerated, or it may be specified
+ * by the user.
* @return dump label
*/
public String getLabel() {
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
index 179393ee..197b8f11 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/AES256SHA256Decoder.java
@@ -209,7 +209,7 @@ class AES256SHA256Decoder extends AbstractCoder {
flushBuffer();
if (len - gap >= cipherBlockSize) {
- // skip buffer to encrypt data chunks big enought to fit cipher block size
+ // skip buffer to encrypt data chunks big enough to fit cipher block size
final int multipleCipherBlockSizeLen = (len - gap) / cipherBlockSize * cipherBlockSize;
cipherOutputStream.write(b, off + gap, multipleCipherBlockSizeLen);
gap += multipleCipherBlockSizeLen;
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java
index 85c901a5..e2020153 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/Folder.java
@@ -25,7 +25,7 @@ import java.util.LinkedList;
*/
class Folder {
static final Folder[] EMPTY_FOLDER_ARRAY = {};
- /// List of coders used in this folder, eg. one for compression, one for encryption.
+ /// List of coders used in this folder, e.g. one for compression, one for encryption.
Coder[] coders;
/// Total number of input streams across all coders.
/// this field is currently unused but technically part of the 7z API
diff --git a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java
index 2b405cf8..ddcb0d1c 100644
--- a/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java
+++ b/src/main/java/org/apache/commons/compress/archivers/sevenz/SevenZFile.java
@@ -417,7 +417,7 @@ public class SevenZFile implements Closeable {
}
/**
- * Reads a SeekableByteChannel as 7z archive with addtional options.
+ * Reads a SeekableByteChannel as 7z archive with additional options.
*
* <p>{@link
* org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
@@ -511,7 +511,7 @@ public class SevenZFile implements Closeable {
}
/**
- * Reads a SeekableByteChannel as 7z archive with addtional options.
+ * Reads a SeekableByteChannel as 7z archive with additional options.
*
* <p>{@link
* org.apache.commons.compress.utils.SeekableInMemoryByteChannel}
@@ -780,10 +780,10 @@ public class SevenZFile implements Closeable {
* then its command line and GUI tools will use this default name
* when extracting the entries.</p>
*
- * @return null if the name of the archive is unknown. Otherwise
+ * @return null if the name of the archive is unknown. Otherwise,
* if the name of the archive has got any extension, it is
- * stripped and the remainder returned. Finally if the name of the
- * archive hasn't got any extension then a {@code ~} character is
+ * stripped and the remainder returned. Finally, if the name of the
+ * archive hasn't got any extension, then a {@code ~} character is
* appended to the archive name.
*
* @since 1.19
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
index 8d897831..774bf9b7 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
@@ -1093,7 +1093,7 @@ public class TarArchiveEntry implements ArchiveEntry, TarConstants, EntryStreamO
/**
* Get this entry's file size.
*
- * <p>This is the size the entry's data uses inside of the archive. Usually this is the same as {@link
+ * <p>This is the size the entry's data uses inside the archive. Usually this is the same as {@link
* #getRealSize}, but it doesn't take the "holes" into account when the entry represents a sparse file.
*
* @return This entry's file size.
@@ -1290,7 +1290,7 @@ public class TarArchiveEntry implements ArchiveEntry, TarConstants, EntryStreamO
}
private boolean isInvalidPrefix(final byte[] header) {
- // prefix[130] is is guaranteed to be '\0' with XSTAR/XUSTAR
+ // prefix[130] is guaranteed to be '\0' with XSTAR/XUSTAR
if (header[XSTAR_PREFIX_OFFSET + 130] != 0) {
// except when typeflag is 'M'
if (header[LF_OFFSET] != LF_MULTIVOLUME) {
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
index ce6eb643..20a10b38 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
@@ -290,7 +290,7 @@ public class TarArchiveInputStream extends ArchiveInputStream {
for (final TarArchiveStructSparse sparseHeader : sparseHeaders) {
final long zeroBlockSize = sparseHeader.getOffset() - offset;
if (zeroBlockSize < 0) {
- // sparse header says to move backwards inside of the extracted entry
+ // sparse header says to move backwards inside the extracted entry
throw new IOException("Corrupted struct sparse detected");
}
@@ -909,7 +909,7 @@ public class TarArchiveInputStream extends ArchiveInputStream {
}
/**
- * Tries to read the next record rewinding the stream if it is not a EOF record.
+ * Tries to read the next record rewinding the stream if it is not an EOF record.
*
* <p>This is meant to protect against cases where a tar
* implementation has written only one EOF record when two are
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
index bc98332e..5e71093c 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
@@ -386,7 +386,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
// or where UTF-8 encoding isn't a single octet
// per character.
// Must be in loop as size may go from 99 to 100 in
- // first pass so we'd need a second.
+ // first pass, so we'd need a second.
len = actualLength;
line = len + " " + k + "=" + v + "\n";
actualLength = line.getBytes(UTF_8).length;
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarConstants.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarConstants.java
index cc429da8..34055afc 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarConstants.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarConstants.java
@@ -334,12 +334,12 @@ public interface TarConstants {
String MAGIC_GNU = "ustar ";
/**
- * One of two two possible GNU versions
+ * One of two possible GNU versions
*/
String VERSION_GNU_SPACE = " \0";
/**
- * One of two two possible GNU versions
+ * One of two possible GNU versions
*/
String VERSION_GNU_ZERO = "0\0";
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java
index ba418a41..5ca05ded 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarFile.java
@@ -349,7 +349,7 @@ public class TarFile implements Closeable {
for (final TarArchiveStructSparse sparseHeader : sparseHeaders) {
final long zeroBlockSize = sparseHeader.getOffset() - offset;
if (zeroBlockSize < 0) {
- // sparse header says to move backwards inside of the extracted entry
+ // sparse header says to move backwards inside the extracted entry
throw new IOException("Corrupted struct sparse detected");
}
@@ -730,7 +730,7 @@ public class TarFile implements Closeable {
/**
* Tries to read the next record resetting the position in the
- * archive if it is not a EOF record.
+ * archive if it is not an EOF record.
*
* <p>This is meant to protect against cases where a tar
* implementation has written only one EOF record when two are
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
index 323f47eb..3f8957c0 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarUtils.java
@@ -208,7 +208,7 @@ public class TarUtils {
}
/**
- * Write an long integer into a buffer as an octal string if this
+ * Write a long integer into a buffer as an octal string if this
* will fit, or as a binary number otherwise.
*
* Uses {@link #formatUnsignedOctalString} to format
@@ -323,7 +323,7 @@ public class TarUtils {
* @param buf The buffer to receive the output
* @param offset The starting offset into the buffer
* @param length The size of the output buffer
- * @return The updated offset, i.e offset+length
+ * @return The updated offset, i.e. offset+length
* @throws IllegalArgumentException if the value (and trailer) will not fit in the buffer
*/
public static int formatOctalBytes(final long value, final byte[] buf, final int offset, final int length) {
@@ -545,7 +545,7 @@ public class TarUtils {
* @param offset The offset into the buffer from which to parse.
* @param length The maximum number of bytes to parse - must be at least 2 bytes.
* @return The long value of the octal string.
- * @throws IllegalArgumentException if the trailing space/NUL is missing or if a invalid byte is detected.
+ * @throws IllegalArgumentException if the trailing space/NUL is missing or if an invalid byte is detected.
*/
public static long parseOctal(final byte[] buffer, final int offset, final int length) {
long result = 0;
@@ -710,7 +710,7 @@ public class TarUtils {
* @param sparseHeaders used in PAX Format 0.0 & 0.1, as it may appear multiple times,
* the sparse headers need to be stored in an array, not a map
* @param globalPaxHeaders global PAX headers of the tar archive
- * @return map of PAX headers values found inside of the current (local or global) PAX headers tar entry.
+ * @return map of PAX headers values found inside the current (local or global) PAX headers tar entry.
* @throws IOException if an I/O error occurs.
* @deprecated use the four-arg version instead
*/
@@ -741,7 +741,7 @@ public class TarUtils {
* the sparse headers need to be stored in an array, not a map
* @param globalPaxHeaders global PAX headers of the tar archive
* @param headerSize total size of the PAX header, will be ignored if negative
- * @return map of PAX headers values found inside of the current (local or global) PAX headers tar entry.
+ * @return map of PAX headers values found inside the current (local or global) PAX headers tar entry.
* @throws IOException if an I/O error occurs.
* @since 1.21
*/
@@ -802,7 +802,7 @@ public class TarUtils {
// for 0.0 PAX Headers
if (keyword.equals(TarGnuSparseKeys.OFFSET)) {
if (offset != null) {
- // previous GNU.sparse.offset header but but no numBytes
+ // previous GNU.sparse.offset header but no numBytes
sparseHeaders.add(new TarArchiveStructSparse(offset, 0));
}
try {
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java b/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java
index a1a9fb68..c2fc05fc 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java
@@ -25,7 +25,7 @@ import java.util.zip.ZipException;
/**
* ZipExtraField related methods
- * @NotThreadSafe because the HashMap is not synch.
+ * @NotThreadSafe because the HashMap is not synchronized.
*/
// CheckStyle:HideUtilityClassConstructorCheck OFF (bc)
public class ExtraFieldUtils {
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/NioZipEncoding.java b/src/main/java/org/apache/commons/compress/archivers/zip/NioZipEncoding.java
index fb32bc19..5f5695f4 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/NioZipEncoding.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/NioZipEncoding.java
@@ -151,7 +151,7 @@ class NioZipEncoding implements ZipEncoding, CharsetAccessor {
final int spaceForSurrogate = estimateIncrementalEncodingSize(enc, 6 * res.length());
if (spaceForSurrogate > out.remaining()) {
- // if the destination buffer isn't over sized, assume that the presence of one
+ // if the destination buffer isn't oversized, assume that the presence of one
// unmappable character makes it likely that there will be more. Find all the
// un-encoded characters and allocate space based on those estimates.
int charCount = 0;
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java
index cc555547..90fbaf5c 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreator.java
@@ -178,7 +178,7 @@ public class ParallelScatterZipCreator {
*
* @param zipArchiveEntry The entry to add.
* @param source The source input stream supplier
- * @return A callable that should subsequently passed to #submitStreamAwareCallable, possibly in a wrapped/adapted from. The
+ * @return A callable that should subsequently be passed to #submitStreamAwareCallable, possibly in a wrapped/adapted from. The
* value of this callable is not used, but any exceptions happening inside the compression
* will be propagated through the callable.
*/
@@ -208,7 +208,7 @@ public class ParallelScatterZipCreator {
* @see #createCallable(ZipArchiveEntry, InputStreamSupplier)
*
* @param zipArchiveEntryRequestSupplier Should supply the entry to be added.
- * @return A callable that should subsequently passed to #submitStreamAwareCallable, possibly in a wrapped/adapted from. The
+ * @return A callable that should subsequently be passed to #submitStreamAwareCallable, possibly in a wrapped/adapted from. The
* value of this callable is not used, but any exceptions happening inside the compression
* will be propagated through the callable.
* @since 1.13
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java
index 9b174c91..db431427 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ScatterZipOutputStream.java
@@ -64,9 +64,9 @@ public class ScatterZipOutputStream implements Closeable {
}
/**
- * Updates the original {@link ZipArchiveEntry} with sizes/crc
- * Do not use this methods from threads that did not create the instance itself !
- * @return the zipArchiveEntry that is basis for this request
+ * Updates the original {@link ZipArchiveEntry} with sizes/crc.
+ * Do not use this method from threads that did not create the instance itself!
+ * @return the zipArchiveEntry that is the basis for this request.
*/
public ZipArchiveEntry transferToArchiveEntry() {
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java
index 440193d2..4f21b452 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/StreamCompressor.java
@@ -100,7 +100,7 @@ public abstract class StreamCompressor implements Closeable {
}
/*
* Apparently Deflater.setInput gets slowed down a lot on Sun JVMs
- * when it gets handed a really big buffer. See
+ * when it gets handed a huge buffer. See
* https://issues.apache.org/bugzilla/show_bug.cgi?id=45396
*
* Using a buffer size of 8 kB proved to be a good compromise
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java b/src/main/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java
index def84e35..655092a0 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java
@@ -144,7 +144,7 @@ public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serial
private static FileTime unixTimeToFileTime(final ZipLong unixTime) {
return unixTime != null ? TimeUtils.unixTimeToFileTime(unixTime.getIntValue()) : null;
}
- // The 3 boolean fields (below) come from this flags byte. The remaining 5 bits
+ // The 3 boolean fields (below) come from this flag's byte. The remaining 5 bits
// are ignored according to the current version of the spec (December 2012).
private static ZipLong unixTimeToZipLong(final long unixTime) {
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/X7875_NewUnix.java b/src/main/java/org/apache/commons/compress/archivers/zip/X7875_NewUnix.java
index 00b96df7..b02416d8 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/X7875_NewUnix.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/X7875_NewUnix.java
@@ -118,7 +118,7 @@ public class X7875_NewUnix implements ZipExtraField, Cloneable, Serializable {
exactly like InfoZip (requires changes to unit tests, though).
And I am sorry that the time you spent reading this comment is now
- gone and you can never have it back.
+ gone, and you can never have it back.
*/
final int MIN_LENGTH = 1;
@@ -281,7 +281,7 @@ public class X7875_NewUnix implements ZipExtraField, Cloneable, Serializable {
@Override
public int hashCode() {
int hc = -1234567 * version;
- // Since most UID's and GID's are below 65,536, this is (hopefully!)
+ // Since most UIDs and GIDs are below 65,536, this is (hopefully!)
// a nice way to make sure typical UID and GID values impact the hash
// as much as possible.
hc ^= Integer.rotateLeft(uid.hashCode(), 16);
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java
index 37a0a28f..de2d6bf4 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveEntryRequest.java
@@ -72,7 +72,7 @@ public class ZipArchiveEntryRequest {
/**
- * Gets the underlying entry. Do not use this methods from threads that did not create the instance itself !
+ * Gets the underlying entry. Do not use this method from threads that did not create the instance itself !
* @return the zipArchiveEntry that is basis for this request
*/
ZipArchiveEntry getZipArchiveEntry() {
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
index 499bb6e5..cb7eedca 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java
@@ -329,8 +329,8 @@ public class ZipArchiveInputStream extends ArchiveInputStream implements InputSt
/**
* Whether the stream will try to read STORED entries that use a data descriptor.
- * Setting it to true means we will not stop reading a entry with the compressed
- * size, instead we will stoping reading a entry when a data descriptor is met(by
+ * Setting it to true means we will not stop reading an entry with the compressed
+ * size, instead we will stop reading an entry when a data descriptor is met (by
* finding the Data Descriptor Signature). This will completely break down in some
* cases - like JARs in WARs.
* <p>
@@ -455,7 +455,7 @@ public class ZipArchiveInputStream extends ArchiveInputStream implements InputSt
if (i >= expectedDDLen &&
(buf.array()[i + 2] == LFH[2] && buf.array()[i + 3] == LFH[3])
|| (buf.array()[i + 2] == CFH[2] && buf.array()[i + 3] == CFH[3])) {
- // found a LFH or CFH:
+ // found an LFH or CFH:
expectDDPos = i - expectedDDLen;
done = true;
}
@@ -486,12 +486,12 @@ public class ZipArchiveInputStream extends ArchiveInputStream implements InputSt
* <p>Data descriptor plus incomplete signature (3 bytes in the
* worst case) can be 20 bytes max.</p>
*/
- private int cacheBytesRead(final ByteArrayOutputStream bos, int offset, final int lastRead, final int expecteDDLen) {
- final int cacheable = offset + lastRead - expecteDDLen - 3;
+ private int cacheBytesRead(final ByteArrayOutputStream bos, int offset, final int lastRead, final int expectedDDLen) {
+ final int cacheable = offset + lastRead - expectedDDLen - 3;
if (cacheable > 0) {
bos.write(buf.array(), 0, cacheable);
- System.arraycopy(buf.array(), cacheable, buf.array(), 0, expecteDDLen + 3);
- offset = expecteDDLen + 3;
+ System.arraycopy(buf.array(), cacheable, buf.array(), 0, expectedDDLen + 3);
+ offset = expectedDDLen + 3;
} else {
offset += lastRead;
}
@@ -886,7 +886,7 @@ public class ZipArchiveInputStream extends ArchiveInputStream implements InputSt
* @param suspectLocalFileHeader the bytes read from the underlying stream in the expectation that they would hold
* the local file header of the next entry.
*
- * @return true if this looks like a APK signing block
+ * @return true if this looks like an APK signing block
*
* @see <a href="https://source.android.com/security/apksigning/v2">https://source.android.com/security/apksigning/v2</a>
*/
@@ -1358,7 +1358,7 @@ public class ZipArchiveInputStream extends ArchiveInputStream implements InputSt
private void skipRemainderOfArchive() throws IOException {
// skip over central directory. One LFH has been read too much
// already. The calculation discounts file names and extra
- // data so it will be too short.
+ // data, so it will be too short.
if (entriesRead > 0) {
realSkip((long) entriesRead * CFH_LEN - LFH_LEN);
final boolean foundEocd = findEocdRecord();
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java
index d0f2128a..4a76a057 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java
@@ -1395,7 +1395,7 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream {
channel.position(entry.localDataStart - 5 * ZipConstants.SHORT);
writeOut(ZipShort.getBytes(versionNeededToExtract(entry.entry.getMethod(), false, false)));
- // * remove ZIP64 extra so it doesn't get written
+ // * remove ZIP64 extra, so it doesn't get written
// to the central directory
entry.entry.removeExtraField(Zip64ExtendedInformationExtraField
.HEADER_ID);
@@ -1895,7 +1895,7 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream {
}
/**
- * Write preamble data. For most of time, this is used to
+ * Write preamble data. For most of the time, this is used to
* make self-extracting zips.
*
* @param preamble data to write
@@ -1907,7 +1907,7 @@ public class ZipArchiveOutputStream extends ArchiveOutputStream {
}
/**
- * Write preamble data. For most of time, this is used to
+ * Write preamble data. For most of the time, this is used to
* make self-extracting zips.
*
* @param preamble data to write
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncoding.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncoding.java
index 88936188..ebc868cd 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncoding.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipEncoding.java
@@ -47,7 +47,7 @@ public interface ZipEncoding {
* encoding.
*
* @param name A file name or ZIP comment.
- * @return Whether the given name may be encoded with out any losses.
+ * @return Whether the given name may be encoded without any losses.
*/
boolean canEncode(String name);
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java
index 19f08f16..47861753 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipFile.java
@@ -356,7 +356,7 @@ public class ZipFile implements Closeable {
.thenComparingLong(ZipArchiveEntry::getLocalHeaderOffset);
/**
- * Closes a ZIP file quietly; throwing no IOException, dooes nothing
+ * Closes a ZIP file quietly; throwing no IOException, does nothing
* on null input.
* @param zipFile file to close, can be null
*/
@@ -792,7 +792,7 @@ public class ZipFile implements Closeable {
private void fillNameMap() {
entries.forEach(ze -> {
- // entries is filled in populateFromCentralDirectory and
+ // entries are filled in populateFromCentralDirectory and
// never modified
final String name = ze.getName();
final LinkedList<ZipArchiveEntry> entriesOfThatName = nameMap.computeIfAbsent(name, k -> new LinkedList<>());
@@ -820,7 +820,7 @@ public class ZipFile implements Closeable {
* Gets an InputStream for reading the content before the first local file header.
*
* @return null if there is no content before the first local file header.
- * Otherwise returns a stream to read the content before the first local file header.
+ * Otherwise, returns a stream to read the content before the first local file header.
* @since 1.23
*/
public InputStream getContentBeforeFirstLocalFileHeader() {
@@ -1063,7 +1063,7 @@ public class ZipFile implements Closeable {
* the central directory alone, but not the data that requires the
* local file header or additional data to be read.</p>
*
- * @return a map of zipentries that didn't have the language
+ * @return a map of zip entries that didn't have the language
* encoding flag set when read.
*/
private Map<ZipArchiveEntry, NameAndComment> populateFromCentralDirectory()
@@ -1238,7 +1238,7 @@ public class ZipFile implements Closeable {
}
/**
- * Reads an individual entry of the central directory, creats an
+ * Reads an individual entry of the central directory, creates an
* ZipArchiveEntry from it and adds it to the global maps.
*
* @param noUTF8Flag map used to collect entries that don't have
@@ -1376,7 +1376,7 @@ public class ZipFile implements Closeable {
entriesWithoutUTF8Flag)
throws IOException {
for (final ZipArchiveEntry zipArchiveEntry : entries) {
- // entries is filled in populateFromCentralDirectory and
+ // entries are filled in populateFromCentralDirectory and
// never modified
final Entry ze = (Entry) zipArchiveEntry;
final int[] lens = setDataOffset(ze);
@@ -1524,7 +1524,7 @@ public class ZipFile implements Closeable {
}
/**
- * Checks whether the archive starts with a LFH. If it doesn't,
+ * Checks whether the archive starts with an LFH. If it doesn't,
* it may be an empty archive.
*/
private boolean startsWithLocalFileHeader() throws IOException {
diff --git a/src/main/java/org/apache/commons/compress/archivers/zip/ZipSplitReadOnlySeekableByteChannel.java b/src/main/java/org/apache/commons/compress/archivers/zip/ZipSplitReadOnlySeekableByteChannel.java
index 14d41581..47b06e59 100644
--- a/src/main/java/org/apache/commons/compress/archivers/zip/ZipSplitReadOnlySeekableByteChannel.java
+++ b/src/main/java/org/apache/commons/compress/archivers/zip/ZipSplitReadOnlySeekableByteChannel.java
@@ -123,7 +123,7 @@ public class ZipSplitReadOnlySeekableByteChannel extends MultiReadOnlySeekableBy
* Concatenates the given files.
*
* @param files the files to concatenate, note that the LAST FILE of files should be the LAST SEGMENT(.zip)
- * and theses files should be added in correct order (e.g. .z01, .z02... .z99, .zip)
+ * and these files should be added in correct order (e.g. .z01, .z02... .z99, .zip)
* @return SeekableByteChannel that concatenates all provided files
* @throws NullPointerException if files is null
* @throws IOException if opening a channel for one of the files fails
@@ -144,7 +144,7 @@ public class ZipSplitReadOnlySeekableByteChannel extends MultiReadOnlySeekableBy
*
* @param lastSegmentFile the last segment of split ZIP segments, its extension should be .zip
* @param files the files to concatenate except for the last segment,
- * note theses files should be added in correct order (e.g. .z01, .z02... .z99)
+ * note these files should be added in correct order (e.g. .z01, .z02... .z99)
* @return SeekableByteChannel that concatenates all provided files
* @throws IOException if the first channel doesn't seem to hold
* the beginning of a split archive
@@ -164,7 +164,7 @@ public class ZipSplitReadOnlySeekableByteChannel extends MultiReadOnlySeekableBy
* Concatenates the given channels.
*
* @param channels the channels to concatenate, note that the LAST CHANNEL of channels should be the LAST SEGMENT(.zip)
- * and theses channels should be added in correct order (e.g. .z01, .z02... .z99, .zip)
+ * and these channels should be added in correct order (e.g. .z01, .z02... .z99, .zip)
* @return SeekableByteChannel that concatenates all provided channels
* @throws NullPointerException if channels is null
* @throws IOException if reading channels fails
@@ -181,7 +181,7 @@ public class ZipSplitReadOnlySeekableByteChannel extends MultiReadOnlySeekableBy
*
* @param lastSegmentChannel channel of the last segment of split ZIP segments, its extension should be .zip
* @param channels the channels to concatenate except for the last segment,
- * note theses channels should be added in correct order (e.g. .z01, .z02... .z99)
+ * note these channels should be added in correct order (e.g. .z01, .z02... .z99)
* @return SeekableByteChannel that concatenates all provided channels
* @throws NullPointerException if lastSegmentChannel or channels is null
* @throws IOException if the first channel doesn't seem to hold
diff --git a/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java b/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java
index 25402923..05cd38eb 100644
--- a/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java
+++ b/src/main/java/org/apache/commons/compress/changes/ChangeSetPerformer.java
@@ -133,12 +133,12 @@ public class ChangeSetPerformer {
/**
* Checks if an ArchiveEntry is deleted later in the ChangeSet. This is
- * necessary if an file is added with this ChangeSet, but later became
+ * necessary if a file is added with this ChangeSet, but later became
* deleted in the same set.
*
* @param entry
* the entry to check
- * @return true, if this entry has an deletion change later, false otherwise
+ * @return true, if this entry has a deletion change later, false otherwise
*/
private boolean isDeletedLater(final Set<Change> workingSet, final ArchiveEntry entry) {
final String source = entry.getName();
@@ -171,7 +171,7 @@ public class ChangeSetPerformer {
* @param out
* the resulting OutputStream with all modifications
* @throws IOException
- * if an read/write error occurs
+ * if a read/write error occurs
* @return the results of this operation
*/
private ChangeSetResults perform(final ArchiveEntryIterator entryIterator,
@@ -253,7 +253,7 @@ public class ChangeSetPerformer {
* @param out
* the resulting OutputStream with all modifications
* @throws IOException
- * if an read/write error occurs
+ * if a read/write error occurs
* @return the results of this operation
*/
public ChangeSetResults perform(final ArchiveInputStream in, final ArchiveOutputStream out)
@@ -273,7 +273,7 @@ public class ChangeSetPerformer {
* @param out
* the resulting OutputStream with all modifications
* @throws IOException
- * if an read/write error occurs
+ * if a read/write error occurs
* @return the results of this operation
* @since 1.5
*/
diff --git a/src/main/java/org/apache/commons/compress/changes/ChangeSetResults.java b/src/main/java/org/apache/commons/compress/changes/ChangeSetResults.java
index af61f5f7..c63de74a 100644
--- a/src/main/java/org/apache/commons/compress/changes/ChangeSetResults.java
+++ b/src/main/java/org/apache/commons/compress/changes/ChangeSetResults.java
@@ -22,7 +22,7 @@ import java.util.ArrayList;
import java.util.List;
/**
- * Stores the results of an performed ChangeSet operation.
+ * Stores the results of a performed ChangeSet operation.
*/
public class ChangeSetResults {
private final List<String> addedFromChangeSet = new ArrayList<>();
@@ -80,7 +80,7 @@ public class ChangeSetResults {
}
/**
- * Checks if an file name already has been added to the result list
+ * Checks if a file name already has been added to the result list
* @param fileName the file name to check
* @return true, if this file name already has been added
*/
diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java
index 84fee104..302f6fc8 100644
--- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java
+++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamFactory.java
@@ -505,7 +505,7 @@ public class CompressorStreamFactory implements CompressorStreamProvider {
this.memoryLimitInKb = memoryLimitInKb;
}
/**
- * Create an compressor input stream from an input stream, autodetecting the
+ * Create a compressor input stream from an input stream, auto-detecting the
* compressor type from the first few bytes of the stream. The InputStream
* must support marks, like BufferedInputStream.
*
@@ -637,7 +637,7 @@ public class CompressorStreamFactory implements CompressorStreamProvider {
}
/**
- * Creates an compressor output stream from an compressor name and an output
+ * Creates a compressor output stream from a compressor name and an output
* stream.
*
* @param name
diff --git a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamProvider.java b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamProvider.java
index b0c84312..8555d53c 100644
--- a/src/main/java/org/apache/commons/compress/compressors/CompressorStreamProvider.java
+++ b/src/main/java/org/apache/commons/compress/compressors/CompressorStreamProvider.java
@@ -64,7 +64,7 @@ public interface CompressorStreamProvider {
final boolean decompressUntilEOF) throws CompressorException;
/**
- * Creates a compressor output stream from an compressor name and an output
+ * Creates a compressor output stream from a compressor name and an output
* stream.
*
* @param name
diff --git a/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java b/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java
index 52ba760e..7bd1961d 100644
--- a/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java
+++ b/src/main/java/org/apache/commons/compress/compressors/lz4/FramedLZ4CompressorInputStream.java
@@ -288,7 +288,7 @@ public class FramedLZ4CompressorInputStream extends CompressorInputStream
throw new IOException("Premature end of stream while reading frame BD byte");
}
contentHash.update(bdByte);
- if (expectContentSize) { // for now we don't care, contains the uncompressed size
+ if (expectContentSize) { // for now, we don't care, contains the uncompressed size
final byte[] contentSize = new byte[8];
final int skipped = IOUtils.readFully(inputStream, contentSize);
count(skipped);
diff --git a/src/main/java/org/apache/commons/compress/compressors/lz77support/LZ77Compressor.java b/src/main/java/org/apache/commons/compress/compressors/lz77support/LZ77Compressor.java
index ee50cd5a..448885df 100644
--- a/src/main/java/org/apache/commons/compress/compressors/lz77support/LZ77Compressor.java
+++ b/src/main/java/org/apache/commons/compress/compressors/lz77support/LZ77Compressor.java
@@ -244,8 +244,8 @@ public class LZ77Compressor {
// the hash of the three bytes stating at the current position
private int insertHash;
- // the position inside of the window where the current literal
- // block starts (in case we are inside of a literal block).
+ // the position inside the window where the current literal
+ // block starts (in case we are inside a literal block).
private int blockStart;
// position of the current match
@@ -303,7 +303,7 @@ public class LZ77Compressor {
}
if (matchLength >= minMatch) {
if (blockStart != currentPosition) {
- // emit preceeding literal block
+ // emit preceding literal block
flushLiteralBlock();
blockStart = NO_MATCH;
}
diff --git a/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200Utils.java b/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200Utils.java
index 5c36f6ac..8bb0733e 100644
--- a/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200Utils.java
+++ b/src/main/java/org/apache/commons/compress/compressors/pack200/Pack200Utils.java
@@ -39,7 +39,7 @@ import org.apache.commons.compress.java.util.jar.Pack200;
*/
public class Pack200Utils {
/**
- * Normalizes a JAR archive in-place so it can be safely signed
+ * Normalizes a JAR archive in-place, so it can be safely signed
* and packed.
*
* <p>As stated in <a
@@ -61,7 +61,7 @@ public class Pack200Utils {
}
/**
- * Normalizes a JAR archive so it can be safely signed and packed.
+ * Normalizes a JAR archive, so it can be safely signed and packed.
*
* <p>As stated in <a
* href="https://download.oracle.com/javase/1.5.0/docs/api/java/util/jar/Pack200.Packer.html">Pack200.Packer's</a>
@@ -86,7 +86,7 @@ public class Pack200Utils {
}
/**
- * Normalizes a JAR archive so it can be safely signed and packed.
+ * Normalizes a JAR archive, so it can be safely signed and packed.
*
* <p>As stated in <a
* href="https://download.oracle.com/javase/1.5.0/docs/api/java/util/jar/Pack200.Packer.html">Pack200.Packer's</a>
@@ -128,7 +128,7 @@ public class Pack200Utils {
}
/**
- * Normalizes a JAR archive in-place so it can be safely signed
+ * Normalizes a JAR archive in-place, so it can be safely signed
* and packed.
*
* <p>As stated in <a
diff --git a/src/main/java/org/apache/commons/compress/harmony/pack200/NewAttribute.java b/src/main/java/org/apache/commons/compress/harmony/pack200/NewAttribute.java
index 00ba383e..d5c30fc8 100644
--- a/src/main/java/org/apache/commons/compress/harmony/pack200/NewAttribute.java
+++ b/src/main/java/org/apache/commons/compress/harmony/pack200/NewAttribute.java
@@ -45,7 +45,7 @@ public class NewAttribute extends Attribute {
}
/**
- * PassAttribute extends {@code NewAttribute} and manages attributes encountered by ASM that have had an pass
+ * PassAttribute extends {@code NewAttribute} and manages attributes encountered by ASM that have had a pass
* action specified to pack200 (e.g. via one of the -C, -M, -F or -D command line options such as
* -Cattribute-name=pass)
*/
@@ -63,7 +63,7 @@ public class NewAttribute extends Attribute {
}
/**
- * StripAttribute extends {@code NewAttribute} and manages attributes encountered by ASM that have had an strip
+ * StripAttribute extends {@code NewAttribute} and manages attributes encountered by ASM that have had a strip
* action specified to pack200 (e.g. via one of the -C, -M, -F or -D command line options such as
* -Cattribute-name=strip)
*/
diff --git a/src/main/java/org/apache/commons/compress/harmony/pack200/NewAttributeBands.java b/src/main/java/org/apache/commons/compress/harmony/pack200/NewAttributeBands.java
index 6f7aec08..68a72626 100644
--- a/src/main/java/org/apache/commons/compress/harmony/pack200/NewAttributeBands.java
+++ b/src/main/java/org/apache/commons/compress/harmony/pack200/NewAttributeBands.java
@@ -570,8 +570,8 @@ public class NewAttributeBands extends BandSet {
}
/**
- * Utility method to get the contents of the given stream, up to the next ']', (ignoring pairs of brackets '[' and
- * ']')
+ * Utility method to get the contents of the given stream, up to the next {@code ]},
+ * (ignoring pairs of brackets {@code [} and {@code ]})
*
* @param reader
* @return
diff --git a/src/main/java/org/apache/commons/compress/harmony/pack200/SegmentHeader.java b/src/main/java/org/apache/commons/compress/harmony/pack200/SegmentHeader.java
index ceff201b..6d5ffec8 100644
--- a/src/main/java/org/apache/commons/compress/harmony/pack200/SegmentHeader.java
+++ b/src/main/java/org/apache/commons/compress/harmony/pack200/SegmentHeader.java
@@ -26,7 +26,7 @@ import java.io.OutputStream;
public class SegmentHeader extends BandSet {
/**
- * Counter for major/minor class file numbers so we can work out the default
+ * Counter for major/minor class file numbers, so we can work out the default
*/
private static class Counter {
diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/NewAttributeBands.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/NewAttributeBands.java
index d913a21b..5e632cdf 100644
--- a/src/main/java/org/apache/commons/compress/harmony/unpack200/NewAttributeBands.java
+++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/NewAttributeBands.java
@@ -138,7 +138,7 @@ public class NewAttributeBands extends BandSet {
}
/**
- * Used by calls when adding band contents to attributes so they don't have to keep track of the internal index
+ * Used by calls when adding band contents to attributes, so they don't have to keep track of the internal index
* of the callable.
*
* @param attribute TODO
@@ -636,8 +636,8 @@ public class NewAttributeBands extends BandSet {
}
/**
- * Utility method to get the contents of the given stream, up to the next ']', (ignoring pairs of brackets '[' and
- * ']')
+ * Utility method to get the contents of the given stream, up to the next {@code ]},
+ * (ignoring pairs of brackets {@code [} and {@code ]})
*
* @param stream
* @return
@@ -892,7 +892,8 @@ public class NewAttributeBands extends BandSet {
}
/**
- * Gets the contents of the given stream, up to the next ']', (ignoring pairs of brackets '[' and ']')
+ * Gets the contents of the given stream, up to the next {@code ]},
+ * (ignoring pairs of brackets {@code [} and {@code ]})
*
* @param stream input stream.
* @return the contents of the given stream.
diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/Segment.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/Segment.java
index 44ecb04d..925d2488 100644
--- a/src/main/java/org/apache/commons/compress/harmony/unpack200/Segment.java
+++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/Segment.java
@@ -53,7 +53,7 @@ import org.apache.commons.compress.harmony.unpack200.bytecode.SourceFileAttribut
/**
* A Pack200 archive consists of one or more segments. Each segment is stand-alone, in the sense that every segment has
* the magic number header; thus, every segment is also a valid archive. However, it is possible to combine
- * (non-GZipped) archives into a single large archive by concatenation alone. Thus all the hard work in unpacking an
+ * (non-GZipped) archives into a single large archive by concatenation alone. Thus, all the hard work in unpacking an
* archive falls to understanding a segment.
*
* The first component of a segment is the header; this contains (amongst other things) the expected counts of constant
@@ -254,7 +254,7 @@ public class Segment {
innerClassesAttribute.addInnerClassesEntry(innerClass, outerClass, innerName, flags);
addInnerClassesAttr = true;
}
- // If ic_local is sent and it's empty, don't add
+ // If ic_local is sent, and it's empty, don't add
// the inner classes attribute.
if (icLocalSent && (icLocal.length == 0)) {
addInnerClassesAttr = false;
diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/SegmentConstantPoolArrayCache.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/SegmentConstantPoolArrayCache.java
index f89f2513..48dd5030 100644
--- a/src/main/java/org/apache/commons/compress/harmony/unpack200/SegmentConstantPoolArrayCache.java
+++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/SegmentConstantPoolArrayCache.java
@@ -134,7 +134,7 @@ public class SegmentConstantPoolArrayCache {
// If the search is one we've just done, don't even
// bother looking and return the last indices. This
// is a second cache within the cache. This is
- // efficient because we usually are looking for
+ // efficient because we are usually looking for
// several secondary elements with the same primary
// key.
if ((lastArray == array) && (lastKey == key)) {
diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/ClassConstantPool.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/ClassConstantPool.java
index 0f292959..53321e22 100644
--- a/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/ClassConstantPool.java
+++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/ClassConstantPool.java
@@ -135,7 +135,7 @@ public class ClassConstantPool {
throw new IllegalStateException("Index cache is not initialized!");
}
final Integer entryIndex = (indexCache.get(entry));
- // If the entry isn't found, answer -1. Otherwise answer the entry.
+ // If the entry isn't found, answer -1, otherwise answer the entry.
if (entryIndex != null) {
return entryIndex.intValue() + 1;
}
diff --git a/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/forms/SingleByteReferenceForm.java b/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/forms/SingleByteReferenceForm.java
index e1bcb8de..06d96345 100644
--- a/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/forms/SingleByteReferenceForm.java
+++ b/src/main/java/org/apache/commons/compress/harmony/unpack200/bytecode/forms/SingleByteReferenceForm.java
@@ -21,7 +21,7 @@ import org.apache.commons.compress.harmony.unpack200.bytecode.ByteCode;
import org.apache.commons.compress.harmony.unpack200.bytecode.OperandManager;
/**
- * Some bytecodes (such as (a)ldc, fldc and ildc) have single- byte references to the class pool. This class is the
+ * Some bytecodes (such as (a)ldc, fldc and ildc) have single-byte references to the class pool. This class is the
* abstract superclass of those classes.
*/
public abstract class SingleByteReferenceForm extends ReferenceForm {
diff --git a/src/main/java/org/apache/commons/compress/java/util/jar/Pack200.java b/src/main/java/org/apache/commons/compress/java/util/jar/Pack200.java
index daf994f3..429f6c07 100644
--- a/src/main/java/org/apache/commons/compress/java/util/jar/Pack200.java
+++ b/src/main/java/org/apache/commons/compress/java/util/jar/Pack200.java
@@ -299,14 +299,14 @@ public abstract class Pack200 {
}
/**
- * Returns a new instance of a unpacker engine.
+ * Returns a new instance of an unpacker engine.
* <p>
* The implementation of the unpacker engine is defined by the system
- * property {@code 'java.util.jar.Pack200.Unpacker'}. If this system
+ * property {@link Pack200.Unpacker}. If this system
* property is defined an instance of the specified class is returned,
* otherwise the system's default implementation is returned.
*
- * @return a instance of {@code Unpacker}.
+ * @return an instance of {@link Pack200.Unpacker}.
*/
public static Pack200.Unpacker newUnpacker() {
return (Unpacker) newInstance(SYSTEM_PROPERTY_UNPACKER, "org.apache.commons.compress.harmony.unpack200.Pack200UnpackerAdapter"); //$NON-NLS-1$
diff --git a/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java b/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java
index 83aa511b..ed9a0693 100644
--- a/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java
+++ b/src/main/java/org/apache/commons/compress/utils/ArchiveUtils.java
@@ -152,7 +152,7 @@ public class ArchiveUtils {
/**
* Check if buffer contents matches Ascii String.
*
- * @param expected the expected strin
+ * @param expected the expected string
* @param buffer the buffer
* @return {@code true} if buffer is the same as the expected string
*/
diff --git a/src/main/java/org/apache/commons/compress/utils/FixedLengthBlockOutputStream.java b/src/main/java/org/apache/commons/compress/utils/FixedLengthBlockOutputStream.java
index b704fb59..218a6c23 100644
--- a/src/main/java/org/apache/commons/compress/utils/FixedLengthBlockOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/utils/FixedLengthBlockOutputStream.java
@@ -217,7 +217,7 @@ public class FixedLengthBlockOutputStream extends OutputStream implements Writab
final int srcRemaining = src.remaining();
if (srcRemaining < buffer.remaining()) {
- // if don't have enough bytes in src to fill up a block we must buffer
+ // if we don't have enough bytes in src to fill up a block we must buffer
buffer.put(src);
} else {
int srcLeft = srcRemaining;
diff --git a/src/test/java/org/apache/commons/compress/AbstractTestCase.java b/src/test/java/org/apache/commons/compress/AbstractTestCase.java
index 33e1c4dc..f400776d 100644
--- a/src/test/java/org/apache/commons/compress/AbstractTestCase.java
+++ b/src/test/java/org/apache/commons/compress/AbstractTestCase.java
@@ -254,7 +254,7 @@ public abstract class AbstractTestCase {
}
/**
- * Creates an archive of textbased files in several directories. The
+ * Creates an archive of text-based files in several directories. The
* archivename is the factory identifier for the archiver, for example zip,
* tar, cpio, jar, ar. The archive is created as a temp file.
*
diff --git a/src/test/java/org/apache/commons/compress/archivers/examples/SevenZArchiverTest.java b/src/test/java/org/apache/commons/compress/archivers/examples/SevenZArchiverTest.java
index 2e93a6f2..41cfbd89 100644
--- a/src/test/java/org/apache/commons/compress/archivers/examples/SevenZArchiverTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/examples/SevenZArchiverTest.java
@@ -100,7 +100,7 @@ public class SevenZArchiverTest extends AbstractTestCase {
target = new File(resultDir, "test.7z");
}
- // not really a 7z test but I didn't feel like adding a new test just for this
+ // not really a 7z test, but I didn't feel like adding a new test just for this
@Test
public void unknownFormat() throws IOException {
try (SeekableByteChannel c = FileChannel.open(target.toPath(), StandardOpenOption.WRITE, StandardOpenOption.CREATE,
diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
index c348dabb..1b2b15d2 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
@@ -111,7 +111,7 @@ public class TarArchiveOutputStreamTest extends AbstractTestCase {
assertEquals(0100000000000L, e.getSize());
}
// generates IOE because of unclosed entries.
- // However we don't really want to create such large entries.
+ // However, we don't really want to create such large entries.
closeQuietly(tos);
}
@@ -132,7 +132,7 @@ public class TarArchiveOutputStreamTest extends AbstractTestCase {
assertEquals(0100000000000L, e.getSize());
}
// generates IOE because of unclosed entries.
- // However we don't really want to create such large entries.
+ // However, we don't really want to create such large entries.
closeQuietly(tos);
}
@@ -187,7 +187,7 @@ public class TarArchiveOutputStreamTest extends AbstractTestCase {
/**
* When using long file names the longLinkEntry included the current timestamp as the Entry
- * modification date. This was never exposed to the client but it caused identical archives to
+ * modification date. This was never exposed to the client, but it caused identical archives to
* have different MD5 hashes.
*/
@Test
@@ -209,7 +209,7 @@ public class TarArchiveOutputStreamTest extends AbstractTestCase {
assertArrayEquals(digest1, digest2);
// do I still have the correct modification date?
- // let a second elapse so we don't get the current time
+ // let a second elapse, so we don't get the current time
Thread.sleep(1000);
try (TarArchiveInputStream tarIn = new TarArchiveInputStream(new ByteArrayInputStream(archive2))) {
final ArchiveEntry nextEntry = tarIn.getNextEntry();
@@ -270,7 +270,7 @@ public class TarArchiveOutputStreamTest extends AbstractTestCase {
assertEquals(cal.getTime(), e.getLastModifiedDate());
}
// generates IOE because of unclosed entries.
- // However we don't really want to create such large entries.
+ // However, we don't really want to create such large entries.
closeQuietly(tos);
}
@@ -300,7 +300,7 @@ public class TarArchiveOutputStreamTest extends AbstractTestCase {
assertEquals(cal.getTime(), e.getLastModifiedDate());
}
// generates IOE because of unclosed entries.
- // However we don't really want to create such large entries.
+ // However, we don't really want to create such large entries.
closeQuietly(tos);
}
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java
index d4de91fc..aa4c3690 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/ParallelScatterZipCreatorTest.java
@@ -217,7 +217,7 @@ public class ParallelScatterZipCreatorTest {
final byte[] expected = entries.remove(zipArchiveEntry.getName());
assertArrayEquals(expected, actual, "For " + zipArchiveEntry.getName());
}
- // check order of ZIP entries vs order of order of addition to the parallel ZIP creator
+ // check order of ZIP entries vs order of addition to the parallel ZIP creator
assertEquals("file" + i++, zipArchiveEntry.getName(), "For " + zipArchiveEntry.getName());
}
}
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/X000A_NTFSTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/X000A_NTFSTest.java
index f2bfd9f4..954c931d 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/X000A_NTFSTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/X000A_NTFSTest.java
@@ -28,7 +28,7 @@ import org.junit.jupiter.api.Test;
public class X000A_NTFSTest {
@Test
- public void simpleRountrip() throws Exception {
+ public void simpleRoundtrip() throws Exception {
final X000A_NTFS xf = new X000A_NTFS();
xf.setModifyJavaTime(new Date(0));
// one second past midnight
@@ -44,7 +44,7 @@ public class X000A_NTFSTest {
}
@Test
- public void simpleRountripWithHighPrecisionDatesWithBigValues() throws Exception {
+ public void simpleRoundtripWithHighPrecisionDatesWithBigValues() throws Exception {
final X000A_NTFS xf = new X000A_NTFS();
xf.setModifyFileTime(FileTime.from(Instant.ofEpochSecond(123456789101L, 123456700)));
// one second past midnight
@@ -64,7 +64,7 @@ public class X000A_NTFSTest {
}
@Test
- public void simpleRountripWithHighPrecisionDatesWithSmallValues() throws Exception {
+ public void simpleRoundtripWithHighPrecisionDatesWithSmallValues() throws Exception {
final X000A_NTFS xf = new X000A_NTFS();
// The last 2 digits should not be written due to the 100ns precision
xf.setModifyFileTime(FileTime.from(Instant.ofEpochSecond(0, 1234)));
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestampTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestampTest.java
index e6d52d98..1bf6278f 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestampTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestampTest.java
@@ -65,7 +65,7 @@ public class X5455_ExtendedTimestampTest {
* was in local time.
*
* The archive read in {@link #testSampleFile} has been created
- * with GMT-8 so we need to adjust for the difference.
+ * with GMT-8, so we need to adjust for the difference.
*/
private static Date adjustFromGMTToExpectedOffset(final Date from) {
final Calendar cal = Calendar.getInstance();
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/X7875_NewUnixTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/X7875_NewUnixTest.java
index 4ecfdfad..f58d03ef 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/X7875_NewUnixTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/X7875_NewUnixTest.java
@@ -139,7 +139,7 @@ public class X7875_NewUnixTest {
final byte[] LENGTH_5 = {1, 5, 0, 0, 0, 0, 1, 5, 1, 0, 0, 0, 1};
// Version=1, Len=8, 2^63 - 2, Len=8, 2^63 - 1
- // Esoteric test: can we handle 64 bit numbers?
+ // Esoteric test: can we handle 64-bit numbers?
final byte[] LENGTH_8 = {1, 8, -2, -1, -1, -1, -1, -1, -1, 127, 8, -1, -1, -1, -1, -1, -1, -1, 127};
final long TWO_TO_32 = 0x100000000L;
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java
index 2aab31c8..81f8a07e 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/Zip64SupportIT.java
@@ -440,7 +440,7 @@ public class Zip64SupportIT {
);
// grab third entry, verify offset is
- // 0xFFFFFFFF and it has a ZIP64 extended
+ // 0xFFFFFFFF, and it has a ZIP64 extended
// information extra field
final byte[] header = new byte[12];
a.readFully(header);
@@ -517,7 +517,7 @@ public class Zip64SupportIT {
// read offset of LFH
final byte[] offset = new byte[8];
a.readFully(offset);
- // verify there is a LFH where the CD claims it
+ // verify there is an LFH where the CD claims it
a.seek(ZipEightByteInteger.getLongValue(offset));
final byte[] sig = new byte[4];
a.readFully(sig);
@@ -1325,7 +1325,7 @@ public class Zip64SupportIT {
getLengthAndPositionAtCentralDirectory(a);
// grab first CD entry, verify sizes are not
- // 0xFFFFFFFF and it has a an empty ZIP64 extended
+ // 0xFFFFFFFF, and it has an empty ZIP64 extended
// information extra field
byte[] header = new byte[12];
a.readFully(header);
@@ -1913,7 +1913,7 @@ public class Zip64SupportIT {
try (RandomAccessFile a = new RandomAccessFile(f, "r")) {
getLengthAndPositionAtCentralDirectory(a);
- // grab first CF entry, verify sizes are 1e6 and it
+ // grab first CF entry, verify sizes are 1e6, and it
// has an empty ZIP64 extended information extra field
byte[] header = new byte[12];
a.readFully(header);
diff --git a/src/test/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStreamTest.java
index e3715b9c..e7fc569a 100644
--- a/src/test/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/zip/ZipSplitOutputStreamTest.java
@@ -45,7 +45,7 @@ public class ZipSplitOutputStreamTest extends AbstractTestCase {
@Test
public void testCreateSplittedFiles() throws IOException {
final File testOutputFile = new File(dir, "testCreateSplittedFiles.zip");
- final int splitSize = 100 * 1024; /* 100KB */
+ final int splitSize = 100 * 1024; /* 100 KB */
final ZipSplitOutputStream zipSplitOutputStream = new ZipSplitOutputStream(testOutputFile, splitSize);
final File fileToTest = getFile("COMPRESS-477/split_zip_created_by_zip/zip_to_compare_created_by_zip.zip");
diff --git a/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java b/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java
index f81efa49..51913afa 100644
--- a/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java
+++ b/src/test/java/org/apache/commons/compress/changes/ChangeSetTestCase.java
@@ -65,7 +65,7 @@ public final class ChangeSetTestCase extends AbstractTestCase {
* @throws Exception
*/
@Test
- public void testAddAllreadyExistingWithReplaceFalse() throws Exception {
+ public void testAddAlreadyExistingWithReplaceFalse() throws Exception {
final String archivename = "zip";
final Path input = createArchive(archivename);
@@ -99,7 +99,7 @@ public final class ChangeSetTestCase extends AbstractTestCase {
* @throws Exception
*/
@Test
- public void testAddAllreadyExistingWithReplaceTrue() throws Exception {
+ public void testAddAlreadyExistingWithReplaceTrue() throws Exception {
final String archivename = "zip";
final Path input = createArchive(archivename);
@@ -244,7 +244,7 @@ public final class ChangeSetTestCase extends AbstractTestCase {
*
* add dir1/bla.txt + mv dir1/test.text dir2/test.txt + delete dir1
*
- * Add dir1/bla.txt should be surpressed. All other dir1 files will be
+ * Add dir1/bla.txt should be suppressed. All other dir1 files will be
* deleted, except dir1/test.text will be moved
*
* @throws Exception
@@ -631,7 +631,7 @@ public final class ChangeSetTestCase extends AbstractTestCase {
}
/**
- * Adds a file to a ZIP archive. Deletes an other file.
+ * Adds a file to a ZIP archive. Deletes another file.
*
* @throws Exception
*/
@@ -662,7 +662,7 @@ public final class ChangeSetTestCase extends AbstractTestCase {
}
/**
- * Adds a file to a ZIP archive. Deletes an other file.
+ * Adds a file to a ZIP archive. Deletes another file.
*
* @throws Exception
*/
diff --git a/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java b/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java
index f5f695c7..31b11078 100644
--- a/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java
+++ b/src/test/java/org/apache/commons/compress/compressors/Pack200TestCase.java
@@ -158,7 +158,7 @@ public final class Pack200TestCase extends AbstractTestCase {
m.put("foo", "bar");
try (InputStream is = new Pack200CompressorInputStream(newInputStream("bla.jar"),
m)) {
- // packed file is a jar, which is a ZIP so it starts with
+ // packed file is a jar, which is a ZIP, so it starts with
// a local file header
assertTrue(is.markSupported());
is.mark(5);
diff --git a/src/test/java/org/apache/commons/compress/compressors/snappy/SnappyRoundtripTest.java b/src/test/java/org/apache/commons/compress/compressors/snappy/SnappyRoundtripTest.java
index a4bdbdb3..fef98b80 100644
--- a/src/test/java/org/apache/commons/compress/compressors/snappy/SnappyRoundtripTest.java
+++ b/src/test/java/org/apache/commons/compress/compressors/snappy/SnappyRoundtripTest.java
@@ -134,7 +134,7 @@ public final class SnappyRoundtripTest extends AbstractTestCase {
// as the block size is only 32k. This means we never execute
// the code for four-byte length copies in either stream class
// using real-world Snappy files.
- // This is an artifical stream using a bigger block size that
+ // This is an artificial stream using a bigger block size that
// may not even be expandable by other Snappy implementations.
// Start with the four byte sequence 0000 after that add > 64k
// of random noise that doesn't contain any 0000 at all, then
diff --git a/src/test/java/org/apache/commons/compress/harmony/pack200/tests/NewAttributeBandsTest.java b/src/test/java/org/apache/commons/compress/harmony/pack200/tests/NewAttributeBandsTest.java
index 92795212..47a1a3b6 100644
--- a/src/test/java/org/apache/commons/compress/harmony/pack200/tests/NewAttributeBandsTest.java
+++ b/src/test/java/org/apache/commons/compress/harmony/pack200/tests/NewAttributeBandsTest.java
@@ -80,7 +80,7 @@ public class NewAttributeBandsTest {
"B", new byte[] { 3 }, null, 0, null));
final ByteArrayOutputStream out = new ByteArrayOutputStream();
newAttributeBands.pack(out);
- // BYTE1 is used for B layouts so we don't need to unpack to test the
+ // BYTE1 is used for B layouts, so we don't need to unpack to test the
// results
final byte[] bytes = out.toByteArray();
assertEquals(3, bytes.length);