You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@commons.apache.org by bo...@apache.org on 2017/06/26 13:23:22 UTC
[1/3] commons-compress git commit: COMPRESS-400 : Squash commit of
COMPRESS-400-REDUX.
Repository: commons-compress
Updated Branches:
refs/heads/master 19e1b02f7 -> 53b450710
COMPRESS-400 : Squash commit of COMPRESS-400-REDUX.
Add support for extra PAX headers (local and global).
Signed-off-by: Simon Spero <se...@gmail.com>
Project: http://git-wip-us.apache.org/repos/asf/commons-compress/repo
Commit: http://git-wip-us.apache.org/repos/asf/commons-compress/commit/9bcdc3e9
Tree: http://git-wip-us.apache.org/repos/asf/commons-compress/tree/9bcdc3e9
Diff: http://git-wip-us.apache.org/repos/asf/commons-compress/diff/9bcdc3e9
Branch: refs/heads/master
Commit: 9bcdc3e9f77f101a0d3031c341c9b4ea554d34de
Parents: 19e1b02
Author: Simon Spero <se...@gmail.com>
Authored: Sun Jun 25 18:28:07 2017 -0400
Committer: Stefan Bodewig <bo...@apache.org>
Committed: Mon Jun 26 15:19:31 2017 +0200
----------------------------------------------------------------------
.../compress/archivers/tar/TarArchiveEntry.java | 150 +++++++++++++-
.../archivers/tar/TarArchiveInputStream.java | 62 +-----
.../archivers/tar/TarArchiveOutputStream.java | 200 +++++++++++--------
.../archivers/tar/TarArchiveEntryTest.java | 55 ++++-
.../tar/TarArchiveInputStreamTest.java | 24 +++
.../tar/TarArchiveOutputStreamTest.java | 50 ++++-
6 files changed, 395 insertions(+), 146 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/commons-compress/blob/9bcdc3e9/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
index 849532c..8595252 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java
@@ -20,10 +20,11 @@ package org.apache.commons.compress.archivers.tar;
import java.io.File;
import java.io.IOException;
+import java.util.Collections;
import java.util.Date;
+import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
-
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.zip.ZipEncoding;
import org.apache.commons.compress.utils.ArchiveUtils;
@@ -133,7 +134,7 @@ import org.apache.commons.compress.utils.ArchiveUtils;
* char prefix[131]; // offset 345
* char atime[12]; // offset 476
* char ctime[12]; // offset 488
- * char mfill[8]; // offset 500
+ * char mfill[8]; // offset 500
* char xmagic[4]; // offset 508 "tar"
* };
* </pre>
@@ -207,6 +208,9 @@ public class TarArchiveEntry implements ArchiveEntry, TarConstants {
/** The entry's file reference */
private final File file;
+ /** Extra, user supplied pax headers */
+ private final Map<String,String> extraPaxHeaders = new HashMap<>();
+
/** Maximum length of a user's name in the tar file */
public static final int MAX_NAMELEN = 31;
@@ -219,6 +223,7 @@ public class TarArchiveEntry implements ArchiveEntry, TarConstants {
/** Convert millis to seconds */
public static final int MILLIS_PER_SECOND = 1000;
+
/**
* Construct an empty entry and prepares the header values.
*/
@@ -943,6 +948,147 @@ public class TarArchiveEntry implements ArchiveEntry, TarConstants {
}
/**
+ * get extra PAX Headers
+ * @return read-only map containing any extra PAX Headers
+ * @since 1.15
+ */
+ public Map<String, String> getExtraPaxHeaders() {
+ return Collections.unmodifiableMap(extraPaxHeaders);
+ }
+
+ /**
+ * clear all extra PAX headers.
+ * @since 1.15
+ */
+ public void clearExtraPaxHeaders() {
+ extraPaxHeaders.clear();
+ }
+
+ /**
+ * add a PAX header to this entry. If the header corresponds to an existing field in the entry,
+ * that field will be set; otherwise the header will be added to the extraPaxHeaders Map
+ * @param name The full name of the header to set.
+ * @param value value of header.
+ * @since 1.15
+ */
+ public void addPaxHeader(String name,String value) {
+ processPaxHeader(name,value);
+ }
+
+ /**
+ * get named extra PAX header
+ * @param name The full name of an extended PAX header to retrieve
+ * @return The value of the header, if any.
+ * @since 1.15
+ */
+ public String getExtraPaxHeader(String name) {
+ return extraPaxHeaders.get(name);
+ }
+
+ /**
+ * Update the entry using a map of pax headers.
+ * @param headers
+ * @since 1.15
+ */
+ void updateEntryFromPaxHeaders(Map<String, String> headers) {
+ for (final Map.Entry<String, String> ent : headers.entrySet()) {
+ final String key = ent.getKey();
+ final String val = ent.getValue();
+ processPaxHeader(key, val, headers);
+ }
+ }
+
+ /**
+ * process one pax header, using the entries extraPaxHeaders map as source for extra headers
+ * used when handling entries for sparse files.
+ * @param key
+ * @param val
+ * @since 1.15
+ */
+ private void processPaxHeader(String key, String val) {
+ processPaxHeader(key,val,extraPaxHeaders);
+ }
+
+ /**
+ * Process one pax header, using the supplied map as source for extra headers to be used when handling
+ * entries for sparse files
+ *
+ * @param key the header name.
+ * @param val the header value.
+ * @param headers map of headers used for dealing with sparse file.
+ * @since 1.15
+ */
+ private void processPaxHeader(String key, String val, Map<String, String> headers) {
+ /*
+ * The following headers are defined for Pax.
+ * atime, ctime, charset: cannot use these without changing TarArchiveEntry fields
+ * mtime
+ * comment
+ * gid, gname
+ * linkpath
+ * size
+ * uid,uname
+ * SCHILY.devminor, SCHILY.devmajor: don't have setters/getters for those
+ *
+ * GNU sparse files use additional members, we use
+ * GNU.sparse.size to detect the 0.0 and 0.1 versions and
+ * GNU.sparse.realsize for 1.0.
+ *
+ * star files use additional members of which we use
+ * SCHILY.filetype in order to detect star sparse files.
+ *
+ * If called from addExtraPaxHeader, these additional headers must be already present .
+ */
+ switch (key) {
+ case "path":
+ setName(val);
+ break;
+ case "linkpath":
+ setLinkName(val);
+ break;
+ case "gid":
+ setGroupId(Long.parseLong(val));
+ break;
+ case "gname":
+ setGroupName(val);
+ break;
+ case "uid":
+ setUserId(Long.parseLong(val));
+ break;
+ case "uname":
+ setUserName(val);
+ break;
+ case "size":
+ setSize(Long.parseLong(val));
+ break;
+ case "mtime":
+ setModTime((long) (Double.parseDouble(val) * 1000));
+ break;
+ case "SCHILY.devminor":
+ setDevMinor(Integer.parseInt(val));
+ break;
+ case "SCHILY.devmajor":
+ setDevMajor(Integer.parseInt(val));
+ break;
+ case "GNU.sparse.size":
+ fillGNUSparse0xData(headers);
+ break;
+ case "GNU.sparse.realsize":
+ fillGNUSparse1xData(headers);
+ break;
+ case "SCHILY.filetype":
+ if ("sparse".equals(val)) {
+ fillStarSparseData(headers);
+ }
+ break;
+ default:
+ extraPaxHeaders.put(key,val);
+ }
+ }
+
+
+
+ /**
* If this entry represents a file, and the file is a directory, return
* an array of TarEntries for this entry's children.
*
http://git-wip-us.apache.org/repos/asf/commons-compress/blob/9bcdc3e9/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
index 281ad5b..4f090ec 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java
@@ -28,7 +28,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
-import java.util.Map.Entry;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveInputStream;
@@ -260,7 +259,7 @@ public class TarArchiveInputStream extends ArchiveInputStream {
* @throws IOException on error
*/
public TarArchiveEntry getNextTarEntry() throws IOException {
- if (hasHitEOF) {
+ if (isAtEOF()) {
return null;
}
@@ -396,8 +395,8 @@ public class TarArchiveInputStream extends ArchiveInputStream {
*/
private byte[] getRecord() throws IOException {
byte[] headerBuf = readRecord();
- hasHitEOF = isEOFRecord(headerBuf);
- if (hasHitEOF && headerBuf != null) {
+ setAtEOF(isEOFRecord(headerBuf));
+ if (isAtEOF() && headerBuf != null) {
tryToConsumeSecondEOFRecord();
consumeRemainderOfLastBlock();
headerBuf = null;
@@ -504,55 +503,8 @@ public class TarArchiveInputStream extends ArchiveInputStream {
}
private void applyPaxHeadersToCurrentEntry(final Map<String, String> headers) {
- /*
- * The following headers are defined for Pax.
- * atime, ctime, charset: cannot use these without changing TarArchiveEntry fields
- * mtime
- * comment
- * gid, gname
- * linkpath
- * size
- * uid,uname
- * SCHILY.devminor, SCHILY.devmajor: don't have setters/getters for those
- *
- * GNU sparse files use additional members, we use
- * GNU.sparse.size to detect the 0.0 and 0.1 versions and
- * GNU.sparse.realsize for 1.0.
- *
- * star files use additional members of which we use
- * SCHILY.filetype in order to detect star sparse files.
- */
- for (final Entry<String, String> ent : headers.entrySet()){
- final String key = ent.getKey();
- final String val = ent.getValue();
- if ("path".equals(key)){
- currEntry.setName(val);
- } else if ("linkpath".equals(key)){
- currEntry.setLinkName(val);
- } else if ("gid".equals(key)){
- currEntry.setGroupId(Long.parseLong(val));
- } else if ("gname".equals(key)){
- currEntry.setGroupName(val);
- } else if ("uid".equals(key)){
- currEntry.setUserId(Long.parseLong(val));
- } else if ("uname".equals(key)){
- currEntry.setUserName(val);
- } else if ("size".equals(key)){
- currEntry.setSize(Long.parseLong(val));
- } else if ("mtime".equals(key)){
- currEntry.setModTime((long) (Double.parseDouble(val) * 1000));
- } else if ("SCHILY.devminor".equals(key)){
- currEntry.setDevMinor(Integer.parseInt(val));
- } else if ("SCHILY.devmajor".equals(key)){
- currEntry.setDevMajor(Integer.parseInt(val));
- } else if ("GNU.sparse.size".equals(key)) {
- currEntry.fillGNUSparse0xData(headers);
- } else if ("GNU.sparse.realsize".equals(key)) {
- currEntry.fillGNUSparse1xData(headers);
- } else if ("SCHILY.filetype".equals(key) && "sparse".equals(val)) {
- currEntry.fillStarSparseData(headers);
- }
- }
+ currEntry.updateEntryFromPaxHeaders(headers);
+
}
/**
@@ -643,7 +595,7 @@ public class TarArchiveInputStream extends ArchiveInputStream {
public int read(final byte[] buf, final int offset, int numToRead) throws IOException {
int totalRead = 0;
- if (hasHitEOF || isDirectory() || entryOffset >= entrySize) {
+ if (isAtEOF() || isDirectory() || entryOffset >= entrySize) {
return -1;
}
@@ -659,7 +611,7 @@ public class TarArchiveInputStream extends ArchiveInputStream {
if (numToRead > 0) {
throw new IOException("Truncated TAR archive");
}
- hasHitEOF = true;
+ setAtEOF(true);
} else {
count(totalRead);
entryOffset += totalRead;
http://git-wip-us.apache.org/repos/asf/commons-compress/blob/9bcdc3e9/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
index 6b31705..340e35c 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
@@ -22,6 +22,7 @@ import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.StringWriter;
+import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Date;
@@ -35,31 +36,46 @@ import org.apache.commons.compress.utils.CharsetNames;
import org.apache.commons.compress.utils.CountingOutputStream;
/**
- * The TarOutputStream writes a UNIX tar archive as an OutputStream.
- * Methods are provided to put entries, and then write their contents
- * by writing to this stream using write().
+ * The TarOutputStream writes a UNIX tar archive as an OutputStream. Methods are provided to put
+ * entries, and then write their contents by writing to this stream using write().
+ *
* @NotThreadSafe
*/
public class TarArchiveOutputStream extends ArchiveOutputStream {
- /** Fail if a long file name is required in the archive. */
+
+ /**
+ * Fail if a long file name is required in the archive.
+ */
public static final int LONGFILE_ERROR = 0;
- /** Long paths will be truncated in the archive. */
+ /**
+ * Long paths will be truncated in the archive.
+ */
public static final int LONGFILE_TRUNCATE = 1;
- /** GNU tar extensions are used to store long file names in the archive. */
+ /**
+ * GNU tar extensions are used to store long file names in the archive.
+ */
public static final int LONGFILE_GNU = 2;
- /** POSIX/PAX extensions are used to store long file names in the archive. */
+ /**
+ * POSIX/PAX extensions are used to store long file names in the archive.
+ */
public static final int LONGFILE_POSIX = 3;
- /** Fail if a big number (e.g. size > 8GiB) is required in the archive. */
+ /**
+ * Fail if a big number (e.g. size > 8GiB) is required in the archive.
+ */
public static final int BIGNUMBER_ERROR = 0;
- /** star/GNU tar/BSD tar extensions are used to store big number in the archive. */
+ /**
+ * star/GNU tar/BSD tar extensions are used to store big number in the archive.
+ */
public static final int BIGNUMBER_STAR = 1;
- /** POSIX/PAX extensions are used to store big numbers in the archive. */
+ /**
+ * POSIX/PAX extensions are used to store big numbers in the archive.
+ */
public static final int BIGNUMBER_POSIX = 2;
private static final int RECORD_SIZE = 512;
@@ -76,10 +92,14 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
private boolean closed = false;
- /** Indicates if putArchiveEntry has been called without closeArchiveEntry */
+ /**
+ * Indicates if putArchiveEntry has been called without closeArchiveEntry
+ */
private boolean haveUnclosedEntry = false;
- /** indicates if this archive is finished */
+ /**
+ * indicates if this archive is finished
+ */
private boolean finished = false;
private final OutputStream out;
@@ -97,6 +117,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
*/
public TarArchiveOutputStream(final OutputStream os) {
@@ -105,6 +126,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
* @param encoding name of the encoding to use for file names
* @since 1.4
@@ -115,6 +137,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
* @param blockSize the block size to use. Must be a multiple of 512 bytes.
*/
@@ -125,6 +148,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
* @param blockSize the block size to use
* @param recordSize the record size to use. Must be 512 bytes.
@@ -139,6 +163,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Constructor for TarInputStream.
+ *
* @param os the output stream to use
* @param blockSize the block size to use . Must be a multiple of 512 bytes.
* @param recordSize the record size to use. Must be 512 bytes.
@@ -189,10 +214,10 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
}
/**
- * Set the long file mode.
- * This can be LONGFILE_ERROR(0), LONGFILE_TRUNCATE(1) or LONGFILE_GNU(2).
- * This specifies the treatment of long file names (names >= TarConstants.NAMELEN).
- * Default is LONGFILE_ERROR.
+ * Set the long file mode. This can be LONGFILE_ERROR(0), LONGFILE_TRUNCATE(1) or
+ * LONGFILE_GNU(2). This specifies the treatment of long file names (names >=
+ * TarConstants.NAMELEN). Default is LONGFILE_ERROR.
+ *
* @param longFileMode the mode to use
*/
public void setLongFileMode(final int longFileMode) {
@@ -200,10 +225,11 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
}
/**
- * Set the big number mode.
- * This can be BIGNUMBER_ERROR(0), BIGNUMBER_POSIX(1) or BIGNUMBER_STAR(2).
- * This specifies the treatment of big files (sizes > TarConstants.MAXSIZE) and other numeric values to big to fit into a traditional tar header.
+ * Set the big number mode. This can be BIGNUMBER_ERROR(0), BIGNUMBER_POSIX(1) or
+ * BIGNUMBER_STAR(2). This specifies the treatment of big files (sizes >
+ * TarConstants.MAXSIZE) and other numeric values to big to fit into a traditional tar header.
* Default is BIGNUMBER_ERROR.
+ *
* @param bigNumberMode the mode to use
* @since 1.4
*/
@@ -213,8 +239,9 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Whether to add a PAX extension header for non-ASCII file names.
- * @since 1.4
+ *
* @param b whether to add a PAX extension header for non-ASCII file names.
+ * @since 1.4
*/
public void setAddPaxHeadersForNonAsciiNames(final boolean b) {
addPaxHeadersForNonAsciiNames = b;
@@ -258,6 +285,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Closes the underlying OutputStream.
+ *
* @throws IOException on error
*/
@Override
@@ -284,13 +312,11 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
}
/**
- * Put an entry on the output stream. This writes the entry's
- * header record and positions the output stream for writing
- * the contents of the entry. Once this method is called, the
- * stream is ready for calls to write() to write the entry's
- * contents. Once the contents are written, closeArchiveEntry()
- * <B>MUST</B> be called to ensure that all buffered data
- * is completely written to the output stream.
+ * Put an entry on the output stream. This writes the entry's header record and positions the
+ * output stream for writing the contents of the entry. Once this method is called, the stream
+ * is ready for calls to write() to write the entry's contents. Once the contents are written,
+ * closeArchiveEntry() <B>MUST</B> be called to ensure that all buffered data is completely
+ * written to the output stream.
*
* @param archiveEntry The TarEntry to be written to the archive.
* @throws IOException on error
@@ -302,7 +328,17 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
throw new IOException("Stream has already been finished");
}
final TarArchiveEntry entry = (TarArchiveEntry) archiveEntry;
- final Map<String, String> paxHeaders = new HashMap<>();
+ if (entry.isGlobalPaxHeader()) {
+ final byte[] data = encodeExtendedPaxHeadersContents(entry.getExtraPaxHeaders());
+ entry.setSize(data.length);
+ entry.writeEntryHeader(recordBuf, zipEncoding, bigNumberMode == BIGNUMBER_STAR);
+ writeRecord(recordBuf);
+ currSize= entry.getSize();
+ currBytes = 0;
+ this.haveUnclosedEntry = true;
+ write(data);
+ closeArchiveEntry();
+ } else {final Map<String, String> paxHeaders = new HashMap<>();
final String entryName = entry.getName();
final boolean paxHeaderContainsPath = handleLongName(entry, entryName, paxHeaders, "path",
TarConstants.LF_GNUTYPE_LONGNAME, "file name");
@@ -312,50 +348,50 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
&& handleLongName(entry, linkName, paxHeaders, "linkpath",
TarConstants.LF_GNUTYPE_LONGLINK, "link name");
- if (bigNumberMode == BIGNUMBER_POSIX) {
- addPaxHeadersForBigNumbers(paxHeaders, entry);
- } else if (bigNumberMode != BIGNUMBER_STAR) {
- failForBigNumbers(entry);
- }
+ if (bigNumberMode == BIGNUMBER_POSIX) {
+ addPaxHeadersForBigNumbers(paxHeaders, entry);
+ } else if (bigNumberMode != BIGNUMBER_STAR) {
+ failForBigNumbers(entry);
+ }
- if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsPath
- && !ASCII.canEncode(entryName)) {
- paxHeaders.put("path", entryName);
- }
+ if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsPath
+ && !ASCII.canEncode(entryName)) {
+ paxHeaders.put("path", entryName);
+ }
- if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsLinkPath
- && (entry.isLink() || entry.isSymbolicLink())
- && !ASCII.canEncode(linkName)) {
- paxHeaders.put("linkpath", linkName);
- }
+ if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsLinkPath
+ && (entry.isLink() || entry.isSymbolicLink())
+ && !ASCII.canEncode(linkName)) {
+ paxHeaders.put("linkpath", linkName);
+ }
+ paxHeaders.putAll(entry.getExtraPaxHeaders());
- if (paxHeaders.size() > 0) {
- writePaxHeaders(entry, entryName, paxHeaders);
- }
+ if (paxHeaders.size() > 0) {
+ writePaxHeaders(entry, entryName, paxHeaders);
+ }
entry.writeEntryHeader(recordBuf, zipEncoding,
bigNumberMode == BIGNUMBER_STAR);
writeRecord(recordBuf);
- currBytes = 0;
+ currBytes = 0;
- if (entry.isDirectory()) {
- currSize = 0;
- } else {
- currSize = entry.getSize();
+ if (entry.isDirectory()) {
+ currSize = 0;
+ } else {
+ currSize = entry.getSize();
+ }
+ currName = entryName;
+ haveUnclosedEntry = true;
}
- currName = entryName;
- haveUnclosedEntry = true;
}
/**
- * Close an entry. This method MUST be called for all file
- * entries that contain data. The reason is that we must
- * buffer data written to the stream in order to satisfy
- * the buffer's record based writes. Thus, there may be
- * data fragments still being assembled that must be written
- * to the output stream before this entry is closed and the
- * next entry written.
+ * Close an entry. This method MUST be called for all file entries that contain data. The reason
+ * is that we must buffer data written to the stream in order to satisfy the buffer's record
+ * based writes. Thus, there may be data fragments still being assembled that must be written to
+ * the output stream before this entry is closed and the next entry written.
+ *
* @throws IOException on error
*/
@Override
@@ -387,12 +423,10 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
}
/**
- * Writes bytes to the current tar archive entry. This method
- * is aware of the current entry and will throw an exception if
- * you attempt to write bytes past the length specified for the
- * current entry. The method is also (painfully) aware of the
- * record buffering required by TarBuffer, and manages buffers
- * that are not a multiple of recordsize in length, including
+ * Writes bytes to the current tar archive entry. This method is aware of the current entry and
+ * will throw an exception if you attempt to write bytes past the length specified for the
+ * current entry. The method is also (painfully) aware of the record buffering required by
+ * TarBuffer, and manages buffers that are not a multiple of recordsize in length, including
* assembling records from small buffers.
*
* @param wBuf The buffer to write to the archive.
@@ -471,6 +505,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Writes a PAX extended header with the given map as contents.
+ *
* @since 1.4
*/
void writePaxHeaders(final TarArchiveEntry entry,
@@ -484,6 +519,15 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
TarConstants.LF_PAX_EXTENDED_HEADER_LC);
transferModTime(entry, pex);
+ final byte[] data = encodeExtendedPaxHeadersContents(headers);
+ pex.setSize(data.length);
+ putArchiveEntry(pex);
+ write(data);
+ closeArchiveEntry();
+ }
+
+ private byte[] encodeExtendedPaxHeadersContents(Map<String, String> headers)
+ throws UnsupportedEncodingException {
final StringWriter w = new StringWriter();
for (final Map.Entry<String, String> h : headers.entrySet()) {
final String key = h.getKey();
@@ -505,11 +549,7 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
}
w.write(line);
}
- final byte[] data = w.toString().getBytes(CharsetNames.UTF_8);
- pex.setSize(data.length);
- putArchiveEntry(pex);
- write(data);
- closeArchiveEntry();
+ return w.toString().getBytes(CharsetNames.UTF_8);
}
private String stripTo7Bits(final String name) {
@@ -578,9 +618,8 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
}
/**
- * Write an archive record to the archive, where the record may be
- * inside of a larger array buffer. The buffer must be "offset plus
- * record size" long.
+ * Write an archive record to the archive, where the record may be inside of a larger array
+ * buffer. The buffer must be "offset plus record size" long.
*
* @param buf The buffer containing the record data to write.
* @param offset The offset of the record data within buf.
@@ -672,16 +711,11 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
/**
* Handles long file or link names according to the longFileMode setting.
*
- * <p>I.e. if the given name is too long to be written to a plain
- * tar header then
- * <ul>
- * <li>it creates a pax header who's name is given by the
- * paxHeaderName parameter if longFileMode is POSIX</li>
- * <li>it creates a GNU longlink entry who's type is given by
- * the linkType parameter if longFileMode is GNU</li>
- * <li>it throws an exception if longFileMode is ERROR</li>
- * <li>it truncates the name if longFileMode is TRUNCATE</li>
- * </ul></p>
+ * <p>I.e. if the given name is too long to be written to a plain tar header then <ul> <li>it
+ * creates a pax header who's name is given by the paxHeaderName parameter if longFileMode is
+ * POSIX</li> <li>it creates a GNU longlink entry who's type is given by the linkType parameter
+ * if longFileMode is GNU</li> <li>it throws an exception if longFileMode is ERROR</li> <li>it
+ * truncates the name if longFileMode is TRUNCATE</li> </ul></p>
*
* @param entry entry the name belongs to
* @param name the name to write
http://git-wip-us.apache.org/repos/asf/commons-compress/blob/9bcdc3e9/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java
index ca0b4d9..5e35d33 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveEntryTest.java
@@ -18,16 +18,24 @@
package org.apache.commons.compress.archivers.tar;
-import static org.junit.Assert.*;
-import org.junit.Test;
-
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
import java.util.Locale;
-
import org.apache.commons.compress.AbstractTestCase;
+import org.junit.Test;
public class TarArchiveEntryTest implements TarConstants {
@@ -121,6 +129,45 @@ public class TarArchiveEntryTest implements TarConstants {
t.setSize(0100000000000L);
}
+ @Test public void testExtraPaxHeaders() throws IOException {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ TarArchiveOutputStream tos = new TarArchiveOutputStream(bos);
+
+ TarArchiveEntry entry = new TarArchiveEntry("./weasels");
+ entry.addPaxHeader("APACHE.mustelida","true");
+ entry.addPaxHeader("SCHILY.xattr.user.org.apache.weasels","maximum weasels");
+ entry.addPaxHeader("size","1");
+ assertEquals("extra header count",2,entry.getExtraPaxHeaders().size());
+ assertEquals("APACHE.mustelida","true",
+ entry.getExtraPaxHeader("APACHE.mustelida"));
+ assertEquals("SCHILY.xattr.user.org.apache.weasels","maximum weasels",
+ entry.getExtraPaxHeader("SCHILY.xattr.user.org.apache.weasels"));
+ assertEquals("size",entry.getSize(),1);
+
+ tos.putArchiveEntry(entry);
+ tos.write('W');
+ tos.closeArchiveEntry();
+ tos.close();
+ assertNotEquals("should have extra headers before clear",0,entry.getExtraPaxHeaders().size());
+ entry.clearExtraPaxHeaders();
+ assertEquals("extra headers should be empty after clear",0,entry.getExtraPaxHeaders().size());
+ TarArchiveInputStream tis = new TarArchiveInputStream(new ByteArrayInputStream(bos.toByteArray()));
+ entry = tis.getNextTarEntry();
+ assertNotNull("couldn't get entry",entry);
+
+ assertEquals("extra header count",2,entry.getExtraPaxHeaders().size());
+ assertEquals("APACHE.mustelida","true",
+ entry.getExtraPaxHeader("APACHE.mustelida"));
+ assertEquals("user.org.apache.weasels","maximum weasels",
+ entry.getExtraPaxHeader("SCHILY.xattr.user.org.apache.weasels"));
+
+ assertEquals('W',tis.read());
+ assertTrue("should be at end of entry",tis.read() <0);
+
+ assertNull("should be at end of file",tis.getNextTarEntry());
+ tis.close();
+ }
+
@Test
public void testLinkFlagConstructor() {
final TarArchiveEntry t = new TarArchiveEntry("/foo", LF_GNUTYPE_LONGNAME);
http://git-wip-us.apache.org/repos/asf/commons-compress/blob/9bcdc3e9/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
index 8e0c7a5..e73982d 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveInputStreamTest.java
@@ -23,6 +23,7 @@ import static org.apache.commons.compress.AbstractTestCase.mkdir;
import static org.apache.commons.compress.AbstractTestCase.rmdir;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
@@ -312,6 +313,29 @@ public class TarArchiveInputStreamTest {
assertNull(is.getNextTarEntry());
}
}
+ @Test
+ public void testGetAndSetOfPaxEntry() throws Exception {
+ try (TarArchiveInputStream is = getTestStream("/COMPRESS-356.tar")) {
+ final TarArchiveEntry entry = is.getNextTarEntry();
+ assertEquals("package/package.json", entry.getName());
+ assertEquals(is.getCurrentEntry(),entry);
+ TarArchiveEntry weaselEntry = new TarArchiveEntry(entry.getName());
+ weaselEntry.setSize(entry.getSize());
+ is.setCurrentEntry(weaselEntry);
+ assertEquals(entry,is.getCurrentEntry());
+ assertFalse(entry == is.getCurrentEntry());
+ assertTrue(weaselEntry == is.getCurrentEntry());
+ try {
+ is.setCurrentEntry(null);
+ is.read();
+ fail("should abort because current entry is nulled");
+ } catch(IllegalStateException e) {
+ // expected
+ }
+ is.setCurrentEntry(entry);
+ is.read();
+ }
+ }
private TarArchiveInputStream getTestStream(final String name) {
return new TarArchiveInputStream(
http://git-wip-us.apache.org/repos/asf/commons-compress/blob/9bcdc3e9/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
----------------------------------------------------------------------
diff --git a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
index 497a764..b8d213b 100644
--- a/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
+++ b/src/test/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStreamTest.java
@@ -18,7 +18,12 @@
package org.apache.commons.compress.archivers.tar;
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@@ -27,13 +32,14 @@ import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.Reader;
import java.security.MessageDigest;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.TimeZone;
-
import org.apache.commons.compress.AbstractTestCase;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveOutputStream;
@@ -689,6 +695,46 @@ public class TarArchiveOutputStreamTest extends AbstractTestCase {
}
return bos.toByteArray();
}
+ @Test public void testPutGlobalPaxHeaderEntry() throws IOException {
+ ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ TarArchiveOutputStream tos = new TarArchiveOutputStream(bos);
+ int pid = 73;
+ int globCount = 1;
+ byte lfPaxGlobalExtendedHeader = TarConstants.LF_PAX_GLOBAL_EXTENDED_HEADER;
+ TarArchiveEntry globalHeader = new TarArchiveEntry("/tmp/GlobalHead." + pid + "." + globCount,
+ lfPaxGlobalExtendedHeader);
+ globalHeader.addPaxHeader("SCHILLY.xattr.user.org.apache.weasels","global-weasels");
+ tos.putArchiveEntry(globalHeader);
+ TarArchiveEntry entry = new TarArchiveEntry("message");
+ String x = "If at first you don't succeed, give up";
+ entry.setSize(x.length());
+ tos.putArchiveEntry(entry);
+ tos.write(x.getBytes());
+ tos.closeArchiveEntry();
+ entry = new TarArchiveEntry("counter-message");
+ String y = "Nothing succeeds like excess";
+ entry.setSize(y.length());
+ entry.addPaxHeader("SCHILLY.xattr.user.org.apache.weasels.species","unknown");
+ tos.putArchiveEntry(entry);
+ tos.write(y.getBytes());
+ tos.closeArchiveEntry();
+ tos.close();
+ TarArchiveInputStream in = new TarArchiveInputStream(new ByteArrayInputStream(bos.toByteArray()));
+ TarArchiveEntry entryIn = in.getNextTarEntry();
+ assertNotNull(entryIn);
+ assertEquals("message",entryIn.getName());
+ assertEquals("global-weasels",entryIn.getExtraPaxHeader("SCHILLY.xattr.user.org.apache.weasels"));
+ Reader reader = new InputStreamReader(in);
+ for(int i=0;i<x.length();i++) {
+ assertEquals(x.charAt(i),reader.read());
+ }
+ assertEquals(-1,reader.read());
+ entryIn = in.getNextTarEntry();
+ assertEquals("counter-message",entryIn.getName());
+ assertEquals("global-weasels",entryIn.getExtraPaxHeader("SCHILLY.xattr.user.org.apache.weasels"));
+ assertEquals("unknown",entryIn.getExtraPaxHeader("SCHILLY.xattr.user.org.apache.weasels.species"));
+ assertNull(in.getNextTarEntry());
+ }
/**
* When using long file names the longLinkEntry included the current timestamp as the Entry
[3/3] commons-compress git commit: COMPRESS-400 record changes
Posted by bo...@apache.org.
COMPRESS-400 record changes
closes #46
Project: http://git-wip-us.apache.org/repos/asf/commons-compress/repo
Commit: http://git-wip-us.apache.org/repos/asf/commons-compress/commit/53b45071
Tree: http://git-wip-us.apache.org/repos/asf/commons-compress/tree/53b45071
Diff: http://git-wip-us.apache.org/repos/asf/commons-compress/diff/53b45071
Branch: refs/heads/master
Commit: 53b45071004a33b703f3d8b0bebf28dda62f0f11
Parents: dddfb60
Author: Stefan Bodewig <bo...@apache.org>
Authored: Mon Jun 26 15:22:21 2017 +0200
Committer: Stefan Bodewig <bo...@apache.org>
Committed: Mon Jun 26 15:22:21 2017 +0200
----------------------------------------------------------------------
src/changes/changes.xml | 5 +++++
1 file changed, 5 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/commons-compress/blob/53b45071/src/changes/changes.xml
----------------------------------------------------------------------
diff --git a/src/changes/changes.xml b/src/changes/changes.xml
index b9dafea..de5ca43 100644
--- a/src/changes/changes.xml
+++ b/src/changes/changes.xml
@@ -90,6 +90,11 @@ wanted to create such files.">
specification. In particular 512 is the only recird size
accepted and the block size must be a multiple of 512.
</action>
+ <action issue="COMPRESS-400" type="add" date="2017-06-26"
+ due-to="Simon Spero ">
+ It is now possible to specify/read custom PAX headers when
+ writing/reading tar archives.
+ </action>
</release>
<release version="1.14" date="2017-05-14"
description="Release 1.14">
[2/3] commons-compress git commit: COMPRESS-400 whitespace
Posted by bo...@apache.org.
COMPRESS-400 whitespace
Project: http://git-wip-us.apache.org/repos/asf/commons-compress/repo
Commit: http://git-wip-us.apache.org/repos/asf/commons-compress/commit/dddfb600
Tree: http://git-wip-us.apache.org/repos/asf/commons-compress/tree/dddfb600
Diff: http://git-wip-us.apache.org/repos/asf/commons-compress/diff/dddfb600
Branch: refs/heads/master
Commit: dddfb600beb0511a4a2aa17f266b4cda264d6ba1
Parents: 9bcdc3e
Author: Stefan Bodewig <bo...@apache.org>
Authored: Mon Jun 26 15:21:15 2017 +0200
Committer: Stefan Bodewig <bo...@apache.org>
Committed: Mon Jun 26 15:21:15 2017 +0200
----------------------------------------------------------------------
.../archivers/tar/TarArchiveOutputStream.java | 22 ++++++++++----------
1 file changed, 11 insertions(+), 11 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/commons-compress/blob/dddfb600/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
----------------------------------------------------------------------
diff --git a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
index 340e35c..067f64e 100644
--- a/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
+++ b/src/main/java/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java
@@ -338,15 +338,16 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
this.haveUnclosedEntry = true;
write(data);
closeArchiveEntry();
- } else {final Map<String, String> paxHeaders = new HashMap<>();
- final String entryName = entry.getName();
- final boolean paxHeaderContainsPath = handleLongName(entry, entryName, paxHeaders, "path",
- TarConstants.LF_GNUTYPE_LONGNAME, "file name");
+ } else {
+ final Map<String, String> paxHeaders = new HashMap<>();
+ final String entryName = entry.getName();
+ final boolean paxHeaderContainsPath = handleLongName(entry, entryName, paxHeaders, "path",
+ TarConstants.LF_GNUTYPE_LONGNAME, "file name");
- final String linkName = entry.getLinkName();
- final boolean paxHeaderContainsLinkPath = linkName != null && linkName.length() > 0
- && handleLongName(entry, linkName, paxHeaders, "linkpath",
- TarConstants.LF_GNUTYPE_LONGLINK, "link name");
+ final String linkName = entry.getLinkName();
+ final boolean paxHeaderContainsLinkPath = linkName != null && linkName.length() > 0
+ && handleLongName(entry, linkName, paxHeaders, "linkpath",
+ TarConstants.LF_GNUTYPE_LONGLINK, "link name");
if (bigNumberMode == BIGNUMBER_POSIX) {
addPaxHeadersForBigNumbers(paxHeaders, entry);
@@ -370,9 +371,8 @@ public class TarArchiveOutputStream extends ArchiveOutputStream {
writePaxHeaders(entry, entryName, paxHeaders);
}
- entry.writeEntryHeader(recordBuf, zipEncoding,
- bigNumberMode == BIGNUMBER_STAR);
- writeRecord(recordBuf);
+ entry.writeEntryHeader(recordBuf, zipEncoding, bigNumberMode == BIGNUMBER_STAR);
+ writeRecord(recordBuf);
currBytes = 0;