You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@cordova.apache.org by za...@apache.org on 2015/02/20 20:38:12 UTC
[18/20] cordova-firefoxos git commit: CB-7567 Don't use adm-zip
because it creates invalid zip files
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/test/assets/attributes_test/asd/New Text Document.txt
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/test/assets/attributes_test/asd/New Text Document.txt b/node_modules/adm-zip/test/assets/attributes_test/asd/New Text Document.txt
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/test/assets/attributes_test/blank file.txt
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/test/assets/attributes_test/blank file.txt b/node_modules/adm-zip/test/assets/attributes_test/blank file.txt
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/test/index.js
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/test/index.js b/node_modules/adm-zip/test/index.js
deleted file mode 100644
index c0d7822..0000000
--- a/node_modules/adm-zip/test/index.js
+++ /dev/null
@@ -1,5 +0,0 @@
-var Attr = require("../util").FileAttr,
- Zip = require("../adm-zip"),
- fs = require("fs");
-
-//zip.addLocalFile("./test/readonly.txt");
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/util/constants.js
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/util/constants.js b/node_modules/adm-zip/util/constants.js
deleted file mode 100644
index 61a96af..0000000
--- a/node_modules/adm-zip/util/constants.js
+++ /dev/null
@@ -1,84 +0,0 @@
-module.exports = {
- /* The local file header */
- LOCHDR : 30, // LOC header size
- LOCSIG : 0x04034b50, // "PK\003\004"
- LOCVER : 4, // version needed to extract
- LOCFLG : 6, // general purpose bit flag
- LOCHOW : 8, // compression method
- LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
- LOCCRC : 14, // uncompressed file crc-32 value
- LOCSIZ : 18, // compressed size
- LOCLEN : 22, // uncompressed size
- LOCNAM : 26, // filename length
- LOCEXT : 28, // extra field length
-
- /* The Data descriptor */
- EXTSIG : 0x08074b50, // "PK\007\008"
- EXTHDR : 16, // EXT header size
- EXTCRC : 4, // uncompressed file crc-32 value
- EXTSIZ : 8, // compressed size
- EXTLEN : 12, // uncompressed size
-
- /* The central directory file header */
- CENHDR : 46, // CEN header size
- CENSIG : 0x02014b50, // "PK\001\002"
- CENVEM : 4, // version made by
- CENVER : 6, // version needed to extract
- CENFLG : 8, // encrypt, decrypt flags
- CENHOW : 10, // compression method
- CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
- CENCRC : 16, // uncompressed file crc-32 value
- CENSIZ : 20, // compressed size
- CENLEN : 24, // uncompressed size
- CENNAM : 28, // filename length
- CENEXT : 30, // extra field length
- CENCOM : 32, // file comment length
- CENDSK : 34, // volume number start
- CENATT : 36, // internal file attributes
- CENATX : 38, // external file attributes (host system dependent)
- CENOFF : 42, // LOC header offset
-
- /* The entries in the end of central directory */
- ENDHDR : 22, // END header size
- ENDSIG : 0x06054b50, // "PK\005\006"
- ENDSUB : 8, // number of entries on this disk
- ENDTOT : 10, // total number of entries
- ENDSIZ : 12, // central directory size in bytes
- ENDOFF : 16, // offset of first CEN header
- ENDCOM : 20, // zip file comment length
-
- /* Compression methods */
- STORED : 0, // no compression
- SHRUNK : 1, // shrunk
- REDUCED1 : 2, // reduced with compression factor 1
- REDUCED2 : 3, // reduced with compression factor 2
- REDUCED3 : 4, // reduced with compression factor 3
- REDUCED4 : 5, // reduced with compression factor 4
- IMPLODED : 6, // imploded
- // 7 reserved
- DEFLATED : 8, // deflated
- ENHANCED_DEFLATED: 9, // enhanced deflated
- PKWARE : 10,// PKWare DCL imploded
- // 11 reserved
- BZIP2 : 12, // compressed using BZIP2
- // 13 reserved
- LZMA : 14, // LZMA
- // 15-17 reserved
- IBM_TERSE : 18, // compressed using IBM TERSE
- IBM_LZ77 : 19, //IBM LZ77 z
-
- /* General purpose bit flag */
- FLG_ENC : 0, // encripted file
- FLG_COMP1 : 1, // compression option
- FLG_COMP2 : 2, // compression option
- FLG_DESC : 4, // data descriptor
- FLG_ENH : 8, // enhanced deflation
- FLG_STR : 16, // strong encryption
- FLG_LNG : 1024, // language encoding
- FLG_MSK : 4096, // mask header values
-
- /* Load type */
- FILE : 0,
- BUFFER : 1,
- NONE : 2
-};
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/util/errors.js
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/util/errors.js b/node_modules/adm-zip/util/errors.js
deleted file mode 100644
index 50931c3..0000000
--- a/node_modules/adm-zip/util/errors.js
+++ /dev/null
@@ -1,35 +0,0 @@
-module.exports = {
- /* Header error messages */
- "INVALID_LOC" : "Invalid LOC header (bad signature)",
- "INVALID_CEN" : "Invalid CEN header (bad signature)",
- "INVALID_END" : "Invalid END header (bad signature)",
-
- /* ZipEntry error messages*/
- "NO_DATA" : "Nothing to decompress",
- "BAD_CRC" : "CRC32 checksum failed",
- "FILE_IN_THE_WAY" : "There is a file in the way: %s",
- "UNKNOWN_METHOD" : "Invalid/unsupported compression method",
-
- /* Inflater error messages */
- "AVAIL_DATA" : "inflate::Available inflate data did not terminate",
- "INVALID_DISTANCE" : "inflate::Invalid literal/length or distance code in fixed or dynamic block",
- "TO_MANY_CODES" : "inflate::Dynamic block code description: too many length or distance codes",
- "INVALID_REPEAT_LEN" : "inflate::Dynamic block code description: repeat more than specified lengths",
- "INVALID_REPEAT_FIRST" : "inflate::Dynamic block code description: repeat lengths with no first length",
- "INCOMPLETE_CODES" : "inflate::Dynamic block code description: code lengths codes incomplete",
- "INVALID_DYN_DISTANCE": "inflate::Dynamic block code description: invalid distance code lengths",
- "INVALID_CODES_LEN": "inflate::Dynamic block code description: invalid literal/length code lengths",
- "INVALID_STORE_BLOCK" : "inflate::Stored block length did not match one's complement",
- "INVALID_BLOCK_TYPE" : "inflate::Invalid block type (type == 3)",
-
- /* ADM-ZIP error messages */
- "CANT_EXTRACT_FILE" : "Could not extract the file",
- "CANT_OVERRIDE" : "Target file already exists",
- "NO_ZIP" : "No zip file was loaded",
- "NO_ENTRY" : "Entry doesn't exist",
- "DIRECTORY_CONTENT_ERROR" : "A directory cannot have content",
- "FILE_NOT_FOUND" : "File not found: %s",
- "NOT_IMPLEMENTED" : "Not implemented",
- "INVALID_FILENAME" : "Invalid filename",
- "INVALID_FORMAT" : "Invalid or unsupported zip format. No END header found"
-};
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/util/fattr.js
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/util/fattr.js b/node_modules/adm-zip/util/fattr.js
deleted file mode 100644
index 4f247ea..0000000
--- a/node_modules/adm-zip/util/fattr.js
+++ /dev/null
@@ -1,84 +0,0 @@
-var fs = require("fs"),
- pth = require("path");
-
-fs.existsSync = fs.existsSync || pth.existsSync;
-
-module.exports = function(/*String*/path) {
-
- var _path = path || "",
- _permissions = 0,
- _obj = newAttr(),
- _stat = null;
-
- function newAttr() {
- return {
- directory : false,
- readonly : false,
- hidden : false,
- executable : false,
- mtime : 0,
- atime : 0
- }
- }
-
- if (_path && fs.existsSync(_path)) {
- _stat = fs.statSync(_path);
- _obj.directory = _stat.isDirectory();
- _obj.mtime = _stat.mtime;
- _obj.atime = _stat.atime;
- _obj.executable = !!(1 & parseInt ((_stat.mode & parseInt ("777", 8)).toString (8)[0]));
- _obj.readonly = !!(2 & parseInt ((_stat.mode & parseInt ("777", 8)).toString (8)[0]));
- _obj.hidden = pth.basename(_path)[0] === ".";
- } else {
- console.warn("Invalid path: " + _path)
- }
-
- return {
-
- get directory () {
- return _obj.directory;
- },
-
- get readOnly () {
- return _obj.readonly;
- },
-
- get hidden () {
- return _obj.hidden;
- },
-
- get mtime () {
- return _obj.mtime;
- },
-
- get atime () {
- return _obj.atime;
- },
-
-
- get executable () {
- return _obj.executable;
- },
-
- decodeAttributes : function(val) {
-
- },
-
- encodeAttributes : function (val) {
-
- },
-
- toString : function() {
- return '{\n' +
- '\t"path" : "' + _path + ",\n" +
- '\t"isDirectory" : ' + _obj.directory + ",\n" +
- '\t"isReadOnly" : ' + _obj.readonly + ",\n" +
- '\t"isHidden" : ' + _obj.hidden + ",\n" +
- '\t"isExecutable" : ' + _obj.executable + ",\n" +
- '\t"mTime" : ' + _obj.mtime + "\n" +
- '\t"aTime" : ' + _obj.atime + "\n" +
- '}';
- }
- }
-
-};
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/util/index.js
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/util/index.js b/node_modules/adm-zip/util/index.js
deleted file mode 100644
index d77b980..0000000
--- a/node_modules/adm-zip/util/index.js
+++ /dev/null
@@ -1,4 +0,0 @@
-module.exports = require("./utils");
-module.exports.Constants = require("./constants");
-module.exports.Errors = require("./errors");
-module.exports.FileAttr = require("./fattr");
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/util/utils.js
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/util/utils.js b/node_modules/adm-zip/util/utils.js
deleted file mode 100644
index ef42999..0000000
--- a/node_modules/adm-zip/util/utils.js
+++ /dev/null
@@ -1,145 +0,0 @@
-var fs = require("fs"),
- pth = require('path');
-
-fs.existsSync = fs.existsSync || pth.existsSync;
-
-module.exports = (function() {
-
- var crcTable = [],
- Constants = require('./constants'),
- Errors = require('./errors'),
-
- PATH_SEPARATOR = pth.normalize("/");
-
-
- function mkdirSync(/*String*/path) {
- var resolvedPath = path.split(PATH_SEPARATOR)[0];
- path.split(PATH_SEPARATOR).forEach(function(name) {
- if (!name || name.substr(-1,1) == ":") return;
- resolvedPath += PATH_SEPARATOR + name;
- var stat;
- try {
- stat = fs.statSync(resolvedPath);
- } catch (e) {
- fs.mkdirSync(resolvedPath);
- }
- if (stat && stat.isFile())
- throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath);
- });
- }
-
- function findSync(/*String*/root, /*RegExp*/pattern, /*Boolean*/recoursive) {
- if (typeof pattern === 'boolean') {
- recoursive = pattern;
- pattern = undefined;
- }
- var files = [];
- fs.readdirSync(root).forEach(function(file) {
- var path = pth.join(root, file);
-
- if (fs.statSync(path).isDirectory() && recoursive)
- files = files.concat(findSync(path, pattern, recoursive));
-
- if (!pattern || pattern.test(path)) {
- files.push(pth.normalize(path) + (fs.statSync(path).isDirectory() ? PATH_SEPARATOR : ""));
- }
-
- });
- return files;
- }
-
- return {
- makeDir : function(/*String*/path) {
- mkdirSync(path);
- },
-
- crc32 : function(buf) {
- var b = new Buffer(4);
- if (!crcTable.length) {
- for (var n = 0; n < 256; n++) {
- var c = n;
- for (var k = 8; --k >= 0;) //
- if ((c & 1) != 0) { c = 0xedb88320 ^ (c >>> 1); } else { c = c >>> 1; }
- if (c < 0) {
- b.writeInt32LE(c, 0);
- c = b.readUInt32LE(0);
- }
- crcTable[n] = c;
- }
- }
- var crc = 0, off = 0, len = buf.length, c1 = ~crc;
- while(--len >= 0) c1 = crcTable[(c1 ^ buf[off++]) & 0xff] ^ (c1 >>> 8);
- crc = ~c1;
- b.writeInt32LE(crc & 0xffffffff, 0);
- return b.readUInt32LE(0);
- },
-
- methodToString : function(/*Number*/method) {
- switch (method) {
- case Constants.STORED:
- return 'STORED (' + method + ')';
- case Constants.DEFLATED:
- return 'DEFLATED (' + method + ')';
- default:
- return 'UNSUPPORTED (' + method + ')'
- }
-
- },
-
- writeFileTo : function(/*String*/path, /*Buffer*/content, /*Boolean*/overwrite, /*Number*/attr) {
- if (fs.existsSync(path)) {
- if (!overwrite)
- return false; // cannot overwite
-
- var stat = fs.statSync(path);
- if (stat.isDirectory()) {
- return false;
- }
- }
- var folder = pth.dirname(path);
- if (!fs.existsSync(folder)) {
- mkdirSync(folder);
- }
-
- var fd;
- try {
- fd = fs.openSync(path, 'w', 438); // 0666
- } catch(e) {
- fs.chmodSync(path, 438);
- fd = fs.openSync(path, 'w', 438);
- }
- if (fd) {
- fs.writeSync(fd, content, 0, content.length, 0);
- fs.closeSync(fd);
- }
- fs.chmodSync(path, attr || 438);
- return true;
- },
-
- findFiles : function(/*String*/path) {
- return findSync(path, true);
- },
-
- getAttributes : function(/*String*/path) {
-
- },
-
- setAttributes : function(/*String*/path) {
-
- },
-
- toBuffer : function(input) {
- if (Buffer.isBuffer(input)) {
- return input;
- } else {
- if (input.length == 0) {
- return new Buffer(0)
- }
- return new Buffer(input, 'utf8');
- }
- },
-
- Constants : Constants,
- Errors : Errors
- }
-})();
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/zipEntry.js
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/zipEntry.js b/node_modules/adm-zip/zipEntry.js
deleted file mode 100644
index 3da38f1..0000000
--- a/node_modules/adm-zip/zipEntry.js
+++ /dev/null
@@ -1,224 +0,0 @@
-var Utils = require("./util"),
- Headers = require("./headers"),
- Constants = Utils.Constants,
- Methods = require("./methods");
-
-module.exports = function (/*Buffer*/input) {
-
- var _entryHeader = new Headers.EntryHeader(),
- _entryName = new Buffer(0),
- _comment = new Buffer(0),
- _isDirectory = false,
- uncompressedData = null,
- _extra = new Buffer(0);
-
- function getCompressedDataFromZip() {
- if (!input || !Buffer.isBuffer(input)) {
- return new Buffer(0);
- }
- _entryHeader.loadDataHeaderFromBinary(input);
- return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize)
- }
-
- function crc32OK(data) {
- // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
- if (_entryHeader.flags & 0x8 != 0x8) {
- if (Utils.crc32(data) != _entryHeader.crc) {
- return false;
- }
- } else {
- // @TODO: load and check data descriptor header
- // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure
- // (optionally preceded by a 4-byte signature) immediately after the compressed data:
- }
- return true;
- }
-
- function decompress(/*Boolean*/async, /*Function*/callback) {
- if (_isDirectory) {
- if (async && callback) {
- callback(new Buffer(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.
- }
- return new Buffer(0);
- }
-
- var compressedData = getCompressedDataFromZip();
- if (compressedData.length == 0) {
- if (async && callback) callback(compressedData, Utils.Errors.NO_DATA);//si added error.
- return compressedData;
- }
-
- var data = new Buffer(_entryHeader.size);
- data.fill(0);
-
- switch (_entryHeader.method) {
- case Utils.Constants.STORED:
- compressedData.copy(data);
- if (!crc32OK(data)) {
- if (async && callback) callback(data, Utils.Errors.BAD_CRC);//si added error
- return Utils.Errors.BAD_CRC;
- } else {//si added otherwise did not seem to return data.
- if (async && callback) callback(data);
- return data;
- }
- break;
- case Utils.Constants.DEFLATED:
- var inflater = new Methods.Inflater(compressedData);
- if (!async) {
- inflater.inflate(data);
- if (!crc32OK(data)) {
- console.warn(Utils.Errors.BAD_CRC + " " + _entryName.toString())
- }
- return data;
- } else {
- inflater.inflateAsync(function(result) {
- result.copy(data, 0);
- if (crc32OK(data)) {
- if (callback) callback(data, Utils.Errors.BAD_CRC); //si added error
- } else { //si added otherwise did not seem to return data.
- if (callback) callback(data);
- }
- })
- }
- break;
- default:
- if (async && callback) callback(new Buffer(0), Utils.Errors.UNKNOWN_METHOD);
- return Utils.Errors.UNKNOWN_METHOD;
- }
- }
-
- function compress(/*Boolean*/async, /*Function*/callback) {
- if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
- // no data set or the data wasn't changed to require recompression
- if (async && callback) callback(getCompressedDataFromZip());
- return getCompressedDataFromZip();
- }
-
- if (uncompressedData.length && !_isDirectory) {
- var compressedData;
- // Local file header
- switch (_entryHeader.method) {
- case Utils.Constants.STORED:
- _entryHeader.compressedSize = _entryHeader.size;
-
- compressedData = new Buffer(uncompressedData.length);
- uncompressedData.copy(compressedData);
-
- if (async && callback) callback(compressedData);
- return compressedData;
-
- break;
- default:
- case Utils.Constants.DEFLATED:
-
- var deflater = new Methods.Deflater(uncompressedData);
- if (!async) {
- var deflated = deflater.deflate();
- _entryHeader.compressedSize = deflated.length;
- return deflated;
- } else {
- deflater.deflateAsync(function(data) {
- compressedData = new Buffer(data.length);
- _entryHeader.compressedSize = data.length;
- data.copy(compressedData);
- callback && callback(compressedData);
- })
- }
- deflater = null;
- break;
- }
- } else {
- if (async && callback) {
- callback(new Buffer(0));
- } else {
- return new Buffer(0);
- }
- }
- }
-
- return {
- get entryName () { return _entryName.toString(); },
- get rawEntryName() { return _entryName; },
- set entryName (val) {
- _entryName = Utils.toBuffer(val);
- var lastChar = _entryName[_entryName.length - 1];
- _isDirectory = (lastChar == 47) || (lastChar == 92);
- _entryHeader.fileNameLength = _entryName.length;
- },
-
- get extra () { return _extra; },
- set extra (val) {
- _extra = val;
- _entryHeader.extraLength = val.length;
- },
-
- get comment () { return _comment.toString(); },
- set comment (val) {
- _comment = Utils.toBuffer(val);
- _entryHeader.commentLength = _comment.length;
- },
-
- get name () { var n = _entryName.toString(); return _isDirectory ? n.substr(n.length - 1).split("/").pop() : n.split("/").pop(); },
- get isDirectory () { return _isDirectory },
-
- getCompressedData : function() {
- return compress(false, null)
- },
-
- getCompressedDataAsync : function(/*Function*/callback) {
- compress(true, callback)
- },
-
- setData : function(value) {
- uncompressedData = Utils.toBuffer(value);
- if (!_isDirectory && uncompressedData.length) {
- _entryHeader.size = uncompressedData.length;
- _entryHeader.method = Utils.Constants.DEFLATED;
- _entryHeader.crc = Utils.crc32(value);
- } else { // folders and blank files should be stored
- _entryHeader.method = Utils.Constants.STORED;
- }
- },
-
- getData : function() {
- return decompress(false, null);
- },
-
- getDataAsync : function(/*Function*/callback) {
- decompress(true, callback)
- },
-
- set header(/*Buffer*/data) {
- _entryHeader.loadFromBinary(data);
- },
-
- get header() {
- return _entryHeader;
- },
-
- packHeader : function() {
- var header = _entryHeader.entryHeaderToBinary();
- // add
- _entryName.copy(header, Utils.Constants.CENHDR);
- if (_entryHeader.extraLength) {
- _extra.copy(header, Utils.Constants.CENHDR + _entryName.length)
- }
- if (_entryHeader.commentLength) {
- _comment.copy(header, Utils.Constants.CENHDR + _entryName.length + _entryHeader.extraLength, _comment.length);
- }
- return header;
- },
-
- toString : function() {
- return '{\n' +
- '\t"entryName" : "' + _entryName.toString() + "\",\n" +
- '\t"name" : "' + _entryName.toString().split("/").pop() + "\",\n" +
- '\t"comment" : "' + _comment.toString() + "\",\n" +
- '\t"isDirectory" : ' + _isDirectory + ",\n" +
- '\t"header" : ' + _entryHeader.toString().replace(/\t/mg, "\t\t") + ",\n" +
- '\t"compressedData" : <' + (input && input.length + " bytes buffer" || "null") + ">\n" +
- '\t"data" : <' + (uncompressedData && uncompressedData.length + " bytes buffer" || "null") + ">\n" +
- '}';
- }
- }
-};
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/adm-zip/zipFile.js
----------------------------------------------------------------------
diff --git a/node_modules/adm-zip/zipFile.js b/node_modules/adm-zip/zipFile.js
deleted file mode 100644
index d7433b0..0000000
--- a/node_modules/adm-zip/zipFile.js
+++ /dev/null
@@ -1,311 +0,0 @@
-var ZipEntry = require("./zipEntry"),
- Headers = require("./headers"),
- Utils = require("./util");
-
-module.exports = function(/*String|Buffer*/input, /*Number*/inputType) {
- var entryList = [],
- entryTable = {},
- _comment = new Buffer(0),
- filename = "",
- fs = require("fs"),
- inBuffer = null,
- mainHeader = new Headers.MainHeader();
-
- if (inputType == Utils.Constants.FILE) {
- // is a filename
- filename = input;
- inBuffer = fs.readFileSync(filename);
- readMainHeader();
- } else if (inputType == Utils.Constants.BUFFER) {
- // is a memory buffer
- inBuffer = input;
- readMainHeader();
- } else {
- // none. is a new file
- }
-
- function readEntries() {
- entryTable = {};
- entryList = new Array(mainHeader.diskEntries); // total number of entries
- var index = mainHeader.offset; // offset of first CEN header
- for(var i = 0; i < entryList.length; i++) {
-
- var tmp = index,
- entry = new ZipEntry(inBuffer);
- entry.header = inBuffer.slice(tmp, tmp += Utils.Constants.CENHDR);
-
- entry.entryName = inBuffer.slice(tmp, tmp += entry.header.fileNameLength);
-
- if (entry.header.extraLength) {
- entry.extra = inBuffer.slice(tmp, tmp += entry.header.extraLength);
- }
-
- if (entry.header.commentLength)
- entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
-
- index += entry.header.entryHeaderSize;
-
- entryList[i] = entry;
- entryTable[entry.entryName] = entry;
- }
- }
-
- function readMainHeader() {
- var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
- n = Math.max(0, i - 0xFFFF), // 0xFFFF is the max zip file comment length
- endOffset = 0; // Start offset of the END header
-
- for (i; i >= n; i--) {
- if (inBuffer[i] != 0x50) continue; // quick check that the byte is 'P'
- if (inBuffer.readUInt32LE(i) == Utils.Constants.ENDSIG) { // "PK\005\006"
- endOffset = i;
- break;
- }
- }
- if (!endOffset)
- throw Utils.Errors.INVALID_FORMAT;
-
- mainHeader.loadFromBinary(inBuffer.slice(endOffset, endOffset + Utils.Constants.ENDHDR));
- if (mainHeader.commentLength) {
- _comment = inBuffer.slice(endOffset + Utils.Constants.ENDHDR);
- }
- readEntries();
- }
-
- return {
- /**
- * Returns an array of ZipEntry objects existent in the current opened archive
- * @return Array
- */
- get entries () {
- return entryList;
- },
-
- /**
- * Archive comment
- * @return {String}
- */
- get comment () { return _comment.toString(); },
- set comment(val) {
- mainHeader.commentLength = val.length;
- _comment = val;
- },
-
- /**
- * Returns a reference to the entry with the given name or null if entry is inexistent
- *
- * @param entryName
- * @return ZipEntry
- */
- getEntry : function(/*String*/entryName) {
- return entryTable[entryName] || null;
- },
-
- /**
- * Adds the given entry to the entry list
- *
- * @param entry
- */
- setEntry : function(/*ZipEntry*/entry) {
- entryList.push(entry);
- entryTable[entry.entryName] = entry;
- mainHeader.totalEntries = entryList.length;
- },
-
- /**
- * Removes the entry with the given name from the entry list.
- *
- * If the entry is a directory, then all nested files and directories will be removed
- * @param entryName
- */
- deleteEntry : function(/*String*/entryName) {
- var entry = entryTable[entryName];
- if (entry && entry.isDirectory) {
- var _self = this;
- this.getEntryChildren(entry).forEach(function(child) {
- if (child.entryName != entryName) {
- _self.deleteEntry(child.entryName)
- }
- })
- }
- entryList.splice(entryList.indexOf(entry), 1);
- delete(entryTable[entryName]);
- mainHeader.totalEntries = entryList.length;
- },
-
- /**
- * Iterates and returns all nested files and directories of the given entry
- *
- * @param entry
- * @return Array
- */
- getEntryChildren : function(/*ZipEntry*/entry) {
- if (entry.isDirectory) {
- var list = [],
- name = entry.entryName,
- len = name.length;
-
- entryList.forEach(function(zipEntry) {
- if (zipEntry.entryName.substr(0, len) == name) {
- list.push(zipEntry);
- }
- });
- return list;
- }
- return []
- },
-
- /**
- * Returns the zip file
- *
- * @return Buffer
- */
- compressToBuffer : function() {
- if (entryList.length > 1) {
- entryList.sort(function(a, b) {
- var nameA = a.entryName.toLowerCase();
- var nameB = b.entryName.toLowerCase();
- if (nameA < nameB) {return -1}
- if (nameA > nameB) {return 1}
- return 0;
- });
- }
-
- var totalSize = 0,
- dataBlock = [],
- entryHeaders = [],
- dindex = 0;
-
- mainHeader.size = 0;
- mainHeader.offset = 0;
-
- entryList.forEach(function(entry) {
- entry.header.offset = dindex;
-
- // compress data and set local and entry header accordingly. Reason why is called first
- var compressedData = entry.getCompressedData();
- // data header
- var dataHeader = entry.header.dataHeaderToBinary();
- var postHeader = new Buffer(entry.entryName + entry.extra.toString());
- var dataLength = dataHeader.length + postHeader.length + compressedData.length;
-
- dindex += dataLength;
-
- dataBlock.push(dataHeader);
- dataBlock.push(postHeader);
- dataBlock.push(compressedData);
-
- var entryHeader = entry.packHeader();
- entryHeaders.push(entryHeader);
- mainHeader.size += entryHeader.length;
- totalSize += (dataLength + entryHeader.length);
- });
-
- totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
- // point to end of data and begining of central directory first record
- mainHeader.offset = dindex;
-
- dindex = 0;
- var outBuffer = new Buffer(totalSize);
- dataBlock.forEach(function(content) {
- content.copy(outBuffer, dindex); // write data blocks
- dindex += content.length;
- });
- entryHeaders.forEach(function(content) {
- content.copy(outBuffer, dindex); // write central directory entries
- dindex += content.length;
- });
-
- var mh = mainHeader.toBinary();
- if (_comment) {
- _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
- }
-
- mh.copy(outBuffer, dindex); // write main header
-
- return outBuffer
- },
-
- toAsyncBuffer : function(/*Function*/onSuccess,/*Function*/onFail,/*Function*/onItemStart,/*Function*/onItemEnd) {
- if (entryList.length > 1) {
- entryList.sort(function(a, b) {
- var nameA = a.entryName.toLowerCase();
- var nameB = b.entryName.toLowerCase();
- if (nameA > nameB) {return -1}
- if (nameA < nameB) {return 1}
- return 0;
- });
- }
-
- var totalSize = 0,
- dataBlock = [],
- entryHeaders = [],
- dindex = 0;
-
- mainHeader.size = 0;
- mainHeader.offset = 0;
-
- var compress=function(entryList){
- var self=arguments.callee;
- var entry;
- if(entryList.length){
- var entry=entryList.pop();
- var name=entry.entryName + entry.extra.toString();
- if(onItemStart)onItemStart(name);
- entry.getCompressedDataAsync(function(compressedData){
- if(onItemEnd)onItemEnd(name);
-
- entry.header.offset = dindex;
- // data header
- var dataHeader = entry.header.dataHeaderToBinary();
- var postHeader = new Buffer(name);
- var dataLength = dataHeader.length + postHeader.length + compressedData.length;
-
- dindex += dataLength;
-
- dataBlock.push(dataHeader);
- dataBlock.push(postHeader);
- dataBlock.push(compressedData);
-
- var entryHeader = entry.packHeader();
- entryHeaders.push(entryHeader);
- mainHeader.size += entryHeader.length;
- totalSize += (dataLength + entryHeader.length);
-
- if(entryList.length){
- self(entryList);
- }else{
-
-
- totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
- // point to end of data and begining of central directory first record
- mainHeader.offset = dindex;
-
- dindex = 0;
- var outBuffer = new Buffer(totalSize);
- dataBlock.forEach(function(content) {
- content.copy(outBuffer, dindex); // write data blocks
- dindex += content.length;
- });
- entryHeaders.forEach(function(content) {
- content.copy(outBuffer, dindex); // write central directory entries
- dindex += content.length;
- });
-
- var mh = mainHeader.toBinary();
- if (_comment) {
- _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
- }
-
- mh.copy(outBuffer, dindex); // write main header
-
- onSuccess(outBuffer);
- }
- });
- }
- };
-
- compress(entryList);
- }
- }
-};
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/LICENSE-MIT
----------------------------------------------------------------------
diff --git a/node_modules/archiver/LICENSE-MIT b/node_modules/archiver/LICENSE-MIT
new file mode 100644
index 0000000..bc56a8a
--- /dev/null
+++ b/node_modules/archiver/LICENSE-MIT
@@ -0,0 +1,22 @@
+Copyright (c) 2012-2014 Chris Talkington, contributors.
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use,
+copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/README.md
----------------------------------------------------------------------
diff --git a/node_modules/archiver/README.md b/node_modules/archiver/README.md
new file mode 100644
index 0000000..94e6a1f
--- /dev/null
+++ b/node_modules/archiver/README.md
@@ -0,0 +1,215 @@
+# Archiver v0.14.3 [![Build Status](https://travis-ci.org/archiverjs/node-archiver.svg?branch=master)](https://travis-ci.org/archiverjs/node-archiver)
+
+a streaming interface for archive generation
+
+[![NPM](https://nodei.co/npm/archiver.png)](https://nodei.co/npm/archiver/)
+
+## Install
+
+```bash
+npm install archiver --save
+```
+
+You can also use `npm install https://github.com/archiverjs/node-archiver/archive/master.tar.gz` to test upcoming versions.
+
+## Archiver
+
+#### create(format, options)
+
+Creates an Archiver instance based on the format (zip, tar, etc) passed. Parameters can be passed directly to `Archiver` constructor for convenience.
+
+#### registerFormat(format, module)
+
+Registers an archive format. Format modules are essentially transform streams with a few required methods. They will be further documented once a formal spec is in place.
+
+### Instance Methods
+
+Inherits [Transform Stream](http://nodejs.org/api/stream.html#stream_class_stream_transform) methods.
+
+#### abort()
+
+Aborts the archiving process, taking a best-effort approach, by:
+
+* removing any pending queue tasks
+* allowing any active queue workers to finish
+* detaching internal module pipes
+* ending both sides of the Transform stream
+
+*It will NOT drain any remaining sources.*
+
+#### append(input, data)
+
+Appends an input source (text string, buffer, or stream) to the instance. When the instance has received, processed, and emitted the input, the `entry` event is fired.
+
+Replaced `#addFile` in v0.5.
+
+```js
+archive.append('string', { name:'string.txt' });
+archive.append(new Buffer('string'), { name:'buffer.txt' });
+archive.append(fs.createReadStream('mydir/file.txt'), { name:'stream.txt' });
+archive.append(null, { name:'dir/' });
+```
+
+#### bulk(mappings)
+
+Appends multiple entries from passed array of src-dest mappings. A [lazystream](https://github.com/jpommerening/node-lazystream) wrapper is used to prevent issues with open file limits.
+
+Globbing patterns are supported through use of the bundled [file-utils](https://github.com/SBoudrias/file-utils) module.
+
+The `data` property can be set (per src-dest mapping) to define data for matched entries.
+
+```js
+archive.bulk([
+ { src: ['mydir/**'], data: { date: new Date() } },
+ { expand: true, cwd: 'mydir', src: ['**'], dest: 'newdir' }
+]);
+```
+
+For more detail on this feature, please see [BULK.md](https://github.com/archiverjs/node-archiver/blob/master/BULK.md).
+
+#### directory(dirpath[, destpath, data])
+
+Appends a directory and its files, recusively, given its dirpath. This is meant to be a simplier approach to something previously only possible with `bulk`. The use of `destpath` allows one to define a custom destination path within the resulting archive and `data` allows for setting data on each entry appended.
+
+```js
+// mydir/ -> archive.ext/mydir/
+archive.directory('mydir');
+
+// mydir/ -> archive.ext/abc/
+archive.directory('mydir', 'abc');
+
+// mydir/ -> archive.ext/
+archive.directory('mydir', false, { date: new Date() });
+```
+
+#### file(filepath, data)
+
+Appends a file given its filepath using a [lazystream](https://github.com/jpommerening/node-lazystream) wrapper to prevent issues with open file limits. When the instance has received, processed, and emitted the file, the `entry` event is fired.
+
+```js
+archive.file('mydir/file.txt', { name:'file.txt' });
+```
+
+#### finalize()
+
+Finalizes the instance and prevents further appending to the archive structure (queue will continue til drained). The `end`, `close` or `finish` events on the destination stream may fire right after calling this method so you should set listeners beforehand to properly detect stream completion.
+
+*You must call this method to get a valid archive and end the instance stream.*
+
+#### pointer()
+
+Returns the current byte length emitted by archiver. Use this in your end callback to log generated size.
+
+## Events
+
+Inherits [Transform Stream](http://nodejs.org/api/stream.html#stream_class_stream_transform) events.
+
+#### entry
+
+Fired when the entry's input has been processed and appended to the archive. Passes entry data as first argument.
+
+## Zip
+
+### Options
+
+#### comment `string`
+
+Sets the zip comment.
+
+#### statConcurrency `number`
+
+Sets the number of workers used to process the internal fs stat queue. Defaults to 4.
+
+#### store `boolean`
+
+If true, all entries will be archived without compression. Defaults to `false`.
+
+#### zlib `object`
+
+Passed to node's [zlib](http://nodejs.org/api/zlib.html#zlib_options) module to control compression. Options may vary by node version.
+
+### Entry Data
+
+#### name `string` `required`
+
+Sets the entry name including internal path.
+
+#### date `string|Date`
+
+Sets the entry date. This can be any valid date string or instance. Defaults to current time in locale.
+
+When using the `bulk` or `file` methods, fs stat data is used as the default value.
+
+#### store `boolean`
+
+If true, this entry will be archived without compression. Defaults to global `store` option.
+
+#### comment `string`
+
+Sets the entry comment.
+
+#### mode `number`
+
+Sets the entry permissions. Defaults to octal 0755 (directory) or 0644 (file).
+
+When using the `bulk` or `file` methods, fs stat data is used as the default value.
+
+#### stats `fs.Stats`
+
+Sets the fs stat data for this entry. This allows for reduction of fs stat calls when stat data is already known.
+
+## Tar
+
+### Options
+
+#### gzip `boolean`
+
+Compresses the tar archive using gzip, default is false.
+
+#### gzipOptions `object`
+
+Passed to node's [zlib](http://nodejs.org/api/zlib.html#zlib_options) module to control compression. Options may vary by node version.
+
+#### statConcurrency `number`
+
+Sets the number of workers used to process the internal fs stat queue. Defaults to 4.
+
+### Entry Data
+
+#### name `string` `required`
+
+Sets the entry name including internal path.
+
+#### date `string|Date`
+
+Sets the entry date. This can be any valid date string or instance. Defaults to current time in locale.
+
+When using the `bulk` or `file` methods, fs stat data is used as the default value.
+
+#### mode `number`
+
+Sets the entry permissions. Defaults to octal 0755 (directory) or 0644 (file).
+
+When using the `bulk` or `file` methods, fs stat data is used as the default value.
+
+#### stats `fs.Stats`
+
+Sets the fs stat data for this entry. This allows for reduction of fs stat calls when stat data is already known.
+
+## Custom Formats
+
+Archiver ships with out of the box support for TAR and ZIP archives. You can register additional formats with `registerFormat`.
+
+## Libraries
+
+Archiver makes use of several libraries/modules to avoid duplication of efforts.
+
+- [zip-stream](https://npmjs.org/package/zip-stream)
+- [tar-stream](https://npmjs.org/package/tar-stream)
+
+## Things of Interest
+
+- [Examples](https://github.com/archiverjs/node-archiver/blob/master/examples)
+- [Changelog](https://github.com/archiverjs/node-archiver/releases)
+- [Contributing](https://github.com/archiverjs/node-archiver/blob/master/CONTRIBUTING.md)
+- [MIT License](https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT)
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/lib/archiver.js
----------------------------------------------------------------------
diff --git a/node_modules/archiver/lib/archiver.js b/node_modules/archiver/lib/archiver.js
new file mode 100644
index 0000000..73a80e8
--- /dev/null
+++ b/node_modules/archiver/lib/archiver.js
@@ -0,0 +1,51 @@
+/**
+ * node-archiver
+ *
+ * Copyright (c) 2012-2014 Chris Talkington, contributors.
+ * Licensed under the MIT license.
+ * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
+ */
+var ArchiverCore = require('./core');
+var formats = {};
+
+var archiver = module.exports = function(format, options) {
+ return archiver.create(format, options);
+};
+
+archiver.create = function(format, options) {
+ if (formats[format]) {
+ var instance = new ArchiverCore(options);
+ instance.setFormat(format);
+ instance.setModule(new formats[format](options));
+
+ return instance;
+ } else {
+ throw new Error('create(' + format + '): format not registered');
+ }
+};
+
+archiver.registerFormat = function(format, module) {
+ if (formats[format]) {
+ throw new Error('register(' + format + '): format already registered');
+ }
+
+ if (typeof module !== 'function') {
+ throw new Error('register(' + format + '): format module invalid');
+ }
+
+ if (typeof module.prototype.append !== 'function' || typeof module.prototype.finalize !== 'function') {
+ throw new Error('register(' + format + '): format module missing methods');
+ }
+
+ formats[format] = module;
+
+ // backwards compat - to be removed in 0.14
+ var compatName = 'create' + format.charAt(0).toUpperCase() + format.slice(1);
+ archiver[compatName] = function(options) {
+ return archiver.create(format, options);
+ };
+};
+
+archiver.registerFormat('zip', require('./plugins/zip'));
+archiver.registerFormat('tar', require('./plugins/tar'));
+archiver.registerFormat('json', require('./plugins/json'));
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/lib/core.js
----------------------------------------------------------------------
diff --git a/node_modules/archiver/lib/core.js b/node_modules/archiver/lib/core.js
new file mode 100644
index 0000000..d5103f3
--- /dev/null
+++ b/node_modules/archiver/lib/core.js
@@ -0,0 +1,488 @@
+/**
+ * node-archiver
+ *
+ * Copyright (c) 2012-2014 Chris Talkington, contributors.
+ * Licensed under the MIT license.
+ * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
+ */
+var fs = require('fs');
+var inherits = require('util').inherits;
+var Transform = require('readable-stream').Transform;
+
+var async = require('async');
+
+var util = require('./util');
+
+var Archiver = module.exports = function(options) {
+ if (!(this instanceof Archiver)) {
+ return new Archiver(options);
+ }
+
+ options = this.options = util.defaults(options, {
+ highWaterMark: 1024 * 1024,
+ statConcurrency: 4
+ });
+
+ Transform.call(this, options);
+
+ this._entries = [];
+ this._format = false;
+ this._module = false;
+ this._pending = 0;
+ this._pointer = 0;
+
+ this._queue = async.queue(this._onQueueTask.bind(this), 1);
+ this._queue.drain = this._onQueueDrain.bind(this);
+
+ this._statQueue = async.queue(this._onStatQueueTask.bind(this), options.statConcurrency);
+
+ this._state = {
+ aborted: false,
+ finalize: false,
+ finalizing: false,
+ finalized: false,
+ modulePiped: false
+ };
+};
+
+inherits(Archiver, Transform);
+
+Archiver.prototype._abort = function() {
+ this._state.aborted = true;
+ this._queue.kill();
+ this._statQueue.kill();
+
+ if (this._queue.idle()) {
+ this._shutdown();
+ }
+};
+
+Archiver.prototype._append = function(filepath, data) {
+ data = data || {};
+
+ var task = {
+ source: null,
+ filepath: filepath
+ };
+
+ if (!data.name) {
+ data.name = filepath;
+ }
+
+ data.sourcePath = filepath;
+ task.data = data;
+
+ if (data.stats && data.stats instanceof fs.Stats) {
+ task = this._updateQueueTaskWithStats(task, data.stats);
+ this._queue.push(task);
+ } else {
+ this._statQueue.push(task);
+ }
+};
+
+Archiver.prototype._finalize = function() {
+ if (this._state.finalizing || this._state.finalized || this._state.aborted) {
+ return;
+ }
+
+ this._state.finalizing = true;
+
+ this._moduleFinalize();
+
+ this._state.finalizing = false;
+ this._state.finalized = true;
+};
+
+Archiver.prototype._maybeFinalize = function() {
+ if (this._state.finalizing || this._state.finalized || this._state.aborted) {
+ return false;
+ }
+
+ if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
+ this._finalize();
+ return true;
+ }
+
+ return false;
+};
+
+Archiver.prototype._moduleAppend = function(source, data, callback) {
+ if (this._state.aborted) {
+ callback();
+ return;
+ }
+
+ this._module.append(source, data, function(err) {
+ this._task = null;
+
+ if (this._state.aborted) {
+ this._shutdown();
+ return;
+ }
+
+ if (err) {
+ this.emit('error', err);
+ setImmediate(callback);
+ return;
+ }
+
+ this.emit('entry', data);
+ this._entries.push(data);
+
+ setImmediate(callback);
+ }.bind(this));
+};
+
+Archiver.prototype._moduleFinalize = function() {
+ if (typeof this._module.finalize === 'function') {
+ this._module.finalize();
+ } else if (typeof this._module.end === 'function') {
+ this._module.end();
+ } else {
+ this.emit('error', new Error('module: no suitable finalize/end method found'));
+ return;
+ }
+};
+
+Archiver.prototype._modulePipe = function() {
+ this._module.on('error', this._onModuleError.bind(this));
+ this._module.pipe(this);
+ this._state.modulePiped = true;
+};
+
+Archiver.prototype._moduleSupports = function(key) {
+ if (!this._module.supports || !this._module.supports[key]) {
+ return false;
+ }
+
+ return this._module.supports[key];
+};
+
+Archiver.prototype._moduleUnpipe = function() {
+ this._module.unpipe(this);
+ this._state.modulePiped = false;
+};
+
+Archiver.prototype._normalizeEntryData = function(data, stats) {
+ data = util.defaults(data, {
+ type: 'file',
+ name: null,
+ date: null,
+ mode: null,
+ sourcePath: null,
+ stats: false
+ });
+
+ if (stats && data.stats === false) {
+ data.stats = stats;
+ }
+
+ var isDir = data.type === 'directory';
+
+ if (data.name) {
+ data.name = util.sanitizePath(data.name);
+
+ if (data.name.slice(-1) === '/') {
+ isDir = true;
+ data.type = 'directory';
+ } else if (isDir) {
+ data.name += '/';
+ }
+ }
+
+ if (typeof data.mode === 'number') {
+ data.mode &= 0777;
+ } else if (data.stats && data.mode === null) {
+ data.mode = data.stats.mode & 0777;
+ } else if (data.mode === null) {
+ data.mode = isDir ? 0755 : 0644;
+ }
+
+ if (data.stats && data.date === null) {
+ data.date = data.stats.mtime;
+ } else {
+ data.date = util.dateify(data.date);
+ }
+
+ return data;
+};
+
+Archiver.prototype._onModuleError = function(err) {
+ this.emit('error', err);
+};
+
+Archiver.prototype._onQueueDrain = function() {
+ if (this._state.finalizing || this._state.finalized || this._state.aborted) {
+ return;
+ }
+
+ if (this._state.finalize && this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
+ this._finalize();
+ return;
+ }
+};
+
+Archiver.prototype._onQueueTask = function(task, callback) {
+ if (this._state.finalizing || this._state.finalized || this._state.aborted) {
+ callback();
+ return;
+ }
+
+ this._task = task;
+ this._moduleAppend(task.source, task.data, callback);
+};
+
+Archiver.prototype._onStatQueueTask = function(task, callback) {
+ if (this._state.finalizing || this._state.finalized || this._state.aborted) {
+ callback();
+ return;
+ }
+
+ fs.stat(task.filepath, function(err, stats) {
+ if (this._state.aborted) {
+ setImmediate(callback);
+ return;
+ }
+
+ if (err) {
+ this.emit('error', err);
+ setImmediate(callback);
+ return;
+ }
+
+ task = this._updateQueueTaskWithStats(task, stats);
+
+ if (task.source !== null) {
+ this._queue.push(task);
+ setImmediate(callback);
+ } else {
+ this.emit('error', new Error('unsupported entry: ' + task.filepath));
+ setImmediate(callback);
+ return;
+ }
+ }.bind(this));
+};
+
+Archiver.prototype._shutdown = function() {
+ this._moduleUnpipe();
+ this.end();
+};
+
+Archiver.prototype._transform = function(chunk, encoding, callback) {
+ if (chunk) {
+ this._pointer += chunk.length;
+ }
+
+ callback(null, chunk);
+};
+
+Archiver.prototype._updateQueueTaskWithStats = function(task, stats) {
+ if (stats.isFile()) {
+ task.data.type = 'file';
+ task.data.sourceType = 'stream';
+ task.source = util.lazyReadStream(task.filepath);
+ } else if (stats.isDirectory() && this._moduleSupports('directory')) {
+ task.data.name = util.trailingSlashIt(task.data.name);
+ task.data.type = 'directory';
+ task.data.sourcePath = util.trailingSlashIt(task.filepath);
+ task.data.sourceType = 'buffer';
+ task.source = new Buffer(0);
+ } else {
+ return task;
+ }
+
+ task.data = this._normalizeEntryData(task.data, stats);
+ return task;
+};
+
+Archiver.prototype.abort = function() {
+ if (this._state.aborted || this._state.finalized) {
+ return this;
+ }
+
+ this._abort();
+
+ return this;
+};
+
+Archiver.prototype.append = function(source, data) {
+ if (this._state.finalize || this._state.aborted) {
+ this.emit('error', new Error('append: queue closed'));
+ return this;
+ }
+
+ data = this._normalizeEntryData(data);
+
+ if (typeof data.name !== 'string' || data.name.length === 0) {
+ this.emit('error', new Error('append: entry name must be a non-empty string value'));
+ return this;
+ }
+
+ if (data.type === 'directory' && !this._moduleSupports('directory')) {
+ this.emit('error', new Error('append: entries of "directory" type not currently supported by this module'));
+ return this;
+ }
+
+ source = util.normalizeInputSource(source);
+
+ if (Buffer.isBuffer(source)) {
+ data.sourceType = 'buffer';
+ } else if (util.isStream(source)) {
+ data.sourceType = 'stream';
+ } else {
+ this.emit('error', new Error('append: input source must be valid Stream or Buffer instance'));
+ return this;
+ }
+
+ this._queue.push({
+ data: data,
+ source: source
+ });
+
+ return this;
+};
+
+Archiver.prototype.bulk = function(mappings) {
+ if (this._state.finalize || this._state.aborted) {
+ this.emit('error', new Error('bulk: queue closed'));
+ return this;
+ }
+
+ if (!Array.isArray(mappings)) {
+ mappings = [mappings];
+ }
+
+ var self = this;
+ var files = util.file.normalizeFilesArray(mappings);
+
+ files.forEach(function(file){
+ var isExpandedPair = file.orig.expand || false;
+ var fileData = file.data || {};
+
+ file.src.forEach(function(filepath) {
+ var data = util._.extend({}, fileData);
+ data.name = isExpandedPair ? util.unixifyPath(file.dest) : util.unixifyPath(file.dest || '', filepath);
+
+ if (data.name === '.') {
+ return;
+ }
+
+ self._append(filepath, data);
+ });
+ });
+
+ return this;
+};
+
+Archiver.prototype.directory = function(dirpath, destpath, data) {
+ if (this._state.finalize || this._state.aborted) {
+ this.emit('error', new Error('directory: queue closed'));
+ return this;
+ }
+
+ if (typeof dirpath !== 'string' || dirpath.length === 0) {
+ this.emit('error', new Error('directory: dirpath must be a non-empty string value'));
+ return this;
+ }
+
+ this._pending++;
+
+ if (destpath === false) {
+ destpath = '';
+ } else if (typeof destpath !== 'string'){
+ destpath = dirpath;
+ }
+
+ if (typeof data !== 'object') {
+ data = {};
+ }
+
+ var self = this;
+
+ util.walkdir(dirpath, function(err, results) {
+ if (err) {
+ self.emit('error', err);
+ } else {
+ results.forEach(function(file) {
+ var entryData = util._.extend({}, data);
+ entryData.name = util.sanitizePath(destpath, file.relative);
+ entryData.stats = file.stats;
+
+ self._append(file.path, entryData);
+ });
+ }
+
+ self._pending--;
+ self._maybeFinalize();
+ });
+
+ return this;
+};
+
+Archiver.prototype.file = function(filepath, data) {
+ if (this._state.finalize || this._state.aborted) {
+ this.emit('error', new Error('file: queue closed'));
+ return this;
+ }
+
+ if (typeof filepath !== 'string' || filepath.length === 0) {
+ this.emit('error', new Error('file: filepath must be a non-empty string value'));
+ return this;
+ }
+
+ this._append(filepath, data);
+
+ return this;
+};
+
+Archiver.prototype.finalize = function() {
+ if (this._state.aborted) {
+ this.emit('error', new Error('finalize: archive was aborted'));
+ return this;
+ }
+
+ if (this._state.finalize) {
+ this.emit('error', new Error('finalize: archive already finalizing'));
+ return this;
+ }
+
+ this._state.finalize = true;
+
+ if (this._pending === 0 && this._queue.idle() && this._statQueue.idle()) {
+ this._finalize();
+ }
+
+ return this;
+};
+
+Archiver.prototype.setFormat = function(format) {
+ if (this._format) {
+ this.emit('error', new Error('format: archive format already set'));
+ return this;
+ }
+
+ this._format = format;
+
+ return this;
+};
+
+Archiver.prototype.setModule = function(module) {
+ if (this._state.aborted) {
+ this.emit('error', new Error('module: archive was aborted'));
+ return this;
+ }
+
+ if (this._state.module) {
+ this.emit('error', new Error('module: module already set'));
+ return this;
+ }
+
+ this._module = module;
+ this._modulePipe();
+
+ return this;
+};
+
+Archiver.prototype.pointer = function() {
+ return this._pointer;
+};
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/lib/plugins/json.js
----------------------------------------------------------------------
diff --git a/node_modules/archiver/lib/plugins/json.js b/node_modules/archiver/lib/plugins/json.js
new file mode 100644
index 0000000..4659dc3
--- /dev/null
+++ b/node_modules/archiver/lib/plugins/json.js
@@ -0,0 +1,70 @@
+/**
+ * node-archiver
+ *
+ * Copyright (c) 2012-2014 Chris Talkington, contributors.
+ * Licensed under the MIT license.
+ * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
+ */
+var inherits = require('util').inherits;
+var Transform = require('readable-stream').Transform;
+
+var crc32 = require('buffer-crc32');
+var util = require('../util');
+
+var Json = module.exports = function(options) {
+ if (!(this instanceof Json)) {
+ return new Json(options);
+ }
+
+ options = this.options = util.defaults(options, {});
+
+ Transform.call(this, options);
+
+ this.supports = {
+ directory: true
+ };
+
+ this.files = [];
+};
+
+inherits(Json, Transform);
+
+Json.prototype._transform = function(chunk, encoding, callback) {
+ callback(null, chunk);
+};
+
+Json.prototype._writeStringified = function() {
+ var fileString = JSON.stringify(this.files);
+ this.write(fileString);
+};
+
+Json.prototype.append = function(source, data, callback) {
+ var self = this;
+
+ data.crc32 = 0;
+
+ function onend(err, sourceBuffer) {
+ if (err) {
+ callback(err);
+ return;
+ }
+
+ data.size = sourceBuffer.length || 0;
+ data.crc32 = crc32.unsigned(sourceBuffer);
+
+ self.files.push(data);
+
+ callback(null, data);
+ }
+
+ if (data.sourceType === 'buffer') {
+ onend(null, source);
+ } else if (data.sourceType === 'stream') {
+ util.collectStream(source, onend);
+ }
+};
+
+Json.prototype.finalize = function() {
+ this._writeStringified();
+ this.end();
+};
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/lib/plugins/tar.js
----------------------------------------------------------------------
diff --git a/node_modules/archiver/lib/plugins/tar.js b/node_modules/archiver/lib/plugins/tar.js
new file mode 100644
index 0000000..c0816e7
--- /dev/null
+++ b/node_modules/archiver/lib/plugins/tar.js
@@ -0,0 +1,96 @@
+/**
+ * node-archiver
+ *
+ * Copyright (c) 2012-2014 Chris Talkington, contributors.
+ * Licensed under the MIT license.
+ * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
+ */
+var zlib = require('zlib');
+
+var engine = require('tar-stream');
+var util = require('../util');
+
+var Tar = module.exports = function(options) {
+ if (!(this instanceof Tar)) {
+ return new Tar(options);
+ }
+
+ options = this.options = util.defaults(options, {
+ gzip: false
+ });
+
+ if (typeof options.gzipOptions !== 'object') {
+ options.gzipOptions = {};
+ }
+
+ this.supports = {
+ directory: true
+ };
+
+ this.engine = engine.pack(options);
+ this.compressor = false;
+
+ if (options.gzip) {
+ this.compressor = zlib.createGzip(options.gzipOptions);
+ this.compressor.on('error', this._onCompressorError.bind(this));
+ }
+};
+
+Tar.prototype._onCompressorError = function(err) {
+ this.engine.emit('error', err);
+};
+
+Tar.prototype.append = function(source, data, callback) {
+ var self = this;
+
+ data.mtime = data.date;
+
+ function append(err, sourceBuffer) {
+ if (err) {
+ callback(err);
+ return;
+ }
+
+ self.engine.entry(data, sourceBuffer, function(err) {
+ callback(err, data);
+ });
+ }
+
+ if (data.sourceType === 'buffer') {
+ append(null, source);
+ } else if (data.sourceType === 'stream' && data._stats) {
+ data.size = data._stats.size;
+
+ var entry = self.engine.entry(data, function(err) {
+ callback(err, data);
+ });
+
+ source.pipe(entry);
+ } else if (data.sourceType === 'stream') {
+ util.collectStream(source, append);
+ }
+};
+
+Tar.prototype.finalize = function() {
+ this.engine.finalize();
+};
+
+Tar.prototype.on = function() {
+ return this.engine.on.apply(this.engine, arguments);
+};
+
+Tar.prototype.pipe = function(destination, options) {
+ if (this.compressor) {
+ return this.engine.pipe.apply(this.engine, [this.compressor]).pipe(destination, options);
+ } else {
+ return this.engine.pipe.apply(this.engine, arguments);
+ }
+};
+
+Tar.prototype.unpipe = function() {
+ if (this.compressor) {
+ return this.compressor.unpipe.apply(this.compressor, arguments);
+ } else {
+ return this.engine.unpipe.apply(this.engine, arguments);
+ }
+};
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/lib/plugins/zip.js
----------------------------------------------------------------------
diff --git a/node_modules/archiver/lib/plugins/zip.js b/node_modules/archiver/lib/plugins/zip.js
new file mode 100644
index 0000000..c46ffc4
--- /dev/null
+++ b/node_modules/archiver/lib/plugins/zip.js
@@ -0,0 +1,47 @@
+/**
+ * node-archiver
+ *
+ * Copyright (c) 2012-2014 Chris Talkington, contributors.
+ * Licensed under the MIT license.
+ * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
+ */
+var engine = require('zip-stream');
+var util = require('../util');
+
+var Zip = module.exports = function(options) {
+ if (!(this instanceof Zip)) {
+ return new Zip(options);
+ }
+
+ options = this.options = util.defaults(options, {
+ comment: '',
+ forceUTC: false,
+ store: false
+ });
+
+ this.supports = {
+ directory: true
+ };
+
+ this.engine = new engine(options);
+};
+
+Zip.prototype.append = function(source, data, callback) {
+ this.engine.entry(source, data, callback);
+};
+
+Zip.prototype.finalize = function() {
+ this.engine.finalize();
+};
+
+Zip.prototype.on = function() {
+ return this.engine.on.apply(this.engine, arguments);
+};
+
+Zip.prototype.pipe = function() {
+ return this.engine.pipe.apply(this.engine, arguments);
+};
+
+Zip.prototype.unpipe = function() {
+ return this.engine.unpipe.apply(this.engine, arguments);
+};
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/lib/util/file.js
----------------------------------------------------------------------
diff --git a/node_modules/archiver/lib/util/file.js b/node_modules/archiver/lib/util/file.js
new file mode 100644
index 0000000..4d671de
--- /dev/null
+++ b/node_modules/archiver/lib/util/file.js
@@ -0,0 +1,206 @@
+/**
+ * node-archiver
+ *
+ * Copyright (c) 2012-2014 Chris Talkington, contributors.
+ * Licensed under the MIT license.
+ * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
+ */
+var fs = require('fs');
+var path = require('path');
+
+var _ = require('lodash');
+var glob = require('glob');
+
+var file = module.exports = {};
+
+var pathSeparatorRe = /[\/\\]/g;
+
+// Process specified wildcard glob patterns or filenames against a
+// callback, excluding and uniquing files in the result set.
+var processPatterns = function(patterns, fn) {
+ // Filepaths to return.
+ var result = [];
+ // Iterate over flattened patterns array.
+ _.flatten(patterns).forEach(function(pattern) {
+ // If the first character is ! it should be omitted
+ var exclusion = pattern.indexOf('!') === 0;
+ // If the pattern is an exclusion, remove the !
+ if (exclusion) { pattern = pattern.slice(1); }
+ // Find all matching files for this pattern.
+ var matches = fn(pattern);
+ if (exclusion) {
+ // If an exclusion, remove matching files.
+ result = _.difference(result, matches);
+ } else {
+ // Otherwise add matching files.
+ result = _.union(result, matches);
+ }
+ });
+ return result;
+};
+
+// True if the file path exists.
+file.exists = function() {
+ var filepath = path.join.apply(path, arguments);
+ return fs.existsSync(filepath);
+};
+
+// Return an array of all file paths that match the given wildcard patterns.
+file.expand = function() {
+ var args = _.toArray(arguments);
+ // If the first argument is an options object, save those options to pass
+ // into the File.prototype.glob.sync method.
+ var options = _.isPlainObject(args[0]) ? args.shift() : {};
+ // Use the first argument if it's an Array, otherwise convert the arguments
+ // object to an array and use that.
+ var patterns = Array.isArray(args[0]) ? args[0] : args;
+ // Return empty set if there are no patterns or filepaths.
+ if (patterns.length === 0) { return []; }
+ // Return all matching filepaths.
+ var matches = processPatterns(patterns, function(pattern) {
+ // Find all matching files for this pattern.
+ return glob.sync(pattern, options);
+ });
+ // Filter result set?
+ if (options.filter) {
+ matches = matches.filter(function(filepath) {
+ filepath = path.join(options.cwd || '', filepath);
+ try {
+ if (typeof options.filter === 'function') {
+ return options.filter(filepath);
+ } else {
+ // If the file is of the right type and exists, this should work.
+ return fs.statSync(filepath)[options.filter]();
+ }
+ } catch(e) {
+ // Otherwise, it's probably not the right type.
+ return false;
+ }
+ });
+ }
+ return matches;
+};
+
+// Build a multi task "files" object dynamically.
+file.expandMapping = function(patterns, destBase, options) {
+ options = _.defaults({}, options, {
+ rename: function(destBase, destPath) {
+ return path.join(destBase || '', destPath);
+ }
+ });
+ var files = [];
+ var fileByDest = {};
+ // Find all files matching pattern, using passed-in options.
+ file.expand(options, patterns).forEach(function(src) {
+ var destPath = src;
+ // Flatten?
+ if (options.flatten) {
+ destPath = path.basename(destPath);
+ }
+ // Change the extension?
+ if (options.ext) {
+ destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext);
+ }
+ // Generate destination filename.
+ var dest = options.rename(destBase, destPath, options);
+ // Prepend cwd to src path if necessary.
+ if (options.cwd) { src = path.join(options.cwd, src); }
+ // Normalize filepaths to be unix-style.
+ dest = dest.replace(pathSeparatorRe, '/');
+ src = src.replace(pathSeparatorRe, '/');
+ // Map correct src path to dest path.
+ if (fileByDest[dest]) {
+ // If dest already exists, push this src onto that dest's src array.
+ fileByDest[dest].src.push(src);
+ } else {
+ // Otherwise create a new src-dest file mapping object.
+ files.push({
+ src: [src],
+ dest: dest,
+ });
+ // And store a reference for later use.
+ fileByDest[dest] = files[files.length - 1];
+ }
+ });
+ return files;
+};
+
+// reusing bits of grunt's multi-task source normalization
+file.normalizeFilesArray = function(data) {
+ var files = [];
+
+ data.forEach(function(obj) {
+ var prop;
+ if ('src' in obj || 'dest' in obj) {
+ files.push(obj);
+ }
+ });
+
+ if (files.length === 0) {
+ return [];
+ }
+
+ files = _(files).chain().forEach(function(obj) {
+ if (!('src' in obj) || !obj.src) { return; }
+ // Normalize .src properties to flattened array.
+ if (Array.isArray(obj.src)) {
+ obj.src = _.flatten(obj.src);
+ } else {
+ obj.src = [obj.src];
+ }
+ }).map(function(obj) {
+ // Build options object, removing unwanted properties.
+ var expandOptions = _.extend({}, obj);
+ delete expandOptions.src;
+ delete expandOptions.dest;
+
+ // Expand file mappings.
+ if (obj.expand) {
+ return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) {
+ // Copy obj properties to result.
+ var result = _.extend({}, obj);
+ // Make a clone of the orig obj available.
+ result.orig = _.extend({}, obj);
+ // Set .src and .dest, processing both as templates.
+ result.src = mapObj.src;
+ result.dest = mapObj.dest;
+ // Remove unwanted properties.
+ ['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) {
+ delete result[prop];
+ });
+ return result;
+ });
+ }
+
+ // Copy obj properties to result, adding an .orig property.
+ var result = _.extend({}, obj);
+ // Make a clone of the orig obj available.
+ result.orig = _.extend({}, obj);
+
+ if ('src' in result) {
+ // Expose an expand-on-demand getter method as .src.
+ Object.defineProperty(result, 'src', {
+ enumerable: true,
+ get: function fn() {
+ var src;
+ if (!('result' in fn)) {
+ src = obj.src;
+ // If src is an array, flatten it. Otherwise, make it into an array.
+ src = Array.isArray(src) ? _.flatten(src) : [src];
+ // Expand src files, memoizing result.
+ fn.result = file.expand(expandOptions, src);
+ }
+ return fn.result;
+ }
+ });
+ }
+
+ if ('dest' in result) {
+ result.dest = obj.dest;
+ }
+
+ return result;
+ }).flatten().value();
+
+ return files;
+};
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/lib/util/index.js
----------------------------------------------------------------------
diff --git a/node_modules/archiver/lib/util/index.js b/node_modules/archiver/lib/util/index.js
new file mode 100644
index 0000000..132b406
--- /dev/null
+++ b/node_modules/archiver/lib/util/index.js
@@ -0,0 +1,149 @@
+/**
+ * node-archiver
+ *
+ * Copyright (c) 2012-2014 Chris Talkington, contributors.
+ * Licensed under the MIT license.
+ * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT
+ */
+var fs = require('fs');
+var path = require('path');
+
+var Stream = require('stream').Stream;
+var PassThrough = require('readable-stream').PassThrough;
+
+var util = module.exports = {};
+
+util._ = require('lodash');
+util.lazystream = require('lazystream');
+util.file = require('./file');
+
+util.collectStream = function(source, callback) {
+ var collection = [];
+ var size = 0;
+
+ source.on('error', callback);
+
+ source.on('data', function(chunk) {
+ collection.push(chunk);
+ size += chunk.length;
+ });
+
+ source.on('end', function() {
+ var buf = new Buffer(size, 'utf8');
+ var offset = 0;
+
+ collection.forEach(function(data) {
+ data.copy(buf, offset);
+ offset += data.length;
+ });
+
+ callback(null, buf);
+ });
+};
+
+util.dateify = function(dateish) {
+ dateish = dateish || new Date();
+
+ if (dateish instanceof Date) {
+ dateish = dateish;
+ } else if (typeof dateish === 'string') {
+ dateish = new Date(dateish);
+ } else {
+ dateish = new Date();
+ }
+
+ return dateish;
+};
+
+// this is slightly different from lodash version
+util.defaults = function(object, source, guard) {
+ var args = arguments;
+ args[0] = args[0] || {};
+
+ return util._.defaults.apply(util._, args);
+};
+
+util.isStream = function(source) {
+ return source instanceof Stream;
+};
+
+util.lazyReadStream = function(filepath) {
+ return new util.lazystream.Readable(function() {
+ return fs.createReadStream(filepath);
+ });
+};
+
+util.normalizeInputSource = function(source) {
+ if (source === null) {
+ return new Buffer(0);
+ } else if (typeof source === 'string') {
+ return new Buffer(source);
+ } else if (util.isStream(source) && !source._readableState) {
+ var normalized = new PassThrough();
+ source.pipe(normalized);
+
+ return normalized;
+ }
+
+ return source;
+};
+
+util.sanitizePath = function() {
+ var filepath = path.join.apply(path, arguments);
+ return filepath.replace(/\\/g, '/').replace(/:/g, '').replace(/^(\.\.\/|\.\/|\/)+/, '');
+};
+
+util.trailingSlashIt = function(str) {
+ return str.slice(-1) !== '/' ? str + '/' : str;
+};
+
+util.unixifyPath = function() {
+ var filepath = path.join.apply(path, arguments);
+ return filepath.replace(/\\/g, '/');
+};
+
+util.walkdir = function(dirpath, base, callback) {
+ var results = [];
+
+ if (typeof base === 'function') {
+ callback = base;
+ base = dirpath;
+ }
+
+ fs.readdir(dirpath, function(err, list) {
+ var i = 0;
+ var file;
+ var filepath;
+
+ if (err) {
+ return callback(err);
+ }
+
+ (function next() {
+ file = list[i++];
+
+ if (!file) {
+ return callback(null, results);
+ }
+
+ filepath = path.join(dirpath, file);
+
+ fs.stat(filepath, function(err, stats) {
+ results.push({
+ path: filepath,
+ relative: path.relative(base, filepath).replace(/\\/g, '/'),
+ stats: stats
+ });
+
+ if (stats && stats.isDirectory()) {
+ util.walkdir(filepath, base, function(err, res) {
+ results = results.concat(res);
+ next();
+ });
+ } else {
+ next();
+ }
+ });
+ })();
+ });
+};
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/node_modules/async/.travis.yml
----------------------------------------------------------------------
diff --git a/node_modules/archiver/node_modules/async/.travis.yml b/node_modules/archiver/node_modules/async/.travis.yml
new file mode 100644
index 0000000..6e5919d
--- /dev/null
+++ b/node_modules/archiver/node_modules/async/.travis.yml
@@ -0,0 +1,3 @@
+language: node_js
+node_js:
+ - "0.10"
http://git-wip-us.apache.org/repos/asf/cordova-firefoxos/blob/bd21ce3b/node_modules/archiver/node_modules/async/LICENSE
----------------------------------------------------------------------
diff --git a/node_modules/archiver/node_modules/async/LICENSE b/node_modules/archiver/node_modules/async/LICENSE
new file mode 100644
index 0000000..8f29698
--- /dev/null
+++ b/node_modules/archiver/node_modules/async/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2010-2014 Caolan McMahon
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@cordova.apache.org
For additional commands, e-mail: commits-help@cordova.apache.org