TheDesk Akane (16.0.1)
This commit is contained in:
70
node_modules/jszip/lib/base64.js
generated
vendored
Normal file
70
node_modules/jszip/lib/base64.js
generated
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
'use strict';
|
||||
// private property
|
||||
var _keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
|
||||
|
||||
|
||||
// public method for encoding
|
||||
exports.encode = function(input, utf8) {
|
||||
var output = "";
|
||||
var chr1, chr2, chr3, enc1, enc2, enc3, enc4;
|
||||
var i = 0;
|
||||
|
||||
while (i < input.length) {
|
||||
|
||||
chr1 = input.charCodeAt(i++);
|
||||
chr2 = input.charCodeAt(i++);
|
||||
chr3 = input.charCodeAt(i++);
|
||||
|
||||
enc1 = chr1 >> 2;
|
||||
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
|
||||
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
|
||||
enc4 = chr3 & 63;
|
||||
|
||||
if (isNaN(chr2)) {
|
||||
enc3 = enc4 = 64;
|
||||
}
|
||||
else if (isNaN(chr3)) {
|
||||
enc4 = 64;
|
||||
}
|
||||
|
||||
output = output + _keyStr.charAt(enc1) + _keyStr.charAt(enc2) + _keyStr.charAt(enc3) + _keyStr.charAt(enc4);
|
||||
|
||||
}
|
||||
|
||||
return output;
|
||||
};
|
||||
|
||||
// public method for decoding
|
||||
exports.decode = function(input, utf8) {
|
||||
var output = "";
|
||||
var chr1, chr2, chr3;
|
||||
var enc1, enc2, enc3, enc4;
|
||||
var i = 0;
|
||||
|
||||
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");
|
||||
|
||||
while (i < input.length) {
|
||||
|
||||
enc1 = _keyStr.indexOf(input.charAt(i++));
|
||||
enc2 = _keyStr.indexOf(input.charAt(i++));
|
||||
enc3 = _keyStr.indexOf(input.charAt(i++));
|
||||
enc4 = _keyStr.indexOf(input.charAt(i++));
|
||||
|
||||
chr1 = (enc1 << 2) | (enc2 >> 4);
|
||||
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
|
||||
chr3 = ((enc3 & 3) << 6) | enc4;
|
||||
|
||||
output = output + String.fromCharCode(chr1);
|
||||
|
||||
if (enc3 != 64) {
|
||||
output = output + String.fromCharCode(chr2);
|
||||
}
|
||||
if (enc4 != 64) {
|
||||
output = output + String.fromCharCode(chr3);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return output;
|
||||
|
||||
};
|
28
node_modules/jszip/lib/compressedObject.js
generated
vendored
Normal file
28
node_modules/jszip/lib/compressedObject.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
'use strict';
|
||||
function CompressedObject() {
|
||||
this.compressedSize = 0;
|
||||
this.uncompressedSize = 0;
|
||||
this.crc32 = 0;
|
||||
this.compressionMethod = null;
|
||||
this.compressedContent = null;
|
||||
}
|
||||
|
||||
CompressedObject.prototype = {
|
||||
/**
|
||||
* Return the decompressed content in an unspecified format.
|
||||
* The format will depend on the decompressor.
|
||||
* @return {Object} the decompressed content.
|
||||
*/
|
||||
getContent: function() {
|
||||
return null; // see implementation
|
||||
},
|
||||
/**
|
||||
* Return the compressed content in an unspecified format.
|
||||
* The format will depend on the compressed conten source.
|
||||
* @return {Object} the compressed content.
|
||||
*/
|
||||
getCompressedContent: function() {
|
||||
return null; // see implementation
|
||||
}
|
||||
};
|
||||
module.exports = CompressedObject;
|
13
node_modules/jszip/lib/compressions.js
generated
vendored
Normal file
13
node_modules/jszip/lib/compressions.js
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
'use strict';
|
||||
exports.STORE = {
|
||||
magic: "\x00\x00",
|
||||
compress: function(content, compressionOptions) {
|
||||
return content; // no compression
|
||||
},
|
||||
uncompress: function(content) {
|
||||
return content; // no compression
|
||||
},
|
||||
compressInputType: null,
|
||||
uncompressInputType: null
|
||||
};
|
||||
exports.DEFLATE = require('./flate');
|
102
node_modules/jszip/lib/crc32.js
generated
vendored
Normal file
102
node_modules/jszip/lib/crc32.js
generated
vendored
Normal file
@@ -0,0 +1,102 @@
|
||||
'use strict';
|
||||
|
||||
var utils = require('./utils');
|
||||
|
||||
var table = [
|
||||
0x00000000, 0x77073096, 0xEE0E612C, 0x990951BA,
|
||||
0x076DC419, 0x706AF48F, 0xE963A535, 0x9E6495A3,
|
||||
0x0EDB8832, 0x79DCB8A4, 0xE0D5E91E, 0x97D2D988,
|
||||
0x09B64C2B, 0x7EB17CBD, 0xE7B82D07, 0x90BF1D91,
|
||||
0x1DB71064, 0x6AB020F2, 0xF3B97148, 0x84BE41DE,
|
||||
0x1ADAD47D, 0x6DDDE4EB, 0xF4D4B551, 0x83D385C7,
|
||||
0x136C9856, 0x646BA8C0, 0xFD62F97A, 0x8A65C9EC,
|
||||
0x14015C4F, 0x63066CD9, 0xFA0F3D63, 0x8D080DF5,
|
||||
0x3B6E20C8, 0x4C69105E, 0xD56041E4, 0xA2677172,
|
||||
0x3C03E4D1, 0x4B04D447, 0xD20D85FD, 0xA50AB56B,
|
||||
0x35B5A8FA, 0x42B2986C, 0xDBBBC9D6, 0xACBCF940,
|
||||
0x32D86CE3, 0x45DF5C75, 0xDCD60DCF, 0xABD13D59,
|
||||
0x26D930AC, 0x51DE003A, 0xC8D75180, 0xBFD06116,
|
||||
0x21B4F4B5, 0x56B3C423, 0xCFBA9599, 0xB8BDA50F,
|
||||
0x2802B89E, 0x5F058808, 0xC60CD9B2, 0xB10BE924,
|
||||
0x2F6F7C87, 0x58684C11, 0xC1611DAB, 0xB6662D3D,
|
||||
0x76DC4190, 0x01DB7106, 0x98D220BC, 0xEFD5102A,
|
||||
0x71B18589, 0x06B6B51F, 0x9FBFE4A5, 0xE8B8D433,
|
||||
0x7807C9A2, 0x0F00F934, 0x9609A88E, 0xE10E9818,
|
||||
0x7F6A0DBB, 0x086D3D2D, 0x91646C97, 0xE6635C01,
|
||||
0x6B6B51F4, 0x1C6C6162, 0x856530D8, 0xF262004E,
|
||||
0x6C0695ED, 0x1B01A57B, 0x8208F4C1, 0xF50FC457,
|
||||
0x65B0D9C6, 0x12B7E950, 0x8BBEB8EA, 0xFCB9887C,
|
||||
0x62DD1DDF, 0x15DA2D49, 0x8CD37CF3, 0xFBD44C65,
|
||||
0x4DB26158, 0x3AB551CE, 0xA3BC0074, 0xD4BB30E2,
|
||||
0x4ADFA541, 0x3DD895D7, 0xA4D1C46D, 0xD3D6F4FB,
|
||||
0x4369E96A, 0x346ED9FC, 0xAD678846, 0xDA60B8D0,
|
||||
0x44042D73, 0x33031DE5, 0xAA0A4C5F, 0xDD0D7CC9,
|
||||
0x5005713C, 0x270241AA, 0xBE0B1010, 0xC90C2086,
|
||||
0x5768B525, 0x206F85B3, 0xB966D409, 0xCE61E49F,
|
||||
0x5EDEF90E, 0x29D9C998, 0xB0D09822, 0xC7D7A8B4,
|
||||
0x59B33D17, 0x2EB40D81, 0xB7BD5C3B, 0xC0BA6CAD,
|
||||
0xEDB88320, 0x9ABFB3B6, 0x03B6E20C, 0x74B1D29A,
|
||||
0xEAD54739, 0x9DD277AF, 0x04DB2615, 0x73DC1683,
|
||||
0xE3630B12, 0x94643B84, 0x0D6D6A3E, 0x7A6A5AA8,
|
||||
0xE40ECF0B, 0x9309FF9D, 0x0A00AE27, 0x7D079EB1,
|
||||
0xF00F9344, 0x8708A3D2, 0x1E01F268, 0x6906C2FE,
|
||||
0xF762575D, 0x806567CB, 0x196C3671, 0x6E6B06E7,
|
||||
0xFED41B76, 0x89D32BE0, 0x10DA7A5A, 0x67DD4ACC,
|
||||
0xF9B9DF6F, 0x8EBEEFF9, 0x17B7BE43, 0x60B08ED5,
|
||||
0xD6D6A3E8, 0xA1D1937E, 0x38D8C2C4, 0x4FDFF252,
|
||||
0xD1BB67F1, 0xA6BC5767, 0x3FB506DD, 0x48B2364B,
|
||||
0xD80D2BDA, 0xAF0A1B4C, 0x36034AF6, 0x41047A60,
|
||||
0xDF60EFC3, 0xA867DF55, 0x316E8EEF, 0x4669BE79,
|
||||
0xCB61B38C, 0xBC66831A, 0x256FD2A0, 0x5268E236,
|
||||
0xCC0C7795, 0xBB0B4703, 0x220216B9, 0x5505262F,
|
||||
0xC5BA3BBE, 0xB2BD0B28, 0x2BB45A92, 0x5CB36A04,
|
||||
0xC2D7FFA7, 0xB5D0CF31, 0x2CD99E8B, 0x5BDEAE1D,
|
||||
0x9B64C2B0, 0xEC63F226, 0x756AA39C, 0x026D930A,
|
||||
0x9C0906A9, 0xEB0E363F, 0x72076785, 0x05005713,
|
||||
0x95BF4A82, 0xE2B87A14, 0x7BB12BAE, 0x0CB61B38,
|
||||
0x92D28E9B, 0xE5D5BE0D, 0x7CDCEFB7, 0x0BDBDF21,
|
||||
0x86D3D2D4, 0xF1D4E242, 0x68DDB3F8, 0x1FDA836E,
|
||||
0x81BE16CD, 0xF6B9265B, 0x6FB077E1, 0x18B74777,
|
||||
0x88085AE6, 0xFF0F6A70, 0x66063BCA, 0x11010B5C,
|
||||
0x8F659EFF, 0xF862AE69, 0x616BFFD3, 0x166CCF45,
|
||||
0xA00AE278, 0xD70DD2EE, 0x4E048354, 0x3903B3C2,
|
||||
0xA7672661, 0xD06016F7, 0x4969474D, 0x3E6E77DB,
|
||||
0xAED16A4A, 0xD9D65ADC, 0x40DF0B66, 0x37D83BF0,
|
||||
0xA9BCAE53, 0xDEBB9EC5, 0x47B2CF7F, 0x30B5FFE9,
|
||||
0xBDBDF21C, 0xCABAC28A, 0x53B39330, 0x24B4A3A6,
|
||||
0xBAD03605, 0xCDD70693, 0x54DE5729, 0x23D967BF,
|
||||
0xB3667A2E, 0xC4614AB8, 0x5D681B02, 0x2A6F2B94,
|
||||
0xB40BBE37, 0xC30C8EA1, 0x5A05DF1B, 0x2D02EF8D
|
||||
];
|
||||
|
||||
/**
|
||||
*
|
||||
* Javascript crc32
|
||||
* http://www.webtoolkit.info/
|
||||
*
|
||||
*/
|
||||
module.exports = function crc32(input, crc) {
|
||||
if (typeof input === "undefined" || !input.length) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var isArray = utils.getTypeOf(input) !== "string";
|
||||
|
||||
if (typeof(crc) == "undefined") {
|
||||
crc = 0;
|
||||
}
|
||||
var x = 0;
|
||||
var y = 0;
|
||||
var b = 0;
|
||||
|
||||
crc = crc ^ (-1);
|
||||
for (var i = 0, iTop = input.length; i < iTop; i++) {
|
||||
b = isArray ? input[i] : input.charCodeAt(i);
|
||||
y = (crc ^ b) & 0xFF;
|
||||
x = table[y];
|
||||
crc = (crc >>> 8) ^ x;
|
||||
}
|
||||
|
||||
return crc ^ (-1);
|
||||
};
|
||||
// vim: set shiftwidth=4 softtabstop=4:
|
107
node_modules/jszip/lib/dataReader.js
generated
vendored
Normal file
107
node_modules/jszip/lib/dataReader.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
'use strict';
|
||||
var utils = require('./utils');
|
||||
|
||||
function DataReader(data) {
|
||||
this.data = null; // type : see implementation
|
||||
this.length = 0;
|
||||
this.index = 0;
|
||||
}
|
||||
DataReader.prototype = {
|
||||
/**
|
||||
* Check that the offset will not go too far.
|
||||
* @param {string} offset the additional offset to check.
|
||||
* @throws {Error} an Error if the offset is out of bounds.
|
||||
*/
|
||||
checkOffset: function(offset) {
|
||||
this.checkIndex(this.index + offset);
|
||||
},
|
||||
/**
|
||||
* Check that the specifed index will not be too far.
|
||||
* @param {string} newIndex the index to check.
|
||||
* @throws {Error} an Error if the index is out of bounds.
|
||||
*/
|
||||
checkIndex: function(newIndex) {
|
||||
if (this.length < newIndex || newIndex < 0) {
|
||||
throw new Error("End of data reached (data length = " + this.length + ", asked index = " + (newIndex) + "). Corrupted zip ?");
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Change the index.
|
||||
* @param {number} newIndex The new index.
|
||||
* @throws {Error} if the new index is out of the data.
|
||||
*/
|
||||
setIndex: function(newIndex) {
|
||||
this.checkIndex(newIndex);
|
||||
this.index = newIndex;
|
||||
},
|
||||
/**
|
||||
* Skip the next n bytes.
|
||||
* @param {number} n the number of bytes to skip.
|
||||
* @throws {Error} if the new index is out of the data.
|
||||
*/
|
||||
skip: function(n) {
|
||||
this.setIndex(this.index + n);
|
||||
},
|
||||
/**
|
||||
* Get the byte at the specified index.
|
||||
* @param {number} i the index to use.
|
||||
* @return {number} a byte.
|
||||
*/
|
||||
byteAt: function(i) {
|
||||
// see implementations
|
||||
},
|
||||
/**
|
||||
* Get the next number with a given byte size.
|
||||
* @param {number} size the number of bytes to read.
|
||||
* @return {number} the corresponding number.
|
||||
*/
|
||||
readInt: function(size) {
|
||||
var result = 0,
|
||||
i;
|
||||
this.checkOffset(size);
|
||||
for (i = this.index + size - 1; i >= this.index; i--) {
|
||||
result = (result << 8) + this.byteAt(i);
|
||||
}
|
||||
this.index += size;
|
||||
return result;
|
||||
},
|
||||
/**
|
||||
* Get the next string with a given byte size.
|
||||
* @param {number} size the number of bytes to read.
|
||||
* @return {string} the corresponding string.
|
||||
*/
|
||||
readString: function(size) {
|
||||
return utils.transformTo("string", this.readData(size));
|
||||
},
|
||||
/**
|
||||
* Get raw data without conversion, <size> bytes.
|
||||
* @param {number} size the number of bytes to read.
|
||||
* @return {Object} the raw data, implementation specific.
|
||||
*/
|
||||
readData: function(size) {
|
||||
// see implementations
|
||||
},
|
||||
/**
|
||||
* Find the last occurence of a zip signature (4 bytes).
|
||||
* @param {string} sig the signature to find.
|
||||
* @return {number} the index of the last occurence, -1 if not found.
|
||||
*/
|
||||
lastIndexOfSignature: function(sig) {
|
||||
// see implementations
|
||||
},
|
||||
/**
|
||||
* Get the next date.
|
||||
* @return {Date} the date.
|
||||
*/
|
||||
readDate: function() {
|
||||
var dostime = this.readInt(4);
|
||||
return new Date(
|
||||
((dostime >> 25) & 0x7f) + 1980, // year
|
||||
((dostime >> 21) & 0x0f) - 1, // month
|
||||
(dostime >> 16) & 0x1f, // day
|
||||
(dostime >> 11) & 0x1f, // hour
|
||||
(dostime >> 5) & 0x3f, // minute
|
||||
(dostime & 0x1f) << 1); // second
|
||||
}
|
||||
};
|
||||
module.exports = DataReader;
|
11
node_modules/jszip/lib/defaults.js
generated
vendored
Normal file
11
node_modules/jszip/lib/defaults.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
'use strict';
|
||||
exports.base64 = false;
|
||||
exports.binary = false;
|
||||
exports.dir = false;
|
||||
exports.createFolders = false;
|
||||
exports.date = null;
|
||||
exports.compression = null;
|
||||
exports.compressionOptions = null;
|
||||
exports.comment = null;
|
||||
exports.unixPermissions = null;
|
||||
exports.dosPermissions = null;
|
105
node_modules/jszip/lib/deprecatedPublicUtils.js
generated
vendored
Normal file
105
node_modules/jszip/lib/deprecatedPublicUtils.js
generated
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
'use strict';
|
||||
var utils = require('./utils');
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.string2binary = function(str) {
|
||||
return utils.string2binary(str);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.string2Uint8Array = function(str) {
|
||||
return utils.transformTo("uint8array", str);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.uint8Array2String = function(array) {
|
||||
return utils.transformTo("string", array);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.string2Blob = function(str) {
|
||||
var buffer = utils.transformTo("arraybuffer", str);
|
||||
return utils.arrayBuffer2Blob(buffer);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.arrayBuffer2Blob = function(buffer) {
|
||||
return utils.arrayBuffer2Blob(buffer);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.transformTo = function(outputType, input) {
|
||||
return utils.transformTo(outputType, input);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.getTypeOf = function(input) {
|
||||
return utils.getTypeOf(input);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.checkSupport = function(type) {
|
||||
return utils.checkSupport(type);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This value will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.MAX_VALUE_16BITS = utils.MAX_VALUE_16BITS;
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This value will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.MAX_VALUE_32BITS = utils.MAX_VALUE_32BITS;
|
||||
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.pretty = function(str) {
|
||||
return utils.pretty(str);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.findCompression = function(compressionMethod) {
|
||||
return utils.findCompression(compressionMethod);
|
||||
};
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This function will be removed in a future version without replacement.
|
||||
*/
|
||||
exports.isRegExp = function (object) {
|
||||
return utils.isRegExp(object);
|
||||
};
|
||||
|
16
node_modules/jszip/lib/flate.js
generated
vendored
Normal file
16
node_modules/jszip/lib/flate.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict';
|
||||
var USE_TYPEDARRAY = (typeof Uint8Array !== 'undefined') && (typeof Uint16Array !== 'undefined') && (typeof Uint32Array !== 'undefined');
|
||||
|
||||
var pako = require("pako");
|
||||
exports.uncompressInputType = USE_TYPEDARRAY ? "uint8array" : "array";
|
||||
exports.compressInputType = USE_TYPEDARRAY ? "uint8array" : "array";
|
||||
|
||||
exports.magic = "\x08\x00";
|
||||
exports.compress = function(input, compressionOptions) {
|
||||
return pako.deflateRaw(input, {
|
||||
level : compressionOptions.level || -1 // default compression
|
||||
});
|
||||
};
|
||||
exports.uncompress = function(input) {
|
||||
return pako.inflateRaw(input);
|
||||
};
|
79
node_modules/jszip/lib/index.js
generated
vendored
Normal file
79
node_modules/jszip/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
'use strict';
|
||||
|
||||
var base64 = require('./base64');
|
||||
|
||||
/**
|
||||
Usage:
|
||||
zip = new JSZip();
|
||||
zip.file("hello.txt", "Hello, World!").file("tempfile", "nothing");
|
||||
zip.folder("images").file("smile.gif", base64Data, {base64: true});
|
||||
zip.file("Xmas.txt", "Ho ho ho !", {date : new Date("December 25, 2007 00:00:01")});
|
||||
zip.remove("tempfile");
|
||||
|
||||
base64zip = zip.generate();
|
||||
|
||||
**/
|
||||
|
||||
/**
|
||||
* Representation a of zip file in js
|
||||
* @constructor
|
||||
* @param {String=|ArrayBuffer=|Uint8Array=} data the data to load, if any (optional).
|
||||
* @param {Object=} options the options for creating this objects (optional).
|
||||
*/
|
||||
function JSZip(data, options) {
|
||||
// if this constructor is used without `new`, it adds `new` before itself:
|
||||
if(!(this instanceof JSZip)) return new JSZip(data, options);
|
||||
|
||||
// object containing the files :
|
||||
// {
|
||||
// "folder/" : {...},
|
||||
// "folder/data.txt" : {...}
|
||||
// }
|
||||
this.files = {};
|
||||
|
||||
this.comment = null;
|
||||
|
||||
// Where we are in the hierarchy
|
||||
this.root = "";
|
||||
if (data) {
|
||||
this.load(data, options);
|
||||
}
|
||||
this.clone = function() {
|
||||
var newObj = new JSZip();
|
||||
for (var i in this) {
|
||||
if (typeof this[i] !== "function") {
|
||||
newObj[i] = this[i];
|
||||
}
|
||||
}
|
||||
return newObj;
|
||||
};
|
||||
}
|
||||
JSZip.prototype = require('./object');
|
||||
JSZip.prototype.load = require('./load');
|
||||
JSZip.support = require('./support');
|
||||
JSZip.defaults = require('./defaults');
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This namespace will be removed in a future version without replacement.
|
||||
*/
|
||||
JSZip.utils = require('./deprecatedPublicUtils');
|
||||
|
||||
JSZip.base64 = {
|
||||
/**
|
||||
* @deprecated
|
||||
* This method will be removed in a future version without replacement.
|
||||
*/
|
||||
encode : function(input) {
|
||||
return base64.encode(input);
|
||||
},
|
||||
/**
|
||||
* @deprecated
|
||||
* This method will be removed in a future version without replacement.
|
||||
*/
|
||||
decode : function(input) {
|
||||
return base64.decode(input);
|
||||
}
|
||||
};
|
||||
JSZip.compressions = require('./compressions');
|
||||
module.exports = JSZip;
|
11
node_modules/jszip/lib/license_header.js
generated
vendored
Normal file
11
node_modules/jszip/lib/license_header.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
/*!
|
||||
|
||||
JSZip - A Javascript class for generating and reading zip files
|
||||
<http://stuartk.com/jszip>
|
||||
|
||||
(c) 2009-2014 Stuart Knightley <stuart [at] stuartk.com>
|
||||
Dual licenced under the MIT license or GPLv3. See https://raw.github.com/Stuk/jszip/master/LICENSE.markdown.
|
||||
|
||||
JSZip uses the library pako released under the MIT license :
|
||||
https://github.com/nodeca/pako/blob/master/LICENSE
|
||||
*/
|
31
node_modules/jszip/lib/load.js
generated
vendored
Normal file
31
node_modules/jszip/lib/load.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict';
|
||||
var base64 = require('./base64');
|
||||
var ZipEntries = require('./zipEntries');
|
||||
module.exports = function(data, options) {
|
||||
var files, zipEntries, i, input;
|
||||
options = options || {};
|
||||
if (options.base64) {
|
||||
data = base64.decode(data);
|
||||
}
|
||||
|
||||
zipEntries = new ZipEntries(data, options);
|
||||
files = zipEntries.files;
|
||||
for (i = 0; i < files.length; i++) {
|
||||
input = files[i];
|
||||
this.file(input.fileName, input.decompressed, {
|
||||
binary: true,
|
||||
optimizedBinaryString: true,
|
||||
date: input.date,
|
||||
dir: input.dir,
|
||||
comment : input.fileComment.length ? input.fileComment : null,
|
||||
unixPermissions : input.unixPermissions,
|
||||
dosPermissions : input.dosPermissions,
|
||||
createFolders: options.createFolders
|
||||
});
|
||||
}
|
||||
if (zipEntries.zipComment.length) {
|
||||
this.comment = zipEntries.zipComment;
|
||||
}
|
||||
|
||||
return this;
|
||||
};
|
7
node_modules/jszip/lib/nodeBuffer.js
generated
vendored
Normal file
7
node_modules/jszip/lib/nodeBuffer.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
'use strict';
|
||||
module.exports = function(data, encoding){
|
||||
return new Buffer(data, encoding);
|
||||
};
|
||||
module.exports.test = function(b){
|
||||
return Buffer.isBuffer(b);
|
||||
};
|
20
node_modules/jszip/lib/nodeBufferReader.js
generated
vendored
Normal file
20
node_modules/jszip/lib/nodeBufferReader.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict';
|
||||
var Uint8ArrayReader = require('./uint8ArrayReader');
|
||||
|
||||
function NodeBufferReader(data) {
|
||||
this.data = data;
|
||||
this.length = this.data.length;
|
||||
this.index = 0;
|
||||
}
|
||||
NodeBufferReader.prototype = new Uint8ArrayReader();
|
||||
|
||||
/**
|
||||
* @see DataReader.readData
|
||||
*/
|
||||
NodeBufferReader.prototype.readData = function(size) {
|
||||
this.checkOffset(size);
|
||||
var result = this.data.slice(this.index, this.index + size);
|
||||
this.index += size;
|
||||
return result;
|
||||
};
|
||||
module.exports = NodeBufferReader;
|
883
node_modules/jszip/lib/object.js
generated
vendored
Normal file
883
node_modules/jszip/lib/object.js
generated
vendored
Normal file
@@ -0,0 +1,883 @@
|
||||
'use strict';
|
||||
var support = require('./support');
|
||||
var utils = require('./utils');
|
||||
var crc32 = require('./crc32');
|
||||
var signature = require('./signature');
|
||||
var defaults = require('./defaults');
|
||||
var base64 = require('./base64');
|
||||
var compressions = require('./compressions');
|
||||
var CompressedObject = require('./compressedObject');
|
||||
var nodeBuffer = require('./nodeBuffer');
|
||||
var utf8 = require('./utf8');
|
||||
var StringWriter = require('./stringWriter');
|
||||
var Uint8ArrayWriter = require('./uint8ArrayWriter');
|
||||
|
||||
/**
|
||||
* Returns the raw data of a ZipObject, decompress the content if necessary.
|
||||
* @param {ZipObject} file the file to use.
|
||||
* @return {String|ArrayBuffer|Uint8Array|Buffer} the data.
|
||||
*/
|
||||
var getRawData = function(file) {
|
||||
if (file._data instanceof CompressedObject) {
|
||||
file._data = file._data.getContent();
|
||||
file.options.binary = true;
|
||||
file.options.base64 = false;
|
||||
|
||||
if (utils.getTypeOf(file._data) === "uint8array") {
|
||||
var copy = file._data;
|
||||
// when reading an arraybuffer, the CompressedObject mechanism will keep it and subarray() a Uint8Array.
|
||||
// if we request a file in the same format, we might get the same Uint8Array or its ArrayBuffer (the original zip file).
|
||||
file._data = new Uint8Array(copy.length);
|
||||
// with an empty Uint8Array, Opera fails with a "Offset larger than array size"
|
||||
if (copy.length !== 0) {
|
||||
file._data.set(copy, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
return file._data;
|
||||
};
|
||||
|
||||
/**
|
||||
* Returns the data of a ZipObject in a binary form. If the content is an unicode string, encode it.
|
||||
* @param {ZipObject} file the file to use.
|
||||
* @return {String|ArrayBuffer|Uint8Array|Buffer} the data.
|
||||
*/
|
||||
var getBinaryData = function(file) {
|
||||
var result = getRawData(file),
|
||||
type = utils.getTypeOf(result);
|
||||
if (type === "string") {
|
||||
if (!file.options.binary) {
|
||||
// unicode text !
|
||||
// unicode string => binary string is a painful process, check if we can avoid it.
|
||||
if (support.nodebuffer) {
|
||||
return nodeBuffer(result, "utf-8");
|
||||
}
|
||||
}
|
||||
return file.asBinary();
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Transform this._data into a string.
|
||||
* @param {function} filter a function String -> String, applied if not null on the result.
|
||||
* @return {String} the string representing this._data.
|
||||
*/
|
||||
var dataToString = function(asUTF8) {
|
||||
var result = getRawData(this);
|
||||
if (result === null || typeof result === "undefined") {
|
||||
return "";
|
||||
}
|
||||
// if the data is a base64 string, we decode it before checking the encoding !
|
||||
if (this.options.base64) {
|
||||
result = base64.decode(result);
|
||||
}
|
||||
if (asUTF8 && this.options.binary) {
|
||||
// JSZip.prototype.utf8decode supports arrays as input
|
||||
// skip to array => string step, utf8decode will do it.
|
||||
result = out.utf8decode(result);
|
||||
}
|
||||
else {
|
||||
// no utf8 transformation, do the array => string step.
|
||||
result = utils.transformTo("string", result);
|
||||
}
|
||||
|
||||
if (!asUTF8 && !this.options.binary) {
|
||||
result = utils.transformTo("string", out.utf8encode(result));
|
||||
}
|
||||
return result;
|
||||
};
|
||||
/**
|
||||
* A simple object representing a file in the zip file.
|
||||
* @constructor
|
||||
* @param {string} name the name of the file
|
||||
* @param {String|ArrayBuffer|Uint8Array|Buffer} data the data
|
||||
* @param {Object} options the options of the file
|
||||
*/
|
||||
var ZipObject = function(name, data, options) {
|
||||
this.name = name;
|
||||
this.dir = options.dir;
|
||||
this.date = options.date;
|
||||
this.comment = options.comment;
|
||||
this.unixPermissions = options.unixPermissions;
|
||||
this.dosPermissions = options.dosPermissions;
|
||||
|
||||
this._data = data;
|
||||
this.options = options;
|
||||
|
||||
/*
|
||||
* This object contains initial values for dir and date.
|
||||
* With them, we can check if the user changed the deprecated metadata in
|
||||
* `ZipObject#options` or not.
|
||||
*/
|
||||
this._initialMetadata = {
|
||||
dir : options.dir,
|
||||
date : options.date
|
||||
};
|
||||
};
|
||||
|
||||
ZipObject.prototype = {
|
||||
/**
|
||||
* Return the content as UTF8 string.
|
||||
* @return {string} the UTF8 string.
|
||||
*/
|
||||
asText: function() {
|
||||
return dataToString.call(this, true);
|
||||
},
|
||||
/**
|
||||
* Returns the binary content.
|
||||
* @return {string} the content as binary.
|
||||
*/
|
||||
asBinary: function() {
|
||||
return dataToString.call(this, false);
|
||||
},
|
||||
/**
|
||||
* Returns the content as a nodejs Buffer.
|
||||
* @return {Buffer} the content as a Buffer.
|
||||
*/
|
||||
asNodeBuffer: function() {
|
||||
var result = getBinaryData(this);
|
||||
return utils.transformTo("nodebuffer", result);
|
||||
},
|
||||
/**
|
||||
* Returns the content as an Uint8Array.
|
||||
* @return {Uint8Array} the content as an Uint8Array.
|
||||
*/
|
||||
asUint8Array: function() {
|
||||
var result = getBinaryData(this);
|
||||
return utils.transformTo("uint8array", result);
|
||||
},
|
||||
/**
|
||||
* Returns the content as an ArrayBuffer.
|
||||
* @return {ArrayBuffer} the content as an ArrayBufer.
|
||||
*/
|
||||
asArrayBuffer: function() {
|
||||
return this.asUint8Array().buffer;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Transform an integer into a string in hexadecimal.
|
||||
* @private
|
||||
* @param {number} dec the number to convert.
|
||||
* @param {number} bytes the number of bytes to generate.
|
||||
* @returns {string} the result.
|
||||
*/
|
||||
var decToHex = function(dec, bytes) {
|
||||
var hex = "",
|
||||
i;
|
||||
for (i = 0; i < bytes; i++) {
|
||||
hex += String.fromCharCode(dec & 0xff);
|
||||
dec = dec >>> 8;
|
||||
}
|
||||
return hex;
|
||||
};
|
||||
|
||||
/**
|
||||
* Merge the objects passed as parameters into a new one.
|
||||
* @private
|
||||
* @param {...Object} var_args All objects to merge.
|
||||
* @return {Object} a new object with the data of the others.
|
||||
*/
|
||||
var extend = function() {
|
||||
var result = {}, i, attr;
|
||||
for (i = 0; i < arguments.length; i++) { // arguments is not enumerable in some browsers
|
||||
for (attr in arguments[i]) {
|
||||
if (arguments[i].hasOwnProperty(attr) && typeof result[attr] === "undefined") {
|
||||
result[attr] = arguments[i][attr];
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Transforms the (incomplete) options from the user into the complete
|
||||
* set of options to create a file.
|
||||
* @private
|
||||
* @param {Object} o the options from the user.
|
||||
* @return {Object} the complete set of options.
|
||||
*/
|
||||
var prepareFileAttrs = function(o) {
|
||||
o = o || {};
|
||||
if (o.base64 === true && (o.binary === null || o.binary === undefined)) {
|
||||
o.binary = true;
|
||||
}
|
||||
o = extend(o, defaults);
|
||||
o.date = o.date || new Date();
|
||||
if (o.compression !== null) o.compression = o.compression.toUpperCase();
|
||||
|
||||
return o;
|
||||
};
|
||||
|
||||
/**
|
||||
* Add a file in the current folder.
|
||||
* @private
|
||||
* @param {string} name the name of the file
|
||||
* @param {String|ArrayBuffer|Uint8Array|Buffer} data the data of the file
|
||||
* @param {Object} o the options of the file
|
||||
* @return {Object} the new file.
|
||||
*/
|
||||
var fileAdd = function(name, data, o) {
|
||||
// be sure sub folders exist
|
||||
var dataType = utils.getTypeOf(data),
|
||||
parent;
|
||||
|
||||
o = prepareFileAttrs(o);
|
||||
|
||||
if (typeof o.unixPermissions === "string") {
|
||||
o.unixPermissions = parseInt(o.unixPermissions, 8);
|
||||
}
|
||||
|
||||
// UNX_IFDIR 0040000 see zipinfo.c
|
||||
if (o.unixPermissions && (o.unixPermissions & 0x4000)) {
|
||||
o.dir = true;
|
||||
}
|
||||
// Bit 4 Directory
|
||||
if (o.dosPermissions && (o.dosPermissions & 0x0010)) {
|
||||
o.dir = true;
|
||||
}
|
||||
|
||||
if (o.dir) {
|
||||
name = forceTrailingSlash(name);
|
||||
}
|
||||
|
||||
if (o.createFolders && (parent = parentFolder(name))) {
|
||||
folderAdd.call(this, parent, true);
|
||||
}
|
||||
|
||||
if (o.dir || data === null || typeof data === "undefined") {
|
||||
o.base64 = false;
|
||||
o.binary = false;
|
||||
data = null;
|
||||
dataType = null;
|
||||
}
|
||||
else if (dataType === "string") {
|
||||
if (o.binary && !o.base64) {
|
||||
// optimizedBinaryString == true means that the file has already been filtered with a 0xFF mask
|
||||
if (o.optimizedBinaryString !== true) {
|
||||
// this is a string, not in a base64 format.
|
||||
// Be sure that this is a correct "binary string"
|
||||
data = utils.string2binary(data);
|
||||
}
|
||||
}
|
||||
}
|
||||
else { // arraybuffer, uint8array, ...
|
||||
o.base64 = false;
|
||||
o.binary = true;
|
||||
|
||||
if (!dataType && !(data instanceof CompressedObject)) {
|
||||
throw new Error("The data of '" + name + "' is in an unsupported format !");
|
||||
}
|
||||
|
||||
// special case : it's way easier to work with Uint8Array than with ArrayBuffer
|
||||
if (dataType === "arraybuffer") {
|
||||
data = utils.transformTo("uint8array", data);
|
||||
}
|
||||
}
|
||||
|
||||
var object = new ZipObject(name, data, o);
|
||||
this.files[name] = object;
|
||||
return object;
|
||||
};
|
||||
|
||||
/**
|
||||
* Find the parent folder of the path.
|
||||
* @private
|
||||
* @param {string} path the path to use
|
||||
* @return {string} the parent folder, or ""
|
||||
*/
|
||||
var parentFolder = function (path) {
|
||||
if (path.slice(-1) == '/') {
|
||||
path = path.substring(0, path.length - 1);
|
||||
}
|
||||
var lastSlash = path.lastIndexOf('/');
|
||||
return (lastSlash > 0) ? path.substring(0, lastSlash) : "";
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Returns the path with a slash at the end.
|
||||
* @private
|
||||
* @param {String} path the path to check.
|
||||
* @return {String} the path with a trailing slash.
|
||||
*/
|
||||
var forceTrailingSlash = function(path) {
|
||||
// Check the name ends with a /
|
||||
if (path.slice(-1) != "/") {
|
||||
path += "/"; // IE doesn't like substr(-1)
|
||||
}
|
||||
return path;
|
||||
};
|
||||
/**
|
||||
* Add a (sub) folder in the current folder.
|
||||
* @private
|
||||
* @param {string} name the folder's name
|
||||
* @param {boolean=} [createFolders] If true, automatically create sub
|
||||
* folders. Defaults to false.
|
||||
* @return {Object} the new folder.
|
||||
*/
|
||||
var folderAdd = function(name, createFolders) {
|
||||
createFolders = (typeof createFolders !== 'undefined') ? createFolders : false;
|
||||
|
||||
name = forceTrailingSlash(name);
|
||||
|
||||
// Does this folder already exist?
|
||||
if (!this.files[name]) {
|
||||
fileAdd.call(this, name, null, {
|
||||
dir: true,
|
||||
createFolders: createFolders
|
||||
});
|
||||
}
|
||||
return this.files[name];
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate a JSZip.CompressedObject for a given zipOject.
|
||||
* @param {ZipObject} file the object to read.
|
||||
* @param {JSZip.compression} compression the compression to use.
|
||||
* @param {Object} compressionOptions the options to use when compressing.
|
||||
* @return {JSZip.CompressedObject} the compressed result.
|
||||
*/
|
||||
var generateCompressedObjectFrom = function(file, compression, compressionOptions) {
|
||||
var result = new CompressedObject(),
|
||||
content;
|
||||
|
||||
// the data has not been decompressed, we might reuse things !
|
||||
if (file._data instanceof CompressedObject) {
|
||||
result.uncompressedSize = file._data.uncompressedSize;
|
||||
result.crc32 = file._data.crc32;
|
||||
|
||||
if (result.uncompressedSize === 0 || file.dir) {
|
||||
compression = compressions['STORE'];
|
||||
result.compressedContent = "";
|
||||
result.crc32 = 0;
|
||||
}
|
||||
else if (file._data.compressionMethod === compression.magic) {
|
||||
result.compressedContent = file._data.getCompressedContent();
|
||||
}
|
||||
else {
|
||||
content = file._data.getContent();
|
||||
// need to decompress / recompress
|
||||
result.compressedContent = compression.compress(utils.transformTo(compression.compressInputType, content), compressionOptions);
|
||||
}
|
||||
}
|
||||
else {
|
||||
// have uncompressed data
|
||||
content = getBinaryData(file);
|
||||
if (!content || content.length === 0 || file.dir) {
|
||||
compression = compressions['STORE'];
|
||||
content = "";
|
||||
}
|
||||
result.uncompressedSize = content.length;
|
||||
result.crc32 = crc32(content);
|
||||
result.compressedContent = compression.compress(utils.transformTo(compression.compressInputType, content), compressionOptions);
|
||||
}
|
||||
|
||||
result.compressedSize = result.compressedContent.length;
|
||||
result.compressionMethod = compression.magic;
|
||||
|
||||
return result;
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Generate the UNIX part of the external file attributes.
|
||||
* @param {Object} unixPermissions the unix permissions or null.
|
||||
* @param {Boolean} isDir true if the entry is a directory, false otherwise.
|
||||
* @return {Number} a 32 bit integer.
|
||||
*
|
||||
* adapted from http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute :
|
||||
*
|
||||
* TTTTsstrwxrwxrwx0000000000ADVSHR
|
||||
* ^^^^____________________________ file type, see zipinfo.c (UNX_*)
|
||||
* ^^^_________________________ setuid, setgid, sticky
|
||||
* ^^^^^^^^^________________ permissions
|
||||
* ^^^^^^^^^^______ not used ?
|
||||
* ^^^^^^ DOS attribute bits : Archive, Directory, Volume label, System file, Hidden, Read only
|
||||
*/
|
||||
var generateUnixExternalFileAttr = function (unixPermissions, isDir) {
|
||||
|
||||
var result = unixPermissions;
|
||||
if (!unixPermissions) {
|
||||
// I can't use octal values in strict mode, hence the hexa.
|
||||
// 040775 => 0x41fd
|
||||
// 0100664 => 0x81b4
|
||||
result = isDir ? 0x41fd : 0x81b4;
|
||||
}
|
||||
|
||||
return (result & 0xFFFF) << 16;
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate the DOS part of the external file attributes.
|
||||
* @param {Object} dosPermissions the dos permissions or null.
|
||||
* @param {Boolean} isDir true if the entry is a directory, false otherwise.
|
||||
* @return {Number} a 32 bit integer.
|
||||
*
|
||||
* Bit 0 Read-Only
|
||||
* Bit 1 Hidden
|
||||
* Bit 2 System
|
||||
* Bit 3 Volume Label
|
||||
* Bit 4 Directory
|
||||
* Bit 5 Archive
|
||||
*/
|
||||
var generateDosExternalFileAttr = function (dosPermissions, isDir) {
|
||||
|
||||
// the dir flag is already set for compatibility
|
||||
|
||||
return (dosPermissions || 0) & 0x3F;
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate the various parts used in the construction of the final zip file.
|
||||
* @param {string} name the file name.
|
||||
* @param {ZipObject} file the file content.
|
||||
* @param {JSZip.CompressedObject} compressedObject the compressed object.
|
||||
* @param {number} offset the current offset from the start of the zip file.
|
||||
* @param {String} platform let's pretend we are this platform (change platform dependents fields)
|
||||
* @return {object} the zip parts.
|
||||
*/
|
||||
var generateZipParts = function(name, file, compressedObject, offset, platform) {
|
||||
var data = compressedObject.compressedContent,
|
||||
utfEncodedFileName = utils.transformTo("string", utf8.utf8encode(file.name)),
|
||||
comment = file.comment || "",
|
||||
utfEncodedComment = utils.transformTo("string", utf8.utf8encode(comment)),
|
||||
useUTF8ForFileName = utfEncodedFileName.length !== file.name.length,
|
||||
useUTF8ForComment = utfEncodedComment.length !== comment.length,
|
||||
o = file.options,
|
||||
dosTime,
|
||||
dosDate,
|
||||
extraFields = "",
|
||||
unicodePathExtraField = "",
|
||||
unicodeCommentExtraField = "",
|
||||
dir, date;
|
||||
|
||||
|
||||
// handle the deprecated options.dir
|
||||
if (file._initialMetadata.dir !== file.dir) {
|
||||
dir = file.dir;
|
||||
} else {
|
||||
dir = o.dir;
|
||||
}
|
||||
|
||||
// handle the deprecated options.date
|
||||
if(file._initialMetadata.date !== file.date) {
|
||||
date = file.date;
|
||||
} else {
|
||||
date = o.date;
|
||||
}
|
||||
|
||||
var extFileAttr = 0;
|
||||
var versionMadeBy = 0;
|
||||
if (dir) {
|
||||
// dos or unix, we set the dos dir flag
|
||||
extFileAttr |= 0x00010;
|
||||
}
|
||||
if(platform === "UNIX") {
|
||||
versionMadeBy = 0x031E; // UNIX, version 3.0
|
||||
extFileAttr |= generateUnixExternalFileAttr(file.unixPermissions, dir);
|
||||
} else { // DOS or other, fallback to DOS
|
||||
versionMadeBy = 0x0014; // DOS, version 2.0
|
||||
extFileAttr |= generateDosExternalFileAttr(file.dosPermissions, dir);
|
||||
}
|
||||
|
||||
// date
|
||||
// @see http://www.delorie.com/djgpp/doc/rbinter/it/52/13.html
|
||||
// @see http://www.delorie.com/djgpp/doc/rbinter/it/65/16.html
|
||||
// @see http://www.delorie.com/djgpp/doc/rbinter/it/66/16.html
|
||||
|
||||
dosTime = date.getHours();
|
||||
dosTime = dosTime << 6;
|
||||
dosTime = dosTime | date.getMinutes();
|
||||
dosTime = dosTime << 5;
|
||||
dosTime = dosTime | date.getSeconds() / 2;
|
||||
|
||||
dosDate = date.getFullYear() - 1980;
|
||||
dosDate = dosDate << 4;
|
||||
dosDate = dosDate | (date.getMonth() + 1);
|
||||
dosDate = dosDate << 5;
|
||||
dosDate = dosDate | date.getDate();
|
||||
|
||||
if (useUTF8ForFileName) {
|
||||
// set the unicode path extra field. unzip needs at least one extra
|
||||
// field to correctly handle unicode path, so using the path is as good
|
||||
// as any other information. This could improve the situation with
|
||||
// other archive managers too.
|
||||
// This field is usually used without the utf8 flag, with a non
|
||||
// unicode path in the header (winrar, winzip). This helps (a bit)
|
||||
// with the messy Windows' default compressed folders feature but
|
||||
// breaks on p7zip which doesn't seek the unicode path extra field.
|
||||
// So for now, UTF-8 everywhere !
|
||||
unicodePathExtraField =
|
||||
// Version
|
||||
decToHex(1, 1) +
|
||||
// NameCRC32
|
||||
decToHex(crc32(utfEncodedFileName), 4) +
|
||||
// UnicodeName
|
||||
utfEncodedFileName;
|
||||
|
||||
extraFields +=
|
||||
// Info-ZIP Unicode Path Extra Field
|
||||
"\x75\x70" +
|
||||
// size
|
||||
decToHex(unicodePathExtraField.length, 2) +
|
||||
// content
|
||||
unicodePathExtraField;
|
||||
}
|
||||
|
||||
if(useUTF8ForComment) {
|
||||
|
||||
unicodeCommentExtraField =
|
||||
// Version
|
||||
decToHex(1, 1) +
|
||||
// CommentCRC32
|
||||
decToHex(this.crc32(utfEncodedComment), 4) +
|
||||
// UnicodeName
|
||||
utfEncodedComment;
|
||||
|
||||
extraFields +=
|
||||
// Info-ZIP Unicode Path Extra Field
|
||||
"\x75\x63" +
|
||||
// size
|
||||
decToHex(unicodeCommentExtraField.length, 2) +
|
||||
// content
|
||||
unicodeCommentExtraField;
|
||||
}
|
||||
|
||||
var header = "";
|
||||
|
||||
// version needed to extract
|
||||
header += "\x0A\x00";
|
||||
// general purpose bit flag
|
||||
// set bit 11 if utf8
|
||||
header += (useUTF8ForFileName || useUTF8ForComment) ? "\x00\x08" : "\x00\x00";
|
||||
// compression method
|
||||
header += compressedObject.compressionMethod;
|
||||
// last mod file time
|
||||
header += decToHex(dosTime, 2);
|
||||
// last mod file date
|
||||
header += decToHex(dosDate, 2);
|
||||
// crc-32
|
||||
header += decToHex(compressedObject.crc32, 4);
|
||||
// compressed size
|
||||
header += decToHex(compressedObject.compressedSize, 4);
|
||||
// uncompressed size
|
||||
header += decToHex(compressedObject.uncompressedSize, 4);
|
||||
// file name length
|
||||
header += decToHex(utfEncodedFileName.length, 2);
|
||||
// extra field length
|
||||
header += decToHex(extraFields.length, 2);
|
||||
|
||||
|
||||
var fileRecord = signature.LOCAL_FILE_HEADER + header + utfEncodedFileName + extraFields;
|
||||
|
||||
var dirRecord = signature.CENTRAL_FILE_HEADER +
|
||||
// version made by (00: DOS)
|
||||
decToHex(versionMadeBy, 2) +
|
||||
// file header (common to file and central directory)
|
||||
header +
|
||||
// file comment length
|
||||
decToHex(utfEncodedComment.length, 2) +
|
||||
// disk number start
|
||||
"\x00\x00" +
|
||||
// internal file attributes TODO
|
||||
"\x00\x00" +
|
||||
// external file attributes
|
||||
decToHex(extFileAttr, 4) +
|
||||
// relative offset of local header
|
||||
decToHex(offset, 4) +
|
||||
// file name
|
||||
utfEncodedFileName +
|
||||
// extra field
|
||||
extraFields +
|
||||
// file comment
|
||||
utfEncodedComment;
|
||||
|
||||
return {
|
||||
fileRecord: fileRecord,
|
||||
dirRecord: dirRecord,
|
||||
compressedObject: compressedObject
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
// return the actual prototype of JSZip
|
||||
var out = {
|
||||
/**
|
||||
* Read an existing zip and merge the data in the current JSZip object.
|
||||
* The implementation is in jszip-load.js, don't forget to include it.
|
||||
* @param {String|ArrayBuffer|Uint8Array|Buffer} stream The stream to load
|
||||
* @param {Object} options Options for loading the stream.
|
||||
* options.base64 : is the stream in base64 ? default : false
|
||||
* @return {JSZip} the current JSZip object
|
||||
*/
|
||||
load: function(stream, options) {
|
||||
throw new Error("Load method is not defined. Is the file jszip-load.js included ?");
|
||||
},
|
||||
|
||||
/**
|
||||
* Filter nested files/folders with the specified function.
|
||||
* @param {Function} search the predicate to use :
|
||||
* function (relativePath, file) {...}
|
||||
* It takes 2 arguments : the relative path and the file.
|
||||
* @return {Array} An array of matching elements.
|
||||
*/
|
||||
filter: function(search) {
|
||||
var result = [],
|
||||
filename, relativePath, file, fileClone;
|
||||
for (filename in this.files) {
|
||||
if (!this.files.hasOwnProperty(filename)) {
|
||||
continue;
|
||||
}
|
||||
file = this.files[filename];
|
||||
// return a new object, don't let the user mess with our internal objects :)
|
||||
fileClone = new ZipObject(file.name, file._data, extend(file.options));
|
||||
relativePath = filename.slice(this.root.length, filename.length);
|
||||
if (filename.slice(0, this.root.length) === this.root && // the file is in the current root
|
||||
search(relativePath, fileClone)) { // and the file matches the function
|
||||
result.push(fileClone);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
|
||||
/**
|
||||
* Add a file to the zip file, or search a file.
|
||||
* @param {string|RegExp} name The name of the file to add (if data is defined),
|
||||
* the name of the file to find (if no data) or a regex to match files.
|
||||
* @param {String|ArrayBuffer|Uint8Array|Buffer} data The file data, either raw or base64 encoded
|
||||
* @param {Object} o File options
|
||||
* @return {JSZip|Object|Array} this JSZip object (when adding a file),
|
||||
* a file (when searching by string) or an array of files (when searching by regex).
|
||||
*/
|
||||
file: function(name, data, o) {
|
||||
if (arguments.length === 1) {
|
||||
if (utils.isRegExp(name)) {
|
||||
var regexp = name;
|
||||
return this.filter(function(relativePath, file) {
|
||||
return !file.dir && regexp.test(relativePath);
|
||||
});
|
||||
}
|
||||
else { // text
|
||||
return this.filter(function(relativePath, file) {
|
||||
return !file.dir && relativePath === name;
|
||||
})[0] || null;
|
||||
}
|
||||
}
|
||||
else { // more than one argument : we have data !
|
||||
name = this.root + name;
|
||||
fileAdd.call(this, name, data, o);
|
||||
}
|
||||
return this;
|
||||
},
|
||||
|
||||
/**
|
||||
* Add a directory to the zip file, or search.
|
||||
* @param {String|RegExp} arg The name of the directory to add, or a regex to search folders.
|
||||
* @return {JSZip} an object with the new directory as the root, or an array containing matching folders.
|
||||
*/
|
||||
folder: function(arg) {
|
||||
if (!arg) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (utils.isRegExp(arg)) {
|
||||
return this.filter(function(relativePath, file) {
|
||||
return file.dir && arg.test(relativePath);
|
||||
});
|
||||
}
|
||||
|
||||
// else, name is a new folder
|
||||
var name = this.root + arg;
|
||||
var newFolder = folderAdd.call(this, name);
|
||||
|
||||
// Allow chaining by returning a new object with this folder as the root
|
||||
var ret = this.clone();
|
||||
ret.root = newFolder.name;
|
||||
return ret;
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a file, or a directory and all sub-files, from the zip
|
||||
* @param {string} name the name of the file to delete
|
||||
* @return {JSZip} this JSZip object
|
||||
*/
|
||||
remove: function(name) {
|
||||
name = this.root + name;
|
||||
var file = this.files[name];
|
||||
if (!file) {
|
||||
// Look for any folders
|
||||
if (name.slice(-1) != "/") {
|
||||
name += "/";
|
||||
}
|
||||
file = this.files[name];
|
||||
}
|
||||
|
||||
if (file && !file.dir) {
|
||||
// file
|
||||
delete this.files[name];
|
||||
} else {
|
||||
// maybe a folder, delete recursively
|
||||
var kids = this.filter(function(relativePath, file) {
|
||||
return file.name.slice(0, name.length) === name;
|
||||
});
|
||||
for (var i = 0; i < kids.length; i++) {
|
||||
delete this.files[kids[i].name];
|
||||
}
|
||||
}
|
||||
|
||||
return this;
|
||||
},
|
||||
|
||||
/**
|
||||
* Generate the complete zip file
|
||||
* @param {Object} options the options to generate the zip file :
|
||||
* - base64, (deprecated, use type instead) true to generate base64.
|
||||
* - compression, "STORE" by default.
|
||||
* - type, "base64" by default. Values are : string, base64, uint8array, arraybuffer, blob.
|
||||
* @return {String|Uint8Array|ArrayBuffer|Buffer|Blob} the zip file
|
||||
*/
|
||||
generate: function(options) {
|
||||
options = extend(options || {}, {
|
||||
base64: true,
|
||||
compression: "STORE",
|
||||
compressionOptions : null,
|
||||
type: "base64",
|
||||
platform: "DOS",
|
||||
comment: null,
|
||||
mimeType: 'application/zip'
|
||||
});
|
||||
|
||||
utils.checkSupport(options.type);
|
||||
|
||||
// accept nodejs `process.platform`
|
||||
if(
|
||||
options.platform === 'darwin' ||
|
||||
options.platform === 'freebsd' ||
|
||||
options.platform === 'linux' ||
|
||||
options.platform === 'sunos'
|
||||
) {
|
||||
options.platform = "UNIX";
|
||||
}
|
||||
if (options.platform === 'win32') {
|
||||
options.platform = "DOS";
|
||||
}
|
||||
|
||||
var zipData = [],
|
||||
localDirLength = 0,
|
||||
centralDirLength = 0,
|
||||
writer, i,
|
||||
utfEncodedComment = utils.transformTo("string", this.utf8encode(options.comment || this.comment || ""));
|
||||
|
||||
// first, generate all the zip parts.
|
||||
for (var name in this.files) {
|
||||
if (!this.files.hasOwnProperty(name)) {
|
||||
continue;
|
||||
}
|
||||
var file = this.files[name];
|
||||
|
||||
var compressionName = file.options.compression || options.compression.toUpperCase();
|
||||
var compression = compressions[compressionName];
|
||||
if (!compression) {
|
||||
throw new Error(compressionName + " is not a valid compression method !");
|
||||
}
|
||||
var compressionOptions = file.options.compressionOptions || options.compressionOptions || {};
|
||||
|
||||
var compressedObject = generateCompressedObjectFrom.call(this, file, compression, compressionOptions);
|
||||
|
||||
var zipPart = generateZipParts.call(this, name, file, compressedObject, localDirLength, options.platform);
|
||||
localDirLength += zipPart.fileRecord.length + compressedObject.compressedSize;
|
||||
centralDirLength += zipPart.dirRecord.length;
|
||||
zipData.push(zipPart);
|
||||
}
|
||||
|
||||
var dirEnd = "";
|
||||
|
||||
// end of central dir signature
|
||||
dirEnd = signature.CENTRAL_DIRECTORY_END +
|
||||
// number of this disk
|
||||
"\x00\x00" +
|
||||
// number of the disk with the start of the central directory
|
||||
"\x00\x00" +
|
||||
// total number of entries in the central directory on this disk
|
||||
decToHex(zipData.length, 2) +
|
||||
// total number of entries in the central directory
|
||||
decToHex(zipData.length, 2) +
|
||||
// size of the central directory 4 bytes
|
||||
decToHex(centralDirLength, 4) +
|
||||
// offset of start of central directory with respect to the starting disk number
|
||||
decToHex(localDirLength, 4) +
|
||||
// .ZIP file comment length
|
||||
decToHex(utfEncodedComment.length, 2) +
|
||||
// .ZIP file comment
|
||||
utfEncodedComment;
|
||||
|
||||
|
||||
// we have all the parts (and the total length)
|
||||
// time to create a writer !
|
||||
var typeName = options.type.toLowerCase();
|
||||
if(typeName==="uint8array"||typeName==="arraybuffer"||typeName==="blob"||typeName==="nodebuffer") {
|
||||
writer = new Uint8ArrayWriter(localDirLength + centralDirLength + dirEnd.length);
|
||||
}else{
|
||||
writer = new StringWriter(localDirLength + centralDirLength + dirEnd.length);
|
||||
}
|
||||
|
||||
for (i = 0; i < zipData.length; i++) {
|
||||
writer.append(zipData[i].fileRecord);
|
||||
writer.append(zipData[i].compressedObject.compressedContent);
|
||||
}
|
||||
for (i = 0; i < zipData.length; i++) {
|
||||
writer.append(zipData[i].dirRecord);
|
||||
}
|
||||
|
||||
writer.append(dirEnd);
|
||||
|
||||
var zip = writer.finalize();
|
||||
|
||||
|
||||
|
||||
switch(options.type.toLowerCase()) {
|
||||
// case "zip is an Uint8Array"
|
||||
case "uint8array" :
|
||||
case "arraybuffer" :
|
||||
case "nodebuffer" :
|
||||
return utils.transformTo(options.type.toLowerCase(), zip);
|
||||
case "blob" :
|
||||
return utils.arrayBuffer2Blob(utils.transformTo("arraybuffer", zip), options.mimeType);
|
||||
// case "zip is a string"
|
||||
case "base64" :
|
||||
return (options.base64) ? base64.encode(zip) : zip;
|
||||
default : // case "string" :
|
||||
return zip;
|
||||
}
|
||||
|
||||
},
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This method will be removed in a future version without replacement.
|
||||
*/
|
||||
crc32: function (input, crc) {
|
||||
return crc32(input, crc);
|
||||
},
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This method will be removed in a future version without replacement.
|
||||
*/
|
||||
utf8encode: function (string) {
|
||||
return utils.transformTo("string", utf8.utf8encode(string));
|
||||
},
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
* This method will be removed in a future version without replacement.
|
||||
*/
|
||||
utf8decode: function (input) {
|
||||
return utf8.utf8decode(input);
|
||||
}
|
||||
};
|
||||
module.exports = out;
|
7
node_modules/jszip/lib/signature.js
generated
vendored
Normal file
7
node_modules/jszip/lib/signature.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
'use strict';
|
||||
exports.LOCAL_FILE_HEADER = "PK\x03\x04";
|
||||
exports.CENTRAL_FILE_HEADER = "PK\x01\x02";
|
||||
exports.CENTRAL_DIRECTORY_END = "PK\x05\x06";
|
||||
exports.ZIP64_CENTRAL_DIRECTORY_LOCATOR = "PK\x06\x07";
|
||||
exports.ZIP64_CENTRAL_DIRECTORY_END = "PK\x06\x06";
|
||||
exports.DATA_DESCRIPTOR = "PK\x07\x08";
|
36
node_modules/jszip/lib/stringReader.js
generated
vendored
Normal file
36
node_modules/jszip/lib/stringReader.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict';
|
||||
var DataReader = require('./dataReader');
|
||||
var utils = require('./utils');
|
||||
|
||||
function StringReader(data, optimizedBinaryString) {
|
||||
this.data = data;
|
||||
if (!optimizedBinaryString) {
|
||||
this.data = utils.string2binary(this.data);
|
||||
}
|
||||
this.length = this.data.length;
|
||||
this.index = 0;
|
||||
}
|
||||
StringReader.prototype = new DataReader();
|
||||
/**
|
||||
* @see DataReader.byteAt
|
||||
*/
|
||||
StringReader.prototype.byteAt = function(i) {
|
||||
return this.data.charCodeAt(i);
|
||||
};
|
||||
/**
|
||||
* @see DataReader.lastIndexOfSignature
|
||||
*/
|
||||
StringReader.prototype.lastIndexOfSignature = function(sig) {
|
||||
return this.data.lastIndexOf(sig);
|
||||
};
|
||||
/**
|
||||
* @see DataReader.readData
|
||||
*/
|
||||
StringReader.prototype.readData = function(size) {
|
||||
this.checkOffset(size);
|
||||
// this will work because the constructor applied the "& 0xff" mask.
|
||||
var result = this.data.slice(this.index, this.index + size);
|
||||
this.index += size;
|
||||
return result;
|
||||
};
|
||||
module.exports = StringReader;
|
30
node_modules/jszip/lib/stringWriter.js
generated
vendored
Normal file
30
node_modules/jszip/lib/stringWriter.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict';
|
||||
|
||||
var utils = require('./utils');
|
||||
|
||||
/**
|
||||
* An object to write any content to a string.
|
||||
* @constructor
|
||||
*/
|
||||
var StringWriter = function() {
|
||||
this.data = [];
|
||||
};
|
||||
StringWriter.prototype = {
|
||||
/**
|
||||
* Append any content to the current string.
|
||||
* @param {Object} input the content to add.
|
||||
*/
|
||||
append: function(input) {
|
||||
input = utils.transformTo("string", input);
|
||||
this.data.push(input);
|
||||
},
|
||||
/**
|
||||
* Finalize the construction an return the result.
|
||||
* @return {string} the generated string.
|
||||
*/
|
||||
finalize: function() {
|
||||
return this.data.join("");
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = StringWriter;
|
34
node_modules/jszip/lib/support.js
generated
vendored
Normal file
34
node_modules/jszip/lib/support.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict';
|
||||
exports.base64 = true;
|
||||
exports.array = true;
|
||||
exports.string = true;
|
||||
exports.arraybuffer = typeof ArrayBuffer !== "undefined" && typeof Uint8Array !== "undefined";
|
||||
// contains true if JSZip can read/generate nodejs Buffer, false otherwise.
|
||||
// Browserify will provide a Buffer implementation for browsers, which is
|
||||
// an augmented Uint8Array (i.e., can be used as either Buffer or U8).
|
||||
exports.nodebuffer = typeof Buffer !== "undefined";
|
||||
// contains true if JSZip can read/generate Uint8Array, false otherwise.
|
||||
exports.uint8array = typeof Uint8Array !== "undefined";
|
||||
|
||||
if (typeof ArrayBuffer === "undefined") {
|
||||
exports.blob = false;
|
||||
}
|
||||
else {
|
||||
var buffer = new ArrayBuffer(0);
|
||||
try {
|
||||
exports.blob = new Blob([buffer], {
|
||||
type: "application/zip"
|
||||
}).size === 0;
|
||||
}
|
||||
catch (e) {
|
||||
try {
|
||||
var Builder = window.BlobBuilder || window.WebKitBlobBuilder || window.MozBlobBuilder || window.MSBlobBuilder;
|
||||
var builder = new Builder();
|
||||
builder.append(buffer);
|
||||
exports.blob = builder.getBlob('application/zip').size === 0;
|
||||
}
|
||||
catch (e) {
|
||||
exports.blob = false;
|
||||
}
|
||||
}
|
||||
}
|
47
node_modules/jszip/lib/uint8ArrayReader.js
generated
vendored
Normal file
47
node_modules/jszip/lib/uint8ArrayReader.js
generated
vendored
Normal file
@@ -0,0 +1,47 @@
|
||||
'use strict';
|
||||
var DataReader = require('./dataReader');
|
||||
|
||||
function Uint8ArrayReader(data) {
|
||||
if (data) {
|
||||
this.data = data;
|
||||
this.length = this.data.length;
|
||||
this.index = 0;
|
||||
}
|
||||
}
|
||||
Uint8ArrayReader.prototype = new DataReader();
|
||||
/**
|
||||
* @see DataReader.byteAt
|
||||
*/
|
||||
Uint8ArrayReader.prototype.byteAt = function(i) {
|
||||
return this.data[i];
|
||||
};
|
||||
/**
|
||||
* @see DataReader.lastIndexOfSignature
|
||||
*/
|
||||
Uint8ArrayReader.prototype.lastIndexOfSignature = function(sig) {
|
||||
var sig0 = sig.charCodeAt(0),
|
||||
sig1 = sig.charCodeAt(1),
|
||||
sig2 = sig.charCodeAt(2),
|
||||
sig3 = sig.charCodeAt(3);
|
||||
for (var i = this.length - 4; i >= 0; --i) {
|
||||
if (this.data[i] === sig0 && this.data[i + 1] === sig1 && this.data[i + 2] === sig2 && this.data[i + 3] === sig3) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
};
|
||||
/**
|
||||
* @see DataReader.readData
|
||||
*/
|
||||
Uint8ArrayReader.prototype.readData = function(size) {
|
||||
this.checkOffset(size);
|
||||
if(size === 0) {
|
||||
// in IE10, when using subarray(idx, idx), we get the array [0x00] instead of [].
|
||||
return new Uint8Array(0);
|
||||
}
|
||||
var result = this.data.subarray(this.index, this.index + size);
|
||||
this.index += size;
|
||||
return result;
|
||||
};
|
||||
module.exports = Uint8ArrayReader;
|
36
node_modules/jszip/lib/uint8ArrayWriter.js
generated
vendored
Normal file
36
node_modules/jszip/lib/uint8ArrayWriter.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict';
|
||||
|
||||
var utils = require('./utils');
|
||||
|
||||
/**
|
||||
* An object to write any content to an Uint8Array.
|
||||
* @constructor
|
||||
* @param {number} length The length of the array.
|
||||
*/
|
||||
var Uint8ArrayWriter = function(length) {
|
||||
this.data = new Uint8Array(length);
|
||||
this.index = 0;
|
||||
};
|
||||
Uint8ArrayWriter.prototype = {
|
||||
/**
|
||||
* Append any content to the current array.
|
||||
* @param {Object} input the content to add.
|
||||
*/
|
||||
append: function(input) {
|
||||
if (input.length !== 0) {
|
||||
// with an empty Uint8Array, Opera fails with a "Offset larger than array size"
|
||||
input = utils.transformTo("uint8array", input);
|
||||
this.data.set(input, this.index);
|
||||
this.index += input.length;
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Finalize the construction an return the result.
|
||||
* @return {Uint8Array} the generated array.
|
||||
*/
|
||||
finalize: function() {
|
||||
return this.data;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = Uint8ArrayWriter;
|
207
node_modules/jszip/lib/utf8.js
generated
vendored
Normal file
207
node_modules/jszip/lib/utf8.js
generated
vendored
Normal file
@@ -0,0 +1,207 @@
|
||||
'use strict';
|
||||
|
||||
var utils = require('./utils');
|
||||
var support = require('./support');
|
||||
var nodeBuffer = require('./nodeBuffer');
|
||||
|
||||
/**
|
||||
* The following functions come from pako, from pako/lib/utils/strings
|
||||
* released under the MIT license, see pako https://github.com/nodeca/pako/
|
||||
*/
|
||||
|
||||
// Table with utf8 lengths (calculated by first byte of sequence)
|
||||
// Note, that 5 & 6-byte values and some 4-byte values can not be represented in JS,
|
||||
// because max possible codepoint is 0x10ffff
|
||||
var _utf8len = new Array(256);
|
||||
for (var i=0; i<256; i++) {
|
||||
_utf8len[i] = (i >= 252 ? 6 : i >= 248 ? 5 : i >= 240 ? 4 : i >= 224 ? 3 : i >= 192 ? 2 : 1);
|
||||
}
|
||||
_utf8len[254]=_utf8len[254]=1; // Invalid sequence start
|
||||
|
||||
// convert string to array (typed, when possible)
|
||||
var string2buf = function (str) {
|
||||
var buf, c, c2, m_pos, i, str_len = str.length, buf_len = 0;
|
||||
|
||||
// count binary size
|
||||
for (m_pos = 0; m_pos < str_len; m_pos++) {
|
||||
c = str.charCodeAt(m_pos);
|
||||
if ((c & 0xfc00) === 0xd800 && (m_pos+1 < str_len)) {
|
||||
c2 = str.charCodeAt(m_pos+1);
|
||||
if ((c2 & 0xfc00) === 0xdc00) {
|
||||
c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);
|
||||
m_pos++;
|
||||
}
|
||||
}
|
||||
buf_len += c < 0x80 ? 1 : c < 0x800 ? 2 : c < 0x10000 ? 3 : 4;
|
||||
}
|
||||
|
||||
// allocate buffer
|
||||
if (support.uint8array) {
|
||||
buf = new Uint8Array(buf_len);
|
||||
} else {
|
||||
buf = new Array(buf_len);
|
||||
}
|
||||
|
||||
// convert
|
||||
for (i=0, m_pos = 0; i < buf_len; m_pos++) {
|
||||
c = str.charCodeAt(m_pos);
|
||||
if ((c & 0xfc00) === 0xd800 && (m_pos+1 < str_len)) {
|
||||
c2 = str.charCodeAt(m_pos+1);
|
||||
if ((c2 & 0xfc00) === 0xdc00) {
|
||||
c = 0x10000 + ((c - 0xd800) << 10) + (c2 - 0xdc00);
|
||||
m_pos++;
|
||||
}
|
||||
}
|
||||
if (c < 0x80) {
|
||||
/* one byte */
|
||||
buf[i++] = c;
|
||||
} else if (c < 0x800) {
|
||||
/* two bytes */
|
||||
buf[i++] = 0xC0 | (c >>> 6);
|
||||
buf[i++] = 0x80 | (c & 0x3f);
|
||||
} else if (c < 0x10000) {
|
||||
/* three bytes */
|
||||
buf[i++] = 0xE0 | (c >>> 12);
|
||||
buf[i++] = 0x80 | (c >>> 6 & 0x3f);
|
||||
buf[i++] = 0x80 | (c & 0x3f);
|
||||
} else {
|
||||
/* four bytes */
|
||||
buf[i++] = 0xf0 | (c >>> 18);
|
||||
buf[i++] = 0x80 | (c >>> 12 & 0x3f);
|
||||
buf[i++] = 0x80 | (c >>> 6 & 0x3f);
|
||||
buf[i++] = 0x80 | (c & 0x3f);
|
||||
}
|
||||
}
|
||||
|
||||
return buf;
|
||||
};
|
||||
|
||||
// Calculate max possible position in utf8 buffer,
|
||||
// that will not break sequence. If that's not possible
|
||||
// - (very small limits) return max size as is.
|
||||
//
|
||||
// buf[] - utf8 bytes array
|
||||
// max - length limit (mandatory);
|
||||
var utf8border = function(buf, max) {
|
||||
var pos;
|
||||
|
||||
max = max || buf.length;
|
||||
if (max > buf.length) { max = buf.length; }
|
||||
|
||||
// go back from last position, until start of sequence found
|
||||
pos = max-1;
|
||||
while (pos >= 0 && (buf[pos] & 0xC0) === 0x80) { pos--; }
|
||||
|
||||
// Fuckup - very small and broken sequence,
|
||||
// return max, because we should return something anyway.
|
||||
if (pos < 0) { return max; }
|
||||
|
||||
// If we came to start of buffer - that means vuffer is too small,
|
||||
// return max too.
|
||||
if (pos === 0) { return max; }
|
||||
|
||||
return (pos + _utf8len[buf[pos]] > max) ? pos : max;
|
||||
};
|
||||
|
||||
// convert array to string
|
||||
var buf2string = function (buf) {
|
||||
var str, i, out, c, c_len;
|
||||
var len = buf.length;
|
||||
|
||||
// Reserve max possible length (2 words per char)
|
||||
// NB: by unknown reasons, Array is significantly faster for
|
||||
// String.fromCharCode.apply than Uint16Array.
|
||||
var utf16buf = new Array(len*2);
|
||||
|
||||
for (out=0, i=0; i<len;) {
|
||||
c = buf[i++];
|
||||
// quick process ascii
|
||||
if (c < 0x80) { utf16buf[out++] = c; continue; }
|
||||
|
||||
c_len = _utf8len[c];
|
||||
// skip 5 & 6 byte codes
|
||||
if (c_len > 4) { utf16buf[out++] = 0xfffd; i += c_len-1; continue; }
|
||||
|
||||
// apply mask on first byte
|
||||
c &= c_len === 2 ? 0x1f : c_len === 3 ? 0x0f : 0x07;
|
||||
// join the rest
|
||||
while (c_len > 1 && i < len) {
|
||||
c = (c << 6) | (buf[i++] & 0x3f);
|
||||
c_len--;
|
||||
}
|
||||
|
||||
// terminated by end of string?
|
||||
if (c_len > 1) { utf16buf[out++] = 0xfffd; continue; }
|
||||
|
||||
if (c < 0x10000) {
|
||||
utf16buf[out++] = c;
|
||||
} else {
|
||||
c -= 0x10000;
|
||||
utf16buf[out++] = 0xd800 | ((c >> 10) & 0x3ff);
|
||||
utf16buf[out++] = 0xdc00 | (c & 0x3ff);
|
||||
}
|
||||
}
|
||||
|
||||
// shrinkBuf(utf16buf, out)
|
||||
if (utf16buf.length !== out) {
|
||||
if(utf16buf.subarray) {
|
||||
utf16buf = utf16buf.subarray(0, out);
|
||||
} else {
|
||||
utf16buf.length = out;
|
||||
}
|
||||
}
|
||||
|
||||
// return String.fromCharCode.apply(null, utf16buf);
|
||||
return utils.applyFromCharCode(utf16buf);
|
||||
};
|
||||
|
||||
|
||||
// That's all for the pako functions.
|
||||
|
||||
|
||||
/**
|
||||
* Transform a javascript string into an array (typed if possible) of bytes,
|
||||
* UTF-8 encoded.
|
||||
* @param {String} str the string to encode
|
||||
* @return {Array|Uint8Array|Buffer} the UTF-8 encoded string.
|
||||
*/
|
||||
exports.utf8encode = function utf8encode(str) {
|
||||
if (support.nodebuffer) {
|
||||
return nodeBuffer(str, "utf-8");
|
||||
}
|
||||
|
||||
return string2buf(str);
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Transform a bytes array (or a representation) representing an UTF-8 encoded
|
||||
* string into a javascript string.
|
||||
* @param {Array|Uint8Array|Buffer} buf the data de decode
|
||||
* @return {String} the decoded string.
|
||||
*/
|
||||
exports.utf8decode = function utf8decode(buf) {
|
||||
if (support.nodebuffer) {
|
||||
return utils.transformTo("nodebuffer", buf).toString("utf-8");
|
||||
}
|
||||
|
||||
buf = utils.transformTo(support.uint8array ? "uint8array" : "array", buf);
|
||||
|
||||
// return buf2string(buf);
|
||||
// Chrome prefers to work with "small" chunks of data
|
||||
// for the method buf2string.
|
||||
// Firefox and Chrome has their own shortcut, IE doesn't seem to really care.
|
||||
var result = [], k = 0, len = buf.length, chunk = 65536;
|
||||
while (k < len) {
|
||||
var nextBoundary = utf8border(buf, Math.min(k + chunk, len));
|
||||
if (support.uint8array) {
|
||||
result.push(buf2string(buf.subarray(k, nextBoundary)));
|
||||
} else {
|
||||
result.push(buf2string(buf.slice(k, nextBoundary)));
|
||||
}
|
||||
k = nextBoundary;
|
||||
}
|
||||
return result.join("");
|
||||
|
||||
};
|
||||
// vim: set shiftwidth=4 softtabstop=4:
|
326
node_modules/jszip/lib/utils.js
generated
vendored
Normal file
326
node_modules/jszip/lib/utils.js
generated
vendored
Normal file
@@ -0,0 +1,326 @@
|
||||
'use strict';
|
||||
var support = require('./support');
|
||||
var compressions = require('./compressions');
|
||||
var nodeBuffer = require('./nodeBuffer');
|
||||
/**
|
||||
* Convert a string to a "binary string" : a string containing only char codes between 0 and 255.
|
||||
* @param {string} str the string to transform.
|
||||
* @return {String} the binary string.
|
||||
*/
|
||||
exports.string2binary = function(str) {
|
||||
var result = "";
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
result += String.fromCharCode(str.charCodeAt(i) & 0xff);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
exports.arrayBuffer2Blob = function(buffer, mimeType) {
|
||||
exports.checkSupport("blob");
|
||||
mimeType = mimeType || 'application/zip';
|
||||
|
||||
try {
|
||||
// Blob constructor
|
||||
return new Blob([buffer], {
|
||||
type: mimeType
|
||||
});
|
||||
}
|
||||
catch (e) {
|
||||
|
||||
try {
|
||||
// deprecated, browser only, old way
|
||||
var Builder = window.BlobBuilder || window.WebKitBlobBuilder || window.MozBlobBuilder || window.MSBlobBuilder;
|
||||
var builder = new Builder();
|
||||
builder.append(buffer);
|
||||
return builder.getBlob(mimeType);
|
||||
}
|
||||
catch (e) {
|
||||
|
||||
// well, fuck ?!
|
||||
throw new Error("Bug : can't construct the Blob.");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
/**
|
||||
* The identity function.
|
||||
* @param {Object} input the input.
|
||||
* @return {Object} the same input.
|
||||
*/
|
||||
function identity(input) {
|
||||
return input;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fill in an array with a string.
|
||||
* @param {String} str the string to use.
|
||||
* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to fill in (will be mutated).
|
||||
* @return {Array|ArrayBuffer|Uint8Array|Buffer} the updated array.
|
||||
*/
|
||||
function stringToArrayLike(str, array) {
|
||||
for (var i = 0; i < str.length; ++i) {
|
||||
array[i] = str.charCodeAt(i) & 0xFF;
|
||||
}
|
||||
return array;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform an array-like object to a string.
|
||||
* @param {Array|ArrayBuffer|Uint8Array|Buffer} array the array to transform.
|
||||
* @return {String} the result.
|
||||
*/
|
||||
function arrayLikeToString(array) {
|
||||
// Performances notes :
|
||||
// --------------------
|
||||
// String.fromCharCode.apply(null, array) is the fastest, see
|
||||
// see http://jsperf.com/converting-a-uint8array-to-a-string/2
|
||||
// but the stack is limited (and we can get huge arrays !).
|
||||
//
|
||||
// result += String.fromCharCode(array[i]); generate too many strings !
|
||||
//
|
||||
// This code is inspired by http://jsperf.com/arraybuffer-to-string-apply-performance/2
|
||||
var chunk = 65536;
|
||||
var result = [],
|
||||
len = array.length,
|
||||
type = exports.getTypeOf(array),
|
||||
k = 0,
|
||||
canUseApply = true;
|
||||
try {
|
||||
switch(type) {
|
||||
case "uint8array":
|
||||
String.fromCharCode.apply(null, new Uint8Array(0));
|
||||
break;
|
||||
case "nodebuffer":
|
||||
String.fromCharCode.apply(null, nodeBuffer(0));
|
||||
break;
|
||||
}
|
||||
} catch(e) {
|
||||
canUseApply = false;
|
||||
}
|
||||
|
||||
// no apply : slow and painful algorithm
|
||||
// default browser on android 4.*
|
||||
if (!canUseApply) {
|
||||
var resultStr = "";
|
||||
for(var i = 0; i < array.length;i++) {
|
||||
resultStr += String.fromCharCode(array[i]);
|
||||
}
|
||||
return resultStr;
|
||||
}
|
||||
while (k < len && chunk > 1) {
|
||||
try {
|
||||
if (type === "array" || type === "nodebuffer") {
|
||||
result.push(String.fromCharCode.apply(null, array.slice(k, Math.min(k + chunk, len))));
|
||||
}
|
||||
else {
|
||||
result.push(String.fromCharCode.apply(null, array.subarray(k, Math.min(k + chunk, len))));
|
||||
}
|
||||
k += chunk;
|
||||
}
|
||||
catch (e) {
|
||||
chunk = Math.floor(chunk / 2);
|
||||
}
|
||||
}
|
||||
return result.join("");
|
||||
}
|
||||
|
||||
exports.applyFromCharCode = arrayLikeToString;
|
||||
|
||||
|
||||
/**
|
||||
* Copy the data from an array-like to an other array-like.
|
||||
* @param {Array|ArrayBuffer|Uint8Array|Buffer} arrayFrom the origin array.
|
||||
* @param {Array|ArrayBuffer|Uint8Array|Buffer} arrayTo the destination array which will be mutated.
|
||||
* @return {Array|ArrayBuffer|Uint8Array|Buffer} the updated destination array.
|
||||
*/
|
||||
function arrayLikeToArrayLike(arrayFrom, arrayTo) {
|
||||
for (var i = 0; i < arrayFrom.length; i++) {
|
||||
arrayTo[i] = arrayFrom[i];
|
||||
}
|
||||
return arrayTo;
|
||||
}
|
||||
|
||||
// a matrix containing functions to transform everything into everything.
|
||||
var transform = {};
|
||||
|
||||
// string to ?
|
||||
transform["string"] = {
|
||||
"string": identity,
|
||||
"array": function(input) {
|
||||
return stringToArrayLike(input, new Array(input.length));
|
||||
},
|
||||
"arraybuffer": function(input) {
|
||||
return transform["string"]["uint8array"](input).buffer;
|
||||
},
|
||||
"uint8array": function(input) {
|
||||
return stringToArrayLike(input, new Uint8Array(input.length));
|
||||
},
|
||||
"nodebuffer": function(input) {
|
||||
return stringToArrayLike(input, nodeBuffer(input.length));
|
||||
}
|
||||
};
|
||||
|
||||
// array to ?
|
||||
transform["array"] = {
|
||||
"string": arrayLikeToString,
|
||||
"array": identity,
|
||||
"arraybuffer": function(input) {
|
||||
return (new Uint8Array(input)).buffer;
|
||||
},
|
||||
"uint8array": function(input) {
|
||||
return new Uint8Array(input);
|
||||
},
|
||||
"nodebuffer": function(input) {
|
||||
return nodeBuffer(input);
|
||||
}
|
||||
};
|
||||
|
||||
// arraybuffer to ?
|
||||
transform["arraybuffer"] = {
|
||||
"string": function(input) {
|
||||
return arrayLikeToString(new Uint8Array(input));
|
||||
},
|
||||
"array": function(input) {
|
||||
return arrayLikeToArrayLike(new Uint8Array(input), new Array(input.byteLength));
|
||||
},
|
||||
"arraybuffer": identity,
|
||||
"uint8array": function(input) {
|
||||
return new Uint8Array(input);
|
||||
},
|
||||
"nodebuffer": function(input) {
|
||||
return nodeBuffer(new Uint8Array(input));
|
||||
}
|
||||
};
|
||||
|
||||
// uint8array to ?
|
||||
transform["uint8array"] = {
|
||||
"string": arrayLikeToString,
|
||||
"array": function(input) {
|
||||
return arrayLikeToArrayLike(input, new Array(input.length));
|
||||
},
|
||||
"arraybuffer": function(input) {
|
||||
return input.buffer;
|
||||
},
|
||||
"uint8array": identity,
|
||||
"nodebuffer": function(input) {
|
||||
return nodeBuffer(input);
|
||||
}
|
||||
};
|
||||
|
||||
// nodebuffer to ?
|
||||
transform["nodebuffer"] = {
|
||||
"string": arrayLikeToString,
|
||||
"array": function(input) {
|
||||
return arrayLikeToArrayLike(input, new Array(input.length));
|
||||
},
|
||||
"arraybuffer": function(input) {
|
||||
return transform["nodebuffer"]["uint8array"](input).buffer;
|
||||
},
|
||||
"uint8array": function(input) {
|
||||
return arrayLikeToArrayLike(input, new Uint8Array(input.length));
|
||||
},
|
||||
"nodebuffer": identity
|
||||
};
|
||||
|
||||
/**
|
||||
* Transform an input into any type.
|
||||
* The supported output type are : string, array, uint8array, arraybuffer, nodebuffer.
|
||||
* If no output type is specified, the unmodified input will be returned.
|
||||
* @param {String} outputType the output type.
|
||||
* @param {String|Array|ArrayBuffer|Uint8Array|Buffer} input the input to convert.
|
||||
* @throws {Error} an Error if the browser doesn't support the requested output type.
|
||||
*/
|
||||
exports.transformTo = function(outputType, input) {
|
||||
if (!input) {
|
||||
// undefined, null, etc
|
||||
// an empty string won't harm.
|
||||
input = "";
|
||||
}
|
||||
if (!outputType) {
|
||||
return input;
|
||||
}
|
||||
exports.checkSupport(outputType);
|
||||
var inputType = exports.getTypeOf(input);
|
||||
var result = transform[inputType][outputType](input);
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the type of the input.
|
||||
* The type will be in a format valid for JSZip.utils.transformTo : string, array, uint8array, arraybuffer.
|
||||
* @param {Object} input the input to identify.
|
||||
* @return {String} the (lowercase) type of the input.
|
||||
*/
|
||||
exports.getTypeOf = function(input) {
|
||||
if (typeof input === "string") {
|
||||
return "string";
|
||||
}
|
||||
if (Object.prototype.toString.call(input) === "[object Array]") {
|
||||
return "array";
|
||||
}
|
||||
if (support.nodebuffer && nodeBuffer.test(input)) {
|
||||
return "nodebuffer";
|
||||
}
|
||||
if (support.uint8array && input instanceof Uint8Array) {
|
||||
return "uint8array";
|
||||
}
|
||||
if (support.arraybuffer && input instanceof ArrayBuffer) {
|
||||
return "arraybuffer";
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Throw an exception if the type is not supported.
|
||||
* @param {String} type the type to check.
|
||||
* @throws {Error} an Error if the browser doesn't support the requested type.
|
||||
*/
|
||||
exports.checkSupport = function(type) {
|
||||
var supported = support[type.toLowerCase()];
|
||||
if (!supported) {
|
||||
throw new Error(type + " is not supported by this browser");
|
||||
}
|
||||
};
|
||||
exports.MAX_VALUE_16BITS = 65535;
|
||||
exports.MAX_VALUE_32BITS = -1; // well, "\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF" is parsed as -1
|
||||
|
||||
/**
|
||||
* Prettify a string read as binary.
|
||||
* @param {string} str the string to prettify.
|
||||
* @return {string} a pretty string.
|
||||
*/
|
||||
exports.pretty = function(str) {
|
||||
var res = '',
|
||||
code, i;
|
||||
for (i = 0; i < (str || "").length; i++) {
|
||||
code = str.charCodeAt(i);
|
||||
res += '\\x' + (code < 16 ? "0" : "") + code.toString(16).toUpperCase();
|
||||
}
|
||||
return res;
|
||||
};
|
||||
|
||||
/**
|
||||
* Find a compression registered in JSZip.
|
||||
* @param {string} compressionMethod the method magic to find.
|
||||
* @return {Object|null} the JSZip compression object, null if none found.
|
||||
*/
|
||||
exports.findCompression = function(compressionMethod) {
|
||||
for (var method in compressions) {
|
||||
if (!compressions.hasOwnProperty(method)) {
|
||||
continue;
|
||||
}
|
||||
if (compressions[method].magic === compressionMethod) {
|
||||
return compressions[method];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
/**
|
||||
* Cross-window, cross-Node-context regular expression detection
|
||||
* @param {Object} object Anything
|
||||
* @return {Boolean} true if the object is a regular expression,
|
||||
* false otherwise
|
||||
*/
|
||||
exports.isRegExp = function (object) {
|
||||
return Object.prototype.toString.call(object) === "[object RegExp]";
|
||||
};
|
||||
|
221
node_modules/jszip/lib/zipEntries.js
generated
vendored
Normal file
221
node_modules/jszip/lib/zipEntries.js
generated
vendored
Normal file
@@ -0,0 +1,221 @@
|
||||
'use strict';
|
||||
var StringReader = require('./stringReader');
|
||||
var NodeBufferReader = require('./nodeBufferReader');
|
||||
var Uint8ArrayReader = require('./uint8ArrayReader');
|
||||
var utils = require('./utils');
|
||||
var sig = require('./signature');
|
||||
var ZipEntry = require('./zipEntry');
|
||||
var support = require('./support');
|
||||
var jszipProto = require('./object');
|
||||
// class ZipEntries {{{
|
||||
/**
|
||||
* All the entries in the zip file.
|
||||
* @constructor
|
||||
* @param {String|ArrayBuffer|Uint8Array} data the binary stream to load.
|
||||
* @param {Object} loadOptions Options for loading the stream.
|
||||
*/
|
||||
function ZipEntries(data, loadOptions) {
|
||||
this.files = [];
|
||||
this.loadOptions = loadOptions;
|
||||
if (data) {
|
||||
this.load(data);
|
||||
}
|
||||
}
|
||||
ZipEntries.prototype = {
|
||||
/**
|
||||
* Check that the reader is on the speficied signature.
|
||||
* @param {string} expectedSignature the expected signature.
|
||||
* @throws {Error} if it is an other signature.
|
||||
*/
|
||||
checkSignature: function(expectedSignature) {
|
||||
var signature = this.reader.readString(4);
|
||||
if (signature !== expectedSignature) {
|
||||
throw new Error("Corrupted zip or bug : unexpected signature " + "(" + utils.pretty(signature) + ", expected " + utils.pretty(expectedSignature) + ")");
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Read the end of the central directory.
|
||||
*/
|
||||
readBlockEndOfCentral: function() {
|
||||
this.diskNumber = this.reader.readInt(2);
|
||||
this.diskWithCentralDirStart = this.reader.readInt(2);
|
||||
this.centralDirRecordsOnThisDisk = this.reader.readInt(2);
|
||||
this.centralDirRecords = this.reader.readInt(2);
|
||||
this.centralDirSize = this.reader.readInt(4);
|
||||
this.centralDirOffset = this.reader.readInt(4);
|
||||
|
||||
this.zipCommentLength = this.reader.readInt(2);
|
||||
// warning : the encoding depends of the system locale
|
||||
// On a linux machine with LANG=en_US.utf8, this field is utf8 encoded.
|
||||
// On a windows machine, this field is encoded with the localized windows code page.
|
||||
this.zipComment = this.reader.readString(this.zipCommentLength);
|
||||
// To get consistent behavior with the generation part, we will assume that
|
||||
// this is utf8 encoded.
|
||||
this.zipComment = jszipProto.utf8decode(this.zipComment);
|
||||
},
|
||||
/**
|
||||
* Read the end of the Zip 64 central directory.
|
||||
* Not merged with the method readEndOfCentral :
|
||||
* The end of central can coexist with its Zip64 brother,
|
||||
* I don't want to read the wrong number of bytes !
|
||||
*/
|
||||
readBlockZip64EndOfCentral: function() {
|
||||
this.zip64EndOfCentralSize = this.reader.readInt(8);
|
||||
this.versionMadeBy = this.reader.readString(2);
|
||||
this.versionNeeded = this.reader.readInt(2);
|
||||
this.diskNumber = this.reader.readInt(4);
|
||||
this.diskWithCentralDirStart = this.reader.readInt(4);
|
||||
this.centralDirRecordsOnThisDisk = this.reader.readInt(8);
|
||||
this.centralDirRecords = this.reader.readInt(8);
|
||||
this.centralDirSize = this.reader.readInt(8);
|
||||
this.centralDirOffset = this.reader.readInt(8);
|
||||
|
||||
this.zip64ExtensibleData = {};
|
||||
var extraDataSize = this.zip64EndOfCentralSize - 44,
|
||||
index = 0,
|
||||
extraFieldId,
|
||||
extraFieldLength,
|
||||
extraFieldValue;
|
||||
while (index < extraDataSize) {
|
||||
extraFieldId = this.reader.readInt(2);
|
||||
extraFieldLength = this.reader.readInt(4);
|
||||
extraFieldValue = this.reader.readString(extraFieldLength);
|
||||
this.zip64ExtensibleData[extraFieldId] = {
|
||||
id: extraFieldId,
|
||||
length: extraFieldLength,
|
||||
value: extraFieldValue
|
||||
};
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Read the end of the Zip 64 central directory locator.
|
||||
*/
|
||||
readBlockZip64EndOfCentralLocator: function() {
|
||||
this.diskWithZip64CentralDirStart = this.reader.readInt(4);
|
||||
this.relativeOffsetEndOfZip64CentralDir = this.reader.readInt(8);
|
||||
this.disksCount = this.reader.readInt(4);
|
||||
if (this.disksCount > 1) {
|
||||
throw new Error("Multi-volumes zip are not supported");
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Read the local files, based on the offset read in the central part.
|
||||
*/
|
||||
readLocalFiles: function() {
|
||||
var i, file;
|
||||
for (i = 0; i < this.files.length; i++) {
|
||||
file = this.files[i];
|
||||
this.reader.setIndex(file.localHeaderOffset);
|
||||
this.checkSignature(sig.LOCAL_FILE_HEADER);
|
||||
file.readLocalPart(this.reader);
|
||||
file.handleUTF8();
|
||||
file.processAttributes();
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Read the central directory.
|
||||
*/
|
||||
readCentralDir: function() {
|
||||
var file;
|
||||
|
||||
this.reader.setIndex(this.centralDirOffset);
|
||||
while (this.reader.readString(4) === sig.CENTRAL_FILE_HEADER) {
|
||||
file = new ZipEntry({
|
||||
zip64: this.zip64
|
||||
}, this.loadOptions);
|
||||
file.readCentralPart(this.reader);
|
||||
this.files.push(file);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Read the end of central directory.
|
||||
*/
|
||||
readEndOfCentral: function() {
|
||||
var offset = this.reader.lastIndexOfSignature(sig.CENTRAL_DIRECTORY_END);
|
||||
if (offset === -1) {
|
||||
// Check if the content is a truncated zip or complete garbage.
|
||||
// A "LOCAL_FILE_HEADER" is not required at the beginning (auto
|
||||
// extractible zip for example) but it can give a good hint.
|
||||
// If an ajax request was used without responseType, we will also
|
||||
// get unreadable data.
|
||||
var isGarbage = true;
|
||||
try {
|
||||
this.reader.setIndex(0);
|
||||
this.checkSignature(sig.LOCAL_FILE_HEADER);
|
||||
isGarbage = false;
|
||||
} catch (e) {}
|
||||
|
||||
if (isGarbage) {
|
||||
throw new Error("Can't find end of central directory : is this a zip file ? " +
|
||||
"If it is, see http://stuk.github.io/jszip/documentation/howto/read_zip.html");
|
||||
} else {
|
||||
throw new Error("Corrupted zip : can't find end of central directory");
|
||||
}
|
||||
}
|
||||
this.reader.setIndex(offset);
|
||||
this.checkSignature(sig.CENTRAL_DIRECTORY_END);
|
||||
this.readBlockEndOfCentral();
|
||||
|
||||
|
||||
/* extract from the zip spec :
|
||||
4) If one of the fields in the end of central directory
|
||||
record is too small to hold required data, the field
|
||||
should be set to -1 (0xFFFF or 0xFFFFFFFF) and the
|
||||
ZIP64 format record should be created.
|
||||
5) The end of central directory record and the
|
||||
Zip64 end of central directory locator record must
|
||||
reside on the same disk when splitting or spanning
|
||||
an archive.
|
||||
*/
|
||||
if (this.diskNumber === utils.MAX_VALUE_16BITS || this.diskWithCentralDirStart === utils.MAX_VALUE_16BITS || this.centralDirRecordsOnThisDisk === utils.MAX_VALUE_16BITS || this.centralDirRecords === utils.MAX_VALUE_16BITS || this.centralDirSize === utils.MAX_VALUE_32BITS || this.centralDirOffset === utils.MAX_VALUE_32BITS) {
|
||||
this.zip64 = true;
|
||||
|
||||
/*
|
||||
Warning : the zip64 extension is supported, but ONLY if the 64bits integer read from
|
||||
the zip file can fit into a 32bits integer. This cannot be solved : Javascript represents
|
||||
all numbers as 64-bit double precision IEEE 754 floating point numbers.
|
||||
So, we have 53bits for integers and bitwise operations treat everything as 32bits.
|
||||
see https://developer.mozilla.org/en-US/docs/JavaScript/Reference/Operators/Bitwise_Operators
|
||||
and http://www.ecma-international.org/publications/files/ECMA-ST/ECMA-262.pdf section 8.5
|
||||
*/
|
||||
|
||||
// should look for a zip64 EOCD locator
|
||||
offset = this.reader.lastIndexOfSignature(sig.ZIP64_CENTRAL_DIRECTORY_LOCATOR);
|
||||
if (offset === -1) {
|
||||
throw new Error("Corrupted zip : can't find the ZIP64 end of central directory locator");
|
||||
}
|
||||
this.reader.setIndex(offset);
|
||||
this.checkSignature(sig.ZIP64_CENTRAL_DIRECTORY_LOCATOR);
|
||||
this.readBlockZip64EndOfCentralLocator();
|
||||
|
||||
// now the zip64 EOCD record
|
||||
this.reader.setIndex(this.relativeOffsetEndOfZip64CentralDir);
|
||||
this.checkSignature(sig.ZIP64_CENTRAL_DIRECTORY_END);
|
||||
this.readBlockZip64EndOfCentral();
|
||||
}
|
||||
},
|
||||
prepareReader: function(data) {
|
||||
var type = utils.getTypeOf(data);
|
||||
if (type === "string" && !support.uint8array) {
|
||||
this.reader = new StringReader(data, this.loadOptions.optimizedBinaryString);
|
||||
}
|
||||
else if (type === "nodebuffer") {
|
||||
this.reader = new NodeBufferReader(data);
|
||||
}
|
||||
else {
|
||||
this.reader = new Uint8ArrayReader(utils.transformTo("uint8array", data));
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Read a zip file and create ZipEntries.
|
||||
* @param {String|ArrayBuffer|Uint8Array|Buffer} data the binary string representing a zip file.
|
||||
*/
|
||||
load: function(data) {
|
||||
this.prepareReader(data);
|
||||
this.readEndOfCentral();
|
||||
this.readCentralDir();
|
||||
this.readLocalFiles();
|
||||
}
|
||||
};
|
||||
// }}} end of ZipEntries
|
||||
module.exports = ZipEntries;
|
310
node_modules/jszip/lib/zipEntry.js
generated
vendored
Normal file
310
node_modules/jszip/lib/zipEntry.js
generated
vendored
Normal file
@@ -0,0 +1,310 @@
|
||||
'use strict';
|
||||
var StringReader = require('./stringReader');
|
||||
var utils = require('./utils');
|
||||
var CompressedObject = require('./compressedObject');
|
||||
var jszipProto = require('./object');
|
||||
|
||||
var MADE_BY_DOS = 0x00;
|
||||
var MADE_BY_UNIX = 0x03;
|
||||
|
||||
// class ZipEntry {{{
|
||||
/**
|
||||
* An entry in the zip file.
|
||||
* @constructor
|
||||
* @param {Object} options Options of the current file.
|
||||
* @param {Object} loadOptions Options for loading the stream.
|
||||
*/
|
||||
function ZipEntry(options, loadOptions) {
|
||||
this.options = options;
|
||||
this.loadOptions = loadOptions;
|
||||
}
|
||||
ZipEntry.prototype = {
|
||||
/**
|
||||
* say if the file is encrypted.
|
||||
* @return {boolean} true if the file is encrypted, false otherwise.
|
||||
*/
|
||||
isEncrypted: function() {
|
||||
// bit 1 is set
|
||||
return (this.bitFlag & 0x0001) === 0x0001;
|
||||
},
|
||||
/**
|
||||
* say if the file has utf-8 filename/comment.
|
||||
* @return {boolean} true if the filename/comment is in utf-8, false otherwise.
|
||||
*/
|
||||
useUTF8: function() {
|
||||
// bit 11 is set
|
||||
return (this.bitFlag & 0x0800) === 0x0800;
|
||||
},
|
||||
/**
|
||||
* Prepare the function used to generate the compressed content from this ZipFile.
|
||||
* @param {DataReader} reader the reader to use.
|
||||
* @param {number} from the offset from where we should read the data.
|
||||
* @param {number} length the length of the data to read.
|
||||
* @return {Function} the callback to get the compressed content (the type depends of the DataReader class).
|
||||
*/
|
||||
prepareCompressedContent: function(reader, from, length) {
|
||||
return function() {
|
||||
var previousIndex = reader.index;
|
||||
reader.setIndex(from);
|
||||
var compressedFileData = reader.readData(length);
|
||||
reader.setIndex(previousIndex);
|
||||
|
||||
return compressedFileData;
|
||||
};
|
||||
},
|
||||
/**
|
||||
* Prepare the function used to generate the uncompressed content from this ZipFile.
|
||||
* @param {DataReader} reader the reader to use.
|
||||
* @param {number} from the offset from where we should read the data.
|
||||
* @param {number} length the length of the data to read.
|
||||
* @param {JSZip.compression} compression the compression used on this file.
|
||||
* @param {number} uncompressedSize the uncompressed size to expect.
|
||||
* @return {Function} the callback to get the uncompressed content (the type depends of the DataReader class).
|
||||
*/
|
||||
prepareContent: function(reader, from, length, compression, uncompressedSize) {
|
||||
return function() {
|
||||
|
||||
var compressedFileData = utils.transformTo(compression.uncompressInputType, this.getCompressedContent());
|
||||
var uncompressedFileData = compression.uncompress(compressedFileData);
|
||||
|
||||
if (uncompressedFileData.length !== uncompressedSize) {
|
||||
throw new Error("Bug : uncompressed data size mismatch");
|
||||
}
|
||||
|
||||
return uncompressedFileData;
|
||||
};
|
||||
},
|
||||
/**
|
||||
* Read the local part of a zip file and add the info in this object.
|
||||
* @param {DataReader} reader the reader to use.
|
||||
*/
|
||||
readLocalPart: function(reader) {
|
||||
var compression, localExtraFieldsLength;
|
||||
|
||||
// we already know everything from the central dir !
|
||||
// If the central dir data are false, we are doomed.
|
||||
// On the bright side, the local part is scary : zip64, data descriptors, both, etc.
|
||||
// The less data we get here, the more reliable this should be.
|
||||
// Let's skip the whole header and dash to the data !
|
||||
reader.skip(22);
|
||||
// in some zip created on windows, the filename stored in the central dir contains \ instead of /.
|
||||
// Strangely, the filename here is OK.
|
||||
// I would love to treat these zip files as corrupted (see http://www.info-zip.org/FAQ.html#backslashes
|
||||
// or APPNOTE#4.4.17.1, "All slashes MUST be forward slashes '/'") but there are a lot of bad zip generators...
|
||||
// Search "unzip mismatching "local" filename continuing with "central" filename version" on
|
||||
// the internet.
|
||||
//
|
||||
// I think I see the logic here : the central directory is used to display
|
||||
// content and the local directory is used to extract the files. Mixing / and \
|
||||
// may be used to display \ to windows users and use / when extracting the files.
|
||||
// Unfortunately, this lead also to some issues : http://seclists.org/fulldisclosure/2009/Sep/394
|
||||
this.fileNameLength = reader.readInt(2);
|
||||
localExtraFieldsLength = reader.readInt(2); // can't be sure this will be the same as the central dir
|
||||
this.fileName = reader.readString(this.fileNameLength);
|
||||
reader.skip(localExtraFieldsLength);
|
||||
|
||||
if (this.compressedSize == -1 || this.uncompressedSize == -1) {
|
||||
throw new Error("Bug or corrupted zip : didn't get enough informations from the central directory " + "(compressedSize == -1 || uncompressedSize == -1)");
|
||||
}
|
||||
|
||||
compression = utils.findCompression(this.compressionMethod);
|
||||
if (compression === null) { // no compression found
|
||||
throw new Error("Corrupted zip : compression " + utils.pretty(this.compressionMethod) + " unknown (inner file : " + this.fileName + ")");
|
||||
}
|
||||
this.decompressed = new CompressedObject();
|
||||
this.decompressed.compressedSize = this.compressedSize;
|
||||
this.decompressed.uncompressedSize = this.uncompressedSize;
|
||||
this.decompressed.crc32 = this.crc32;
|
||||
this.decompressed.compressionMethod = this.compressionMethod;
|
||||
this.decompressed.getCompressedContent = this.prepareCompressedContent(reader, reader.index, this.compressedSize, compression);
|
||||
this.decompressed.getContent = this.prepareContent(reader, reader.index, this.compressedSize, compression, this.uncompressedSize);
|
||||
|
||||
// we need to compute the crc32...
|
||||
if (this.loadOptions.checkCRC32) {
|
||||
this.decompressed = utils.transformTo("string", this.decompressed.getContent());
|
||||
if (jszipProto.crc32(this.decompressed) !== this.crc32) {
|
||||
throw new Error("Corrupted zip : CRC32 mismatch");
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Read the central part of a zip file and add the info in this object.
|
||||
* @param {DataReader} reader the reader to use.
|
||||
*/
|
||||
readCentralPart: function(reader) {
|
||||
this.versionMadeBy = reader.readInt(2);
|
||||
this.versionNeeded = reader.readInt(2);
|
||||
this.bitFlag = reader.readInt(2);
|
||||
this.compressionMethod = reader.readString(2);
|
||||
this.date = reader.readDate();
|
||||
this.crc32 = reader.readInt(4);
|
||||
this.compressedSize = reader.readInt(4);
|
||||
this.uncompressedSize = reader.readInt(4);
|
||||
this.fileNameLength = reader.readInt(2);
|
||||
this.extraFieldsLength = reader.readInt(2);
|
||||
this.fileCommentLength = reader.readInt(2);
|
||||
this.diskNumberStart = reader.readInt(2);
|
||||
this.internalFileAttributes = reader.readInt(2);
|
||||
this.externalFileAttributes = reader.readInt(4);
|
||||
this.localHeaderOffset = reader.readInt(4);
|
||||
|
||||
if (this.isEncrypted()) {
|
||||
throw new Error("Encrypted zip are not supported");
|
||||
}
|
||||
|
||||
this.fileName = reader.readString(this.fileNameLength);
|
||||
this.readExtraFields(reader);
|
||||
this.parseZIP64ExtraField(reader);
|
||||
this.fileComment = reader.readString(this.fileCommentLength);
|
||||
},
|
||||
|
||||
/**
|
||||
* Parse the external file attributes and get the unix/dos permissions.
|
||||
*/
|
||||
processAttributes: function () {
|
||||
this.unixPermissions = null;
|
||||
this.dosPermissions = null;
|
||||
var madeBy = this.versionMadeBy >> 8;
|
||||
|
||||
// Check if we have the DOS directory flag set.
|
||||
// We look for it in the DOS and UNIX permissions
|
||||
// but some unknown platform could set it as a compatibility flag.
|
||||
this.dir = this.externalFileAttributes & 0x0010 ? true : false;
|
||||
|
||||
if(madeBy === MADE_BY_DOS) {
|
||||
// first 6 bits (0 to 5)
|
||||
this.dosPermissions = this.externalFileAttributes & 0x3F;
|
||||
}
|
||||
|
||||
if(madeBy === MADE_BY_UNIX) {
|
||||
this.unixPermissions = (this.externalFileAttributes >> 16) & 0xFFFF;
|
||||
// the octal permissions are in (this.unixPermissions & 0x01FF).toString(8);
|
||||
}
|
||||
|
||||
// fail safe : if the name ends with a / it probably means a folder
|
||||
if (!this.dir && this.fileName.slice(-1) === '/') {
|
||||
this.dir = true;
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Parse the ZIP64 extra field and merge the info in the current ZipEntry.
|
||||
* @param {DataReader} reader the reader to use.
|
||||
*/
|
||||
parseZIP64ExtraField: function(reader) {
|
||||
|
||||
if (!this.extraFields[0x0001]) {
|
||||
return;
|
||||
}
|
||||
|
||||
// should be something, preparing the extra reader
|
||||
var extraReader = new StringReader(this.extraFields[0x0001].value);
|
||||
|
||||
// I really hope that these 64bits integer can fit in 32 bits integer, because js
|
||||
// won't let us have more.
|
||||
if (this.uncompressedSize === utils.MAX_VALUE_32BITS) {
|
||||
this.uncompressedSize = extraReader.readInt(8);
|
||||
}
|
||||
if (this.compressedSize === utils.MAX_VALUE_32BITS) {
|
||||
this.compressedSize = extraReader.readInt(8);
|
||||
}
|
||||
if (this.localHeaderOffset === utils.MAX_VALUE_32BITS) {
|
||||
this.localHeaderOffset = extraReader.readInt(8);
|
||||
}
|
||||
if (this.diskNumberStart === utils.MAX_VALUE_32BITS) {
|
||||
this.diskNumberStart = extraReader.readInt(4);
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Read the central part of a zip file and add the info in this object.
|
||||
* @param {DataReader} reader the reader to use.
|
||||
*/
|
||||
readExtraFields: function(reader) {
|
||||
var start = reader.index,
|
||||
extraFieldId,
|
||||
extraFieldLength,
|
||||
extraFieldValue;
|
||||
|
||||
this.extraFields = this.extraFields || {};
|
||||
|
||||
while (reader.index < start + this.extraFieldsLength) {
|
||||
extraFieldId = reader.readInt(2);
|
||||
extraFieldLength = reader.readInt(2);
|
||||
extraFieldValue = reader.readString(extraFieldLength);
|
||||
|
||||
this.extraFields[extraFieldId] = {
|
||||
id: extraFieldId,
|
||||
length: extraFieldLength,
|
||||
value: extraFieldValue
|
||||
};
|
||||
}
|
||||
},
|
||||
/**
|
||||
* Apply an UTF8 transformation if needed.
|
||||
*/
|
||||
handleUTF8: function() {
|
||||
if (this.useUTF8()) {
|
||||
this.fileName = jszipProto.utf8decode(this.fileName);
|
||||
this.fileComment = jszipProto.utf8decode(this.fileComment);
|
||||
} else {
|
||||
var upath = this.findExtraFieldUnicodePath();
|
||||
if (upath !== null) {
|
||||
this.fileName = upath;
|
||||
}
|
||||
var ucomment = this.findExtraFieldUnicodeComment();
|
||||
if (ucomment !== null) {
|
||||
this.fileComment = ucomment;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* Find the unicode path declared in the extra field, if any.
|
||||
* @return {String} the unicode path, null otherwise.
|
||||
*/
|
||||
findExtraFieldUnicodePath: function() {
|
||||
var upathField = this.extraFields[0x7075];
|
||||
if (upathField) {
|
||||
var extraReader = new StringReader(upathField.value);
|
||||
|
||||
// wrong version
|
||||
if (extraReader.readInt(1) !== 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// the crc of the filename changed, this field is out of date.
|
||||
if (jszipProto.crc32(this.fileName) !== extraReader.readInt(4)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return jszipProto.utf8decode(extraReader.readString(upathField.length - 5));
|
||||
}
|
||||
return null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Find the unicode comment declared in the extra field, if any.
|
||||
* @return {String} the unicode comment, null otherwise.
|
||||
*/
|
||||
findExtraFieldUnicodeComment: function() {
|
||||
var ucommentField = this.extraFields[0x6375];
|
||||
if (ucommentField) {
|
||||
var extraReader = new StringReader(ucommentField.value);
|
||||
|
||||
// wrong version
|
||||
if (extraReader.readInt(1) !== 1) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// the crc of the comment changed, this field is out of date.
|
||||
if (jszipProto.crc32(this.fileComment) !== extraReader.readInt(4)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return jszipProto.utf8decode(extraReader.readString(ucommentField.length - 5));
|
||||
}
|
||||
return null;
|
||||
}
|
||||
};
|
||||
module.exports = ZipEntry;
|
Reference in New Issue
Block a user