4177 lines
185 KiB
JavaScript
4177 lines
185 KiB
JavaScript
|
// @ts-nocheck
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/constants.js
|
||
|
var MAX_32_BITS = 4294967295;
|
||
|
var MAX_16_BITS = 65535;
|
||
|
var COMPRESSION_METHOD_DEFLATE = 8;
|
||
|
var COMPRESSION_METHOD_STORE = 0;
|
||
|
var COMPRESSION_METHOD_AES = 99;
|
||
|
var LOCAL_FILE_HEADER_SIGNATURE = 67324752;
|
||
|
var SPLIT_ZIP_FILE_SIGNATURE = 134695760;
|
||
|
var DATA_DESCRIPTOR_RECORD_SIGNATURE = SPLIT_ZIP_FILE_SIGNATURE;
|
||
|
var CENTRAL_FILE_HEADER_SIGNATURE = 33639248;
|
||
|
var END_OF_CENTRAL_DIR_SIGNATURE = 101010256;
|
||
|
var ZIP64_END_OF_CENTRAL_DIR_SIGNATURE = 101075792;
|
||
|
var ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIGNATURE = 117853008;
|
||
|
var END_OF_CENTRAL_DIR_LENGTH = 22;
|
||
|
var ZIP64_END_OF_CENTRAL_DIR_LOCATOR_LENGTH = 20;
|
||
|
var ZIP64_END_OF_CENTRAL_DIR_LENGTH = 56;
|
||
|
var ZIP64_END_OF_CENTRAL_DIR_TOTAL_LENGTH = END_OF_CENTRAL_DIR_LENGTH + ZIP64_END_OF_CENTRAL_DIR_LOCATOR_LENGTH + ZIP64_END_OF_CENTRAL_DIR_LENGTH;
|
||
|
var EXTRAFIELD_TYPE_ZIP64 = 1;
|
||
|
var EXTRAFIELD_TYPE_AES = 39169;
|
||
|
var EXTRAFIELD_TYPE_NTFS = 10;
|
||
|
var EXTRAFIELD_TYPE_NTFS_TAG1 = 1;
|
||
|
var EXTRAFIELD_TYPE_EXTENDED_TIMESTAMP = 21589;
|
||
|
var EXTRAFIELD_TYPE_UNICODE_PATH = 28789;
|
||
|
var EXTRAFIELD_TYPE_UNICODE_COMMENT = 25461;
|
||
|
var EXTRAFIELD_TYPE_USDZ = 6534;
|
||
|
var BITFLAG_ENCRYPTED = 1;
|
||
|
var BITFLAG_LEVEL = 6;
|
||
|
var BITFLAG_DATA_DESCRIPTOR = 8;
|
||
|
var BITFLAG_LANG_ENCODING_FLAG = 2048;
|
||
|
var FILE_ATTR_MSDOS_DIR_MASK = 16;
|
||
|
var VERSION_DEFLATE = 20;
|
||
|
var VERSION_ZIP64 = 45;
|
||
|
var VERSION_AES = 51;
|
||
|
var DIRECTORY_SIGNATURE = "/";
|
||
|
var MAX_DATE = new Date(2107, 11, 31);
|
||
|
var MIN_DATE = new Date(1980, 0, 1);
|
||
|
var UNDEFINED_VALUE = void 0;
|
||
|
var UNDEFINED_TYPE = "undefined";
|
||
|
var FUNCTION_TYPE = "function";
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/stream-adapter.js
|
||
|
var StreamAdapter = class {
|
||
|
constructor(Codec) {
|
||
|
return class extends TransformStream {
|
||
|
constructor(_format, options) {
|
||
|
const codec2 = new Codec(options);
|
||
|
super({
|
||
|
transform(chunk, controller) {
|
||
|
controller.enqueue(codec2.append(chunk));
|
||
|
},
|
||
|
flush(controller) {
|
||
|
const chunk = codec2.flush();
|
||
|
if (chunk) {
|
||
|
controller.enqueue(chunk);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/configuration.js
|
||
|
var MINIMUM_CHUNK_SIZE = 64;
|
||
|
var maxWorkers = 2;
|
||
|
try {
|
||
|
if (typeof navigator != UNDEFINED_TYPE && navigator.hardwareConcurrency) {
|
||
|
maxWorkers = navigator.hardwareConcurrency;
|
||
|
}
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
var DEFAULT_CONFIGURATION = {
|
||
|
chunkSize: 512 * 1024,
|
||
|
maxWorkers,
|
||
|
terminateWorkerTimeout: 5e3,
|
||
|
useWebWorkers: true,
|
||
|
useCompressionStream: true,
|
||
|
workerScripts: UNDEFINED_VALUE,
|
||
|
CompressionStreamNative: typeof CompressionStream != UNDEFINED_TYPE && CompressionStream,
|
||
|
DecompressionStreamNative: typeof DecompressionStream != UNDEFINED_TYPE && DecompressionStream
|
||
|
};
|
||
|
var config = Object.assign({}, DEFAULT_CONFIGURATION);
|
||
|
function getConfiguration() {
|
||
|
return config;
|
||
|
}
|
||
|
function getChunkSize(config2) {
|
||
|
return Math.max(config2.chunkSize, MINIMUM_CHUNK_SIZE);
|
||
|
}
|
||
|
function configure(configuration) {
|
||
|
const {
|
||
|
baseURL: baseURL2,
|
||
|
chunkSize,
|
||
|
maxWorkers: maxWorkers2,
|
||
|
terminateWorkerTimeout,
|
||
|
useCompressionStream,
|
||
|
useWebWorkers,
|
||
|
Deflate,
|
||
|
Inflate,
|
||
|
CompressionStream: CompressionStream2,
|
||
|
DecompressionStream: DecompressionStream2,
|
||
|
workerScripts
|
||
|
} = configuration;
|
||
|
setIfDefined("baseURL", baseURL2);
|
||
|
setIfDefined("chunkSize", chunkSize);
|
||
|
setIfDefined("maxWorkers", maxWorkers2);
|
||
|
setIfDefined("terminateWorkerTimeout", terminateWorkerTimeout);
|
||
|
setIfDefined("useCompressionStream", useCompressionStream);
|
||
|
setIfDefined("useWebWorkers", useWebWorkers);
|
||
|
if (Deflate) {
|
||
|
config.CompressionStream = new StreamAdapter(Deflate);
|
||
|
}
|
||
|
if (Inflate) {
|
||
|
config.DecompressionStream = new StreamAdapter(Inflate);
|
||
|
}
|
||
|
setIfDefined("CompressionStream", CompressionStream2);
|
||
|
setIfDefined("DecompressionStream", DecompressionStream2);
|
||
|
if (workerScripts !== UNDEFINED_VALUE) {
|
||
|
const { deflate, inflate } = workerScripts;
|
||
|
if (deflate || inflate) {
|
||
|
if (!config.workerScripts) {
|
||
|
config.workerScripts = {};
|
||
|
}
|
||
|
}
|
||
|
if (deflate) {
|
||
|
if (!Array.isArray(deflate)) {
|
||
|
throw new Error("workerScripts.deflate must be an array");
|
||
|
}
|
||
|
config.workerScripts.deflate = deflate;
|
||
|
}
|
||
|
if (inflate) {
|
||
|
if (!Array.isArray(inflate)) {
|
||
|
throw new Error("workerScripts.inflate must be an array");
|
||
|
}
|
||
|
config.workerScripts.inflate = inflate;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
function setIfDefined(propertyName, propertyValue) {
|
||
|
if (propertyValue !== UNDEFINED_VALUE) {
|
||
|
config[propertyName] = propertyValue;
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/z-worker-inline.js
|
||
|
function e(e2) {
|
||
|
const t = () => URL.createObjectURL(new Blob(['const{Array:e,Object:t,Number:n,Math:r,Error:s,Uint8Array:i,Uint16Array:o,Uint32Array:c,Int32Array:f,Map:a,DataView:l,Promise:u,TextEncoder:w,crypto:h,postMessage:d,TransformStream:p,ReadableStream:y,WritableStream:m,CompressionStream:b,DecompressionStream:g}=self,k=void 0,v="undefined",S="function";class z{constructor(e){return class extends p{constructor(t,n){const r=new e(n);super({transform(e,t){t.enqueue(r.append(e))},flush(e){const t=r.flush();t&&e.enqueue(t)}})}}}}const C=[];for(let e=0;256>e;e++){let t=e;for(let e=0;8>e;e++)1&t?t=t>>>1^3988292384:t>>>=1;C[e]=t}class x{constructor(e){this.t=e||-1}append(e){let t=0|this.t;for(let n=0,r=0|e.length;r>n;n++)t=t>>>8^C[255&(t^e[n])];this.t=t}get(){return~this.t}}class A extends p{constructor(){let e;const t=new x;super({transform(e,n){t.append(e),n.enqueue(e)},flush(){const n=new i(4);new l(n.buffer).setUint32(0,t.get()),e.value=n}}),e=this}}const _={concat(e,t){if(0===e.length||0===t.length)return e.concat(t);const n=e[e.length-1],r=_.i(n);return 32===r?e.concat(t):_.o(t,r,0|n,e.slice(0,e.length-1))},l(e){const t=e.length;if(0===t)return 0;const n=e[t-1];return 32*(t-1)+_.i(n)},u(e,t){if(32*e.length<t)return e;const n=(e=e.slice(0,r.ceil(t/32))).length;return t&=31,n>0&&t&&(e[n-1]=_.h(t,e[n-1]&2147483648>>t-1,1)),e},h:(e,t,n)=>32===e?t:(n?0|t:t<<32-e)+1099511627776*e,i:e=>r.round(e/1099511627776)||32,o(e,t,n,r){for(void 0===r&&(r=[]);t>=32;t-=32)r.push(n),n=0;if(0===t)return r.concat(e);for(let s=0;s<e.length;s++)r.push(n|e[s]>>>t),n=e[s]<<32-t;const s=e.length?e[e.length-1]:0,i=_.i(s);return r.push(_.h(t+i&31,t+i>32?n:r.pop(),1)),r}},I={p:{m(e){const t=_.l(e)/8,n=new i(t);let r;for(let s=0;t>s;s++)3&s||(r=e[s/4]),n[s]=r>>>24,r<<=8;return n},k(e){const t=[];let n,r=0;for(n=0;n<e.length;n++)r=r<<8|e[n],3&~n||(t.push(r),r=0);return 3&n&&t.push(_.h(8*(3&n),r)),t}}},P=class{constructor(e){const t=this;t.blockSize=512,t.v=[1732584193,4023233417,2562383102,271733878,3285377520],t.S=[1518500249,1859775393,2400959708,3395469782],e?(t.C=e.C.slice(0),t.A=e.A.slice(0),t._=e._):t.reset()}reset(){const e=this;return e.C=e.v.slice(0),e.A=[],e._=0,e}update(e){const t=this;"string"==typeof e&&(e=I.I.k(e));const n=t.A=_.concat(t.A,e),r=t._,i=t._=r+_.l(e);if(i>9007199254740991)throw new s("Cannot hash more than 2^53 - 1 bits");const o=new c(n);let f=0;for(let e=t.blockSize+r-(t.blockSize+r&t.blockSize-1);i>=e;e+=t.blockSize)t.P(o.subarray(16*f,16*(f+1))),f+=1;return n.splice(0,16*f),t}D(){const e=this;let t=e.A;const n=e.C;t=_.concat(t,[_.h(1,1)]);for(let e=t.length+2;15&e;e++)t.push(0);for(t.push(r.floor(e._/4294967296)),t.push(0|e._);t.length;)e.P(t.splice(0,16));return e.reset(),n}V(e,t,n,r){return e>19?e>39?e>59?e>79?void 0:t^n^r:t&n|t&r|n&r:t^n^r:t&n|~t&r}R(e,t){return t<<e|t>>>32-e}P(t){const n=this,s=n.C,i=e(80);for(let e=0;16>e;e++)i[e]=t[e];let o=s[0],c=s[1],f=s[2],a=s[3],l=s[4];for(let e=0;79>=e;e++){16>e||(i[e]=n.R(1,i[e-3]^i[e-8]^i[e-14]^i[e-16]));const t=n.R(5,o)+n.V(e,c,f,a)+l+i[e]+n.S[r.floor(e/20)]|0;l=a,a=f,f=n.R(30,c),c=o,o=t}s[0]=s[0]+o|0,s[1]=s[1]+c|0,s[2]=s[2]+f|0,s[3]=s[3]+a|0,s[4]=s[4]+l|0}},D={getRandomValues(e){const t=new c(e.buffer),n=e=>{let t=987654321;const n=4294967295;return()=>(t=36969*(65535&t)+(t>>16)&n,(((t<<16)+(e=18e3*(65535&e)+(e>>16)&n)&n)/4294967296+.5)*(r.random()>.5?1:-1))};for(let s,i=0;i<e.length;i+=4){const e=n(4294967296*(s||r.random()));s=987654071*e(),t[i/4]=4294967296*e()|0}return e}},V={importKey:e=>new V.B(I.p.k(e)),M(e,t,n,r){if(n=n||1e4,0>r||0>n)throw new s("invalid params to pbkdf2");const i=1+(r>>5)<<2;let o,c,f,a,u;const w=new ArrayBuffer(i),h=new l(w);let d=0;const p=_;for(t=I.p.k(t),u=1;(i||1)>d;u++){for(o=c=e.encrypt(p.concat(t,[u])),f=1;n>f;f++)for(c=e.encrypt(c),a=0;a<c.length;a++)o[a]^=c[a];for(f=0;(i||1)>d&&f<o.length;f++)h.setInt32(d,o[f]),d+=4}return w.slice(0,r/8)},B:class{constructor(e){const t=this,n=t.U=P,r=[[],[]];t.K=[new n,new n];const s=t.K[0].blockSize/32;e.length>s&&(e=(new n).update(e).D());for(let t=0;s>t;t++)r[0][t]=909522486^e[t],r[1][t]=1549556828^e[t];t.K[0
|
||
|
e2({ workerScripts: { inflate: [t], deflate: [t] } });
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/util/default-mime-type.js
|
||
|
function getMimeType() {
|
||
|
return "application/octet-stream";
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/util/stream-codec-shim.js
|
||
|
function initShimAsyncCodec(library, options = {}, registerDataHandler) {
|
||
|
return {
|
||
|
Deflate: createCodecClass(library.Deflate, options.deflate, registerDataHandler),
|
||
|
Inflate: createCodecClass(library.Inflate, options.inflate, registerDataHandler)
|
||
|
};
|
||
|
}
|
||
|
function objectHasOwn(object, propertyName) {
|
||
|
return typeof Object.hasOwn === FUNCTION_TYPE ? Object.hasOwn(object, propertyName) : object.hasOwnProperty(propertyName);
|
||
|
}
|
||
|
function createCodecClass(constructor, constructorOptions, registerDataHandler) {
|
||
|
return class {
|
||
|
constructor(options) {
|
||
|
const codecAdapter = this;
|
||
|
const onData = (data) => {
|
||
|
if (codecAdapter.pendingData) {
|
||
|
const previousPendingData = codecAdapter.pendingData;
|
||
|
codecAdapter.pendingData = new Uint8Array(previousPendingData.length + data.length);
|
||
|
const { pendingData } = codecAdapter;
|
||
|
pendingData.set(previousPendingData, 0);
|
||
|
pendingData.set(data, previousPendingData.length);
|
||
|
} else {
|
||
|
codecAdapter.pendingData = new Uint8Array(data);
|
||
|
}
|
||
|
};
|
||
|
if (objectHasOwn(options, "level") && options.level === UNDEFINED_VALUE) {
|
||
|
delete options.level;
|
||
|
}
|
||
|
codecAdapter.codec = new constructor(Object.assign({}, constructorOptions, options));
|
||
|
registerDataHandler(codecAdapter.codec, onData);
|
||
|
}
|
||
|
append(data) {
|
||
|
this.codec.push(data);
|
||
|
return getResponse(this);
|
||
|
}
|
||
|
flush() {
|
||
|
this.codec.push(new Uint8Array(), true);
|
||
|
return getResponse(this);
|
||
|
}
|
||
|
};
|
||
|
function getResponse(codec2) {
|
||
|
if (codec2.pendingData) {
|
||
|
const output = codec2.pendingData;
|
||
|
codec2.pendingData = null;
|
||
|
return output;
|
||
|
} else {
|
||
|
return new Uint8Array();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/codecs/crc32.js
|
||
|
var table = [];
|
||
|
for (let i = 0; i < 256; i++) {
|
||
|
let t = i;
|
||
|
for (let j = 0; j < 8; j++) {
|
||
|
if (t & 1) {
|
||
|
t = t >>> 1 ^ 3988292384;
|
||
|
} else {
|
||
|
t = t >>> 1;
|
||
|
}
|
||
|
}
|
||
|
table[i] = t;
|
||
|
}
|
||
|
var Crc32 = class {
|
||
|
constructor(crc) {
|
||
|
this.crc = crc || -1;
|
||
|
}
|
||
|
append(data) {
|
||
|
let crc = this.crc | 0;
|
||
|
for (let offset = 0, length = data.length | 0; offset < length; offset++) {
|
||
|
crc = crc >>> 8 ^ table[(crc ^ data[offset]) & 255];
|
||
|
}
|
||
|
this.crc = crc;
|
||
|
}
|
||
|
get() {
|
||
|
return ~this.crc;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/crc32-stream.js
|
||
|
var Crc32Stream = class extends TransformStream {
|
||
|
constructor() {
|
||
|
let stream;
|
||
|
const crc32 = new Crc32();
|
||
|
super({
|
||
|
transform(chunk, controller) {
|
||
|
crc32.append(chunk);
|
||
|
controller.enqueue(chunk);
|
||
|
},
|
||
|
flush() {
|
||
|
const value = new Uint8Array(4);
|
||
|
const dataView = new DataView(value.buffer);
|
||
|
dataView.setUint32(0, crc32.get());
|
||
|
stream.value = value;
|
||
|
}
|
||
|
});
|
||
|
stream = this;
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/util/encode-text.js
|
||
|
function encodeText(value) {
|
||
|
if (typeof TextEncoder == UNDEFINED_TYPE) {
|
||
|
value = unescape(encodeURIComponent(value));
|
||
|
const result = new Uint8Array(value.length);
|
||
|
for (let i = 0; i < result.length; i++) {
|
||
|
result[i] = value.charCodeAt(i);
|
||
|
}
|
||
|
return result;
|
||
|
} else {
|
||
|
return new TextEncoder().encode(value);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/codecs/sjcl.js
|
||
|
var bitArray = {
|
||
|
/**
|
||
|
* Concatenate two bit arrays.
|
||
|
* @param {bitArray} a1 The first array.
|
||
|
* @param {bitArray} a2 The second array.
|
||
|
* @return {bitArray} The concatenation of a1 and a2.
|
||
|
*/
|
||
|
concat(a1, a2) {
|
||
|
if (a1.length === 0 || a2.length === 0) {
|
||
|
return a1.concat(a2);
|
||
|
}
|
||
|
const last = a1[a1.length - 1], shift = bitArray.getPartial(last);
|
||
|
if (shift === 32) {
|
||
|
return a1.concat(a2);
|
||
|
} else {
|
||
|
return bitArray._shiftRight(a2, shift, last | 0, a1.slice(0, a1.length - 1));
|
||
|
}
|
||
|
},
|
||
|
/**
|
||
|
* Find the length of an array of bits.
|
||
|
* @param {bitArray} a The array.
|
||
|
* @return {Number} The length of a, in bits.
|
||
|
*/
|
||
|
bitLength(a) {
|
||
|
const l = a.length;
|
||
|
if (l === 0) {
|
||
|
return 0;
|
||
|
}
|
||
|
const x = a[l - 1];
|
||
|
return (l - 1) * 32 + bitArray.getPartial(x);
|
||
|
},
|
||
|
/**
|
||
|
* Truncate an array.
|
||
|
* @param {bitArray} a The array.
|
||
|
* @param {Number} len The length to truncate to, in bits.
|
||
|
* @return {bitArray} A new array, truncated to len bits.
|
||
|
*/
|
||
|
clamp(a, len) {
|
||
|
if (a.length * 32 < len) {
|
||
|
return a;
|
||
|
}
|
||
|
a = a.slice(0, Math.ceil(len / 32));
|
||
|
const l = a.length;
|
||
|
len = len & 31;
|
||
|
if (l > 0 && len) {
|
||
|
a[l - 1] = bitArray.partial(len, a[l - 1] & 2147483648 >> len - 1, 1);
|
||
|
}
|
||
|
return a;
|
||
|
},
|
||
|
/**
|
||
|
* Make a partial word for a bit array.
|
||
|
* @param {Number} len The number of bits in the word.
|
||
|
* @param {Number} x The bits.
|
||
|
* @param {Number} [_end=0] Pass 1 if x has already been shifted to the high side.
|
||
|
* @return {Number} The partial word.
|
||
|
*/
|
||
|
partial(len, x, _end) {
|
||
|
if (len === 32) {
|
||
|
return x;
|
||
|
}
|
||
|
return (_end ? x | 0 : x << 32 - len) + len * 1099511627776;
|
||
|
},
|
||
|
/**
|
||
|
* Get the number of bits used by a partial word.
|
||
|
* @param {Number} x The partial word.
|
||
|
* @return {Number} The number of bits used by the partial word.
|
||
|
*/
|
||
|
getPartial(x) {
|
||
|
return Math.round(x / 1099511627776) || 32;
|
||
|
},
|
||
|
/** Shift an array right.
|
||
|
* @param {bitArray} a The array to shift.
|
||
|
* @param {Number} shift The number of bits to shift.
|
||
|
* @param {Number} [carry=0] A byte to carry in
|
||
|
* @param {bitArray} [out=[]] An array to prepend to the output.
|
||
|
* @private
|
||
|
*/
|
||
|
_shiftRight(a, shift, carry, out) {
|
||
|
if (out === void 0) {
|
||
|
out = [];
|
||
|
}
|
||
|
for (; shift >= 32; shift -= 32) {
|
||
|
out.push(carry);
|
||
|
carry = 0;
|
||
|
}
|
||
|
if (shift === 0) {
|
||
|
return out.concat(a);
|
||
|
}
|
||
|
for (let i = 0; i < a.length; i++) {
|
||
|
out.push(carry | a[i] >>> shift);
|
||
|
carry = a[i] << 32 - shift;
|
||
|
}
|
||
|
const last2 = a.length ? a[a.length - 1] : 0;
|
||
|
const shift2 = bitArray.getPartial(last2);
|
||
|
out.push(bitArray.partial(shift + shift2 & 31, shift + shift2 > 32 ? carry : out.pop(), 1));
|
||
|
return out;
|
||
|
}
|
||
|
};
|
||
|
var codec = {
|
||
|
bytes: {
|
||
|
/** Convert from a bitArray to an array of bytes. */
|
||
|
fromBits(arr) {
|
||
|
const bl = bitArray.bitLength(arr);
|
||
|
const byteLength = bl / 8;
|
||
|
const out = new Uint8Array(byteLength);
|
||
|
let tmp;
|
||
|
for (let i = 0; i < byteLength; i++) {
|
||
|
if ((i & 3) === 0) {
|
||
|
tmp = arr[i / 4];
|
||
|
}
|
||
|
out[i] = tmp >>> 24;
|
||
|
tmp <<= 8;
|
||
|
}
|
||
|
return out;
|
||
|
},
|
||
|
/** Convert from an array of bytes to a bitArray. */
|
||
|
toBits(bytes) {
|
||
|
const out = [];
|
||
|
let i;
|
||
|
let tmp = 0;
|
||
|
for (i = 0; i < bytes.length; i++) {
|
||
|
tmp = tmp << 8 | bytes[i];
|
||
|
if ((i & 3) === 3) {
|
||
|
out.push(tmp);
|
||
|
tmp = 0;
|
||
|
}
|
||
|
}
|
||
|
if (i & 3) {
|
||
|
out.push(bitArray.partial(8 * (i & 3), tmp));
|
||
|
}
|
||
|
return out;
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
var hash = {};
|
||
|
hash.sha1 = class {
|
||
|
constructor(hash2) {
|
||
|
const sha1 = this;
|
||
|
sha1.blockSize = 512;
|
||
|
sha1._init = [1732584193, 4023233417, 2562383102, 271733878, 3285377520];
|
||
|
sha1._key = [1518500249, 1859775393, 2400959708, 3395469782];
|
||
|
if (hash2) {
|
||
|
sha1._h = hash2._h.slice(0);
|
||
|
sha1._buffer = hash2._buffer.slice(0);
|
||
|
sha1._length = hash2._length;
|
||
|
} else {
|
||
|
sha1.reset();
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Reset the hash state.
|
||
|
* @return this
|
||
|
*/
|
||
|
reset() {
|
||
|
const sha1 = this;
|
||
|
sha1._h = sha1._init.slice(0);
|
||
|
sha1._buffer = [];
|
||
|
sha1._length = 0;
|
||
|
return sha1;
|
||
|
}
|
||
|
/**
|
||
|
* Input several words to the hash.
|
||
|
* @param {bitArray|String} data the data to hash.
|
||
|
* @return this
|
||
|
*/
|
||
|
update(data) {
|
||
|
const sha1 = this;
|
||
|
if (typeof data === "string") {
|
||
|
data = codec.utf8String.toBits(data);
|
||
|
}
|
||
|
const b = sha1._buffer = bitArray.concat(sha1._buffer, data);
|
||
|
const ol = sha1._length;
|
||
|
const nl = sha1._length = ol + bitArray.bitLength(data);
|
||
|
if (nl > 9007199254740991) {
|
||
|
throw new Error("Cannot hash more than 2^53 - 1 bits");
|
||
|
}
|
||
|
const c = new Uint32Array(b);
|
||
|
let j = 0;
|
||
|
for (let i = sha1.blockSize + ol - (sha1.blockSize + ol & sha1.blockSize - 1); i <= nl; i += sha1.blockSize) {
|
||
|
sha1._block(c.subarray(16 * j, 16 * (j + 1)));
|
||
|
j += 1;
|
||
|
}
|
||
|
b.splice(0, 16 * j);
|
||
|
return sha1;
|
||
|
}
|
||
|
/**
|
||
|
* Complete hashing and output the hash value.
|
||
|
* @return {bitArray} The hash value, an array of 5 big-endian words. TODO
|
||
|
*/
|
||
|
finalize() {
|
||
|
const sha1 = this;
|
||
|
let b = sha1._buffer;
|
||
|
const h = sha1._h;
|
||
|
b = bitArray.concat(b, [bitArray.partial(1, 1)]);
|
||
|
for (let i = b.length + 2; i & 15; i++) {
|
||
|
b.push(0);
|
||
|
}
|
||
|
b.push(Math.floor(sha1._length / 4294967296));
|
||
|
b.push(sha1._length | 0);
|
||
|
while (b.length) {
|
||
|
sha1._block(b.splice(0, 16));
|
||
|
}
|
||
|
sha1.reset();
|
||
|
return h;
|
||
|
}
|
||
|
/**
|
||
|
* The SHA-1 logical functions f(0), f(1), ..., f(79).
|
||
|
* @private
|
||
|
*/
|
||
|
_f(t, b, c, d) {
|
||
|
if (t <= 19) {
|
||
|
return b & c | ~b & d;
|
||
|
} else if (t <= 39) {
|
||
|
return b ^ c ^ d;
|
||
|
} else if (t <= 59) {
|
||
|
return b & c | b & d | c & d;
|
||
|
} else if (t <= 79) {
|
||
|
return b ^ c ^ d;
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Circular left-shift operator.
|
||
|
* @private
|
||
|
*/
|
||
|
_S(n, x) {
|
||
|
return x << n | x >>> 32 - n;
|
||
|
}
|
||
|
/**
|
||
|
* Perform one cycle of SHA-1.
|
||
|
* @param {Uint32Array|bitArray} words one block of words.
|
||
|
* @private
|
||
|
*/
|
||
|
_block(words) {
|
||
|
const sha1 = this;
|
||
|
const h = sha1._h;
|
||
|
const w = Array(80);
|
||
|
for (let j = 0; j < 16; j++) {
|
||
|
w[j] = words[j];
|
||
|
}
|
||
|
let a = h[0];
|
||
|
let b = h[1];
|
||
|
let c = h[2];
|
||
|
let d = h[3];
|
||
|
let e2 = h[4];
|
||
|
for (let t = 0; t <= 79; t++) {
|
||
|
if (t >= 16) {
|
||
|
w[t] = sha1._S(1, w[t - 3] ^ w[t - 8] ^ w[t - 14] ^ w[t - 16]);
|
||
|
}
|
||
|
const tmp = sha1._S(5, a) + sha1._f(t, b, c, d) + e2 + w[t] + sha1._key[Math.floor(t / 20)] | 0;
|
||
|
e2 = d;
|
||
|
d = c;
|
||
|
c = sha1._S(30, b);
|
||
|
b = a;
|
||
|
a = tmp;
|
||
|
}
|
||
|
h[0] = h[0] + a | 0;
|
||
|
h[1] = h[1] + b | 0;
|
||
|
h[2] = h[2] + c | 0;
|
||
|
h[3] = h[3] + d | 0;
|
||
|
h[4] = h[4] + e2 | 0;
|
||
|
}
|
||
|
};
|
||
|
var cipher = {};
|
||
|
cipher.aes = class {
|
||
|
constructor(key) {
|
||
|
const aes = this;
|
||
|
aes._tables = [[[], [], [], [], []], [[], [], [], [], []]];
|
||
|
if (!aes._tables[0][0][0]) {
|
||
|
aes._precompute();
|
||
|
}
|
||
|
const sbox = aes._tables[0][4];
|
||
|
const decTable = aes._tables[1];
|
||
|
const keyLen = key.length;
|
||
|
let i, encKey, decKey, rcon = 1;
|
||
|
if (keyLen !== 4 && keyLen !== 6 && keyLen !== 8) {
|
||
|
throw new Error("invalid aes key size");
|
||
|
}
|
||
|
aes._key = [encKey = key.slice(0), decKey = []];
|
||
|
for (i = keyLen; i < 4 * keyLen + 28; i++) {
|
||
|
let tmp = encKey[i - 1];
|
||
|
if (i % keyLen === 0 || keyLen === 8 && i % keyLen === 4) {
|
||
|
tmp = sbox[tmp >>> 24] << 24 ^ sbox[tmp >> 16 & 255] << 16 ^ sbox[tmp >> 8 & 255] << 8 ^ sbox[tmp & 255];
|
||
|
if (i % keyLen === 0) {
|
||
|
tmp = tmp << 8 ^ tmp >>> 24 ^ rcon << 24;
|
||
|
rcon = rcon << 1 ^ (rcon >> 7) * 283;
|
||
|
}
|
||
|
}
|
||
|
encKey[i] = encKey[i - keyLen] ^ tmp;
|
||
|
}
|
||
|
for (let j = 0; i; j++, i--) {
|
||
|
const tmp = encKey[j & 3 ? i : i - 4];
|
||
|
if (i <= 4 || j < 4) {
|
||
|
decKey[j] = tmp;
|
||
|
} else {
|
||
|
decKey[j] = decTable[0][sbox[tmp >>> 24]] ^ decTable[1][sbox[tmp >> 16 & 255]] ^ decTable[2][sbox[tmp >> 8 & 255]] ^ decTable[3][sbox[tmp & 255]];
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
// public
|
||
|
/* Something like this might appear here eventually
|
||
|
name: "AES",
|
||
|
blockSize: 4,
|
||
|
keySizes: [4,6,8],
|
||
|
*/
|
||
|
/**
|
||
|
* Encrypt an array of 4 big-endian words.
|
||
|
* @param {Array} data The plaintext.
|
||
|
* @return {Array} The ciphertext.
|
||
|
*/
|
||
|
encrypt(data) {
|
||
|
return this._crypt(data, 0);
|
||
|
}
|
||
|
/**
|
||
|
* Decrypt an array of 4 big-endian words.
|
||
|
* @param {Array} data The ciphertext.
|
||
|
* @return {Array} The plaintext.
|
||
|
*/
|
||
|
decrypt(data) {
|
||
|
return this._crypt(data, 1);
|
||
|
}
|
||
|
/**
|
||
|
* Expand the S-box tables.
|
||
|
*
|
||
|
* @private
|
||
|
*/
|
||
|
_precompute() {
|
||
|
const encTable = this._tables[0];
|
||
|
const decTable = this._tables[1];
|
||
|
const sbox = encTable[4];
|
||
|
const sboxInv = decTable[4];
|
||
|
const d = [];
|
||
|
const th = [];
|
||
|
let xInv, x2, x4, x8;
|
||
|
for (let i = 0; i < 256; i++) {
|
||
|
th[(d[i] = i << 1 ^ (i >> 7) * 283) ^ i] = i;
|
||
|
}
|
||
|
for (let x = xInv = 0; !sbox[x]; x ^= x2 || 1, xInv = th[xInv] || 1) {
|
||
|
let s = xInv ^ xInv << 1 ^ xInv << 2 ^ xInv << 3 ^ xInv << 4;
|
||
|
s = s >> 8 ^ s & 255 ^ 99;
|
||
|
sbox[x] = s;
|
||
|
sboxInv[s] = x;
|
||
|
x8 = d[x4 = d[x2 = d[x]]];
|
||
|
let tDec = x8 * 16843009 ^ x4 * 65537 ^ x2 * 257 ^ x * 16843008;
|
||
|
let tEnc = d[s] * 257 ^ s * 16843008;
|
||
|
for (let i = 0; i < 4; i++) {
|
||
|
encTable[i][x] = tEnc = tEnc << 24 ^ tEnc >>> 8;
|
||
|
decTable[i][s] = tDec = tDec << 24 ^ tDec >>> 8;
|
||
|
}
|
||
|
}
|
||
|
for (let i = 0; i < 5; i++) {
|
||
|
encTable[i] = encTable[i].slice(0);
|
||
|
decTable[i] = decTable[i].slice(0);
|
||
|
}
|
||
|
}
|
||
|
/**
|
||
|
* Encryption and decryption core.
|
||
|
* @param {Array} input Four words to be encrypted or decrypted.
|
||
|
* @param dir The direction, 0 for encrypt and 1 for decrypt.
|
||
|
* @return {Array} The four encrypted or decrypted words.
|
||
|
* @private
|
||
|
*/
|
||
|
_crypt(input, dir) {
|
||
|
if (input.length !== 4) {
|
||
|
throw new Error("invalid aes block size");
|
||
|
}
|
||
|
const key = this._key[dir];
|
||
|
const nInnerRounds = key.length / 4 - 2;
|
||
|
const out = [0, 0, 0, 0];
|
||
|
const table2 = this._tables[dir];
|
||
|
const t0 = table2[0];
|
||
|
const t1 = table2[1];
|
||
|
const t2 = table2[2];
|
||
|
const t3 = table2[3];
|
||
|
const sbox = table2[4];
|
||
|
let a = input[0] ^ key[0];
|
||
|
let b = input[dir ? 3 : 1] ^ key[1];
|
||
|
let c = input[2] ^ key[2];
|
||
|
let d = input[dir ? 1 : 3] ^ key[3];
|
||
|
let kIndex = 4;
|
||
|
let a2, b2, c2;
|
||
|
for (let i = 0; i < nInnerRounds; i++) {
|
||
|
a2 = t0[a >>> 24] ^ t1[b >> 16 & 255] ^ t2[c >> 8 & 255] ^ t3[d & 255] ^ key[kIndex];
|
||
|
b2 = t0[b >>> 24] ^ t1[c >> 16 & 255] ^ t2[d >> 8 & 255] ^ t3[a & 255] ^ key[kIndex + 1];
|
||
|
c2 = t0[c >>> 24] ^ t1[d >> 16 & 255] ^ t2[a >> 8 & 255] ^ t3[b & 255] ^ key[kIndex + 2];
|
||
|
d = t0[d >>> 24] ^ t1[a >> 16 & 255] ^ t2[b >> 8 & 255] ^ t3[c & 255] ^ key[kIndex + 3];
|
||
|
kIndex += 4;
|
||
|
a = a2;
|
||
|
b = b2;
|
||
|
c = c2;
|
||
|
}
|
||
|
for (let i = 0; i < 4; i++) {
|
||
|
out[dir ? 3 & -i : i] = sbox[a >>> 24] << 24 ^ sbox[b >> 16 & 255] << 16 ^ sbox[c >> 8 & 255] << 8 ^ sbox[d & 255] ^ key[kIndex++];
|
||
|
a2 = a;
|
||
|
a = b;
|
||
|
b = c;
|
||
|
c = d;
|
||
|
d = a2;
|
||
|
}
|
||
|
return out;
|
||
|
}
|
||
|
};
|
||
|
var random = {
|
||
|
/**
|
||
|
* Generate random words with pure js, cryptographically not as strong & safe as native implementation.
|
||
|
* @param {TypedArray} typedArray The array to fill.
|
||
|
* @return {TypedArray} The random values.
|
||
|
*/
|
||
|
getRandomValues(typedArray) {
|
||
|
const words = new Uint32Array(typedArray.buffer);
|
||
|
const r = (m_w) => {
|
||
|
let m_z = 987654321;
|
||
|
const mask = 4294967295;
|
||
|
return function() {
|
||
|
m_z = 36969 * (m_z & 65535) + (m_z >> 16) & mask;
|
||
|
m_w = 18e3 * (m_w & 65535) + (m_w >> 16) & mask;
|
||
|
const result = ((m_z << 16) + m_w & mask) / 4294967296 + 0.5;
|
||
|
return result * (Math.random() > 0.5 ? 1 : -1);
|
||
|
};
|
||
|
};
|
||
|
for (let i = 0, rcache; i < typedArray.length; i += 4) {
|
||
|
const _r = r((rcache || Math.random()) * 4294967296);
|
||
|
rcache = _r() * 987654071;
|
||
|
words[i / 4] = _r() * 4294967296 | 0;
|
||
|
}
|
||
|
return typedArray;
|
||
|
}
|
||
|
};
|
||
|
var mode = {};
|
||
|
mode.ctrGladman = class {
|
||
|
constructor(prf, iv) {
|
||
|
this._prf = prf;
|
||
|
this._initIv = iv;
|
||
|
this._iv = iv;
|
||
|
}
|
||
|
reset() {
|
||
|
this._iv = this._initIv;
|
||
|
}
|
||
|
/** Input some data to calculate.
|
||
|
* @param {bitArray} data the data to process, it must be intergral multiple of 128 bits unless it's the last.
|
||
|
*/
|
||
|
update(data) {
|
||
|
return this.calculate(this._prf, data, this._iv);
|
||
|
}
|
||
|
incWord(word) {
|
||
|
if ((word >> 24 & 255) === 255) {
|
||
|
let b1 = word >> 16 & 255;
|
||
|
let b2 = word >> 8 & 255;
|
||
|
let b3 = word & 255;
|
||
|
if (b1 === 255) {
|
||
|
b1 = 0;
|
||
|
if (b2 === 255) {
|
||
|
b2 = 0;
|
||
|
if (b3 === 255) {
|
||
|
b3 = 0;
|
||
|
} else {
|
||
|
++b3;
|
||
|
}
|
||
|
} else {
|
||
|
++b2;
|
||
|
}
|
||
|
} else {
|
||
|
++b1;
|
||
|
}
|
||
|
word = 0;
|
||
|
word += b1 << 16;
|
||
|
word += b2 << 8;
|
||
|
word += b3;
|
||
|
} else {
|
||
|
word += 1 << 24;
|
||
|
}
|
||
|
return word;
|
||
|
}
|
||
|
incCounter(counter) {
|
||
|
if ((counter[0] = this.incWord(counter[0])) === 0) {
|
||
|
counter[1] = this.incWord(counter[1]);
|
||
|
}
|
||
|
}
|
||
|
calculate(prf, data, iv) {
|
||
|
let l;
|
||
|
if (!(l = data.length)) {
|
||
|
return [];
|
||
|
}
|
||
|
const bl = bitArray.bitLength(data);
|
||
|
for (let i = 0; i < l; i += 4) {
|
||
|
this.incCounter(iv);
|
||
|
const e2 = prf.encrypt(iv);
|
||
|
data[i] ^= e2[0];
|
||
|
data[i + 1] ^= e2[1];
|
||
|
data[i + 2] ^= e2[2];
|
||
|
data[i + 3] ^= e2[3];
|
||
|
}
|
||
|
return bitArray.clamp(data, bl);
|
||
|
}
|
||
|
};
|
||
|
var misc = {
|
||
|
importKey(password) {
|
||
|
return new misc.hmacSha1(codec.bytes.toBits(password));
|
||
|
},
|
||
|
pbkdf2(prf, salt, count, length) {
|
||
|
count = count || 1e4;
|
||
|
if (length < 0 || count < 0) {
|
||
|
throw new Error("invalid params to pbkdf2");
|
||
|
}
|
||
|
const byteLength = (length >> 5) + 1 << 2;
|
||
|
let u, ui, i, j, k;
|
||
|
const arrayBuffer = new ArrayBuffer(byteLength);
|
||
|
const out = new DataView(arrayBuffer);
|
||
|
let outLength = 0;
|
||
|
const b = bitArray;
|
||
|
salt = codec.bytes.toBits(salt);
|
||
|
for (k = 1; outLength < (byteLength || 1); k++) {
|
||
|
u = ui = prf.encrypt(b.concat(salt, [k]));
|
||
|
for (i = 1; i < count; i++) {
|
||
|
ui = prf.encrypt(ui);
|
||
|
for (j = 0; j < ui.length; j++) {
|
||
|
u[j] ^= ui[j];
|
||
|
}
|
||
|
}
|
||
|
for (i = 0; outLength < (byteLength || 1) && i < u.length; i++) {
|
||
|
out.setInt32(outLength, u[i]);
|
||
|
outLength += 4;
|
||
|
}
|
||
|
}
|
||
|
return arrayBuffer.slice(0, length / 8);
|
||
|
}
|
||
|
};
|
||
|
misc.hmacSha1 = class {
|
||
|
constructor(key) {
|
||
|
const hmac = this;
|
||
|
const Hash = hmac._hash = hash.sha1;
|
||
|
const exKey = [[], []];
|
||
|
hmac._baseHash = [new Hash(), new Hash()];
|
||
|
const bs = hmac._baseHash[0].blockSize / 32;
|
||
|
if (key.length > bs) {
|
||
|
key = new Hash().update(key).finalize();
|
||
|
}
|
||
|
for (let i = 0; i < bs; i++) {
|
||
|
exKey[0][i] = key[i] ^ 909522486;
|
||
|
exKey[1][i] = key[i] ^ 1549556828;
|
||
|
}
|
||
|
hmac._baseHash[0].update(exKey[0]);
|
||
|
hmac._baseHash[1].update(exKey[1]);
|
||
|
hmac._resultHash = new Hash(hmac._baseHash[0]);
|
||
|
}
|
||
|
reset() {
|
||
|
const hmac = this;
|
||
|
hmac._resultHash = new hmac._hash(hmac._baseHash[0]);
|
||
|
hmac._updated = false;
|
||
|
}
|
||
|
update(data) {
|
||
|
const hmac = this;
|
||
|
hmac._updated = true;
|
||
|
hmac._resultHash.update(data);
|
||
|
}
|
||
|
digest() {
|
||
|
const hmac = this;
|
||
|
const w = hmac._resultHash.finalize();
|
||
|
const result = new hmac._hash(hmac._baseHash[1]).update(w).finalize();
|
||
|
hmac.reset();
|
||
|
return result;
|
||
|
}
|
||
|
encrypt(data) {
|
||
|
if (!this._updated) {
|
||
|
this.update(data);
|
||
|
return this.digest(data);
|
||
|
} else {
|
||
|
throw new Error("encrypt on already updated hmac called!");
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/common-crypto.js
|
||
|
var GET_RANDOM_VALUES_SUPPORTED = typeof crypto != UNDEFINED_TYPE && typeof crypto.getRandomValues == FUNCTION_TYPE;
|
||
|
var ERR_INVALID_PASSWORD = "Invalid password";
|
||
|
var ERR_INVALID_SIGNATURE = "Invalid signature";
|
||
|
var ERR_ABORT_CHECK_PASSWORD = "zipjs-abort-check-password";
|
||
|
function getRandomValues(array) {
|
||
|
if (GET_RANDOM_VALUES_SUPPORTED) {
|
||
|
return crypto.getRandomValues(array);
|
||
|
} else {
|
||
|
return random.getRandomValues(array);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/aes-crypto-stream.js
|
||
|
var BLOCK_LENGTH = 16;
|
||
|
var RAW_FORMAT = "raw";
|
||
|
var PBKDF2_ALGORITHM = { name: "PBKDF2" };
|
||
|
var HASH_ALGORITHM = { name: "HMAC" };
|
||
|
var HASH_FUNCTION = "SHA-1";
|
||
|
var BASE_KEY_ALGORITHM = Object.assign({ hash: HASH_ALGORITHM }, PBKDF2_ALGORITHM);
|
||
|
var DERIVED_BITS_ALGORITHM = Object.assign({ iterations: 1e3, hash: { name: HASH_FUNCTION } }, PBKDF2_ALGORITHM);
|
||
|
var DERIVED_BITS_USAGE = ["deriveBits"];
|
||
|
var SALT_LENGTH = [8, 12, 16];
|
||
|
var KEY_LENGTH = [16, 24, 32];
|
||
|
var SIGNATURE_LENGTH = 10;
|
||
|
var COUNTER_DEFAULT_VALUE = [0, 0, 0, 0];
|
||
|
var CRYPTO_API_SUPPORTED = typeof crypto != UNDEFINED_TYPE;
|
||
|
var subtle = CRYPTO_API_SUPPORTED && crypto.subtle;
|
||
|
var SUBTLE_API_SUPPORTED = CRYPTO_API_SUPPORTED && typeof subtle != UNDEFINED_TYPE;
|
||
|
var codecBytes = codec.bytes;
|
||
|
var Aes = cipher.aes;
|
||
|
var CtrGladman = mode.ctrGladman;
|
||
|
var HmacSha1 = misc.hmacSha1;
|
||
|
var IMPORT_KEY_SUPPORTED = CRYPTO_API_SUPPORTED && SUBTLE_API_SUPPORTED && typeof subtle.importKey == FUNCTION_TYPE;
|
||
|
var DERIVE_BITS_SUPPORTED = CRYPTO_API_SUPPORTED && SUBTLE_API_SUPPORTED && typeof subtle.deriveBits == FUNCTION_TYPE;
|
||
|
var AESDecryptionStream = class extends TransformStream {
|
||
|
constructor({ password, rawPassword, signed, encryptionStrength, checkPasswordOnly }) {
|
||
|
super({
|
||
|
start() {
|
||
|
Object.assign(this, {
|
||
|
ready: new Promise((resolve) => this.resolveReady = resolve),
|
||
|
password: encodePassword(password, rawPassword),
|
||
|
signed,
|
||
|
strength: encryptionStrength - 1,
|
||
|
pending: new Uint8Array()
|
||
|
});
|
||
|
},
|
||
|
async transform(chunk, controller) {
|
||
|
const aesCrypto = this;
|
||
|
const {
|
||
|
password: password2,
|
||
|
strength,
|
||
|
resolveReady,
|
||
|
ready
|
||
|
} = aesCrypto;
|
||
|
if (password2) {
|
||
|
await createDecryptionKeys(aesCrypto, strength, password2, subarray(chunk, 0, SALT_LENGTH[strength] + 2));
|
||
|
chunk = subarray(chunk, SALT_LENGTH[strength] + 2);
|
||
|
if (checkPasswordOnly) {
|
||
|
controller.error(new Error(ERR_ABORT_CHECK_PASSWORD));
|
||
|
} else {
|
||
|
resolveReady();
|
||
|
}
|
||
|
} else {
|
||
|
await ready;
|
||
|
}
|
||
|
const output = new Uint8Array(chunk.length - SIGNATURE_LENGTH - (chunk.length - SIGNATURE_LENGTH) % BLOCK_LENGTH);
|
||
|
controller.enqueue(append(aesCrypto, chunk, output, 0, SIGNATURE_LENGTH, true));
|
||
|
},
|
||
|
async flush(controller) {
|
||
|
const {
|
||
|
signed: signed2,
|
||
|
ctr,
|
||
|
hmac,
|
||
|
pending,
|
||
|
ready
|
||
|
} = this;
|
||
|
if (hmac && ctr) {
|
||
|
await ready;
|
||
|
const chunkToDecrypt = subarray(pending, 0, pending.length - SIGNATURE_LENGTH);
|
||
|
const originalSignature = subarray(pending, pending.length - SIGNATURE_LENGTH);
|
||
|
let decryptedChunkArray = new Uint8Array();
|
||
|
if (chunkToDecrypt.length) {
|
||
|
const encryptedChunk = toBits(codecBytes, chunkToDecrypt);
|
||
|
hmac.update(encryptedChunk);
|
||
|
const decryptedChunk = ctr.update(encryptedChunk);
|
||
|
decryptedChunkArray = fromBits(codecBytes, decryptedChunk);
|
||
|
}
|
||
|
if (signed2) {
|
||
|
const signature = subarray(fromBits(codecBytes, hmac.digest()), 0, SIGNATURE_LENGTH);
|
||
|
for (let indexSignature = 0; indexSignature < SIGNATURE_LENGTH; indexSignature++) {
|
||
|
if (signature[indexSignature] != originalSignature[indexSignature]) {
|
||
|
throw new Error(ERR_INVALID_SIGNATURE);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
controller.enqueue(decryptedChunkArray);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
var AESEncryptionStream = class extends TransformStream {
|
||
|
constructor({ password, rawPassword, encryptionStrength }) {
|
||
|
let stream;
|
||
|
super({
|
||
|
start() {
|
||
|
Object.assign(this, {
|
||
|
ready: new Promise((resolve) => this.resolveReady = resolve),
|
||
|
password: encodePassword(password, rawPassword),
|
||
|
strength: encryptionStrength - 1,
|
||
|
pending: new Uint8Array()
|
||
|
});
|
||
|
},
|
||
|
async transform(chunk, controller) {
|
||
|
const aesCrypto = this;
|
||
|
const {
|
||
|
password: password2,
|
||
|
strength,
|
||
|
resolveReady,
|
||
|
ready
|
||
|
} = aesCrypto;
|
||
|
let preamble = new Uint8Array();
|
||
|
if (password2) {
|
||
|
preamble = await createEncryptionKeys(aesCrypto, strength, password2);
|
||
|
resolveReady();
|
||
|
} else {
|
||
|
await ready;
|
||
|
}
|
||
|
const output = new Uint8Array(preamble.length + chunk.length - chunk.length % BLOCK_LENGTH);
|
||
|
output.set(preamble, 0);
|
||
|
controller.enqueue(append(aesCrypto, chunk, output, preamble.length, 0));
|
||
|
},
|
||
|
async flush(controller) {
|
||
|
const {
|
||
|
ctr,
|
||
|
hmac,
|
||
|
pending,
|
||
|
ready
|
||
|
} = this;
|
||
|
if (hmac && ctr) {
|
||
|
await ready;
|
||
|
let encryptedChunkArray = new Uint8Array();
|
||
|
if (pending.length) {
|
||
|
const encryptedChunk = ctr.update(toBits(codecBytes, pending));
|
||
|
hmac.update(encryptedChunk);
|
||
|
encryptedChunkArray = fromBits(codecBytes, encryptedChunk);
|
||
|
}
|
||
|
stream.signature = fromBits(codecBytes, hmac.digest()).slice(0, SIGNATURE_LENGTH);
|
||
|
controller.enqueue(concat(encryptedChunkArray, stream.signature));
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
stream = this;
|
||
|
}
|
||
|
};
|
||
|
function append(aesCrypto, input, output, paddingStart, paddingEnd, verifySignature) {
|
||
|
const {
|
||
|
ctr,
|
||
|
hmac,
|
||
|
pending
|
||
|
} = aesCrypto;
|
||
|
const inputLength = input.length - paddingEnd;
|
||
|
if (pending.length) {
|
||
|
input = concat(pending, input);
|
||
|
output = expand(output, inputLength - inputLength % BLOCK_LENGTH);
|
||
|
}
|
||
|
let offset;
|
||
|
for (offset = 0; offset <= inputLength - BLOCK_LENGTH; offset += BLOCK_LENGTH) {
|
||
|
const inputChunk = toBits(codecBytes, subarray(input, offset, offset + BLOCK_LENGTH));
|
||
|
if (verifySignature) {
|
||
|
hmac.update(inputChunk);
|
||
|
}
|
||
|
const outputChunk = ctr.update(inputChunk);
|
||
|
if (!verifySignature) {
|
||
|
hmac.update(outputChunk);
|
||
|
}
|
||
|
output.set(fromBits(codecBytes, outputChunk), offset + paddingStart);
|
||
|
}
|
||
|
aesCrypto.pending = subarray(input, offset);
|
||
|
return output;
|
||
|
}
|
||
|
async function createDecryptionKeys(decrypt2, strength, password, preamble) {
|
||
|
const passwordVerificationKey = await createKeys(decrypt2, strength, password, subarray(preamble, 0, SALT_LENGTH[strength]));
|
||
|
const passwordVerification = subarray(preamble, SALT_LENGTH[strength]);
|
||
|
if (passwordVerificationKey[0] != passwordVerification[0] || passwordVerificationKey[1] != passwordVerification[1]) {
|
||
|
throw new Error(ERR_INVALID_PASSWORD);
|
||
|
}
|
||
|
}
|
||
|
async function createEncryptionKeys(encrypt2, strength, password) {
|
||
|
const salt = getRandomValues(new Uint8Array(SALT_LENGTH[strength]));
|
||
|
const passwordVerification = await createKeys(encrypt2, strength, password, salt);
|
||
|
return concat(salt, passwordVerification);
|
||
|
}
|
||
|
async function createKeys(aesCrypto, strength, password, salt) {
|
||
|
aesCrypto.password = null;
|
||
|
const baseKey = await importKey(RAW_FORMAT, password, BASE_KEY_ALGORITHM, false, DERIVED_BITS_USAGE);
|
||
|
const derivedBits = await deriveBits(Object.assign({ salt }, DERIVED_BITS_ALGORITHM), baseKey, 8 * (KEY_LENGTH[strength] * 2 + 2));
|
||
|
const compositeKey = new Uint8Array(derivedBits);
|
||
|
const key = toBits(codecBytes, subarray(compositeKey, 0, KEY_LENGTH[strength]));
|
||
|
const authentication = toBits(codecBytes, subarray(compositeKey, KEY_LENGTH[strength], KEY_LENGTH[strength] * 2));
|
||
|
const passwordVerification = subarray(compositeKey, KEY_LENGTH[strength] * 2);
|
||
|
Object.assign(aesCrypto, {
|
||
|
keys: {
|
||
|
key,
|
||
|
authentication,
|
||
|
passwordVerification
|
||
|
},
|
||
|
ctr: new CtrGladman(new Aes(key), Array.from(COUNTER_DEFAULT_VALUE)),
|
||
|
hmac: new HmacSha1(authentication)
|
||
|
});
|
||
|
return passwordVerification;
|
||
|
}
|
||
|
async function importKey(format, password, algorithm, extractable, keyUsages) {
|
||
|
if (IMPORT_KEY_SUPPORTED) {
|
||
|
try {
|
||
|
return await subtle.importKey(format, password, algorithm, extractable, keyUsages);
|
||
|
} catch (_error) {
|
||
|
IMPORT_KEY_SUPPORTED = false;
|
||
|
return misc.importKey(password);
|
||
|
}
|
||
|
} else {
|
||
|
return misc.importKey(password);
|
||
|
}
|
||
|
}
|
||
|
async function deriveBits(algorithm, baseKey, length) {
|
||
|
if (DERIVE_BITS_SUPPORTED) {
|
||
|
try {
|
||
|
return await subtle.deriveBits(algorithm, baseKey, length);
|
||
|
} catch (_error) {
|
||
|
DERIVE_BITS_SUPPORTED = false;
|
||
|
return misc.pbkdf2(baseKey, algorithm.salt, DERIVED_BITS_ALGORITHM.iterations, length);
|
||
|
}
|
||
|
} else {
|
||
|
return misc.pbkdf2(baseKey, algorithm.salt, DERIVED_BITS_ALGORITHM.iterations, length);
|
||
|
}
|
||
|
}
|
||
|
function encodePassword(password, rawPassword) {
|
||
|
if (rawPassword === UNDEFINED_VALUE) {
|
||
|
return encodeText(password);
|
||
|
} else {
|
||
|
return rawPassword;
|
||
|
}
|
||
|
}
|
||
|
function concat(leftArray, rightArray) {
|
||
|
let array = leftArray;
|
||
|
if (leftArray.length + rightArray.length) {
|
||
|
array = new Uint8Array(leftArray.length + rightArray.length);
|
||
|
array.set(leftArray, 0);
|
||
|
array.set(rightArray, leftArray.length);
|
||
|
}
|
||
|
return array;
|
||
|
}
|
||
|
function expand(inputArray, length) {
|
||
|
if (length && length > inputArray.length) {
|
||
|
const array = inputArray;
|
||
|
inputArray = new Uint8Array(length);
|
||
|
inputArray.set(array, 0);
|
||
|
}
|
||
|
return inputArray;
|
||
|
}
|
||
|
function subarray(array, begin, end) {
|
||
|
return array.subarray(begin, end);
|
||
|
}
|
||
|
function fromBits(codecBytes2, chunk) {
|
||
|
return codecBytes2.fromBits(chunk);
|
||
|
}
|
||
|
function toBits(codecBytes2, chunk) {
|
||
|
return codecBytes2.toBits(chunk);
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/zip-crypto-stream.js
|
||
|
var HEADER_LENGTH = 12;
|
||
|
var ZipCryptoDecryptionStream = class extends TransformStream {
|
||
|
constructor({ password, passwordVerification, checkPasswordOnly }) {
|
||
|
super({
|
||
|
start() {
|
||
|
Object.assign(this, {
|
||
|
password,
|
||
|
passwordVerification
|
||
|
});
|
||
|
createKeys2(this, password);
|
||
|
},
|
||
|
transform(chunk, controller) {
|
||
|
const zipCrypto = this;
|
||
|
if (zipCrypto.password) {
|
||
|
const decryptedHeader = decrypt(zipCrypto, chunk.subarray(0, HEADER_LENGTH));
|
||
|
zipCrypto.password = null;
|
||
|
if (decryptedHeader[HEADER_LENGTH - 1] != zipCrypto.passwordVerification) {
|
||
|
throw new Error(ERR_INVALID_PASSWORD);
|
||
|
}
|
||
|
chunk = chunk.subarray(HEADER_LENGTH);
|
||
|
}
|
||
|
if (checkPasswordOnly) {
|
||
|
controller.error(new Error(ERR_ABORT_CHECK_PASSWORD));
|
||
|
} else {
|
||
|
controller.enqueue(decrypt(zipCrypto, chunk));
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
var ZipCryptoEncryptionStream = class extends TransformStream {
|
||
|
constructor({ password, passwordVerification }) {
|
||
|
super({
|
||
|
start() {
|
||
|
Object.assign(this, {
|
||
|
password,
|
||
|
passwordVerification
|
||
|
});
|
||
|
createKeys2(this, password);
|
||
|
},
|
||
|
transform(chunk, controller) {
|
||
|
const zipCrypto = this;
|
||
|
let output;
|
||
|
let offset;
|
||
|
if (zipCrypto.password) {
|
||
|
zipCrypto.password = null;
|
||
|
const header = getRandomValues(new Uint8Array(HEADER_LENGTH));
|
||
|
header[HEADER_LENGTH - 1] = zipCrypto.passwordVerification;
|
||
|
output = new Uint8Array(chunk.length + header.length);
|
||
|
output.set(encrypt(zipCrypto, header), 0);
|
||
|
offset = HEADER_LENGTH;
|
||
|
} else {
|
||
|
output = new Uint8Array(chunk.length);
|
||
|
offset = 0;
|
||
|
}
|
||
|
output.set(encrypt(zipCrypto, chunk), offset);
|
||
|
controller.enqueue(output);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
function decrypt(target, input) {
|
||
|
const output = new Uint8Array(input.length);
|
||
|
for (let index = 0; index < input.length; index++) {
|
||
|
output[index] = getByte(target) ^ input[index];
|
||
|
updateKeys(target, output[index]);
|
||
|
}
|
||
|
return output;
|
||
|
}
|
||
|
function encrypt(target, input) {
|
||
|
const output = new Uint8Array(input.length);
|
||
|
for (let index = 0; index < input.length; index++) {
|
||
|
output[index] = getByte(target) ^ input[index];
|
||
|
updateKeys(target, input[index]);
|
||
|
}
|
||
|
return output;
|
||
|
}
|
||
|
function createKeys2(target, password) {
|
||
|
const keys = [305419896, 591751049, 878082192];
|
||
|
Object.assign(target, {
|
||
|
keys,
|
||
|
crcKey0: new Crc32(keys[0]),
|
||
|
crcKey2: new Crc32(keys[2])
|
||
|
});
|
||
|
for (let index = 0; index < password.length; index++) {
|
||
|
updateKeys(target, password.charCodeAt(index));
|
||
|
}
|
||
|
}
|
||
|
function updateKeys(target, byte) {
|
||
|
let [key0, key1, key2] = target.keys;
|
||
|
target.crcKey0.append([byte]);
|
||
|
key0 = ~target.crcKey0.get();
|
||
|
key1 = getInt32(Math.imul(getInt32(key1 + getInt8(key0)), 134775813) + 1);
|
||
|
target.crcKey2.append([key1 >>> 24]);
|
||
|
key2 = ~target.crcKey2.get();
|
||
|
target.keys = [key0, key1, key2];
|
||
|
}
|
||
|
function getByte(target) {
|
||
|
const temp = target.keys[2] | 2;
|
||
|
return getInt8(Math.imul(temp, temp ^ 1) >>> 8);
|
||
|
}
|
||
|
function getInt8(number) {
|
||
|
return number & 255;
|
||
|
}
|
||
|
function getInt32(number) {
|
||
|
return number & 4294967295;
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/zip-entry-stream.js
|
||
|
var COMPRESSION_FORMAT = "deflate-raw";
|
||
|
var DeflateStream = class extends TransformStream {
|
||
|
constructor(options, { chunkSize, CompressionStream: CompressionStream2, CompressionStreamNative }) {
|
||
|
super({});
|
||
|
const { compressed, encrypted, useCompressionStream, zipCrypto, signed, level } = options;
|
||
|
const stream = this;
|
||
|
let crc32Stream, encryptionStream;
|
||
|
let readable = filterEmptyChunks(super.readable);
|
||
|
if ((!encrypted || zipCrypto) && signed) {
|
||
|
crc32Stream = new Crc32Stream();
|
||
|
readable = pipeThrough(readable, crc32Stream);
|
||
|
}
|
||
|
if (compressed) {
|
||
|
readable = pipeThroughCommpressionStream(readable, useCompressionStream, { level, chunkSize }, CompressionStreamNative, CompressionStream2);
|
||
|
}
|
||
|
if (encrypted) {
|
||
|
if (zipCrypto) {
|
||
|
readable = pipeThrough(readable, new ZipCryptoEncryptionStream(options));
|
||
|
} else {
|
||
|
encryptionStream = new AESEncryptionStream(options);
|
||
|
readable = pipeThrough(readable, encryptionStream);
|
||
|
}
|
||
|
}
|
||
|
setReadable(stream, readable, () => {
|
||
|
let signature;
|
||
|
if (encrypted && !zipCrypto) {
|
||
|
signature = encryptionStream.signature;
|
||
|
}
|
||
|
if ((!encrypted || zipCrypto) && signed) {
|
||
|
signature = new DataView(crc32Stream.value.buffer).getUint32(0);
|
||
|
}
|
||
|
stream.signature = signature;
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
var InflateStream = class extends TransformStream {
|
||
|
constructor(options, { chunkSize, DecompressionStream: DecompressionStream2, DecompressionStreamNative }) {
|
||
|
super({});
|
||
|
const { zipCrypto, encrypted, signed, signature, compressed, useCompressionStream } = options;
|
||
|
let crc32Stream, decryptionStream;
|
||
|
let readable = filterEmptyChunks(super.readable);
|
||
|
if (encrypted) {
|
||
|
if (zipCrypto) {
|
||
|
readable = pipeThrough(readable, new ZipCryptoDecryptionStream(options));
|
||
|
} else {
|
||
|
decryptionStream = new AESDecryptionStream(options);
|
||
|
readable = pipeThrough(readable, decryptionStream);
|
||
|
}
|
||
|
}
|
||
|
if (compressed) {
|
||
|
readable = pipeThroughCommpressionStream(readable, useCompressionStream, { chunkSize }, DecompressionStreamNative, DecompressionStream2);
|
||
|
}
|
||
|
if ((!encrypted || zipCrypto) && signed) {
|
||
|
crc32Stream = new Crc32Stream();
|
||
|
readable = pipeThrough(readable, crc32Stream);
|
||
|
}
|
||
|
setReadable(this, readable, () => {
|
||
|
if ((!encrypted || zipCrypto) && signed) {
|
||
|
const dataViewSignature = new DataView(crc32Stream.value.buffer);
|
||
|
if (signature != dataViewSignature.getUint32(0, false)) {
|
||
|
throw new Error(ERR_INVALID_SIGNATURE);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
function filterEmptyChunks(readable) {
|
||
|
return pipeThrough(readable, new TransformStream({
|
||
|
transform(chunk, controller) {
|
||
|
if (chunk && chunk.length) {
|
||
|
controller.enqueue(chunk);
|
||
|
}
|
||
|
}
|
||
|
}));
|
||
|
}
|
||
|
function setReadable(stream, readable, flush) {
|
||
|
readable = pipeThrough(readable, new TransformStream({ flush }));
|
||
|
Object.defineProperty(stream, "readable", {
|
||
|
get() {
|
||
|
return readable;
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
function pipeThroughCommpressionStream(readable, useCompressionStream, options, CodecStreamNative, CodecStream2) {
|
||
|
try {
|
||
|
const CompressionStream2 = useCompressionStream && CodecStreamNative ? CodecStreamNative : CodecStream2;
|
||
|
readable = pipeThrough(readable, new CompressionStream2(COMPRESSION_FORMAT, options));
|
||
|
} catch (error) {
|
||
|
if (useCompressionStream) {
|
||
|
try {
|
||
|
readable = pipeThrough(readable, new CodecStream2(COMPRESSION_FORMAT, options));
|
||
|
} catch (error2) {
|
||
|
return readable;
|
||
|
}
|
||
|
} else {
|
||
|
return readable;
|
||
|
}
|
||
|
}
|
||
|
return readable;
|
||
|
}
|
||
|
function pipeThrough(readable, transformStream) {
|
||
|
return readable.pipeThrough(transformStream);
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/streams/codec-stream.js
|
||
|
var MESSAGE_EVENT_TYPE = "message";
|
||
|
var MESSAGE_START = "start";
|
||
|
var MESSAGE_PULL = "pull";
|
||
|
var MESSAGE_DATA = "data";
|
||
|
var MESSAGE_ACK_DATA = "ack";
|
||
|
var MESSAGE_CLOSE = "close";
|
||
|
var CODEC_DEFLATE = "deflate";
|
||
|
var CODEC_INFLATE = "inflate";
|
||
|
var CodecStream = class extends TransformStream {
|
||
|
constructor(options, config2) {
|
||
|
super({});
|
||
|
const codec2 = this;
|
||
|
const { codecType } = options;
|
||
|
let Stream2;
|
||
|
if (codecType.startsWith(CODEC_DEFLATE)) {
|
||
|
Stream2 = DeflateStream;
|
||
|
} else if (codecType.startsWith(CODEC_INFLATE)) {
|
||
|
Stream2 = InflateStream;
|
||
|
}
|
||
|
let outputSize = 0;
|
||
|
let inputSize = 0;
|
||
|
const stream = new Stream2(options, config2);
|
||
|
const readable = super.readable;
|
||
|
const inputSizeStream = new TransformStream({
|
||
|
transform(chunk, controller) {
|
||
|
if (chunk && chunk.length) {
|
||
|
inputSize += chunk.length;
|
||
|
controller.enqueue(chunk);
|
||
|
}
|
||
|
},
|
||
|
flush() {
|
||
|
Object.assign(codec2, {
|
||
|
inputSize
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
const outputSizeStream = new TransformStream({
|
||
|
transform(chunk, controller) {
|
||
|
if (chunk && chunk.length) {
|
||
|
outputSize += chunk.length;
|
||
|
controller.enqueue(chunk);
|
||
|
}
|
||
|
},
|
||
|
flush() {
|
||
|
const { signature } = stream;
|
||
|
Object.assign(codec2, {
|
||
|
signature,
|
||
|
outputSize,
|
||
|
inputSize
|
||
|
});
|
||
|
}
|
||
|
});
|
||
|
Object.defineProperty(codec2, "readable", {
|
||
|
get() {
|
||
|
return readable.pipeThrough(inputSizeStream).pipeThrough(stream).pipeThrough(outputSizeStream);
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
var ChunkStream = class extends TransformStream {
|
||
|
constructor(chunkSize) {
|
||
|
let pendingChunk;
|
||
|
super({
|
||
|
transform,
|
||
|
flush(controller) {
|
||
|
if (pendingChunk && pendingChunk.length) {
|
||
|
controller.enqueue(pendingChunk);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
function transform(chunk, controller) {
|
||
|
if (pendingChunk) {
|
||
|
const newChunk = new Uint8Array(pendingChunk.length + chunk.length);
|
||
|
newChunk.set(pendingChunk);
|
||
|
newChunk.set(chunk, pendingChunk.length);
|
||
|
chunk = newChunk;
|
||
|
pendingChunk = null;
|
||
|
}
|
||
|
if (chunk.length > chunkSize) {
|
||
|
controller.enqueue(chunk.slice(0, chunkSize));
|
||
|
transform(chunk.slice(chunkSize), controller);
|
||
|
} else {
|
||
|
pendingChunk = chunk;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/codec-worker.js
|
||
|
var WEB_WORKERS_SUPPORTED = typeof Worker != UNDEFINED_TYPE;
|
||
|
var CodecWorker = class {
|
||
|
constructor(workerData, { readable, writable }, { options, config: config2, streamOptions, useWebWorkers, transferStreams, scripts }, onTaskFinished) {
|
||
|
const { signal } = streamOptions;
|
||
|
Object.assign(workerData, {
|
||
|
busy: true,
|
||
|
readable: readable.pipeThrough(new ChunkStream(config2.chunkSize)).pipeThrough(new ProgressWatcherStream(readable, streamOptions), { signal }),
|
||
|
writable,
|
||
|
options: Object.assign({}, options),
|
||
|
scripts,
|
||
|
transferStreams,
|
||
|
terminate() {
|
||
|
return new Promise((resolve) => {
|
||
|
const { worker, busy } = workerData;
|
||
|
if (worker) {
|
||
|
if (busy) {
|
||
|
workerData.resolveTerminated = resolve;
|
||
|
} else {
|
||
|
worker.terminate();
|
||
|
resolve();
|
||
|
}
|
||
|
workerData.interface = null;
|
||
|
} else {
|
||
|
resolve();
|
||
|
}
|
||
|
});
|
||
|
},
|
||
|
onTaskFinished() {
|
||
|
const { resolveTerminated } = workerData;
|
||
|
if (resolveTerminated) {
|
||
|
workerData.resolveTerminated = null;
|
||
|
workerData.terminated = true;
|
||
|
workerData.worker.terminate();
|
||
|
resolveTerminated();
|
||
|
}
|
||
|
workerData.busy = false;
|
||
|
onTaskFinished(workerData);
|
||
|
}
|
||
|
});
|
||
|
return (useWebWorkers && WEB_WORKERS_SUPPORTED ? createWebWorkerInterface : createWorkerInterface)(workerData, config2);
|
||
|
}
|
||
|
};
|
||
|
var ProgressWatcherStream = class extends TransformStream {
|
||
|
constructor(readableSource, { onstart, onprogress, size, onend }) {
|
||
|
let chunkOffset = 0;
|
||
|
super({
|
||
|
async start() {
|
||
|
if (onstart) {
|
||
|
await callHandler(onstart, size);
|
||
|
}
|
||
|
},
|
||
|
async transform(chunk, controller) {
|
||
|
chunkOffset += chunk.length;
|
||
|
if (onprogress) {
|
||
|
await callHandler(onprogress, chunkOffset, size);
|
||
|
}
|
||
|
controller.enqueue(chunk);
|
||
|
},
|
||
|
async flush() {
|
||
|
readableSource.size = chunkOffset;
|
||
|
if (onend) {
|
||
|
await callHandler(onend, chunkOffset);
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
};
|
||
|
async function callHandler(handler, ...parameters) {
|
||
|
try {
|
||
|
await handler(...parameters);
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
}
|
||
|
function createWorkerInterface(workerData, config2) {
|
||
|
return {
|
||
|
run: () => runWorker(workerData, config2)
|
||
|
};
|
||
|
}
|
||
|
function createWebWorkerInterface(workerData, config2) {
|
||
|
const { baseURL: baseURL2, chunkSize } = config2;
|
||
|
if (!workerData.interface) {
|
||
|
let worker;
|
||
|
try {
|
||
|
worker = getWebWorker(workerData.scripts[0], baseURL2, workerData);
|
||
|
} catch (error) {
|
||
|
WEB_WORKERS_SUPPORTED = false;
|
||
|
return createWorkerInterface(workerData, config2);
|
||
|
}
|
||
|
Object.assign(workerData, {
|
||
|
worker,
|
||
|
interface: {
|
||
|
run: () => runWebWorker(workerData, { chunkSize })
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
return workerData.interface;
|
||
|
}
|
||
|
async function runWorker({ options, readable, writable, onTaskFinished }, config2) {
|
||
|
try {
|
||
|
const codecStream = new CodecStream(options, config2);
|
||
|
await readable.pipeThrough(codecStream).pipeTo(writable, { preventClose: true, preventAbort: true });
|
||
|
const {
|
||
|
signature,
|
||
|
inputSize,
|
||
|
outputSize
|
||
|
} = codecStream;
|
||
|
return {
|
||
|
signature,
|
||
|
inputSize,
|
||
|
outputSize
|
||
|
};
|
||
|
} finally {
|
||
|
onTaskFinished();
|
||
|
}
|
||
|
}
|
||
|
async function runWebWorker(workerData, config2) {
|
||
|
let resolveResult, rejectResult;
|
||
|
const result = new Promise((resolve, reject) => {
|
||
|
resolveResult = resolve;
|
||
|
rejectResult = reject;
|
||
|
});
|
||
|
Object.assign(workerData, {
|
||
|
reader: null,
|
||
|
writer: null,
|
||
|
resolveResult,
|
||
|
rejectResult,
|
||
|
result
|
||
|
});
|
||
|
const { readable, options, scripts } = workerData;
|
||
|
const { writable, closed } = watchClosedStream(workerData.writable);
|
||
|
const streamsTransferred = sendMessage({
|
||
|
type: MESSAGE_START,
|
||
|
scripts: scripts.slice(1),
|
||
|
options,
|
||
|
config: config2,
|
||
|
readable,
|
||
|
writable
|
||
|
}, workerData);
|
||
|
if (!streamsTransferred) {
|
||
|
Object.assign(workerData, {
|
||
|
reader: readable.getReader(),
|
||
|
writer: writable.getWriter()
|
||
|
});
|
||
|
}
|
||
|
const resultValue = await result;
|
||
|
if (!streamsTransferred) {
|
||
|
await writable.getWriter().close();
|
||
|
}
|
||
|
await closed;
|
||
|
return resultValue;
|
||
|
}
|
||
|
function watchClosedStream(writableSource) {
|
||
|
let resolveStreamClosed;
|
||
|
const closed = new Promise((resolve) => resolveStreamClosed = resolve);
|
||
|
const writable = new WritableStream({
|
||
|
async write(chunk) {
|
||
|
const writer = writableSource.getWriter();
|
||
|
await writer.ready;
|
||
|
await writer.write(chunk);
|
||
|
writer.releaseLock();
|
||
|
},
|
||
|
close() {
|
||
|
resolveStreamClosed();
|
||
|
},
|
||
|
abort(reason) {
|
||
|
const writer = writableSource.getWriter();
|
||
|
return writer.abort(reason);
|
||
|
}
|
||
|
});
|
||
|
return { writable, closed };
|
||
|
}
|
||
|
var classicWorkersSupported = true;
|
||
|
var transferStreamsSupported = true;
|
||
|
function getWebWorker(url, baseURL2, workerData) {
|
||
|
const workerOptions = { type: "module" };
|
||
|
let scriptUrl, worker;
|
||
|
if (typeof url == FUNCTION_TYPE) {
|
||
|
url = url();
|
||
|
}
|
||
|
try {
|
||
|
scriptUrl = new URL(url, baseURL2);
|
||
|
} catch (_error) {
|
||
|
scriptUrl = url;
|
||
|
}
|
||
|
if (classicWorkersSupported) {
|
||
|
try {
|
||
|
worker = new Worker(scriptUrl);
|
||
|
} catch (_error) {
|
||
|
classicWorkersSupported = false;
|
||
|
worker = new Worker(scriptUrl, workerOptions);
|
||
|
}
|
||
|
} else {
|
||
|
worker = new Worker(scriptUrl, workerOptions);
|
||
|
}
|
||
|
worker.addEventListener(MESSAGE_EVENT_TYPE, (event) => onMessage(event, workerData));
|
||
|
return worker;
|
||
|
}
|
||
|
function sendMessage(message, { worker, writer, onTaskFinished, transferStreams }) {
|
||
|
try {
|
||
|
let { value, readable, writable } = message;
|
||
|
const transferables = [];
|
||
|
if (value) {
|
||
|
if (value.byteLength < value.buffer.byteLength) {
|
||
|
message.value = value.buffer.slice(0, value.byteLength);
|
||
|
} else {
|
||
|
message.value = value.buffer;
|
||
|
}
|
||
|
transferables.push(message.value);
|
||
|
}
|
||
|
if (transferStreams && transferStreamsSupported) {
|
||
|
if (readable) {
|
||
|
transferables.push(readable);
|
||
|
}
|
||
|
if (writable) {
|
||
|
transferables.push(writable);
|
||
|
}
|
||
|
} else {
|
||
|
message.readable = message.writable = null;
|
||
|
}
|
||
|
if (transferables.length) {
|
||
|
try {
|
||
|
worker.postMessage(message, transferables);
|
||
|
return true;
|
||
|
} catch (_error) {
|
||
|
transferStreamsSupported = false;
|
||
|
message.readable = message.writable = null;
|
||
|
worker.postMessage(message);
|
||
|
}
|
||
|
} else {
|
||
|
worker.postMessage(message);
|
||
|
}
|
||
|
} catch (error) {
|
||
|
if (writer) {
|
||
|
writer.releaseLock();
|
||
|
}
|
||
|
onTaskFinished();
|
||
|
throw error;
|
||
|
}
|
||
|
}
|
||
|
async function onMessage({ data }, workerData) {
|
||
|
const { type, value, messageId, result, error } = data;
|
||
|
const { reader, writer, resolveResult, rejectResult, onTaskFinished } = workerData;
|
||
|
try {
|
||
|
if (error) {
|
||
|
const { message, stack, code, name } = error;
|
||
|
const responseError = new Error(message);
|
||
|
Object.assign(responseError, { stack, code, name });
|
||
|
close(responseError);
|
||
|
} else {
|
||
|
if (type == MESSAGE_PULL) {
|
||
|
const { value: value2, done } = await reader.read();
|
||
|
sendMessage({ type: MESSAGE_DATA, value: value2, done, messageId }, workerData);
|
||
|
}
|
||
|
if (type == MESSAGE_DATA) {
|
||
|
await writer.ready;
|
||
|
await writer.write(new Uint8Array(value));
|
||
|
sendMessage({ type: MESSAGE_ACK_DATA, messageId }, workerData);
|
||
|
}
|
||
|
if (type == MESSAGE_CLOSE) {
|
||
|
close(null, result);
|
||
|
}
|
||
|
}
|
||
|
} catch (error2) {
|
||
|
sendMessage({ type: MESSAGE_CLOSE, messageId }, workerData);
|
||
|
close(error2);
|
||
|
}
|
||
|
function close(error2, result2) {
|
||
|
if (error2) {
|
||
|
rejectResult(error2);
|
||
|
} else {
|
||
|
resolveResult(result2);
|
||
|
}
|
||
|
if (writer) {
|
||
|
writer.releaseLock();
|
||
|
}
|
||
|
onTaskFinished();
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/codec-pool.js
|
||
|
var pool = [];
|
||
|
var pendingRequests = [];
|
||
|
var indexWorker = 0;
|
||
|
async function runWorker2(stream, workerOptions) {
|
||
|
const { options, config: config2 } = workerOptions;
|
||
|
const { transferStreams, useWebWorkers, useCompressionStream, codecType, compressed, signed, encrypted } = options;
|
||
|
const { workerScripts, maxWorkers: maxWorkers2 } = config2;
|
||
|
workerOptions.transferStreams = transferStreams || transferStreams === UNDEFINED_VALUE;
|
||
|
const streamCopy = !compressed && !signed && !encrypted && !workerOptions.transferStreams;
|
||
|
workerOptions.useWebWorkers = !streamCopy && (useWebWorkers || useWebWorkers === UNDEFINED_VALUE && config2.useWebWorkers);
|
||
|
workerOptions.scripts = workerOptions.useWebWorkers && workerScripts ? workerScripts[codecType] : [];
|
||
|
options.useCompressionStream = useCompressionStream || useCompressionStream === UNDEFINED_VALUE && config2.useCompressionStream;
|
||
|
return (await getWorker()).run();
|
||
|
async function getWorker() {
|
||
|
const workerData = pool.find((workerData2) => !workerData2.busy);
|
||
|
if (workerData) {
|
||
|
clearTerminateTimeout(workerData);
|
||
|
return new CodecWorker(workerData, stream, workerOptions, onTaskFinished);
|
||
|
} else if (pool.length < maxWorkers2) {
|
||
|
const workerData2 = { indexWorker };
|
||
|
indexWorker++;
|
||
|
pool.push(workerData2);
|
||
|
return new CodecWorker(workerData2, stream, workerOptions, onTaskFinished);
|
||
|
} else {
|
||
|
return new Promise((resolve) => pendingRequests.push({ resolve, stream, workerOptions }));
|
||
|
}
|
||
|
}
|
||
|
function onTaskFinished(workerData) {
|
||
|
if (pendingRequests.length) {
|
||
|
const [{ resolve, stream: stream2, workerOptions: workerOptions2 }] = pendingRequests.splice(0, 1);
|
||
|
resolve(new CodecWorker(workerData, stream2, workerOptions2, onTaskFinished));
|
||
|
} else if (workerData.worker) {
|
||
|
clearTerminateTimeout(workerData);
|
||
|
terminateWorker(workerData, workerOptions);
|
||
|
} else {
|
||
|
pool = pool.filter((data) => data != workerData);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
function terminateWorker(workerData, workerOptions) {
|
||
|
const { config: config2 } = workerOptions;
|
||
|
const { terminateWorkerTimeout } = config2;
|
||
|
if (Number.isFinite(terminateWorkerTimeout) && terminateWorkerTimeout >= 0) {
|
||
|
if (workerData.terminated) {
|
||
|
workerData.terminated = false;
|
||
|
} else {
|
||
|
workerData.terminateTimeout = setTimeout(async () => {
|
||
|
pool = pool.filter((data) => data != workerData);
|
||
|
try {
|
||
|
await workerData.terminate();
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
}, terminateWorkerTimeout);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
function clearTerminateTimeout(workerData) {
|
||
|
const { terminateTimeout } = workerData;
|
||
|
if (terminateTimeout) {
|
||
|
clearTimeout(terminateTimeout);
|
||
|
workerData.terminateTimeout = null;
|
||
|
}
|
||
|
}
|
||
|
async function terminateWorkers() {
|
||
|
await Promise.allSettled(pool.map((workerData) => {
|
||
|
clearTerminateTimeout(workerData);
|
||
|
return workerData.terminate();
|
||
|
}));
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/io.js
|
||
|
var ERR_HTTP_STATUS = "HTTP error ";
|
||
|
var ERR_HTTP_RANGE = "HTTP Range not supported";
|
||
|
var ERR_ITERATOR_COMPLETED_TOO_SOON = "Writer iterator completed too soon";
|
||
|
var CONTENT_TYPE_TEXT_PLAIN = "text/plain";
|
||
|
var HTTP_HEADER_CONTENT_LENGTH = "Content-Length";
|
||
|
var HTTP_HEADER_CONTENT_RANGE = "Content-Range";
|
||
|
var HTTP_HEADER_ACCEPT_RANGES = "Accept-Ranges";
|
||
|
var HTTP_HEADER_RANGE = "Range";
|
||
|
var HTTP_HEADER_CONTENT_TYPE = "Content-Type";
|
||
|
var HTTP_METHOD_HEAD = "HEAD";
|
||
|
var HTTP_METHOD_GET = "GET";
|
||
|
var HTTP_RANGE_UNIT = "bytes";
|
||
|
var DEFAULT_CHUNK_SIZE = 64 * 1024;
|
||
|
var PROPERTY_NAME_WRITABLE = "writable";
|
||
|
var Stream = class {
|
||
|
constructor() {
|
||
|
this.size = 0;
|
||
|
}
|
||
|
init() {
|
||
|
this.initialized = true;
|
||
|
}
|
||
|
};
|
||
|
var Reader = class extends Stream {
|
||
|
get readable() {
|
||
|
const reader = this;
|
||
|
const { chunkSize = DEFAULT_CHUNK_SIZE } = reader;
|
||
|
const readable = new ReadableStream({
|
||
|
start() {
|
||
|
this.chunkOffset = 0;
|
||
|
},
|
||
|
async pull(controller) {
|
||
|
const { offset = 0, size, diskNumberStart } = readable;
|
||
|
const { chunkOffset } = this;
|
||
|
controller.enqueue(await readUint8Array(reader, offset + chunkOffset, Math.min(chunkSize, size - chunkOffset), diskNumberStart));
|
||
|
if (chunkOffset + chunkSize > size) {
|
||
|
controller.close();
|
||
|
} else {
|
||
|
this.chunkOffset += chunkSize;
|
||
|
}
|
||
|
}
|
||
|
});
|
||
|
return readable;
|
||
|
}
|
||
|
};
|
||
|
var Writer = class extends Stream {
|
||
|
constructor() {
|
||
|
super();
|
||
|
const writer = this;
|
||
|
const writable = new WritableStream({
|
||
|
write(chunk) {
|
||
|
return writer.writeUint8Array(chunk);
|
||
|
}
|
||
|
});
|
||
|
Object.defineProperty(writer, PROPERTY_NAME_WRITABLE, {
|
||
|
get() {
|
||
|
return writable;
|
||
|
}
|
||
|
});
|
||
|
}
|
||
|
writeUint8Array() {
|
||
|
}
|
||
|
};
|
||
|
var Data64URIReader = class extends Reader {
|
||
|
constructor(dataURI) {
|
||
|
super();
|
||
|
let dataEnd = dataURI.length;
|
||
|
while (dataURI.charAt(dataEnd - 1) == "=") {
|
||
|
dataEnd--;
|
||
|
}
|
||
|
const dataStart = dataURI.indexOf(",") + 1;
|
||
|
Object.assign(this, {
|
||
|
dataURI,
|
||
|
dataStart,
|
||
|
size: Math.floor((dataEnd - dataStart) * 0.75)
|
||
|
});
|
||
|
}
|
||
|
readUint8Array(offset, length) {
|
||
|
const {
|
||
|
dataStart,
|
||
|
dataURI
|
||
|
} = this;
|
||
|
const dataArray = new Uint8Array(length);
|
||
|
const start = Math.floor(offset / 3) * 4;
|
||
|
const bytes = atob(dataURI.substring(start + dataStart, Math.ceil((offset + length) / 3) * 4 + dataStart));
|
||
|
const delta = offset - Math.floor(start / 4) * 3;
|
||
|
for (let indexByte = delta; indexByte < delta + length; indexByte++) {
|
||
|
dataArray[indexByte - delta] = bytes.charCodeAt(indexByte);
|
||
|
}
|
||
|
return dataArray;
|
||
|
}
|
||
|
};
|
||
|
var Data64URIWriter = class extends Writer {
|
||
|
constructor(contentType) {
|
||
|
super();
|
||
|
Object.assign(this, {
|
||
|
data: "data:" + (contentType || "") + ";base64,",
|
||
|
pending: []
|
||
|
});
|
||
|
}
|
||
|
writeUint8Array(array) {
|
||
|
const writer = this;
|
||
|
let indexArray = 0;
|
||
|
let dataString = writer.pending;
|
||
|
const delta = writer.pending.length;
|
||
|
writer.pending = "";
|
||
|
for (indexArray = 0; indexArray < Math.floor((delta + array.length) / 3) * 3 - delta; indexArray++) {
|
||
|
dataString += String.fromCharCode(array[indexArray]);
|
||
|
}
|
||
|
for (; indexArray < array.length; indexArray++) {
|
||
|
writer.pending += String.fromCharCode(array[indexArray]);
|
||
|
}
|
||
|
if (dataString.length > 2) {
|
||
|
writer.data += btoa(dataString);
|
||
|
} else {
|
||
|
writer.pending = dataString;
|
||
|
}
|
||
|
}
|
||
|
getData() {
|
||
|
return this.data + btoa(this.pending);
|
||
|
}
|
||
|
};
|
||
|
var BlobReader = class extends Reader {
|
||
|
constructor(blob) {
|
||
|
super();
|
||
|
Object.assign(this, {
|
||
|
blob,
|
||
|
size: blob.size
|
||
|
});
|
||
|
}
|
||
|
async readUint8Array(offset, length) {
|
||
|
const reader = this;
|
||
|
const offsetEnd = offset + length;
|
||
|
const blob = offset || offsetEnd < reader.size ? reader.blob.slice(offset, offsetEnd) : reader.blob;
|
||
|
let arrayBuffer = await blob.arrayBuffer();
|
||
|
if (arrayBuffer.byteLength > length) {
|
||
|
arrayBuffer = arrayBuffer.slice(offset, offsetEnd);
|
||
|
}
|
||
|
return new Uint8Array(arrayBuffer);
|
||
|
}
|
||
|
};
|
||
|
var BlobWriter = class extends Stream {
|
||
|
constructor(contentType) {
|
||
|
super();
|
||
|
const writer = this;
|
||
|
const transformStream = new TransformStream();
|
||
|
const headers = [];
|
||
|
if (contentType) {
|
||
|
headers.push([HTTP_HEADER_CONTENT_TYPE, contentType]);
|
||
|
}
|
||
|
Object.defineProperty(writer, PROPERTY_NAME_WRITABLE, {
|
||
|
get() {
|
||
|
return transformStream.writable;
|
||
|
}
|
||
|
});
|
||
|
writer.blob = new Response(transformStream.readable, { headers }).blob();
|
||
|
}
|
||
|
getData() {
|
||
|
return this.blob;
|
||
|
}
|
||
|
};
|
||
|
var TextReader = class extends BlobReader {
|
||
|
constructor(text) {
|
||
|
super(new Blob([text], { type: CONTENT_TYPE_TEXT_PLAIN }));
|
||
|
}
|
||
|
};
|
||
|
var TextWriter = class extends BlobWriter {
|
||
|
constructor(encoding) {
|
||
|
super(encoding);
|
||
|
Object.assign(this, {
|
||
|
encoding,
|
||
|
utf8: !encoding || encoding.toLowerCase() == "utf-8"
|
||
|
});
|
||
|
}
|
||
|
async getData() {
|
||
|
const {
|
||
|
encoding,
|
||
|
utf8
|
||
|
} = this;
|
||
|
const blob = await super.getData();
|
||
|
if (blob.text && utf8) {
|
||
|
return blob.text();
|
||
|
} else {
|
||
|
const reader = new FileReader();
|
||
|
return new Promise((resolve, reject) => {
|
||
|
Object.assign(reader, {
|
||
|
onload: ({ target }) => resolve(target.result),
|
||
|
onerror: () => reject(reader.error)
|
||
|
});
|
||
|
reader.readAsText(blob, encoding);
|
||
|
});
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
var FetchReader = class extends Reader {
|
||
|
constructor(url, options) {
|
||
|
super();
|
||
|
createHttpReader(this, url, options);
|
||
|
}
|
||
|
async init() {
|
||
|
await initHttpReader(this, sendFetchRequest, getFetchRequestData);
|
||
|
super.init();
|
||
|
}
|
||
|
readUint8Array(index, length) {
|
||
|
return readUint8ArrayHttpReader(this, index, length, sendFetchRequest, getFetchRequestData);
|
||
|
}
|
||
|
};
|
||
|
var XHRReader = class extends Reader {
|
||
|
constructor(url, options) {
|
||
|
super();
|
||
|
createHttpReader(this, url, options);
|
||
|
}
|
||
|
async init() {
|
||
|
await initHttpReader(this, sendXMLHttpRequest, getXMLHttpRequestData);
|
||
|
super.init();
|
||
|
}
|
||
|
readUint8Array(index, length) {
|
||
|
return readUint8ArrayHttpReader(this, index, length, sendXMLHttpRequest, getXMLHttpRequestData);
|
||
|
}
|
||
|
};
|
||
|
function createHttpReader(httpReader, url, options) {
|
||
|
const {
|
||
|
preventHeadRequest,
|
||
|
useRangeHeader,
|
||
|
forceRangeRequests,
|
||
|
combineSizeEocd
|
||
|
} = options;
|
||
|
options = Object.assign({}, options);
|
||
|
delete options.preventHeadRequest;
|
||
|
delete options.useRangeHeader;
|
||
|
delete options.forceRangeRequests;
|
||
|
delete options.combineSizeEocd;
|
||
|
delete options.useXHR;
|
||
|
Object.assign(httpReader, {
|
||
|
url,
|
||
|
options,
|
||
|
preventHeadRequest,
|
||
|
useRangeHeader,
|
||
|
forceRangeRequests,
|
||
|
combineSizeEocd
|
||
|
});
|
||
|
}
|
||
|
async function initHttpReader(httpReader, sendRequest, getRequestData2) {
|
||
|
const {
|
||
|
url,
|
||
|
preventHeadRequest,
|
||
|
useRangeHeader,
|
||
|
forceRangeRequests,
|
||
|
combineSizeEocd
|
||
|
} = httpReader;
|
||
|
if (isHttpFamily(url) && (useRangeHeader || forceRangeRequests) && (typeof preventHeadRequest == "undefined" || preventHeadRequest)) {
|
||
|
const response = await sendRequest(HTTP_METHOD_GET, httpReader, getRangeHeaders(httpReader, combineSizeEocd ? -END_OF_CENTRAL_DIR_LENGTH : void 0));
|
||
|
if (!forceRangeRequests && response.headers.get(HTTP_HEADER_ACCEPT_RANGES) != HTTP_RANGE_UNIT) {
|
||
|
throw new Error(ERR_HTTP_RANGE);
|
||
|
} else {
|
||
|
if (combineSizeEocd) {
|
||
|
httpReader.eocdCache = new Uint8Array(await response.arrayBuffer());
|
||
|
}
|
||
|
let contentSize;
|
||
|
const contentRangeHeader = response.headers.get(HTTP_HEADER_CONTENT_RANGE);
|
||
|
if (contentRangeHeader) {
|
||
|
const splitHeader = contentRangeHeader.trim().split(/\s*\/\s*/);
|
||
|
if (splitHeader.length) {
|
||
|
const headerValue = splitHeader[1];
|
||
|
if (headerValue && headerValue != "*") {
|
||
|
contentSize = Number(headerValue);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
if (contentSize === UNDEFINED_VALUE) {
|
||
|
await getContentLength(httpReader, sendRequest, getRequestData2);
|
||
|
} else {
|
||
|
httpReader.size = contentSize;
|
||
|
}
|
||
|
}
|
||
|
} else {
|
||
|
await getContentLength(httpReader, sendRequest, getRequestData2);
|
||
|
}
|
||
|
}
|
||
|
async function readUint8ArrayHttpReader(httpReader, index, length, sendRequest, getRequestData2) {
|
||
|
const {
|
||
|
useRangeHeader,
|
||
|
forceRangeRequests,
|
||
|
eocdCache,
|
||
|
size,
|
||
|
options
|
||
|
} = httpReader;
|
||
|
if (useRangeHeader || forceRangeRequests) {
|
||
|
if (eocdCache && index == size - END_OF_CENTRAL_DIR_LENGTH && length == END_OF_CENTRAL_DIR_LENGTH) {
|
||
|
return eocdCache;
|
||
|
}
|
||
|
const response = await sendRequest(HTTP_METHOD_GET, httpReader, getRangeHeaders(httpReader, index, length));
|
||
|
if (response.status != 206) {
|
||
|
throw new Error(ERR_HTTP_RANGE);
|
||
|
}
|
||
|
return new Uint8Array(await response.arrayBuffer());
|
||
|
} else {
|
||
|
const { data } = httpReader;
|
||
|
if (!data) {
|
||
|
await getRequestData2(httpReader, options);
|
||
|
}
|
||
|
return new Uint8Array(httpReader.data.subarray(index, index + length));
|
||
|
}
|
||
|
}
|
||
|
function getRangeHeaders(httpReader, index = 0, length = 1) {
|
||
|
return Object.assign({}, getHeaders(httpReader), { [HTTP_HEADER_RANGE]: HTTP_RANGE_UNIT + "=" + (index < 0 ? index : index + "-" + (index + length - 1)) });
|
||
|
}
|
||
|
function getHeaders({ options }) {
|
||
|
const { headers } = options;
|
||
|
if (headers) {
|
||
|
if (Symbol.iterator in headers) {
|
||
|
return Object.fromEntries(headers);
|
||
|
} else {
|
||
|
return headers;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
async function getFetchRequestData(httpReader) {
|
||
|
await getRequestData(httpReader, sendFetchRequest);
|
||
|
}
|
||
|
async function getXMLHttpRequestData(httpReader) {
|
||
|
await getRequestData(httpReader, sendXMLHttpRequest);
|
||
|
}
|
||
|
async function getRequestData(httpReader, sendRequest) {
|
||
|
const response = await sendRequest(HTTP_METHOD_GET, httpReader, getHeaders(httpReader));
|
||
|
httpReader.data = new Uint8Array(await response.arrayBuffer());
|
||
|
if (!httpReader.size) {
|
||
|
httpReader.size = httpReader.data.length;
|
||
|
}
|
||
|
}
|
||
|
async function getContentLength(httpReader, sendRequest, getRequestData2) {
|
||
|
if (httpReader.preventHeadRequest) {
|
||
|
await getRequestData2(httpReader, httpReader.options);
|
||
|
} else {
|
||
|
const response = await sendRequest(HTTP_METHOD_HEAD, httpReader, getHeaders(httpReader));
|
||
|
const contentLength = response.headers.get(HTTP_HEADER_CONTENT_LENGTH);
|
||
|
if (contentLength) {
|
||
|
httpReader.size = Number(contentLength);
|
||
|
} else {
|
||
|
await getRequestData2(httpReader, httpReader.options);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
async function sendFetchRequest(method, { options, url }, headers) {
|
||
|
const response = await fetch(url, Object.assign({}, options, { method, headers }));
|
||
|
if (response.status < 400) {
|
||
|
return response;
|
||
|
} else {
|
||
|
throw response.status == 416 ? new Error(ERR_HTTP_RANGE) : new Error(ERR_HTTP_STATUS + (response.statusText || response.status));
|
||
|
}
|
||
|
}
|
||
|
function sendXMLHttpRequest(method, { url }, headers) {
|
||
|
return new Promise((resolve, reject) => {
|
||
|
const request = new XMLHttpRequest();
|
||
|
request.addEventListener("load", () => {
|
||
|
if (request.status < 400) {
|
||
|
const headers2 = [];
|
||
|
request.getAllResponseHeaders().trim().split(/[\r\n]+/).forEach((header) => {
|
||
|
const splitHeader = header.trim().split(/\s*:\s*/);
|
||
|
splitHeader[0] = splitHeader[0].trim().replace(/^[a-z]|-[a-z]/g, (value) => value.toUpperCase());
|
||
|
headers2.push(splitHeader);
|
||
|
});
|
||
|
resolve({
|
||
|
status: request.status,
|
||
|
arrayBuffer: () => request.response,
|
||
|
headers: new Map(headers2)
|
||
|
});
|
||
|
} else {
|
||
|
reject(request.status == 416 ? new Error(ERR_HTTP_RANGE) : new Error(ERR_HTTP_STATUS + (request.statusText || request.status)));
|
||
|
}
|
||
|
}, false);
|
||
|
request.addEventListener("error", (event) => reject(event.detail ? event.detail.error : new Error("Network error")), false);
|
||
|
request.open(method, url);
|
||
|
if (headers) {
|
||
|
for (const entry of Object.entries(headers)) {
|
||
|
request.setRequestHeader(entry[0], entry[1]);
|
||
|
}
|
||
|
}
|
||
|
request.responseType = "arraybuffer";
|
||
|
request.send();
|
||
|
});
|
||
|
}
|
||
|
var HttpReader = class extends Reader {
|
||
|
constructor(url, options = {}) {
|
||
|
super();
|
||
|
Object.assign(this, {
|
||
|
url,
|
||
|
reader: options.useXHR ? new XHRReader(url, options) : new FetchReader(url, options)
|
||
|
});
|
||
|
}
|
||
|
set size(value) {
|
||
|
}
|
||
|
get size() {
|
||
|
return this.reader.size;
|
||
|
}
|
||
|
async init() {
|
||
|
await this.reader.init();
|
||
|
super.init();
|
||
|
}
|
||
|
readUint8Array(index, length) {
|
||
|
return this.reader.readUint8Array(index, length);
|
||
|
}
|
||
|
};
|
||
|
var HttpRangeReader = class extends HttpReader {
|
||
|
constructor(url, options = {}) {
|
||
|
options.useRangeHeader = true;
|
||
|
super(url, options);
|
||
|
}
|
||
|
};
|
||
|
var Uint8ArrayReader = class extends Reader {
|
||
|
constructor(array) {
|
||
|
super();
|
||
|
Object.assign(this, {
|
||
|
array,
|
||
|
size: array.length
|
||
|
});
|
||
|
}
|
||
|
readUint8Array(index, length) {
|
||
|
return this.array.slice(index, index + length);
|
||
|
}
|
||
|
};
|
||
|
var Uint8ArrayWriter = class extends Writer {
|
||
|
init(initSize = 0) {
|
||
|
Object.assign(this, {
|
||
|
offset: 0,
|
||
|
array: new Uint8Array(initSize)
|
||
|
});
|
||
|
super.init();
|
||
|
}
|
||
|
writeUint8Array(array) {
|
||
|
const writer = this;
|
||
|
if (writer.offset + array.length > writer.array.length) {
|
||
|
const previousArray = writer.array;
|
||
|
writer.array = new Uint8Array(previousArray.length + array.length);
|
||
|
writer.array.set(previousArray);
|
||
|
}
|
||
|
writer.array.set(array, writer.offset);
|
||
|
writer.offset += array.length;
|
||
|
}
|
||
|
getData() {
|
||
|
return this.array;
|
||
|
}
|
||
|
};
|
||
|
var SplitDataReader = class extends Reader {
|
||
|
constructor(readers) {
|
||
|
super();
|
||
|
this.readers = readers;
|
||
|
}
|
||
|
async init() {
|
||
|
const reader = this;
|
||
|
const { readers } = reader;
|
||
|
reader.lastDiskNumber = 0;
|
||
|
reader.lastDiskOffset = 0;
|
||
|
await Promise.all(readers.map(async (diskReader, indexDiskReader) => {
|
||
|
await diskReader.init();
|
||
|
if (indexDiskReader != readers.length - 1) {
|
||
|
reader.lastDiskOffset += diskReader.size;
|
||
|
}
|
||
|
reader.size += diskReader.size;
|
||
|
}));
|
||
|
super.init();
|
||
|
}
|
||
|
async readUint8Array(offset, length, diskNumber = 0) {
|
||
|
const reader = this;
|
||
|
const { readers } = this;
|
||
|
let result;
|
||
|
let currentDiskNumber = diskNumber;
|
||
|
if (currentDiskNumber == -1) {
|
||
|
currentDiskNumber = readers.length - 1;
|
||
|
}
|
||
|
let currentReaderOffset = offset;
|
||
|
while (currentReaderOffset >= readers[currentDiskNumber].size) {
|
||
|
currentReaderOffset -= readers[currentDiskNumber].size;
|
||
|
currentDiskNumber++;
|
||
|
}
|
||
|
const currentReader = readers[currentDiskNumber];
|
||
|
const currentReaderSize = currentReader.size;
|
||
|
if (currentReaderOffset + length <= currentReaderSize) {
|
||
|
result = await readUint8Array(currentReader, currentReaderOffset, length);
|
||
|
} else {
|
||
|
const chunkLength = currentReaderSize - currentReaderOffset;
|
||
|
result = new Uint8Array(length);
|
||
|
result.set(await readUint8Array(currentReader, currentReaderOffset, chunkLength));
|
||
|
result.set(await reader.readUint8Array(offset + chunkLength, length - chunkLength, diskNumber), chunkLength);
|
||
|
}
|
||
|
reader.lastDiskNumber = Math.max(currentDiskNumber, reader.lastDiskNumber);
|
||
|
return result;
|
||
|
}
|
||
|
};
|
||
|
var SplitDataWriter = class extends Stream {
|
||
|
constructor(writerGenerator, maxSize = 4294967295) {
|
||
|
super();
|
||
|
const writer = this;
|
||
|
Object.assign(writer, {
|
||
|
diskNumber: 0,
|
||
|
diskOffset: 0,
|
||
|
size: 0,
|
||
|
maxSize,
|
||
|
availableSize: maxSize
|
||
|
});
|
||
|
let diskSourceWriter, diskWritable, diskWriter;
|
||
|
const writable = new WritableStream({
|
||
|
async write(chunk) {
|
||
|
const { availableSize } = writer;
|
||
|
if (!diskWriter) {
|
||
|
const { value, done } = await writerGenerator.next();
|
||
|
if (done && !value) {
|
||
|
throw new Error(ERR_ITERATOR_COMPLETED_TOO_SOON);
|
||
|
} else {
|
||
|
diskSourceWriter = value;
|
||
|
diskSourceWriter.size = 0;
|
||
|
if (diskSourceWriter.maxSize) {
|
||
|
writer.maxSize = diskSourceWriter.maxSize;
|
||
|
}
|
||
|
writer.availableSize = writer.maxSize;
|
||
|
await initStream(diskSourceWriter);
|
||
|
diskWritable = value.writable;
|
||
|
diskWriter = diskWritable.getWriter();
|
||
|
}
|
||
|
await this.write(chunk);
|
||
|
} else if (chunk.length >= availableSize) {
|
||
|
await writeChunk(chunk.slice(0, availableSize));
|
||
|
await closeDisk();
|
||
|
writer.diskOffset += diskSourceWriter.size;
|
||
|
writer.diskNumber++;
|
||
|
diskWriter = null;
|
||
|
await this.write(chunk.slice(availableSize));
|
||
|
} else {
|
||
|
await writeChunk(chunk);
|
||
|
}
|
||
|
},
|
||
|
async close() {
|
||
|
await diskWriter.ready;
|
||
|
await closeDisk();
|
||
|
}
|
||
|
});
|
||
|
Object.defineProperty(writer, PROPERTY_NAME_WRITABLE, {
|
||
|
get() {
|
||
|
return writable;
|
||
|
}
|
||
|
});
|
||
|
async function writeChunk(chunk) {
|
||
|
const chunkLength = chunk.length;
|
||
|
if (chunkLength) {
|
||
|
await diskWriter.ready;
|
||
|
await diskWriter.write(chunk);
|
||
|
diskSourceWriter.size += chunkLength;
|
||
|
writer.size += chunkLength;
|
||
|
writer.availableSize -= chunkLength;
|
||
|
}
|
||
|
}
|
||
|
async function closeDisk() {
|
||
|
diskWritable.size = diskSourceWriter.size;
|
||
|
await diskWriter.close();
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
function isHttpFamily(url) {
|
||
|
const { baseURL: baseURL2 } = getConfiguration();
|
||
|
const { protocol } = new URL(url, baseURL2);
|
||
|
return protocol == "http:" || protocol == "https:";
|
||
|
}
|
||
|
async function initStream(stream, initSize) {
|
||
|
if (stream.init && !stream.initialized) {
|
||
|
await stream.init(initSize);
|
||
|
} else {
|
||
|
return Promise.resolve();
|
||
|
}
|
||
|
}
|
||
|
function initReader(reader) {
|
||
|
if (Array.isArray(reader)) {
|
||
|
reader = new SplitDataReader(reader);
|
||
|
}
|
||
|
if (reader instanceof ReadableStream) {
|
||
|
reader = {
|
||
|
readable: reader
|
||
|
};
|
||
|
}
|
||
|
return reader;
|
||
|
}
|
||
|
function initWriter(writer) {
|
||
|
if (writer.writable === UNDEFINED_VALUE && typeof writer.next == FUNCTION_TYPE) {
|
||
|
writer = new SplitDataWriter(writer);
|
||
|
}
|
||
|
if (writer instanceof WritableStream) {
|
||
|
writer = {
|
||
|
writable: writer
|
||
|
};
|
||
|
}
|
||
|
const { writable } = writer;
|
||
|
if (writable.size === UNDEFINED_VALUE) {
|
||
|
writable.size = 0;
|
||
|
}
|
||
|
if (!(writer instanceof SplitDataWriter)) {
|
||
|
Object.assign(writer, {
|
||
|
diskNumber: 0,
|
||
|
diskOffset: 0,
|
||
|
availableSize: Infinity,
|
||
|
maxSize: Infinity
|
||
|
});
|
||
|
}
|
||
|
return writer;
|
||
|
}
|
||
|
function readUint8Array(reader, offset, size, diskNumber) {
|
||
|
return reader.readUint8Array(offset, size, diskNumber);
|
||
|
}
|
||
|
var SplitZipReader = SplitDataReader;
|
||
|
var SplitZipWriter = SplitDataWriter;
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/util/cp437-decode.js
|
||
|
var CP437 = "\0\u263A\u263B\u2665\u2666\u2663\u2660\u2022\u25D8\u25CB\u25D9\u2642\u2640\u266A\u266B\u263C\u25BA\u25C4\u2195\u203C\xB6\xA7\u25AC\u21A8\u2191\u2193\u2192\u2190\u221F\u2194\u25B2\u25BC !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u2302\xC7\xFC\xE9\xE2\xE4\xE0\xE5\xE7\xEA\xEB\xE8\xEF\xEE\xEC\xC4\xC5\xC9\xE6\xC6\xF4\xF6\xF2\xFB\xF9\xFF\xD6\xDC\xA2\xA3\xA5\u20A7\u0192\xE1\xED\xF3\xFA\xF1\xD1\xAA\xBA\xBF\u2310\xAC\xBD\xBC\xA1\xAB\xBB\u2591\u2592\u2593\u2502\u2524\u2561\u2562\u2556\u2555\u2563\u2551\u2557\u255D\u255C\u255B\u2510\u2514\u2534\u252C\u251C\u2500\u253C\u255E\u255F\u255A\u2554\u2569\u2566\u2560\u2550\u256C\u2567\u2568\u2564\u2565\u2559\u2558\u2552\u2553\u256B\u256A\u2518\u250C\u2588\u2584\u258C\u2590\u2580\u03B1\xDF\u0393\u03C0\u03A3\u03C3\xB5\u03C4\u03A6\u0398\u03A9\u03B4\u221E\u03C6\u03B5\u2229\u2261\xB1\u2265\u2264\u2320\u2321\xF7\u2248\xB0\u2219\xB7\u221A\u207F\xB2\u25A0 ".split("");
|
||
|
var VALID_CP437 = CP437.length == 256;
|
||
|
function decodeCP437(stringValue) {
|
||
|
if (VALID_CP437) {
|
||
|
let result = "";
|
||
|
for (let indexCharacter = 0; indexCharacter < stringValue.length; indexCharacter++) {
|
||
|
result += CP437[stringValue[indexCharacter]];
|
||
|
}
|
||
|
return result;
|
||
|
} else {
|
||
|
return new TextDecoder().decode(stringValue);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/util/decode-text.js
|
||
|
function decodeText(value, encoding) {
|
||
|
if (encoding && encoding.trim().toLowerCase() == "cp437") {
|
||
|
return decodeCP437(value);
|
||
|
} else {
|
||
|
return new TextDecoder(encoding).decode(value);
|
||
|
}
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/zip-entry.js
|
||
|
var PROPERTY_NAME_FILENAME = "filename";
|
||
|
var PROPERTY_NAME_RAW_FILENAME = "rawFilename";
|
||
|
var PROPERTY_NAME_COMMENT = "comment";
|
||
|
var PROPERTY_NAME_RAW_COMMENT = "rawComment";
|
||
|
var PROPERTY_NAME_UNCOMPPRESSED_SIZE = "uncompressedSize";
|
||
|
var PROPERTY_NAME_COMPPRESSED_SIZE = "compressedSize";
|
||
|
var PROPERTY_NAME_OFFSET = "offset";
|
||
|
var PROPERTY_NAME_DISK_NUMBER_START = "diskNumberStart";
|
||
|
var PROPERTY_NAME_LAST_MODIFICATION_DATE = "lastModDate";
|
||
|
var PROPERTY_NAME_RAW_LAST_MODIFICATION_DATE = "rawLastModDate";
|
||
|
var PROPERTY_NAME_LAST_ACCESS_DATE = "lastAccessDate";
|
||
|
var PROPERTY_NAME_RAW_LAST_ACCESS_DATE = "rawLastAccessDate";
|
||
|
var PROPERTY_NAME_CREATION_DATE = "creationDate";
|
||
|
var PROPERTY_NAME_RAW_CREATION_DATE = "rawCreationDate";
|
||
|
var PROPERTY_NAME_INTERNAL_FILE_ATTRIBUTE = "internalFileAttribute";
|
||
|
var PROPERTY_NAME_EXTERNAL_FILE_ATTRIBUTE = "externalFileAttribute";
|
||
|
var PROPERTY_NAME_MS_DOS_COMPATIBLE = "msDosCompatible";
|
||
|
var PROPERTY_NAME_ZIP64 = "zip64";
|
||
|
var PROPERTY_NAMES = [
|
||
|
PROPERTY_NAME_FILENAME,
|
||
|
PROPERTY_NAME_RAW_FILENAME,
|
||
|
PROPERTY_NAME_COMPPRESSED_SIZE,
|
||
|
PROPERTY_NAME_UNCOMPPRESSED_SIZE,
|
||
|
PROPERTY_NAME_LAST_MODIFICATION_DATE,
|
||
|
PROPERTY_NAME_RAW_LAST_MODIFICATION_DATE,
|
||
|
PROPERTY_NAME_COMMENT,
|
||
|
PROPERTY_NAME_RAW_COMMENT,
|
||
|
PROPERTY_NAME_LAST_ACCESS_DATE,
|
||
|
PROPERTY_NAME_CREATION_DATE,
|
||
|
PROPERTY_NAME_OFFSET,
|
||
|
PROPERTY_NAME_DISK_NUMBER_START,
|
||
|
PROPERTY_NAME_DISK_NUMBER_START,
|
||
|
PROPERTY_NAME_INTERNAL_FILE_ATTRIBUTE,
|
||
|
PROPERTY_NAME_EXTERNAL_FILE_ATTRIBUTE,
|
||
|
PROPERTY_NAME_MS_DOS_COMPATIBLE,
|
||
|
PROPERTY_NAME_ZIP64,
|
||
|
"directory",
|
||
|
"bitFlag",
|
||
|
"encrypted",
|
||
|
"signature",
|
||
|
"filenameUTF8",
|
||
|
"commentUTF8",
|
||
|
"compressionMethod",
|
||
|
"version",
|
||
|
"versionMadeBy",
|
||
|
"extraField",
|
||
|
"rawExtraField",
|
||
|
"extraFieldZip64",
|
||
|
"extraFieldUnicodePath",
|
||
|
"extraFieldUnicodeComment",
|
||
|
"extraFieldAES",
|
||
|
"extraFieldNTFS",
|
||
|
"extraFieldExtendedTimestamp"
|
||
|
];
|
||
|
var Entry = class {
|
||
|
constructor(data) {
|
||
|
PROPERTY_NAMES.forEach((name) => this[name] = data[name]);
|
||
|
}
|
||
|
};
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/zip-reader.js
|
||
|
var ERR_BAD_FORMAT = "File format is not recognized";
|
||
|
var ERR_EOCDR_NOT_FOUND = "End of central directory not found";
|
||
|
var ERR_EOCDR_LOCATOR_ZIP64_NOT_FOUND = "End of Zip64 central directory locator not found";
|
||
|
var ERR_CENTRAL_DIRECTORY_NOT_FOUND = "Central directory header not found";
|
||
|
var ERR_LOCAL_FILE_HEADER_NOT_FOUND = "Local file header not found";
|
||
|
var ERR_EXTRAFIELD_ZIP64_NOT_FOUND = "Zip64 extra field not found";
|
||
|
var ERR_ENCRYPTED = "File contains encrypted entry";
|
||
|
var ERR_UNSUPPORTED_ENCRYPTION = "Encryption method not supported";
|
||
|
var ERR_UNSUPPORTED_COMPRESSION = "Compression method not supported";
|
||
|
var ERR_SPLIT_ZIP_FILE = "Split zip file";
|
||
|
var CHARSET_UTF8 = "utf-8";
|
||
|
var CHARSET_CP437 = "cp437";
|
||
|
var ZIP64_PROPERTIES = [
|
||
|
[PROPERTY_NAME_UNCOMPPRESSED_SIZE, MAX_32_BITS],
|
||
|
[PROPERTY_NAME_COMPPRESSED_SIZE, MAX_32_BITS],
|
||
|
[PROPERTY_NAME_OFFSET, MAX_32_BITS],
|
||
|
[PROPERTY_NAME_DISK_NUMBER_START, MAX_16_BITS]
|
||
|
];
|
||
|
var ZIP64_EXTRACTION = {
|
||
|
[MAX_16_BITS]: {
|
||
|
getValue: getUint32,
|
||
|
bytes: 4
|
||
|
},
|
||
|
[MAX_32_BITS]: {
|
||
|
getValue: getBigUint64,
|
||
|
bytes: 8
|
||
|
}
|
||
|
};
|
||
|
var ZipReader = class {
|
||
|
constructor(reader, options = {}) {
|
||
|
Object.assign(this, {
|
||
|
reader: initReader(reader),
|
||
|
options,
|
||
|
config: getConfiguration()
|
||
|
});
|
||
|
}
|
||
|
async *getEntriesGenerator(options = {}) {
|
||
|
const zipReader = this;
|
||
|
let { reader } = zipReader;
|
||
|
const { config: config2 } = zipReader;
|
||
|
await initStream(reader);
|
||
|
if (reader.size === UNDEFINED_VALUE || !reader.readUint8Array) {
|
||
|
reader = new BlobReader(await new Response(reader.readable).blob());
|
||
|
await initStream(reader);
|
||
|
}
|
||
|
if (reader.size < END_OF_CENTRAL_DIR_LENGTH) {
|
||
|
throw new Error(ERR_BAD_FORMAT);
|
||
|
}
|
||
|
reader.chunkSize = getChunkSize(config2);
|
||
|
const endOfDirectoryInfo = await seekSignature(reader, END_OF_CENTRAL_DIR_SIGNATURE, reader.size, END_OF_CENTRAL_DIR_LENGTH, MAX_16_BITS * 16);
|
||
|
if (!endOfDirectoryInfo) {
|
||
|
const signatureArray = await readUint8Array(reader, 0, 4);
|
||
|
const signatureView = getDataView(signatureArray);
|
||
|
if (getUint32(signatureView) == SPLIT_ZIP_FILE_SIGNATURE) {
|
||
|
throw new Error(ERR_SPLIT_ZIP_FILE);
|
||
|
} else {
|
||
|
throw new Error(ERR_EOCDR_NOT_FOUND);
|
||
|
}
|
||
|
}
|
||
|
const endOfDirectoryView = getDataView(endOfDirectoryInfo);
|
||
|
let directoryDataLength = getUint32(endOfDirectoryView, 12);
|
||
|
let directoryDataOffset = getUint32(endOfDirectoryView, 16);
|
||
|
const commentOffset = endOfDirectoryInfo.offset;
|
||
|
const commentLength = getUint16(endOfDirectoryView, 20);
|
||
|
const appendedDataOffset = commentOffset + END_OF_CENTRAL_DIR_LENGTH + commentLength;
|
||
|
let lastDiskNumber = getUint16(endOfDirectoryView, 4);
|
||
|
const expectedLastDiskNumber = reader.lastDiskNumber || 0;
|
||
|
let diskNumber = getUint16(endOfDirectoryView, 6);
|
||
|
let filesLength = getUint16(endOfDirectoryView, 8);
|
||
|
let prependedDataLength = 0;
|
||
|
let startOffset = 0;
|
||
|
if (directoryDataOffset == MAX_32_BITS || directoryDataLength == MAX_32_BITS || filesLength == MAX_16_BITS || diskNumber == MAX_16_BITS) {
|
||
|
const endOfDirectoryLocatorArray = await readUint8Array(reader, endOfDirectoryInfo.offset - ZIP64_END_OF_CENTRAL_DIR_LOCATOR_LENGTH, ZIP64_END_OF_CENTRAL_DIR_LOCATOR_LENGTH);
|
||
|
const endOfDirectoryLocatorView = getDataView(endOfDirectoryLocatorArray);
|
||
|
if (getUint32(endOfDirectoryLocatorView, 0) == ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIGNATURE) {
|
||
|
directoryDataOffset = getBigUint64(endOfDirectoryLocatorView, 8);
|
||
|
let endOfDirectoryArray = await readUint8Array(reader, directoryDataOffset, ZIP64_END_OF_CENTRAL_DIR_LENGTH, -1);
|
||
|
let endOfDirectoryView2 = getDataView(endOfDirectoryArray);
|
||
|
const expectedDirectoryDataOffset = endOfDirectoryInfo.offset - ZIP64_END_OF_CENTRAL_DIR_LOCATOR_LENGTH - ZIP64_END_OF_CENTRAL_DIR_LENGTH;
|
||
|
if (getUint32(endOfDirectoryView2, 0) != ZIP64_END_OF_CENTRAL_DIR_SIGNATURE && directoryDataOffset != expectedDirectoryDataOffset) {
|
||
|
const originalDirectoryDataOffset = directoryDataOffset;
|
||
|
directoryDataOffset = expectedDirectoryDataOffset;
|
||
|
prependedDataLength = directoryDataOffset - originalDirectoryDataOffset;
|
||
|
endOfDirectoryArray = await readUint8Array(reader, directoryDataOffset, ZIP64_END_OF_CENTRAL_DIR_LENGTH, -1);
|
||
|
endOfDirectoryView2 = getDataView(endOfDirectoryArray);
|
||
|
}
|
||
|
if (getUint32(endOfDirectoryView2, 0) != ZIP64_END_OF_CENTRAL_DIR_SIGNATURE) {
|
||
|
throw new Error(ERR_EOCDR_LOCATOR_ZIP64_NOT_FOUND);
|
||
|
}
|
||
|
if (lastDiskNumber == MAX_16_BITS) {
|
||
|
lastDiskNumber = getUint32(endOfDirectoryView2, 16);
|
||
|
}
|
||
|
if (diskNumber == MAX_16_BITS) {
|
||
|
diskNumber = getUint32(endOfDirectoryView2, 20);
|
||
|
}
|
||
|
if (filesLength == MAX_16_BITS) {
|
||
|
filesLength = getBigUint64(endOfDirectoryView2, 32);
|
||
|
}
|
||
|
if (directoryDataLength == MAX_32_BITS) {
|
||
|
directoryDataLength = getBigUint64(endOfDirectoryView2, 40);
|
||
|
}
|
||
|
directoryDataOffset -= directoryDataLength;
|
||
|
}
|
||
|
}
|
||
|
if (directoryDataOffset >= reader.size) {
|
||
|
prependedDataLength = reader.size - directoryDataOffset - directoryDataLength - END_OF_CENTRAL_DIR_LENGTH;
|
||
|
directoryDataOffset = reader.size - directoryDataLength - END_OF_CENTRAL_DIR_LENGTH;
|
||
|
}
|
||
|
if (expectedLastDiskNumber != lastDiskNumber) {
|
||
|
throw new Error(ERR_SPLIT_ZIP_FILE);
|
||
|
}
|
||
|
if (directoryDataOffset < 0) {
|
||
|
throw new Error(ERR_BAD_FORMAT);
|
||
|
}
|
||
|
let offset = 0;
|
||
|
let directoryArray = await readUint8Array(reader, directoryDataOffset, directoryDataLength, diskNumber);
|
||
|
let directoryView = getDataView(directoryArray);
|
||
|
if (directoryDataLength) {
|
||
|
const expectedDirectoryDataOffset = endOfDirectoryInfo.offset - directoryDataLength;
|
||
|
if (getUint32(directoryView, offset) != CENTRAL_FILE_HEADER_SIGNATURE && directoryDataOffset != expectedDirectoryDataOffset) {
|
||
|
const originalDirectoryDataOffset = directoryDataOffset;
|
||
|
directoryDataOffset = expectedDirectoryDataOffset;
|
||
|
prependedDataLength += directoryDataOffset - originalDirectoryDataOffset;
|
||
|
directoryArray = await readUint8Array(reader, directoryDataOffset, directoryDataLength, diskNumber);
|
||
|
directoryView = getDataView(directoryArray);
|
||
|
}
|
||
|
}
|
||
|
const expectedDirectoryDataLength = endOfDirectoryInfo.offset - directoryDataOffset - (reader.lastDiskOffset || 0);
|
||
|
if (directoryDataLength != expectedDirectoryDataLength && expectedDirectoryDataLength >= 0) {
|
||
|
directoryDataLength = expectedDirectoryDataLength;
|
||
|
directoryArray = await readUint8Array(reader, directoryDataOffset, directoryDataLength, diskNumber);
|
||
|
directoryView = getDataView(directoryArray);
|
||
|
}
|
||
|
if (directoryDataOffset < 0 || directoryDataOffset >= reader.size) {
|
||
|
throw new Error(ERR_BAD_FORMAT);
|
||
|
}
|
||
|
const filenameEncoding = getOptionValue(zipReader, options, "filenameEncoding");
|
||
|
const commentEncoding = getOptionValue(zipReader, options, "commentEncoding");
|
||
|
for (let indexFile = 0; indexFile < filesLength; indexFile++) {
|
||
|
const fileEntry = new ZipEntry(reader, config2, zipReader.options);
|
||
|
if (getUint32(directoryView, offset) != CENTRAL_FILE_HEADER_SIGNATURE) {
|
||
|
throw new Error(ERR_CENTRAL_DIRECTORY_NOT_FOUND);
|
||
|
}
|
||
|
readCommonHeader(fileEntry, directoryView, offset + 6);
|
||
|
const languageEncodingFlag = Boolean(fileEntry.bitFlag.languageEncodingFlag);
|
||
|
const filenameOffset = offset + 46;
|
||
|
const extraFieldOffset = filenameOffset + fileEntry.filenameLength;
|
||
|
const commentOffset2 = extraFieldOffset + fileEntry.extraFieldLength;
|
||
|
const versionMadeBy = getUint16(directoryView, offset + 4);
|
||
|
const msDosCompatible = (versionMadeBy & 0) == 0;
|
||
|
const rawFilename = directoryArray.subarray(filenameOffset, extraFieldOffset);
|
||
|
const commentLength2 = getUint16(directoryView, offset + 32);
|
||
|
const endOffset = commentOffset2 + commentLength2;
|
||
|
const rawComment = directoryArray.subarray(commentOffset2, endOffset);
|
||
|
const filenameUTF8 = languageEncodingFlag;
|
||
|
const commentUTF8 = languageEncodingFlag;
|
||
|
const directory = msDosCompatible && (getUint8(directoryView, offset + 38) & FILE_ATTR_MSDOS_DIR_MASK) == FILE_ATTR_MSDOS_DIR_MASK;
|
||
|
const offsetFileEntry = getUint32(directoryView, offset + 42) + prependedDataLength;
|
||
|
Object.assign(fileEntry, {
|
||
|
versionMadeBy,
|
||
|
msDosCompatible,
|
||
|
compressedSize: 0,
|
||
|
uncompressedSize: 0,
|
||
|
commentLength: commentLength2,
|
||
|
directory,
|
||
|
offset: offsetFileEntry,
|
||
|
diskNumberStart: getUint16(directoryView, offset + 34),
|
||
|
internalFileAttribute: getUint16(directoryView, offset + 36),
|
||
|
externalFileAttribute: getUint32(directoryView, offset + 38),
|
||
|
rawFilename,
|
||
|
filenameUTF8,
|
||
|
commentUTF8,
|
||
|
rawExtraField: directoryArray.subarray(extraFieldOffset, commentOffset2)
|
||
|
});
|
||
|
const decode = getOptionValue(zipReader, options, "decodeText") || decodeText;
|
||
|
const rawFilenameEncoding = filenameUTF8 ? CHARSET_UTF8 : filenameEncoding || CHARSET_CP437;
|
||
|
const rawCommentEncoding = commentUTF8 ? CHARSET_UTF8 : commentEncoding || CHARSET_CP437;
|
||
|
let filename = decode(rawFilename, rawFilenameEncoding);
|
||
|
if (filename === UNDEFINED_VALUE) {
|
||
|
filename = decodeText(rawFilename, rawFilenameEncoding);
|
||
|
}
|
||
|
let comment = decode(rawComment, rawCommentEncoding);
|
||
|
if (comment === UNDEFINED_VALUE) {
|
||
|
comment = decodeText(rawComment, rawCommentEncoding);
|
||
|
}
|
||
|
Object.assign(fileEntry, {
|
||
|
rawComment,
|
||
|
filename,
|
||
|
comment,
|
||
|
directory: directory || filename.endsWith(DIRECTORY_SIGNATURE)
|
||
|
});
|
||
|
startOffset = Math.max(offsetFileEntry, startOffset);
|
||
|
await readCommonFooter(fileEntry, fileEntry, directoryView, offset + 6);
|
||
|
const entry = new Entry(fileEntry);
|
||
|
entry.getData = (writer, options2) => fileEntry.getData(writer, entry, options2);
|
||
|
offset = endOffset;
|
||
|
const { onprogress } = options;
|
||
|
if (onprogress) {
|
||
|
try {
|
||
|
await onprogress(indexFile + 1, filesLength, new Entry(fileEntry));
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
}
|
||
|
yield entry;
|
||
|
}
|
||
|
const extractPrependedData = getOptionValue(zipReader, options, "extractPrependedData");
|
||
|
const extractAppendedData = getOptionValue(zipReader, options, "extractAppendedData");
|
||
|
if (extractPrependedData) {
|
||
|
zipReader.prependedData = startOffset > 0 ? await readUint8Array(reader, 0, startOffset) : new Uint8Array();
|
||
|
}
|
||
|
zipReader.comment = commentLength ? await readUint8Array(reader, commentOffset + END_OF_CENTRAL_DIR_LENGTH, commentLength) : new Uint8Array();
|
||
|
if (extractAppendedData) {
|
||
|
zipReader.appendedData = appendedDataOffset < reader.size ? await readUint8Array(reader, appendedDataOffset, reader.size - appendedDataOffset) : new Uint8Array();
|
||
|
}
|
||
|
return true;
|
||
|
}
|
||
|
async getEntries(options = {}) {
|
||
|
const entries = [];
|
||
|
for await (const entry of this.getEntriesGenerator(options)) {
|
||
|
entries.push(entry);
|
||
|
}
|
||
|
return entries;
|
||
|
}
|
||
|
async close() {
|
||
|
}
|
||
|
};
|
||
|
var ZipReaderStream = class {
|
||
|
constructor(options = {}) {
|
||
|
const { readable, writable } = new TransformStream();
|
||
|
const gen = new ZipReader(readable, options).getEntriesGenerator();
|
||
|
this.readable = new ReadableStream({
|
||
|
async pull(controller) {
|
||
|
const { done, value } = await gen.next();
|
||
|
if (done)
|
||
|
return controller.close();
|
||
|
const chunk = {
|
||
|
...value,
|
||
|
readable: function() {
|
||
|
const { readable: readable2, writable: writable2 } = new TransformStream();
|
||
|
if (value.getData) {
|
||
|
value.getData(writable2);
|
||
|
return readable2;
|
||
|
}
|
||
|
}()
|
||
|
};
|
||
|
delete chunk.getData;
|
||
|
controller.enqueue(chunk);
|
||
|
}
|
||
|
});
|
||
|
this.writable = writable;
|
||
|
}
|
||
|
};
|
||
|
var ZipEntry = class {
|
||
|
constructor(reader, config2, options) {
|
||
|
Object.assign(this, {
|
||
|
reader,
|
||
|
config: config2,
|
||
|
options
|
||
|
});
|
||
|
}
|
||
|
async getData(writer, fileEntry, options = {}) {
|
||
|
const zipEntry = this;
|
||
|
const {
|
||
|
reader,
|
||
|
offset,
|
||
|
diskNumberStart,
|
||
|
extraFieldAES,
|
||
|
compressionMethod,
|
||
|
config: config2,
|
||
|
bitFlag,
|
||
|
signature,
|
||
|
rawLastModDate,
|
||
|
uncompressedSize,
|
||
|
compressedSize
|
||
|
} = zipEntry;
|
||
|
const localDirectory = fileEntry.localDirectory = {};
|
||
|
const dataArray = await readUint8Array(reader, offset, 30, diskNumberStart);
|
||
|
const dataView = getDataView(dataArray);
|
||
|
let password = getOptionValue(zipEntry, options, "password");
|
||
|
let rawPassword = getOptionValue(zipEntry, options, "rawPassword");
|
||
|
password = password && password.length && password;
|
||
|
rawPassword = rawPassword && rawPassword.length && rawPassword;
|
||
|
if (extraFieldAES) {
|
||
|
if (extraFieldAES.originalCompressionMethod != COMPRESSION_METHOD_AES) {
|
||
|
throw new Error(ERR_UNSUPPORTED_COMPRESSION);
|
||
|
}
|
||
|
}
|
||
|
if (compressionMethod != COMPRESSION_METHOD_STORE && compressionMethod != COMPRESSION_METHOD_DEFLATE) {
|
||
|
throw new Error(ERR_UNSUPPORTED_COMPRESSION);
|
||
|
}
|
||
|
if (getUint32(dataView, 0) != LOCAL_FILE_HEADER_SIGNATURE) {
|
||
|
throw new Error(ERR_LOCAL_FILE_HEADER_NOT_FOUND);
|
||
|
}
|
||
|
readCommonHeader(localDirectory, dataView, 4);
|
||
|
localDirectory.rawExtraField = localDirectory.extraFieldLength ? await readUint8Array(reader, offset + 30 + localDirectory.filenameLength, localDirectory.extraFieldLength, diskNumberStart) : new Uint8Array();
|
||
|
await readCommonFooter(zipEntry, localDirectory, dataView, 4, true);
|
||
|
Object.assign(fileEntry, {
|
||
|
lastAccessDate: localDirectory.lastAccessDate,
|
||
|
creationDate: localDirectory.creationDate
|
||
|
});
|
||
|
const encrypted = zipEntry.encrypted && localDirectory.encrypted;
|
||
|
const zipCrypto = encrypted && !extraFieldAES;
|
||
|
if (encrypted) {
|
||
|
if (!zipCrypto && extraFieldAES.strength === UNDEFINED_VALUE) {
|
||
|
throw new Error(ERR_UNSUPPORTED_ENCRYPTION);
|
||
|
} else if (!password && !rawPassword) {
|
||
|
throw new Error(ERR_ENCRYPTED);
|
||
|
}
|
||
|
}
|
||
|
const dataOffset = offset + 30 + localDirectory.filenameLength + localDirectory.extraFieldLength;
|
||
|
const size = compressedSize;
|
||
|
const readable = reader.readable;
|
||
|
Object.assign(readable, {
|
||
|
diskNumberStart,
|
||
|
offset: dataOffset,
|
||
|
size
|
||
|
});
|
||
|
const signal = getOptionValue(zipEntry, options, "signal");
|
||
|
const checkPasswordOnly = getOptionValue(zipEntry, options, "checkPasswordOnly");
|
||
|
if (checkPasswordOnly) {
|
||
|
writer = new WritableStream();
|
||
|
}
|
||
|
writer = initWriter(writer);
|
||
|
await initStream(writer, uncompressedSize);
|
||
|
const { writable } = writer;
|
||
|
const { onstart, onprogress, onend } = options;
|
||
|
const workerOptions = {
|
||
|
options: {
|
||
|
codecType: CODEC_INFLATE,
|
||
|
password,
|
||
|
rawPassword,
|
||
|
zipCrypto,
|
||
|
encryptionStrength: extraFieldAES && extraFieldAES.strength,
|
||
|
signed: getOptionValue(zipEntry, options, "checkSignature"),
|
||
|
passwordVerification: zipCrypto && (bitFlag.dataDescriptor ? rawLastModDate >>> 8 & 255 : signature >>> 24 & 255),
|
||
|
signature,
|
||
|
compressed: compressionMethod != 0,
|
||
|
encrypted,
|
||
|
useWebWorkers: getOptionValue(zipEntry, options, "useWebWorkers"),
|
||
|
useCompressionStream: getOptionValue(zipEntry, options, "useCompressionStream"),
|
||
|
transferStreams: getOptionValue(zipEntry, options, "transferStreams"),
|
||
|
checkPasswordOnly
|
||
|
},
|
||
|
config: config2,
|
||
|
streamOptions: { signal, size, onstart, onprogress, onend }
|
||
|
};
|
||
|
let outputSize = 0;
|
||
|
try {
|
||
|
({ outputSize } = await runWorker2({ readable, writable }, workerOptions));
|
||
|
} catch (error) {
|
||
|
if (!checkPasswordOnly || error.message != ERR_ABORT_CHECK_PASSWORD) {
|
||
|
throw error;
|
||
|
}
|
||
|
} finally {
|
||
|
const preventClose = getOptionValue(zipEntry, options, "preventClose");
|
||
|
writable.size += outputSize;
|
||
|
if (!preventClose && !writable.locked) {
|
||
|
await writable.getWriter().close();
|
||
|
}
|
||
|
}
|
||
|
return checkPasswordOnly ? UNDEFINED_VALUE : writer.getData ? writer.getData() : writable;
|
||
|
}
|
||
|
};
|
||
|
function readCommonHeader(directory, dataView, offset) {
|
||
|
const rawBitFlag = directory.rawBitFlag = getUint16(dataView, offset + 2);
|
||
|
const encrypted = (rawBitFlag & BITFLAG_ENCRYPTED) == BITFLAG_ENCRYPTED;
|
||
|
const rawLastModDate = getUint32(dataView, offset + 6);
|
||
|
Object.assign(directory, {
|
||
|
encrypted,
|
||
|
version: getUint16(dataView, offset),
|
||
|
bitFlag: {
|
||
|
level: (rawBitFlag & BITFLAG_LEVEL) >> 1,
|
||
|
dataDescriptor: (rawBitFlag & BITFLAG_DATA_DESCRIPTOR) == BITFLAG_DATA_DESCRIPTOR,
|
||
|
languageEncodingFlag: (rawBitFlag & BITFLAG_LANG_ENCODING_FLAG) == BITFLAG_LANG_ENCODING_FLAG
|
||
|
},
|
||
|
rawLastModDate,
|
||
|
lastModDate: getDate(rawLastModDate),
|
||
|
filenameLength: getUint16(dataView, offset + 22),
|
||
|
extraFieldLength: getUint16(dataView, offset + 24)
|
||
|
});
|
||
|
}
|
||
|
async function readCommonFooter(fileEntry, directory, dataView, offset, localDirectory) {
|
||
|
const { rawExtraField } = directory;
|
||
|
const extraField = directory.extraField = /* @__PURE__ */ new Map();
|
||
|
const rawExtraFieldView = getDataView(new Uint8Array(rawExtraField));
|
||
|
let offsetExtraField = 0;
|
||
|
try {
|
||
|
while (offsetExtraField < rawExtraField.length) {
|
||
|
const type = getUint16(rawExtraFieldView, offsetExtraField);
|
||
|
const size = getUint16(rawExtraFieldView, offsetExtraField + 2);
|
||
|
extraField.set(type, {
|
||
|
type,
|
||
|
data: rawExtraField.slice(offsetExtraField + 4, offsetExtraField + 4 + size)
|
||
|
});
|
||
|
offsetExtraField += 4 + size;
|
||
|
}
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
const compressionMethod = getUint16(dataView, offset + 4);
|
||
|
Object.assign(directory, {
|
||
|
signature: getUint32(dataView, offset + 10),
|
||
|
uncompressedSize: getUint32(dataView, offset + 18),
|
||
|
compressedSize: getUint32(dataView, offset + 14)
|
||
|
});
|
||
|
const extraFieldZip64 = extraField.get(EXTRAFIELD_TYPE_ZIP64);
|
||
|
if (extraFieldZip64) {
|
||
|
readExtraFieldZip64(extraFieldZip64, directory);
|
||
|
directory.extraFieldZip64 = extraFieldZip64;
|
||
|
}
|
||
|
const extraFieldUnicodePath = extraField.get(EXTRAFIELD_TYPE_UNICODE_PATH);
|
||
|
if (extraFieldUnicodePath) {
|
||
|
await readExtraFieldUnicode(extraFieldUnicodePath, PROPERTY_NAME_FILENAME, PROPERTY_NAME_RAW_FILENAME, directory, fileEntry);
|
||
|
directory.extraFieldUnicodePath = extraFieldUnicodePath;
|
||
|
}
|
||
|
const extraFieldUnicodeComment = extraField.get(EXTRAFIELD_TYPE_UNICODE_COMMENT);
|
||
|
if (extraFieldUnicodeComment) {
|
||
|
await readExtraFieldUnicode(extraFieldUnicodeComment, PROPERTY_NAME_COMMENT, PROPERTY_NAME_RAW_COMMENT, directory, fileEntry);
|
||
|
directory.extraFieldUnicodeComment = extraFieldUnicodeComment;
|
||
|
}
|
||
|
const extraFieldAES = extraField.get(EXTRAFIELD_TYPE_AES);
|
||
|
if (extraFieldAES) {
|
||
|
readExtraFieldAES(extraFieldAES, directory, compressionMethod);
|
||
|
directory.extraFieldAES = extraFieldAES;
|
||
|
} else {
|
||
|
directory.compressionMethod = compressionMethod;
|
||
|
}
|
||
|
const extraFieldNTFS = extraField.get(EXTRAFIELD_TYPE_NTFS);
|
||
|
if (extraFieldNTFS) {
|
||
|
readExtraFieldNTFS(extraFieldNTFS, directory);
|
||
|
directory.extraFieldNTFS = extraFieldNTFS;
|
||
|
}
|
||
|
const extraFieldExtendedTimestamp = extraField.get(EXTRAFIELD_TYPE_EXTENDED_TIMESTAMP);
|
||
|
if (extraFieldExtendedTimestamp) {
|
||
|
readExtraFieldExtendedTimestamp(extraFieldExtendedTimestamp, directory, localDirectory);
|
||
|
directory.extraFieldExtendedTimestamp = extraFieldExtendedTimestamp;
|
||
|
}
|
||
|
const extraFieldUSDZ = extraField.get(EXTRAFIELD_TYPE_USDZ);
|
||
|
if (extraFieldUSDZ) {
|
||
|
directory.extraFieldUSDZ = extraFieldUSDZ;
|
||
|
}
|
||
|
}
|
||
|
function readExtraFieldZip64(extraFieldZip64, directory) {
|
||
|
directory.zip64 = true;
|
||
|
const extraFieldView = getDataView(extraFieldZip64.data);
|
||
|
const missingProperties = ZIP64_PROPERTIES.filter(([propertyName, max]) => directory[propertyName] == max);
|
||
|
for (let indexMissingProperty = 0, offset = 0; indexMissingProperty < missingProperties.length; indexMissingProperty++) {
|
||
|
const [propertyName, max] = missingProperties[indexMissingProperty];
|
||
|
if (directory[propertyName] == max) {
|
||
|
const extraction = ZIP64_EXTRACTION[max];
|
||
|
directory[propertyName] = extraFieldZip64[propertyName] = extraction.getValue(extraFieldView, offset);
|
||
|
offset += extraction.bytes;
|
||
|
} else if (extraFieldZip64[propertyName]) {
|
||
|
throw new Error(ERR_EXTRAFIELD_ZIP64_NOT_FOUND);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
async function readExtraFieldUnicode(extraFieldUnicode, propertyName, rawPropertyName, directory, fileEntry) {
|
||
|
const extraFieldView = getDataView(extraFieldUnicode.data);
|
||
|
const crc32 = new Crc32();
|
||
|
crc32.append(fileEntry[rawPropertyName]);
|
||
|
const dataViewSignature = getDataView(new Uint8Array(4));
|
||
|
dataViewSignature.setUint32(0, crc32.get(), true);
|
||
|
const signature = getUint32(extraFieldView, 1);
|
||
|
Object.assign(extraFieldUnicode, {
|
||
|
version: getUint8(extraFieldView, 0),
|
||
|
[propertyName]: decodeText(extraFieldUnicode.data.subarray(5)),
|
||
|
valid: !fileEntry.bitFlag.languageEncodingFlag && signature == getUint32(dataViewSignature, 0)
|
||
|
});
|
||
|
if (extraFieldUnicode.valid) {
|
||
|
directory[propertyName] = extraFieldUnicode[propertyName];
|
||
|
directory[propertyName + "UTF8"] = true;
|
||
|
}
|
||
|
}
|
||
|
function readExtraFieldAES(extraFieldAES, directory, compressionMethod) {
|
||
|
const extraFieldView = getDataView(extraFieldAES.data);
|
||
|
const strength = getUint8(extraFieldView, 4);
|
||
|
Object.assign(extraFieldAES, {
|
||
|
vendorVersion: getUint8(extraFieldView, 0),
|
||
|
vendorId: getUint8(extraFieldView, 2),
|
||
|
strength,
|
||
|
originalCompressionMethod: compressionMethod,
|
||
|
compressionMethod: getUint16(extraFieldView, 5)
|
||
|
});
|
||
|
directory.compressionMethod = extraFieldAES.compressionMethod;
|
||
|
}
|
||
|
function readExtraFieldNTFS(extraFieldNTFS, directory) {
|
||
|
const extraFieldView = getDataView(extraFieldNTFS.data);
|
||
|
let offsetExtraField = 4;
|
||
|
let tag1Data;
|
||
|
try {
|
||
|
while (offsetExtraField < extraFieldNTFS.data.length && !tag1Data) {
|
||
|
const tagValue = getUint16(extraFieldView, offsetExtraField);
|
||
|
const attributeSize = getUint16(extraFieldView, offsetExtraField + 2);
|
||
|
if (tagValue == EXTRAFIELD_TYPE_NTFS_TAG1) {
|
||
|
tag1Data = extraFieldNTFS.data.slice(offsetExtraField + 4, offsetExtraField + 4 + attributeSize);
|
||
|
}
|
||
|
offsetExtraField += 4 + attributeSize;
|
||
|
}
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
try {
|
||
|
if (tag1Data && tag1Data.length == 24) {
|
||
|
const tag1View = getDataView(tag1Data);
|
||
|
const rawLastModDate = tag1View.getBigUint64(0, true);
|
||
|
const rawLastAccessDate = tag1View.getBigUint64(8, true);
|
||
|
const rawCreationDate = tag1View.getBigUint64(16, true);
|
||
|
Object.assign(extraFieldNTFS, {
|
||
|
rawLastModDate,
|
||
|
rawLastAccessDate,
|
||
|
rawCreationDate
|
||
|
});
|
||
|
const lastModDate = getDateNTFS(rawLastModDate);
|
||
|
const lastAccessDate = getDateNTFS(rawLastAccessDate);
|
||
|
const creationDate = getDateNTFS(rawCreationDate);
|
||
|
const extraFieldData = { lastModDate, lastAccessDate, creationDate };
|
||
|
Object.assign(extraFieldNTFS, extraFieldData);
|
||
|
Object.assign(directory, extraFieldData);
|
||
|
}
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
}
|
||
|
function readExtraFieldExtendedTimestamp(extraFieldExtendedTimestamp, directory, localDirectory) {
|
||
|
const extraFieldView = getDataView(extraFieldExtendedTimestamp.data);
|
||
|
const flags = getUint8(extraFieldView, 0);
|
||
|
const timeProperties = [];
|
||
|
const timeRawProperties = [];
|
||
|
if (localDirectory) {
|
||
|
if ((flags & 1) == 1) {
|
||
|
timeProperties.push(PROPERTY_NAME_LAST_MODIFICATION_DATE);
|
||
|
timeRawProperties.push(PROPERTY_NAME_RAW_LAST_MODIFICATION_DATE);
|
||
|
}
|
||
|
if ((flags & 2) == 2) {
|
||
|
timeProperties.push(PROPERTY_NAME_LAST_ACCESS_DATE);
|
||
|
timeRawProperties.push(PROPERTY_NAME_RAW_LAST_ACCESS_DATE);
|
||
|
}
|
||
|
if ((flags & 4) == 4) {
|
||
|
timeProperties.push(PROPERTY_NAME_CREATION_DATE);
|
||
|
timeRawProperties.push(PROPERTY_NAME_RAW_CREATION_DATE);
|
||
|
}
|
||
|
} else if (extraFieldExtendedTimestamp.data.length >= 5) {
|
||
|
timeProperties.push(PROPERTY_NAME_LAST_MODIFICATION_DATE);
|
||
|
timeRawProperties.push(PROPERTY_NAME_RAW_LAST_MODIFICATION_DATE);
|
||
|
}
|
||
|
let offset = 1;
|
||
|
timeProperties.forEach((propertyName, indexProperty) => {
|
||
|
if (extraFieldExtendedTimestamp.data.length >= offset + 4) {
|
||
|
const time = getUint32(extraFieldView, offset);
|
||
|
directory[propertyName] = extraFieldExtendedTimestamp[propertyName] = new Date(time * 1e3);
|
||
|
const rawPropertyName = timeRawProperties[indexProperty];
|
||
|
extraFieldExtendedTimestamp[rawPropertyName] = time;
|
||
|
}
|
||
|
offset += 4;
|
||
|
});
|
||
|
}
|
||
|
async function seekSignature(reader, signature, startOffset, minimumBytes, maximumLength) {
|
||
|
const signatureArray = new Uint8Array(4);
|
||
|
const signatureView = getDataView(signatureArray);
|
||
|
setUint32(signatureView, 0, signature);
|
||
|
const maximumBytes = minimumBytes + maximumLength;
|
||
|
return await seek(minimumBytes) || await seek(Math.min(maximumBytes, startOffset));
|
||
|
async function seek(length) {
|
||
|
const offset = startOffset - length;
|
||
|
const bytes = await readUint8Array(reader, offset, length);
|
||
|
for (let indexByte = bytes.length - minimumBytes; indexByte >= 0; indexByte--) {
|
||
|
if (bytes[indexByte] == signatureArray[0] && bytes[indexByte + 1] == signatureArray[1] && bytes[indexByte + 2] == signatureArray[2] && bytes[indexByte + 3] == signatureArray[3]) {
|
||
|
return {
|
||
|
offset: offset + indexByte,
|
||
|
buffer: bytes.slice(indexByte, indexByte + minimumBytes).buffer
|
||
|
};
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
function getOptionValue(zipReader, options, name) {
|
||
|
return options[name] === UNDEFINED_VALUE ? zipReader.options[name] : options[name];
|
||
|
}
|
||
|
function getDate(timeRaw) {
|
||
|
const date = (timeRaw & 4294901760) >> 16, time = timeRaw & 65535;
|
||
|
try {
|
||
|
return new Date(1980 + ((date & 65024) >> 9), ((date & 480) >> 5) - 1, date & 31, (time & 63488) >> 11, (time & 2016) >> 5, (time & 31) * 2, 0);
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
}
|
||
|
function getDateNTFS(timeRaw) {
|
||
|
return new Date(Number(timeRaw / BigInt(1e4) - BigInt(116444736e5)));
|
||
|
}
|
||
|
function getUint8(view, offset) {
|
||
|
return view.getUint8(offset);
|
||
|
}
|
||
|
function getUint16(view, offset) {
|
||
|
return view.getUint16(offset, true);
|
||
|
}
|
||
|
function getUint32(view, offset) {
|
||
|
return view.getUint32(offset, true);
|
||
|
}
|
||
|
function getBigUint64(view, offset) {
|
||
|
return Number(view.getBigUint64(offset, true));
|
||
|
}
|
||
|
function setUint32(view, offset, value) {
|
||
|
view.setUint32(offset, value, true);
|
||
|
}
|
||
|
function getDataView(array) {
|
||
|
return new DataView(array.buffer);
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/core/zip-writer.js
|
||
|
var ERR_DUPLICATED_NAME = "File already exists";
|
||
|
var ERR_INVALID_COMMENT = "Zip file comment exceeds 64KB";
|
||
|
var ERR_INVALID_ENTRY_COMMENT = "File entry comment exceeds 64KB";
|
||
|
var ERR_INVALID_ENTRY_NAME = "File entry name exceeds 64KB";
|
||
|
var ERR_INVALID_VERSION = "Version exceeds 65535";
|
||
|
var ERR_INVALID_ENCRYPTION_STRENGTH = "The strength must equal 1, 2, or 3";
|
||
|
var ERR_INVALID_EXTRAFIELD_TYPE = "Extra field type exceeds 65535";
|
||
|
var ERR_INVALID_EXTRAFIELD_DATA = "Extra field data exceeds 64KB";
|
||
|
var ERR_UNSUPPORTED_FORMAT = "Zip64 is not supported (make sure 'keepOrder' is set to 'true')";
|
||
|
var EXTRAFIELD_DATA_AES = new Uint8Array([7, 0, 2, 0, 65, 69, 3, 0, 0]);
|
||
|
var workers = 0;
|
||
|
var pendingEntries = [];
|
||
|
var ZipWriter = class {
|
||
|
constructor(writer, options = {}) {
|
||
|
writer = initWriter(writer);
|
||
|
const addSplitZipSignature = writer.availableSize !== UNDEFINED_VALUE && writer.availableSize > 0 && writer.availableSize !== Infinity && writer.maxSize !== UNDEFINED_VALUE && writer.maxSize > 0 && writer.maxSize !== Infinity;
|
||
|
Object.assign(this, {
|
||
|
writer,
|
||
|
addSplitZipSignature,
|
||
|
options,
|
||
|
config: getConfiguration(),
|
||
|
files: /* @__PURE__ */ new Map(),
|
||
|
filenames: /* @__PURE__ */ new Set(),
|
||
|
offset: writer.writable.size,
|
||
|
pendingEntriesSize: 0,
|
||
|
pendingAddFileCalls: /* @__PURE__ */ new Set(),
|
||
|
bufferedWrites: 0
|
||
|
});
|
||
|
}
|
||
|
async add(name = "", reader, options = {}) {
|
||
|
const zipWriter = this;
|
||
|
const {
|
||
|
pendingAddFileCalls,
|
||
|
config: config2
|
||
|
} = zipWriter;
|
||
|
if (workers < config2.maxWorkers) {
|
||
|
workers++;
|
||
|
} else {
|
||
|
await new Promise((resolve) => pendingEntries.push(resolve));
|
||
|
}
|
||
|
let promiseAddFile;
|
||
|
try {
|
||
|
name = name.trim();
|
||
|
if (zipWriter.filenames.has(name)) {
|
||
|
throw new Error(ERR_DUPLICATED_NAME);
|
||
|
}
|
||
|
zipWriter.filenames.add(name);
|
||
|
promiseAddFile = addFile(zipWriter, name, reader, options);
|
||
|
pendingAddFileCalls.add(promiseAddFile);
|
||
|
return await promiseAddFile;
|
||
|
} catch (error) {
|
||
|
zipWriter.filenames.delete(name);
|
||
|
throw error;
|
||
|
} finally {
|
||
|
pendingAddFileCalls.delete(promiseAddFile);
|
||
|
const pendingEntry = pendingEntries.shift();
|
||
|
if (pendingEntry) {
|
||
|
pendingEntry();
|
||
|
} else {
|
||
|
workers--;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
async close(comment = new Uint8Array(), options = {}) {
|
||
|
const zipWriter = this;
|
||
|
const { pendingAddFileCalls, writer } = this;
|
||
|
const { writable } = writer;
|
||
|
while (pendingAddFileCalls.size) {
|
||
|
await Promise.allSettled(Array.from(pendingAddFileCalls));
|
||
|
}
|
||
|
await closeFile(this, comment, options);
|
||
|
const preventClose = getOptionValue2(zipWriter, options, "preventClose");
|
||
|
if (!preventClose) {
|
||
|
await writable.getWriter().close();
|
||
|
}
|
||
|
return writer.getData ? writer.getData() : writable;
|
||
|
}
|
||
|
};
|
||
|
var ZipWriterStream = class {
|
||
|
constructor(options = {}) {
|
||
|
const { readable, writable } = new TransformStream();
|
||
|
this.readable = readable;
|
||
|
this.zipWriter = new ZipWriter(writable, options);
|
||
|
}
|
||
|
transform(path) {
|
||
|
const { readable, writable } = new TransformStream({
|
||
|
flush: () => {
|
||
|
this.zipWriter.close();
|
||
|
}
|
||
|
});
|
||
|
this.zipWriter.add(path, readable);
|
||
|
return { readable: this.readable, writable };
|
||
|
}
|
||
|
writable(path) {
|
||
|
const { readable, writable } = new TransformStream();
|
||
|
this.zipWriter.add(path, readable);
|
||
|
return writable;
|
||
|
}
|
||
|
close(comment = void 0, options = {}) {
|
||
|
return this.zipWriter.close(comment, options);
|
||
|
}
|
||
|
};
|
||
|
async function addFile(zipWriter, name, reader, options) {
|
||
|
name = name.trim();
|
||
|
if (options.directory && !name.endsWith(DIRECTORY_SIGNATURE)) {
|
||
|
name += DIRECTORY_SIGNATURE;
|
||
|
} else {
|
||
|
options.directory = name.endsWith(DIRECTORY_SIGNATURE);
|
||
|
}
|
||
|
const encode = getOptionValue2(zipWriter, options, "encodeText", encodeText);
|
||
|
let rawFilename = encode(name);
|
||
|
if (rawFilename === UNDEFINED_VALUE) {
|
||
|
rawFilename = encodeText(name);
|
||
|
}
|
||
|
if (getLength(rawFilename) > MAX_16_BITS) {
|
||
|
throw new Error(ERR_INVALID_ENTRY_NAME);
|
||
|
}
|
||
|
const comment = options.comment || "";
|
||
|
let rawComment = encode(comment);
|
||
|
if (rawComment === UNDEFINED_VALUE) {
|
||
|
rawComment = encodeText(comment);
|
||
|
}
|
||
|
if (getLength(rawComment) > MAX_16_BITS) {
|
||
|
throw new Error(ERR_INVALID_ENTRY_COMMENT);
|
||
|
}
|
||
|
const version = getOptionValue2(zipWriter, options, "version", VERSION_DEFLATE);
|
||
|
if (version > MAX_16_BITS) {
|
||
|
throw new Error(ERR_INVALID_VERSION);
|
||
|
}
|
||
|
const versionMadeBy = getOptionValue2(zipWriter, options, "versionMadeBy", 20);
|
||
|
if (versionMadeBy > MAX_16_BITS) {
|
||
|
throw new Error(ERR_INVALID_VERSION);
|
||
|
}
|
||
|
const lastModDate = getOptionValue2(zipWriter, options, PROPERTY_NAME_LAST_MODIFICATION_DATE, /* @__PURE__ */ new Date());
|
||
|
const lastAccessDate = getOptionValue2(zipWriter, options, PROPERTY_NAME_LAST_ACCESS_DATE);
|
||
|
const creationDate = getOptionValue2(zipWriter, options, PROPERTY_NAME_CREATION_DATE);
|
||
|
const msDosCompatible = getOptionValue2(zipWriter, options, PROPERTY_NAME_MS_DOS_COMPATIBLE, true);
|
||
|
const internalFileAttribute = getOptionValue2(zipWriter, options, PROPERTY_NAME_INTERNAL_FILE_ATTRIBUTE, 0);
|
||
|
const externalFileAttribute = getOptionValue2(zipWriter, options, PROPERTY_NAME_EXTERNAL_FILE_ATTRIBUTE, 0);
|
||
|
const password = getOptionValue2(zipWriter, options, "password");
|
||
|
const rawPassword = getOptionValue2(zipWriter, options, "rawPassword");
|
||
|
const encryptionStrength = getOptionValue2(zipWriter, options, "encryptionStrength", 3);
|
||
|
const zipCrypto = getOptionValue2(zipWriter, options, "zipCrypto");
|
||
|
const extendedTimestamp = getOptionValue2(zipWriter, options, "extendedTimestamp", true);
|
||
|
const keepOrder = getOptionValue2(zipWriter, options, "keepOrder", true);
|
||
|
const level = getOptionValue2(zipWriter, options, "level");
|
||
|
const useWebWorkers = getOptionValue2(zipWriter, options, "useWebWorkers");
|
||
|
const bufferedWrite = getOptionValue2(zipWriter, options, "bufferedWrite");
|
||
|
const dataDescriptorSignature = getOptionValue2(zipWriter, options, "dataDescriptorSignature", false);
|
||
|
const signal = getOptionValue2(zipWriter, options, "signal");
|
||
|
const useCompressionStream = getOptionValue2(zipWriter, options, "useCompressionStream");
|
||
|
let dataDescriptor = getOptionValue2(zipWriter, options, "dataDescriptor", true);
|
||
|
let zip64 = getOptionValue2(zipWriter, options, PROPERTY_NAME_ZIP64);
|
||
|
if (password !== UNDEFINED_VALUE && encryptionStrength !== UNDEFINED_VALUE && (encryptionStrength < 1 || encryptionStrength > 3)) {
|
||
|
throw new Error(ERR_INVALID_ENCRYPTION_STRENGTH);
|
||
|
}
|
||
|
let rawExtraField = new Uint8Array();
|
||
|
const { extraField } = options;
|
||
|
if (extraField) {
|
||
|
let extraFieldSize = 0;
|
||
|
let offset = 0;
|
||
|
extraField.forEach((data) => extraFieldSize += 4 + getLength(data));
|
||
|
rawExtraField = new Uint8Array(extraFieldSize);
|
||
|
extraField.forEach((data, type) => {
|
||
|
if (type > MAX_16_BITS) {
|
||
|
throw new Error(ERR_INVALID_EXTRAFIELD_TYPE);
|
||
|
}
|
||
|
if (getLength(data) > MAX_16_BITS) {
|
||
|
throw new Error(ERR_INVALID_EXTRAFIELD_DATA);
|
||
|
}
|
||
|
arraySet(rawExtraField, new Uint16Array([type]), offset);
|
||
|
arraySet(rawExtraField, new Uint16Array([getLength(data)]), offset + 2);
|
||
|
arraySet(rawExtraField, data, offset + 4);
|
||
|
offset += 4 + getLength(data);
|
||
|
});
|
||
|
}
|
||
|
let maximumCompressedSize = 0;
|
||
|
let maximumEntrySize = 0;
|
||
|
let uncompressedSize = 0;
|
||
|
const zip64Enabled = zip64 === true;
|
||
|
if (reader) {
|
||
|
reader = initReader(reader);
|
||
|
await initStream(reader);
|
||
|
if (reader.size === UNDEFINED_VALUE) {
|
||
|
dataDescriptor = true;
|
||
|
if (zip64 || zip64 === UNDEFINED_VALUE) {
|
||
|
zip64 = true;
|
||
|
uncompressedSize = maximumCompressedSize = MAX_32_BITS + 1;
|
||
|
}
|
||
|
} else {
|
||
|
uncompressedSize = reader.size;
|
||
|
maximumCompressedSize = getMaximumCompressedSize(uncompressedSize);
|
||
|
}
|
||
|
}
|
||
|
const { diskOffset, diskNumber, maxSize } = zipWriter.writer;
|
||
|
const zip64UncompressedSize = zip64Enabled || uncompressedSize > MAX_32_BITS;
|
||
|
const zip64CompressedSize = zip64Enabled || maximumCompressedSize > MAX_32_BITS;
|
||
|
const zip64Offset = zip64Enabled || zipWriter.offset + zipWriter.pendingEntriesSize - diskOffset > MAX_32_BITS;
|
||
|
const supportZip64SplitFile = getOptionValue2(zipWriter, options, "supportZip64SplitFile", true);
|
||
|
const zip64DiskNumberStart = supportZip64SplitFile && zip64Enabled || diskNumber + Math.ceil(zipWriter.pendingEntriesSize / maxSize) > MAX_16_BITS;
|
||
|
if (zip64Offset || zip64UncompressedSize || zip64CompressedSize || zip64DiskNumberStart) {
|
||
|
if (zip64 === false || !keepOrder) {
|
||
|
throw new Error(ERR_UNSUPPORTED_FORMAT);
|
||
|
} else {
|
||
|
zip64 = true;
|
||
|
}
|
||
|
}
|
||
|
zip64 = zip64 || false;
|
||
|
options = Object.assign({}, options, {
|
||
|
rawFilename,
|
||
|
rawComment,
|
||
|
version,
|
||
|
versionMadeBy,
|
||
|
lastModDate,
|
||
|
lastAccessDate,
|
||
|
creationDate,
|
||
|
rawExtraField,
|
||
|
zip64,
|
||
|
zip64UncompressedSize,
|
||
|
zip64CompressedSize,
|
||
|
zip64Offset,
|
||
|
zip64DiskNumberStart,
|
||
|
password,
|
||
|
rawPassword,
|
||
|
level: !useCompressionStream && (zipWriter.config.CompressionStream === UNDEFINED_VALUE && zipWriter.config.CompressionStreamNative === UNDEFINED_VALUE) ? 0 : level,
|
||
|
useWebWorkers,
|
||
|
encryptionStrength,
|
||
|
extendedTimestamp,
|
||
|
zipCrypto,
|
||
|
bufferedWrite,
|
||
|
keepOrder,
|
||
|
dataDescriptor,
|
||
|
dataDescriptorSignature,
|
||
|
signal,
|
||
|
msDosCompatible,
|
||
|
internalFileAttribute,
|
||
|
externalFileAttribute,
|
||
|
useCompressionStream
|
||
|
});
|
||
|
const headerInfo = getHeaderInfo(options);
|
||
|
const dataDescriptorInfo = getDataDescriptorInfo(options);
|
||
|
const metadataSize = getLength(headerInfo.localHeaderArray, dataDescriptorInfo.dataDescriptorArray);
|
||
|
maximumEntrySize = metadataSize + maximumCompressedSize;
|
||
|
if (zipWriter.options.usdz) {
|
||
|
maximumEntrySize += maximumEntrySize + 64;
|
||
|
}
|
||
|
zipWriter.pendingEntriesSize += maximumEntrySize;
|
||
|
let fileEntry;
|
||
|
try {
|
||
|
fileEntry = await getFileEntry(zipWriter, name, reader, { headerInfo, dataDescriptorInfo, metadataSize }, options);
|
||
|
} finally {
|
||
|
zipWriter.pendingEntriesSize -= maximumEntrySize;
|
||
|
}
|
||
|
Object.assign(fileEntry, { name, comment, extraField });
|
||
|
return new Entry(fileEntry);
|
||
|
}
|
||
|
async function getFileEntry(zipWriter, name, reader, entryInfo, options) {
|
||
|
const {
|
||
|
files,
|
||
|
writer
|
||
|
} = zipWriter;
|
||
|
const {
|
||
|
keepOrder,
|
||
|
dataDescriptor,
|
||
|
signal
|
||
|
} = options;
|
||
|
const {
|
||
|
headerInfo
|
||
|
} = entryInfo;
|
||
|
const { usdz } = zipWriter.options;
|
||
|
const previousFileEntry = Array.from(files.values()).pop();
|
||
|
let fileEntry = {};
|
||
|
let bufferedWrite;
|
||
|
let releaseLockWriter;
|
||
|
let releaseLockCurrentFileEntry;
|
||
|
let writingBufferedEntryData;
|
||
|
let writingEntryData;
|
||
|
let fileWriter;
|
||
|
let blobPromise;
|
||
|
files.set(name, fileEntry);
|
||
|
try {
|
||
|
let lockPreviousFileEntry;
|
||
|
if (keepOrder) {
|
||
|
lockPreviousFileEntry = previousFileEntry && previousFileEntry.lock;
|
||
|
requestLockCurrentFileEntry();
|
||
|
}
|
||
|
if ((options.bufferedWrite || zipWriter.writerLocked || zipWriter.bufferedWrites && keepOrder || !dataDescriptor) && !usdz) {
|
||
|
fileWriter = new TransformStream();
|
||
|
blobPromise = new Response(fileWriter.readable).blob();
|
||
|
fileWriter.writable.size = 0;
|
||
|
bufferedWrite = true;
|
||
|
zipWriter.bufferedWrites++;
|
||
|
await initStream(writer);
|
||
|
} else {
|
||
|
fileWriter = writer;
|
||
|
await requestLockWriter();
|
||
|
}
|
||
|
await initStream(fileWriter);
|
||
|
const { writable } = writer;
|
||
|
let { diskOffset } = writer;
|
||
|
if (zipWriter.addSplitZipSignature) {
|
||
|
delete zipWriter.addSplitZipSignature;
|
||
|
const signatureArray = new Uint8Array(4);
|
||
|
const signatureArrayView = getDataView2(signatureArray);
|
||
|
setUint322(signatureArrayView, 0, SPLIT_ZIP_FILE_SIGNATURE);
|
||
|
await writeData(writable, signatureArray);
|
||
|
zipWriter.offset += 4;
|
||
|
}
|
||
|
if (usdz) {
|
||
|
appendExtraFieldUSDZ(entryInfo, zipWriter.offset - diskOffset);
|
||
|
}
|
||
|
if (!bufferedWrite) {
|
||
|
await lockPreviousFileEntry;
|
||
|
await skipDiskIfNeeded(writable);
|
||
|
}
|
||
|
const { diskNumber } = writer;
|
||
|
writingEntryData = true;
|
||
|
fileEntry.diskNumberStart = diskNumber;
|
||
|
fileEntry = await createFileEntry(reader, fileWriter, fileEntry, entryInfo, zipWriter.config, options);
|
||
|
writingEntryData = false;
|
||
|
files.set(name, fileEntry);
|
||
|
fileEntry.filename = name;
|
||
|
if (bufferedWrite) {
|
||
|
await fileWriter.writable.getWriter().close();
|
||
|
let blob = await blobPromise;
|
||
|
await lockPreviousFileEntry;
|
||
|
await requestLockWriter();
|
||
|
writingBufferedEntryData = true;
|
||
|
if (!dataDescriptor) {
|
||
|
blob = await writeExtraHeaderInfo(fileEntry, blob, writable, options);
|
||
|
}
|
||
|
await skipDiskIfNeeded(writable);
|
||
|
fileEntry.diskNumberStart = writer.diskNumber;
|
||
|
diskOffset = writer.diskOffset;
|
||
|
await blob.stream().pipeTo(writable, { preventClose: true, preventAbort: true, signal });
|
||
|
writable.size += blob.size;
|
||
|
writingBufferedEntryData = false;
|
||
|
}
|
||
|
fileEntry.offset = zipWriter.offset - diskOffset;
|
||
|
if (fileEntry.zip64) {
|
||
|
setZip64ExtraInfo(fileEntry, options);
|
||
|
} else if (fileEntry.offset > MAX_32_BITS) {
|
||
|
throw new Error(ERR_UNSUPPORTED_FORMAT);
|
||
|
}
|
||
|
zipWriter.offset += fileEntry.size;
|
||
|
return fileEntry;
|
||
|
} catch (error) {
|
||
|
if (bufferedWrite && writingBufferedEntryData || !bufferedWrite && writingEntryData) {
|
||
|
zipWriter.hasCorruptedEntries = true;
|
||
|
if (error) {
|
||
|
try {
|
||
|
error.corruptedEntry = true;
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
}
|
||
|
if (bufferedWrite) {
|
||
|
zipWriter.offset += fileWriter.writable.size;
|
||
|
} else {
|
||
|
zipWriter.offset = fileWriter.writable.size;
|
||
|
}
|
||
|
}
|
||
|
files.delete(name);
|
||
|
throw error;
|
||
|
} finally {
|
||
|
if (bufferedWrite) {
|
||
|
zipWriter.bufferedWrites--;
|
||
|
}
|
||
|
if (releaseLockCurrentFileEntry) {
|
||
|
releaseLockCurrentFileEntry();
|
||
|
}
|
||
|
if (releaseLockWriter) {
|
||
|
releaseLockWriter();
|
||
|
}
|
||
|
}
|
||
|
function requestLockCurrentFileEntry() {
|
||
|
fileEntry.lock = new Promise((resolve) => releaseLockCurrentFileEntry = resolve);
|
||
|
}
|
||
|
async function requestLockWriter() {
|
||
|
zipWriter.writerLocked = true;
|
||
|
const { lockWriter } = zipWriter;
|
||
|
zipWriter.lockWriter = new Promise((resolve) => releaseLockWriter = () => {
|
||
|
zipWriter.writerLocked = false;
|
||
|
resolve();
|
||
|
});
|
||
|
await lockWriter;
|
||
|
}
|
||
|
async function skipDiskIfNeeded(writable) {
|
||
|
if (getLength(headerInfo.localHeaderArray) > writer.availableSize) {
|
||
|
writer.availableSize = 0;
|
||
|
await writeData(writable, new Uint8Array());
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
async function createFileEntry(reader, writer, { diskNumberStart, lock }, entryInfo, config2, options) {
|
||
|
const {
|
||
|
headerInfo,
|
||
|
dataDescriptorInfo,
|
||
|
metadataSize
|
||
|
} = entryInfo;
|
||
|
const {
|
||
|
localHeaderArray,
|
||
|
headerArray,
|
||
|
lastModDate,
|
||
|
rawLastModDate,
|
||
|
encrypted,
|
||
|
compressed,
|
||
|
version,
|
||
|
compressionMethod,
|
||
|
rawExtraFieldExtendedTimestamp,
|
||
|
extraFieldExtendedTimestampFlag,
|
||
|
rawExtraFieldNTFS,
|
||
|
rawExtraFieldAES
|
||
|
} = headerInfo;
|
||
|
const { dataDescriptorArray } = dataDescriptorInfo;
|
||
|
const {
|
||
|
rawFilename,
|
||
|
lastAccessDate,
|
||
|
creationDate,
|
||
|
password,
|
||
|
rawPassword,
|
||
|
level,
|
||
|
zip64,
|
||
|
zip64UncompressedSize,
|
||
|
zip64CompressedSize,
|
||
|
zip64Offset,
|
||
|
zip64DiskNumberStart,
|
||
|
zipCrypto,
|
||
|
dataDescriptor,
|
||
|
directory,
|
||
|
versionMadeBy,
|
||
|
rawComment,
|
||
|
rawExtraField,
|
||
|
useWebWorkers,
|
||
|
onstart,
|
||
|
onprogress,
|
||
|
onend,
|
||
|
signal,
|
||
|
encryptionStrength,
|
||
|
extendedTimestamp,
|
||
|
msDosCompatible,
|
||
|
internalFileAttribute,
|
||
|
externalFileAttribute,
|
||
|
useCompressionStream
|
||
|
} = options;
|
||
|
const fileEntry = {
|
||
|
lock,
|
||
|
versionMadeBy,
|
||
|
zip64,
|
||
|
directory: Boolean(directory),
|
||
|
filenameUTF8: true,
|
||
|
rawFilename,
|
||
|
commentUTF8: true,
|
||
|
rawComment,
|
||
|
rawExtraFieldExtendedTimestamp,
|
||
|
rawExtraFieldNTFS,
|
||
|
rawExtraFieldAES,
|
||
|
rawExtraField,
|
||
|
extendedTimestamp,
|
||
|
msDosCompatible,
|
||
|
internalFileAttribute,
|
||
|
externalFileAttribute,
|
||
|
diskNumberStart
|
||
|
};
|
||
|
let compressedSize = 0;
|
||
|
let uncompressedSize = 0;
|
||
|
let signature;
|
||
|
const { writable } = writer;
|
||
|
if (reader) {
|
||
|
reader.chunkSize = getChunkSize(config2);
|
||
|
await writeData(writable, localHeaderArray);
|
||
|
const readable = reader.readable;
|
||
|
const size = readable.size = reader.size;
|
||
|
const workerOptions = {
|
||
|
options: {
|
||
|
codecType: CODEC_DEFLATE,
|
||
|
level,
|
||
|
rawPassword,
|
||
|
password,
|
||
|
encryptionStrength,
|
||
|
zipCrypto: encrypted && zipCrypto,
|
||
|
passwordVerification: encrypted && zipCrypto && rawLastModDate >> 8 & 255,
|
||
|
signed: true,
|
||
|
compressed,
|
||
|
encrypted,
|
||
|
useWebWorkers,
|
||
|
useCompressionStream,
|
||
|
transferStreams: false
|
||
|
},
|
||
|
config: config2,
|
||
|
streamOptions: { signal, size, onstart, onprogress, onend }
|
||
|
};
|
||
|
const result = await runWorker2({ readable, writable }, workerOptions);
|
||
|
uncompressedSize = result.inputSize;
|
||
|
compressedSize = result.outputSize;
|
||
|
signature = result.signature;
|
||
|
writable.size += uncompressedSize;
|
||
|
} else {
|
||
|
await writeData(writable, localHeaderArray);
|
||
|
}
|
||
|
let rawExtraFieldZip64;
|
||
|
if (zip64) {
|
||
|
let rawExtraFieldZip64Length = 4;
|
||
|
if (zip64UncompressedSize) {
|
||
|
rawExtraFieldZip64Length += 8;
|
||
|
}
|
||
|
if (zip64CompressedSize) {
|
||
|
rawExtraFieldZip64Length += 8;
|
||
|
}
|
||
|
if (zip64Offset) {
|
||
|
rawExtraFieldZip64Length += 8;
|
||
|
}
|
||
|
if (zip64DiskNumberStart) {
|
||
|
rawExtraFieldZip64Length += 4;
|
||
|
}
|
||
|
rawExtraFieldZip64 = new Uint8Array(rawExtraFieldZip64Length);
|
||
|
} else {
|
||
|
rawExtraFieldZip64 = new Uint8Array();
|
||
|
}
|
||
|
setEntryInfo({
|
||
|
signature,
|
||
|
rawExtraFieldZip64,
|
||
|
compressedSize,
|
||
|
uncompressedSize,
|
||
|
headerInfo,
|
||
|
dataDescriptorInfo
|
||
|
}, options);
|
||
|
if (dataDescriptor) {
|
||
|
await writeData(writable, dataDescriptorArray);
|
||
|
}
|
||
|
Object.assign(fileEntry, {
|
||
|
uncompressedSize,
|
||
|
compressedSize,
|
||
|
lastModDate,
|
||
|
rawLastModDate,
|
||
|
creationDate,
|
||
|
lastAccessDate,
|
||
|
encrypted,
|
||
|
size: metadataSize + compressedSize,
|
||
|
compressionMethod,
|
||
|
version,
|
||
|
headerArray,
|
||
|
signature,
|
||
|
rawExtraFieldZip64,
|
||
|
extraFieldExtendedTimestampFlag,
|
||
|
zip64UncompressedSize,
|
||
|
zip64CompressedSize,
|
||
|
zip64Offset,
|
||
|
zip64DiskNumberStart
|
||
|
});
|
||
|
return fileEntry;
|
||
|
}
|
||
|
function getHeaderInfo(options) {
|
||
|
const {
|
||
|
rawFilename,
|
||
|
lastModDate,
|
||
|
lastAccessDate,
|
||
|
creationDate,
|
||
|
rawPassword,
|
||
|
password,
|
||
|
level,
|
||
|
zip64,
|
||
|
zipCrypto,
|
||
|
dataDescriptor,
|
||
|
directory,
|
||
|
rawExtraField,
|
||
|
encryptionStrength,
|
||
|
extendedTimestamp
|
||
|
} = options;
|
||
|
const compressed = level !== 0 && !directory;
|
||
|
const encrypted = Boolean(password && getLength(password) || rawPassword && getLength(rawPassword));
|
||
|
let version = options.version;
|
||
|
let rawExtraFieldAES;
|
||
|
if (encrypted && !zipCrypto) {
|
||
|
rawExtraFieldAES = new Uint8Array(getLength(EXTRAFIELD_DATA_AES) + 2);
|
||
|
const extraFieldAESView = getDataView2(rawExtraFieldAES);
|
||
|
setUint16(extraFieldAESView, 0, EXTRAFIELD_TYPE_AES);
|
||
|
arraySet(rawExtraFieldAES, EXTRAFIELD_DATA_AES, 2);
|
||
|
setUint8(extraFieldAESView, 8, encryptionStrength);
|
||
|
} else {
|
||
|
rawExtraFieldAES = new Uint8Array();
|
||
|
}
|
||
|
let rawExtraFieldNTFS;
|
||
|
let rawExtraFieldExtendedTimestamp;
|
||
|
let extraFieldExtendedTimestampFlag;
|
||
|
if (extendedTimestamp) {
|
||
|
rawExtraFieldExtendedTimestamp = new Uint8Array(9 + (lastAccessDate ? 4 : 0) + (creationDate ? 4 : 0));
|
||
|
const extraFieldExtendedTimestampView = getDataView2(rawExtraFieldExtendedTimestamp);
|
||
|
setUint16(extraFieldExtendedTimestampView, 0, EXTRAFIELD_TYPE_EXTENDED_TIMESTAMP);
|
||
|
setUint16(extraFieldExtendedTimestampView, 2, getLength(rawExtraFieldExtendedTimestamp) - 4);
|
||
|
extraFieldExtendedTimestampFlag = 1 + (lastAccessDate ? 2 : 0) + (creationDate ? 4 : 0);
|
||
|
setUint8(extraFieldExtendedTimestampView, 4, extraFieldExtendedTimestampFlag);
|
||
|
let offset = 5;
|
||
|
setUint322(extraFieldExtendedTimestampView, offset, Math.floor(lastModDate.getTime() / 1e3));
|
||
|
offset += 4;
|
||
|
if (lastAccessDate) {
|
||
|
setUint322(extraFieldExtendedTimestampView, offset, Math.floor(lastAccessDate.getTime() / 1e3));
|
||
|
offset += 4;
|
||
|
}
|
||
|
if (creationDate) {
|
||
|
setUint322(extraFieldExtendedTimestampView, offset, Math.floor(creationDate.getTime() / 1e3));
|
||
|
}
|
||
|
try {
|
||
|
rawExtraFieldNTFS = new Uint8Array(36);
|
||
|
const extraFieldNTFSView = getDataView2(rawExtraFieldNTFS);
|
||
|
const lastModTimeNTFS = getTimeNTFS(lastModDate);
|
||
|
setUint16(extraFieldNTFSView, 0, EXTRAFIELD_TYPE_NTFS);
|
||
|
setUint16(extraFieldNTFSView, 2, 32);
|
||
|
setUint16(extraFieldNTFSView, 8, EXTRAFIELD_TYPE_NTFS_TAG1);
|
||
|
setUint16(extraFieldNTFSView, 10, 24);
|
||
|
setBigUint64(extraFieldNTFSView, 12, lastModTimeNTFS);
|
||
|
setBigUint64(extraFieldNTFSView, 20, getTimeNTFS(lastAccessDate) || lastModTimeNTFS);
|
||
|
setBigUint64(extraFieldNTFSView, 28, getTimeNTFS(creationDate) || lastModTimeNTFS);
|
||
|
} catch (_error) {
|
||
|
rawExtraFieldNTFS = new Uint8Array();
|
||
|
}
|
||
|
} else {
|
||
|
rawExtraFieldNTFS = rawExtraFieldExtendedTimestamp = new Uint8Array();
|
||
|
}
|
||
|
let bitFlag = BITFLAG_LANG_ENCODING_FLAG;
|
||
|
if (dataDescriptor) {
|
||
|
bitFlag = bitFlag | BITFLAG_DATA_DESCRIPTOR;
|
||
|
}
|
||
|
let compressionMethod = COMPRESSION_METHOD_STORE;
|
||
|
if (compressed) {
|
||
|
compressionMethod = COMPRESSION_METHOD_DEFLATE;
|
||
|
}
|
||
|
if (zip64) {
|
||
|
version = version > VERSION_ZIP64 ? version : VERSION_ZIP64;
|
||
|
}
|
||
|
if (encrypted) {
|
||
|
bitFlag = bitFlag | BITFLAG_ENCRYPTED;
|
||
|
if (!zipCrypto) {
|
||
|
version = version > VERSION_AES ? version : VERSION_AES;
|
||
|
compressionMethod = COMPRESSION_METHOD_AES;
|
||
|
if (compressed) {
|
||
|
rawExtraFieldAES[9] = COMPRESSION_METHOD_DEFLATE;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
const headerArray = new Uint8Array(26);
|
||
|
const headerView = getDataView2(headerArray);
|
||
|
setUint16(headerView, 0, version);
|
||
|
setUint16(headerView, 2, bitFlag);
|
||
|
setUint16(headerView, 4, compressionMethod);
|
||
|
const dateArray = new Uint32Array(1);
|
||
|
const dateView = getDataView2(dateArray);
|
||
|
let lastModDateMsDos;
|
||
|
if (lastModDate < MIN_DATE) {
|
||
|
lastModDateMsDos = MIN_DATE;
|
||
|
} else if (lastModDate > MAX_DATE) {
|
||
|
lastModDateMsDos = MAX_DATE;
|
||
|
} else {
|
||
|
lastModDateMsDos = lastModDate;
|
||
|
}
|
||
|
setUint16(dateView, 0, (lastModDateMsDos.getHours() << 6 | lastModDateMsDos.getMinutes()) << 5 | lastModDateMsDos.getSeconds() / 2);
|
||
|
setUint16(dateView, 2, (lastModDateMsDos.getFullYear() - 1980 << 4 | lastModDateMsDos.getMonth() + 1) << 5 | lastModDateMsDos.getDate());
|
||
|
const rawLastModDate = dateArray[0];
|
||
|
setUint322(headerView, 6, rawLastModDate);
|
||
|
setUint16(headerView, 22, getLength(rawFilename));
|
||
|
const extraFieldLength = getLength(rawExtraFieldAES, rawExtraFieldExtendedTimestamp, rawExtraFieldNTFS, rawExtraField);
|
||
|
setUint16(headerView, 24, extraFieldLength);
|
||
|
const localHeaderArray = new Uint8Array(30 + getLength(rawFilename) + extraFieldLength);
|
||
|
const localHeaderView = getDataView2(localHeaderArray);
|
||
|
setUint322(localHeaderView, 0, LOCAL_FILE_HEADER_SIGNATURE);
|
||
|
arraySet(localHeaderArray, headerArray, 4);
|
||
|
arraySet(localHeaderArray, rawFilename, 30);
|
||
|
arraySet(localHeaderArray, rawExtraFieldAES, 30 + getLength(rawFilename));
|
||
|
arraySet(localHeaderArray, rawExtraFieldExtendedTimestamp, 30 + getLength(rawFilename, rawExtraFieldAES));
|
||
|
arraySet(localHeaderArray, rawExtraFieldNTFS, 30 + getLength(rawFilename, rawExtraFieldAES, rawExtraFieldExtendedTimestamp));
|
||
|
arraySet(localHeaderArray, rawExtraField, 30 + getLength(rawFilename, rawExtraFieldAES, rawExtraFieldExtendedTimestamp, rawExtraFieldNTFS));
|
||
|
return {
|
||
|
localHeaderArray,
|
||
|
headerArray,
|
||
|
headerView,
|
||
|
lastModDate,
|
||
|
rawLastModDate,
|
||
|
encrypted,
|
||
|
compressed,
|
||
|
version,
|
||
|
compressionMethod,
|
||
|
extraFieldExtendedTimestampFlag,
|
||
|
rawExtraFieldExtendedTimestamp,
|
||
|
rawExtraFieldNTFS,
|
||
|
rawExtraFieldAES,
|
||
|
extraFieldLength
|
||
|
};
|
||
|
}
|
||
|
function appendExtraFieldUSDZ(entryInfo, zipWriterOffset) {
|
||
|
const { headerInfo } = entryInfo;
|
||
|
let { localHeaderArray, extraFieldLength } = headerInfo;
|
||
|
let localHeaderArrayView = getDataView2(localHeaderArray);
|
||
|
let extraBytesLength = 64 - (zipWriterOffset + getLength(localHeaderArray)) % 64;
|
||
|
if (extraBytesLength < 4) {
|
||
|
extraBytesLength += 64;
|
||
|
}
|
||
|
const rawExtraFieldUSDZ = new Uint8Array(extraBytesLength);
|
||
|
const extraFieldUSDZView = getDataView2(rawExtraFieldUSDZ);
|
||
|
setUint16(extraFieldUSDZView, 0, EXTRAFIELD_TYPE_USDZ);
|
||
|
setUint16(extraFieldUSDZView, 2, extraBytesLength - 2);
|
||
|
const previousLocalHeaderArray = localHeaderArray;
|
||
|
headerInfo.localHeaderArray = localHeaderArray = new Uint8Array(getLength(previousLocalHeaderArray) + extraBytesLength);
|
||
|
arraySet(localHeaderArray, previousLocalHeaderArray);
|
||
|
arraySet(localHeaderArray, rawExtraFieldUSDZ, getLength(previousLocalHeaderArray));
|
||
|
localHeaderArrayView = getDataView2(localHeaderArray);
|
||
|
setUint16(localHeaderArrayView, 28, extraFieldLength + extraBytesLength);
|
||
|
entryInfo.metadataSize += extraBytesLength;
|
||
|
}
|
||
|
function getDataDescriptorInfo(options) {
|
||
|
const {
|
||
|
zip64,
|
||
|
dataDescriptor,
|
||
|
dataDescriptorSignature
|
||
|
} = options;
|
||
|
let dataDescriptorArray = new Uint8Array();
|
||
|
let dataDescriptorView, dataDescriptorOffset = 0;
|
||
|
if (dataDescriptor) {
|
||
|
dataDescriptorArray = new Uint8Array(zip64 ? dataDescriptorSignature ? 24 : 20 : dataDescriptorSignature ? 16 : 12);
|
||
|
dataDescriptorView = getDataView2(dataDescriptorArray);
|
||
|
if (dataDescriptorSignature) {
|
||
|
dataDescriptorOffset = 4;
|
||
|
setUint322(dataDescriptorView, 0, DATA_DESCRIPTOR_RECORD_SIGNATURE);
|
||
|
}
|
||
|
}
|
||
|
return {
|
||
|
dataDescriptorArray,
|
||
|
dataDescriptorView,
|
||
|
dataDescriptorOffset
|
||
|
};
|
||
|
}
|
||
|
function setEntryInfo(entryInfo, options) {
|
||
|
const {
|
||
|
signature,
|
||
|
rawExtraFieldZip64,
|
||
|
compressedSize,
|
||
|
uncompressedSize,
|
||
|
headerInfo,
|
||
|
dataDescriptorInfo
|
||
|
} = entryInfo;
|
||
|
const {
|
||
|
headerView,
|
||
|
encrypted
|
||
|
} = headerInfo;
|
||
|
const {
|
||
|
dataDescriptorView,
|
||
|
dataDescriptorOffset
|
||
|
} = dataDescriptorInfo;
|
||
|
const {
|
||
|
zip64,
|
||
|
zip64UncompressedSize,
|
||
|
zip64CompressedSize,
|
||
|
zipCrypto,
|
||
|
dataDescriptor
|
||
|
} = options;
|
||
|
if ((!encrypted || zipCrypto) && signature !== UNDEFINED_VALUE) {
|
||
|
setUint322(headerView, 10, signature);
|
||
|
if (dataDescriptor) {
|
||
|
setUint322(dataDescriptorView, dataDescriptorOffset, signature);
|
||
|
}
|
||
|
}
|
||
|
if (zip64) {
|
||
|
const rawExtraFieldZip64View = getDataView2(rawExtraFieldZip64);
|
||
|
setUint16(rawExtraFieldZip64View, 0, EXTRAFIELD_TYPE_ZIP64);
|
||
|
setUint16(rawExtraFieldZip64View, 2, getLength(rawExtraFieldZip64) - 4);
|
||
|
let rawExtraFieldZip64Offset = 4;
|
||
|
if (zip64UncompressedSize) {
|
||
|
setUint322(headerView, 18, MAX_32_BITS);
|
||
|
setBigUint64(rawExtraFieldZip64View, rawExtraFieldZip64Offset, BigInt(uncompressedSize));
|
||
|
rawExtraFieldZip64Offset += 8;
|
||
|
}
|
||
|
if (zip64CompressedSize) {
|
||
|
setUint322(headerView, 14, MAX_32_BITS);
|
||
|
setBigUint64(rawExtraFieldZip64View, rawExtraFieldZip64Offset, BigInt(compressedSize));
|
||
|
}
|
||
|
if (dataDescriptor) {
|
||
|
setBigUint64(dataDescriptorView, dataDescriptorOffset + 4, BigInt(compressedSize));
|
||
|
setBigUint64(dataDescriptorView, dataDescriptorOffset + 12, BigInt(uncompressedSize));
|
||
|
}
|
||
|
} else {
|
||
|
setUint322(headerView, 14, compressedSize);
|
||
|
setUint322(headerView, 18, uncompressedSize);
|
||
|
if (dataDescriptor) {
|
||
|
setUint322(dataDescriptorView, dataDescriptorOffset + 4, compressedSize);
|
||
|
setUint322(dataDescriptorView, dataDescriptorOffset + 8, uncompressedSize);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
async function writeExtraHeaderInfo(fileEntry, entryData, writable, { zipCrypto }) {
|
||
|
let arrayBuffer;
|
||
|
arrayBuffer = await entryData.slice(0, 26).arrayBuffer();
|
||
|
if (arrayBuffer.byteLength != 26) {
|
||
|
arrayBuffer = arrayBuffer.slice(0, 26);
|
||
|
}
|
||
|
const arrayBufferView = new DataView(arrayBuffer);
|
||
|
if (!fileEntry.encrypted || zipCrypto) {
|
||
|
setUint322(arrayBufferView, 14, fileEntry.signature);
|
||
|
}
|
||
|
if (fileEntry.zip64) {
|
||
|
setUint322(arrayBufferView, 18, MAX_32_BITS);
|
||
|
setUint322(arrayBufferView, 22, MAX_32_BITS);
|
||
|
} else {
|
||
|
setUint322(arrayBufferView, 18, fileEntry.compressedSize);
|
||
|
setUint322(arrayBufferView, 22, fileEntry.uncompressedSize);
|
||
|
}
|
||
|
await writeData(writable, new Uint8Array(arrayBuffer));
|
||
|
return entryData.slice(arrayBuffer.byteLength);
|
||
|
}
|
||
|
function setZip64ExtraInfo(fileEntry, options) {
|
||
|
const { rawExtraFieldZip64, offset, diskNumberStart } = fileEntry;
|
||
|
const { zip64UncompressedSize, zip64CompressedSize, zip64Offset, zip64DiskNumberStart } = options;
|
||
|
const rawExtraFieldZip64View = getDataView2(rawExtraFieldZip64);
|
||
|
let rawExtraFieldZip64Offset = 4;
|
||
|
if (zip64UncompressedSize) {
|
||
|
rawExtraFieldZip64Offset += 8;
|
||
|
}
|
||
|
if (zip64CompressedSize) {
|
||
|
rawExtraFieldZip64Offset += 8;
|
||
|
}
|
||
|
if (zip64Offset) {
|
||
|
setBigUint64(rawExtraFieldZip64View, rawExtraFieldZip64Offset, BigInt(offset));
|
||
|
rawExtraFieldZip64Offset += 8;
|
||
|
}
|
||
|
if (zip64DiskNumberStart) {
|
||
|
setUint322(rawExtraFieldZip64View, rawExtraFieldZip64Offset, diskNumberStart);
|
||
|
}
|
||
|
}
|
||
|
async function closeFile(zipWriter, comment, options) {
|
||
|
const { files, writer } = zipWriter;
|
||
|
const { diskOffset, writable } = writer;
|
||
|
let { diskNumber } = writer;
|
||
|
let offset = 0;
|
||
|
let directoryDataLength = 0;
|
||
|
let directoryOffset = zipWriter.offset - diskOffset;
|
||
|
let filesLength = files.size;
|
||
|
for (const [, fileEntry] of files) {
|
||
|
const {
|
||
|
rawFilename,
|
||
|
rawExtraFieldZip64,
|
||
|
rawExtraFieldAES,
|
||
|
rawComment,
|
||
|
rawExtraFieldNTFS,
|
||
|
rawExtraField,
|
||
|
extendedTimestamp,
|
||
|
extraFieldExtendedTimestampFlag,
|
||
|
lastModDate
|
||
|
} = fileEntry;
|
||
|
let rawExtraFieldTimestamp;
|
||
|
if (extendedTimestamp) {
|
||
|
rawExtraFieldTimestamp = new Uint8Array(9);
|
||
|
const extraFieldExtendedTimestampView = getDataView2(rawExtraFieldTimestamp);
|
||
|
setUint16(extraFieldExtendedTimestampView, 0, EXTRAFIELD_TYPE_EXTENDED_TIMESTAMP);
|
||
|
setUint16(extraFieldExtendedTimestampView, 2, 5);
|
||
|
setUint8(extraFieldExtendedTimestampView, 4, extraFieldExtendedTimestampFlag);
|
||
|
setUint322(extraFieldExtendedTimestampView, 5, Math.floor(lastModDate.getTime() / 1e3));
|
||
|
} else {
|
||
|
rawExtraFieldTimestamp = new Uint8Array();
|
||
|
}
|
||
|
fileEntry.rawExtraFieldCDExtendedTimestamp = rawExtraFieldTimestamp;
|
||
|
directoryDataLength += 46 + getLength(
|
||
|
rawFilename,
|
||
|
rawComment,
|
||
|
rawExtraFieldZip64,
|
||
|
rawExtraFieldAES,
|
||
|
rawExtraFieldNTFS,
|
||
|
rawExtraFieldTimestamp,
|
||
|
rawExtraField
|
||
|
);
|
||
|
}
|
||
|
const directoryArray = new Uint8Array(directoryDataLength);
|
||
|
const directoryView = getDataView2(directoryArray);
|
||
|
await initStream(writer);
|
||
|
let directoryDiskOffset = 0;
|
||
|
for (const [indexFileEntry, fileEntry] of Array.from(files.values()).entries()) {
|
||
|
const {
|
||
|
offset: fileEntryOffset,
|
||
|
rawFilename,
|
||
|
rawExtraFieldZip64,
|
||
|
rawExtraFieldAES,
|
||
|
rawExtraFieldCDExtendedTimestamp,
|
||
|
rawExtraFieldNTFS,
|
||
|
rawExtraField,
|
||
|
rawComment,
|
||
|
versionMadeBy,
|
||
|
headerArray,
|
||
|
directory,
|
||
|
zip64: zip642,
|
||
|
zip64UncompressedSize,
|
||
|
zip64CompressedSize,
|
||
|
zip64DiskNumberStart,
|
||
|
zip64Offset,
|
||
|
msDosCompatible,
|
||
|
internalFileAttribute,
|
||
|
externalFileAttribute,
|
||
|
diskNumberStart,
|
||
|
uncompressedSize,
|
||
|
compressedSize
|
||
|
} = fileEntry;
|
||
|
const extraFieldLength = getLength(rawExtraFieldZip64, rawExtraFieldAES, rawExtraFieldCDExtendedTimestamp, rawExtraFieldNTFS, rawExtraField);
|
||
|
setUint322(directoryView, offset, CENTRAL_FILE_HEADER_SIGNATURE);
|
||
|
setUint16(directoryView, offset + 4, versionMadeBy);
|
||
|
const headerView = getDataView2(headerArray);
|
||
|
if (!zip64UncompressedSize) {
|
||
|
setUint322(headerView, 18, uncompressedSize);
|
||
|
}
|
||
|
if (!zip64CompressedSize) {
|
||
|
setUint322(headerView, 14, compressedSize);
|
||
|
}
|
||
|
arraySet(directoryArray, headerArray, offset + 6);
|
||
|
setUint16(directoryView, offset + 30, extraFieldLength);
|
||
|
setUint16(directoryView, offset + 32, getLength(rawComment));
|
||
|
setUint16(directoryView, offset + 34, zip642 && zip64DiskNumberStart ? MAX_16_BITS : diskNumberStart);
|
||
|
setUint16(directoryView, offset + 36, internalFileAttribute);
|
||
|
if (externalFileAttribute) {
|
||
|
setUint322(directoryView, offset + 38, externalFileAttribute);
|
||
|
} else if (directory && msDosCompatible) {
|
||
|
setUint8(directoryView, offset + 38, FILE_ATTR_MSDOS_DIR_MASK);
|
||
|
}
|
||
|
setUint322(directoryView, offset + 42, zip642 && zip64Offset ? MAX_32_BITS : fileEntryOffset);
|
||
|
arraySet(directoryArray, rawFilename, offset + 46);
|
||
|
arraySet(directoryArray, rawExtraFieldZip64, offset + 46 + getLength(rawFilename));
|
||
|
arraySet(directoryArray, rawExtraFieldAES, offset + 46 + getLength(rawFilename, rawExtraFieldZip64));
|
||
|
arraySet(directoryArray, rawExtraFieldCDExtendedTimestamp, offset + 46 + getLength(rawFilename, rawExtraFieldZip64, rawExtraFieldAES));
|
||
|
arraySet(directoryArray, rawExtraFieldNTFS, offset + 46 + getLength(rawFilename, rawExtraFieldZip64, rawExtraFieldAES, rawExtraFieldCDExtendedTimestamp));
|
||
|
arraySet(directoryArray, rawExtraField, offset + 46 + getLength(rawFilename, rawExtraFieldZip64, rawExtraFieldAES, rawExtraFieldCDExtendedTimestamp, rawExtraFieldNTFS));
|
||
|
arraySet(directoryArray, rawComment, offset + 46 + getLength(rawFilename) + extraFieldLength);
|
||
|
const directoryEntryLength = 46 + getLength(rawFilename, rawComment) + extraFieldLength;
|
||
|
if (offset - directoryDiskOffset > writer.availableSize) {
|
||
|
writer.availableSize = 0;
|
||
|
await writeData(writable, directoryArray.slice(directoryDiskOffset, offset));
|
||
|
directoryDiskOffset = offset;
|
||
|
}
|
||
|
offset += directoryEntryLength;
|
||
|
if (options.onprogress) {
|
||
|
try {
|
||
|
await options.onprogress(indexFileEntry + 1, files.size, new Entry(fileEntry));
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
await writeData(writable, directoryDiskOffset ? directoryArray.slice(directoryDiskOffset) : directoryArray);
|
||
|
let lastDiskNumber = writer.diskNumber;
|
||
|
const { availableSize } = writer;
|
||
|
if (availableSize < END_OF_CENTRAL_DIR_LENGTH) {
|
||
|
lastDiskNumber++;
|
||
|
}
|
||
|
let zip64 = getOptionValue2(zipWriter, options, "zip64");
|
||
|
if (directoryOffset > MAX_32_BITS || directoryDataLength > MAX_32_BITS || filesLength > MAX_16_BITS || lastDiskNumber > MAX_16_BITS) {
|
||
|
if (zip64 === false) {
|
||
|
throw new Error(ERR_UNSUPPORTED_FORMAT);
|
||
|
} else {
|
||
|
zip64 = true;
|
||
|
}
|
||
|
}
|
||
|
const endOfdirectoryArray = new Uint8Array(zip64 ? ZIP64_END_OF_CENTRAL_DIR_TOTAL_LENGTH : END_OF_CENTRAL_DIR_LENGTH);
|
||
|
const endOfdirectoryView = getDataView2(endOfdirectoryArray);
|
||
|
offset = 0;
|
||
|
if (zip64) {
|
||
|
setUint322(endOfdirectoryView, 0, ZIP64_END_OF_CENTRAL_DIR_SIGNATURE);
|
||
|
setBigUint64(endOfdirectoryView, 4, BigInt(44));
|
||
|
setUint16(endOfdirectoryView, 12, 45);
|
||
|
setUint16(endOfdirectoryView, 14, 45);
|
||
|
setUint322(endOfdirectoryView, 16, lastDiskNumber);
|
||
|
setUint322(endOfdirectoryView, 20, diskNumber);
|
||
|
setBigUint64(endOfdirectoryView, 24, BigInt(filesLength));
|
||
|
setBigUint64(endOfdirectoryView, 32, BigInt(filesLength));
|
||
|
setBigUint64(endOfdirectoryView, 40, BigInt(directoryDataLength));
|
||
|
setBigUint64(endOfdirectoryView, 48, BigInt(directoryOffset));
|
||
|
setUint322(endOfdirectoryView, 56, ZIP64_END_OF_CENTRAL_DIR_LOCATOR_SIGNATURE);
|
||
|
setBigUint64(endOfdirectoryView, 64, BigInt(directoryOffset) + BigInt(directoryDataLength));
|
||
|
setUint322(endOfdirectoryView, 72, lastDiskNumber + 1);
|
||
|
const supportZip64SplitFile = getOptionValue2(zipWriter, options, "supportZip64SplitFile", true);
|
||
|
if (supportZip64SplitFile) {
|
||
|
lastDiskNumber = MAX_16_BITS;
|
||
|
diskNumber = MAX_16_BITS;
|
||
|
}
|
||
|
filesLength = MAX_16_BITS;
|
||
|
directoryOffset = MAX_32_BITS;
|
||
|
directoryDataLength = MAX_32_BITS;
|
||
|
offset += ZIP64_END_OF_CENTRAL_DIR_LENGTH + ZIP64_END_OF_CENTRAL_DIR_LOCATOR_LENGTH;
|
||
|
}
|
||
|
setUint322(endOfdirectoryView, offset, END_OF_CENTRAL_DIR_SIGNATURE);
|
||
|
setUint16(endOfdirectoryView, offset + 4, lastDiskNumber);
|
||
|
setUint16(endOfdirectoryView, offset + 6, diskNumber);
|
||
|
setUint16(endOfdirectoryView, offset + 8, filesLength);
|
||
|
setUint16(endOfdirectoryView, offset + 10, filesLength);
|
||
|
setUint322(endOfdirectoryView, offset + 12, directoryDataLength);
|
||
|
setUint322(endOfdirectoryView, offset + 16, directoryOffset);
|
||
|
const commentLength = getLength(comment);
|
||
|
if (commentLength) {
|
||
|
if (commentLength <= MAX_16_BITS) {
|
||
|
setUint16(endOfdirectoryView, offset + 20, commentLength);
|
||
|
} else {
|
||
|
throw new Error(ERR_INVALID_COMMENT);
|
||
|
}
|
||
|
}
|
||
|
await writeData(writable, endOfdirectoryArray);
|
||
|
if (commentLength) {
|
||
|
await writeData(writable, comment);
|
||
|
}
|
||
|
}
|
||
|
async function writeData(writable, array) {
|
||
|
const streamWriter = writable.getWriter();
|
||
|
try {
|
||
|
await streamWriter.ready;
|
||
|
writable.size += getLength(array);
|
||
|
await streamWriter.write(array);
|
||
|
} finally {
|
||
|
streamWriter.releaseLock();
|
||
|
}
|
||
|
}
|
||
|
function getTimeNTFS(date) {
|
||
|
if (date) {
|
||
|
return (BigInt(date.getTime()) + BigInt(116444736e5)) * BigInt(1e4);
|
||
|
}
|
||
|
}
|
||
|
function getOptionValue2(zipWriter, options, name, defaultValue) {
|
||
|
const result = options[name] === UNDEFINED_VALUE ? zipWriter.options[name] : options[name];
|
||
|
return result === UNDEFINED_VALUE ? defaultValue : result;
|
||
|
}
|
||
|
function getMaximumCompressedSize(uncompressedSize) {
|
||
|
return uncompressedSize + 5 * (Math.floor(uncompressedSize / 16383) + 1);
|
||
|
}
|
||
|
function setUint8(view, offset, value) {
|
||
|
view.setUint8(offset, value);
|
||
|
}
|
||
|
function setUint16(view, offset, value) {
|
||
|
view.setUint16(offset, value, true);
|
||
|
}
|
||
|
function setUint322(view, offset, value) {
|
||
|
view.setUint32(offset, value, true);
|
||
|
}
|
||
|
function setBigUint64(view, offset, value) {
|
||
|
view.setBigUint64(offset, value, true);
|
||
|
}
|
||
|
function arraySet(array, typedArray, offset) {
|
||
|
array.set(typedArray, offset);
|
||
|
}
|
||
|
function getDataView2(array) {
|
||
|
return new DataView(array.buffer);
|
||
|
}
|
||
|
function getLength(...arrayLikes) {
|
||
|
let result = 0;
|
||
|
arrayLikes.forEach((arrayLike) => arrayLike && (result += arrayLike.length));
|
||
|
return result;
|
||
|
}
|
||
|
|
||
|
// node_modules/@zip.js/zip.js/lib/zip.js
|
||
|
var baseURL;
|
||
|
try {
|
||
|
baseURL = import.meta.url;
|
||
|
} catch (_error) {
|
||
|
}
|
||
|
configure({ baseURL });
|
||
|
e(configure);
|
||
|
export {
|
||
|
BlobReader,
|
||
|
BlobWriter,
|
||
|
Data64URIReader,
|
||
|
Data64URIWriter,
|
||
|
ERR_BAD_FORMAT,
|
||
|
ERR_CENTRAL_DIRECTORY_NOT_FOUND,
|
||
|
ERR_DUPLICATED_NAME,
|
||
|
ERR_ENCRYPTED,
|
||
|
ERR_EOCDR_LOCATOR_ZIP64_NOT_FOUND,
|
||
|
ERR_EOCDR_NOT_FOUND,
|
||
|
ERR_EXTRAFIELD_ZIP64_NOT_FOUND,
|
||
|
ERR_HTTP_RANGE,
|
||
|
ERR_INVALID_COMMENT,
|
||
|
ERR_INVALID_ENCRYPTION_STRENGTH,
|
||
|
ERR_INVALID_ENTRY_COMMENT,
|
||
|
ERR_INVALID_ENTRY_NAME,
|
||
|
ERR_INVALID_EXTRAFIELD_DATA,
|
||
|
ERR_INVALID_EXTRAFIELD_TYPE,
|
||
|
ERR_INVALID_PASSWORD,
|
||
|
ERR_INVALID_SIGNATURE,
|
||
|
ERR_INVALID_VERSION,
|
||
|
ERR_ITERATOR_COMPLETED_TOO_SOON,
|
||
|
ERR_LOCAL_FILE_HEADER_NOT_FOUND,
|
||
|
ERR_SPLIT_ZIP_FILE,
|
||
|
ERR_UNSUPPORTED_COMPRESSION,
|
||
|
ERR_UNSUPPORTED_ENCRYPTION,
|
||
|
ERR_UNSUPPORTED_FORMAT,
|
||
|
HttpRangeReader,
|
||
|
HttpReader,
|
||
|
Reader,
|
||
|
SplitDataReader,
|
||
|
SplitDataWriter,
|
||
|
SplitZipReader,
|
||
|
SplitZipWriter,
|
||
|
TextReader,
|
||
|
TextWriter,
|
||
|
Uint8ArrayReader,
|
||
|
Uint8ArrayWriter,
|
||
|
Writer,
|
||
|
ZipReader,
|
||
|
ZipReaderStream,
|
||
|
ZipWriter,
|
||
|
ZipWriterStream,
|
||
|
configure,
|
||
|
getMimeType,
|
||
|
initReader,
|
||
|
initShimAsyncCodec,
|
||
|
initStream,
|
||
|
initWriter,
|
||
|
readUint8Array,
|
||
|
terminateWorkers
|
||
|
};
|
||
|
//# sourceMappingURL=zip.js.map
|