diff --git a/headers/entryHeader.js b/headers/entryHeader.js index 5c64ad0..c153059 100644 --- a/headers/entryHeader.js +++ b/headers/entryHeader.js @@ -25,7 +25,7 @@ module.exports = function () { // Without it file names may be corrupted for other apps when file names use unicode chars _flags |= Constants.FLG_EFS; - var _dataHeader = {}; + var _localHeader = {}; function setTime(val) { val = new Date(val); @@ -165,25 +165,25 @@ module.exports = function () { return (_flags & 1) === 1; }, - get entryHeaderSize() { + get centralHeaderSize() { return Constants.CENHDR + _fnameLen + _extraLen + _comLen; }, get realDataOffset() { - return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; + return _offset + Constants.LOCHDR + _localHeader.fnameLen + _localHeader.extraLen; }, - get dataHeader() { - return _dataHeader; + get localHeader() { + return _localHeader; }, - loadDataHeaderFromBinary: function (/*Buffer*/ input) { + loadLocalHeaderFromBinary: function (/*Buffer*/ input) { var data = input.slice(_offset, _offset + Constants.LOCHDR); // 30 bytes and should start with "PK\003\004" if (data.readUInt32LE(0) !== Constants.LOCSIG) { throw new Error(Utils.Errors.INVALID_LOC); } - _dataHeader = { + _localHeader = { // version needed to extract version: data.readUInt16LE(Constants.LOCVER), // general purpose bit flag @@ -242,7 +242,7 @@ module.exports = function () { _offset = data.readUInt32LE(Constants.CENOFF); }, - dataHeaderToBinary: function () { + localHeaderToBinary: function () { // LOC header size (30 bytes) var data = Buffer.alloc(Constants.LOCHDR); // "PK\003\004" @@ -268,7 +268,7 @@ module.exports = function () { return data; }, - entryHeaderToBinary: function () { + centralHeaderToBinary: function () { // CEN header size (46 bytes) var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); // "PK\001\002" @@ -329,7 +329,7 @@ module.exports = function () { inAttr: _inattr, attr: _attr, offset: _offset, - entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) + centralHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) }; }, diff --git a/test/header.js b/test/header.js index bfc8d8c..fe3c347 100644 --- a/test/header.js +++ b/test/header.js @@ -81,7 +81,7 @@ describe("headers", () => { }); describe("entry-header", () => { - const entryHeader = require("../headers/entryHeader"); + const centralHeader = require("../headers/entryHeader"); const datestamp = [1981, 3, 1, 12, 10, 10]; const readBuf = Buffer.from("504b0102140014000008080045618102efbeadde0001000000020000000000000000000000000000000000000000", "hex"); @@ -106,7 +106,7 @@ describe("headers", () => { }; it("compare binary header values with some predetermined values", () => { - const head = new entryHeader(); + const head = new centralHeader(); head.loadFromBinary(readBuf); for (const name in readBufValues) { @@ -114,7 +114,7 @@ describe("headers", () => { head[name] = readBufValues[name]; } - expect(head.entryHeaderSize).to.equal(46); + expect(head.centralHeaderSize).to.equal(46); // split into individual values by local time or timezone messes up our results expect([head.time.getFullYear(), head.time.getMonth(), head.time.getDate(), head.time.getHours(), head.time.getMinutes(), head.time.getSeconds()]).to.eql(datestamp); @@ -135,7 +135,7 @@ describe("headers", () => { inAttr: 0, attr: 0, offset: 0, - entryHeaderSize: "46 bytes" + centralHeaderSize: "46 bytes" }; headerdata.time = head.time; @@ -143,16 +143,16 @@ describe("headers", () => { }); it("read binary and create new binary from it, they have to be equal", () => { - const head = new entryHeader(); + const head = new centralHeader(); head.loadFromBinary(readBuf); - const buf = head.entryHeaderToBinary(); + const buf = head.centralHeaderToBinary(); expect(buf.length).to.equal(readBuf.length); expect(buf).to.eql(readBuf); }); it("construct header with values and compare, binaries have to be equal", () => { - const head = new entryHeader(); + const head = new centralHeader(); // Set Values for (const name in readBufValues) { @@ -164,26 +164,26 @@ describe("headers", () => { // if time is constructed by new Date() it is also in local zone and so it cancels possible timezone difference head.time = new Date(...datestamp); - const buf = head.entryHeaderToBinary(); + const buf = head.centralHeaderToBinary(); expect(buf.length).to.equal(readBuf.length); expect(buf).to.eql(readBuf); }); - it("entryHeaderSize results if postdata is specified", () => { - const head = new entryHeader(); + it("centralHeaderSize results if postdata is specified", () => { + const head = new centralHeader(); head.fileNameLength = 100; head.commentLength = 200; head.extraLength = 100; - expect(head.entryHeaderSize).to.equal(446); + expect(head.centralHeaderSize).to.equal(446); }); - describe("data-header", () => { - const dataheader = Buffer.from("504b030414000008080045618102efbeadde000100000002000000000000", "hex"); + describe("local-header", () => { + const localHeader = Buffer.from("504b030414000008080045618102efbeadde000100000002000000000000", "hex"); - const dataHeaderValues = { + const localHeaderValues = { compressedSize: 0x100, crc: 0xdeadbeef, extraLen: 0, @@ -195,28 +195,28 @@ describe("headers", () => { }; it("compare binary header values with predetermined values", () => { - const head = new entryHeader(); + const head = new centralHeader(); head.loadFromBinary(readBuf); - head.loadDataHeaderFromBinary(dataheader); + head.loadLocalHeaderFromBinary(localHeader); - for (const name in dataHeaderValues) { - expect(head.dataHeader[name]).to.equal(dataHeaderValues[name]); + for (const name in localHeaderValues) { + expect(head.localHeader[name]).to.equal(localHeaderValues[name]); } }); it("read binary and create new binary from it, they have to be equal", () => { - const head = new entryHeader(); + const head = new centralHeader(); head.loadFromBinary(readBuf); - head.loadDataHeaderFromBinary(dataheader); + head.loadLocalHeaderFromBinary(localHeader); - const buf = head.dataHeaderToBinary(); + const buf = head.localHeaderToBinary(); - expect(buf.length).to.equal(dataheader.length); - expect(buf).to.eql(dataheader); + expect(buf.length).to.equal(localHeader.length); + expect(buf).to.eql(localHeader); }); it("construct header by values and compare binaries have to be equal", () => { - const head = new entryHeader(); + const head = new centralHeader(); head.loadFromBinary(readBuf); // Set Values @@ -229,10 +229,10 @@ describe("headers", () => { // if time is constructed by new Date() it is also in local zone and so it cancels possible timezone difference head.time = new Date(...datestamp); - const buf = head.dataHeaderToBinary(); + const buf = head.localHeaderToBinary(); - expect(buf.length).to.equal(dataheader.length); - expect(buf).to.eql(dataheader); + expect(buf.length).to.equal(localHeader.length); + expect(buf).to.eql(localHeader); }); }); }); diff --git a/zipEntry.js b/zipEntry.js index f52d361..9deed1f 100644 --- a/zipEntry.js +++ b/zipEntry.js @@ -4,7 +4,7 @@ var Utils = require("./util"), Methods = require("./methods"); module.exports = function (/*Buffer*/ input) { - var _entryHeader = new Headers.EntryHeader(), + var _centralHeader = new Headers.EntryHeader(), _entryName = Buffer.alloc(0), _comment = Buffer.alloc(0), _isDirectory = false, @@ -15,14 +15,14 @@ module.exports = function (/*Buffer*/ input) { if (!input || !Buffer.isBuffer(input)) { return Buffer.alloc(0); } - _entryHeader.loadDataHeaderFromBinary(input); - return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); + _centralHeader.loadLocalHeaderFromBinary(input); + return input.slice(_centralHeader.realDataOffset, _centralHeader.realDataOffset + _centralHeader.compressedSize); } function crc32OK(data) { // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written - if ((_entryHeader.flags & 0x8) !== 0x8) { - if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { + if ((_centralHeader.flags & 0x8) !== 0x8) { + if (Utils.crc32(data) !== _centralHeader.localHeader.crc) { return false; } } else { @@ -53,16 +53,16 @@ module.exports = function (/*Buffer*/ input) { return compressedData; } - if (_entryHeader.encrypted) { + if (_centralHeader.encrypted) { if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { throw new Error("ADM-ZIP: Incompatible password parameter"); } - compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass); + compressedData = Methods.ZipCrypto.decrypt(compressedData, _centralHeader, pass); } - var data = Buffer.alloc(_entryHeader.size); + var data = Buffer.alloc(_centralHeader.size); - switch (_entryHeader.method) { + switch (_centralHeader.method) { case Utils.Constants.STORED: compressedData.copy(data); if (!crc32OK(data)) { @@ -74,7 +74,7 @@ module.exports = function (/*Buffer*/ input) { return data; } case Utils.Constants.DEFLATED: - var inflater = new Methods.Inflater(compressedData, _entryHeader.size); + var inflater = new Methods.Inflater(compressedData, _centralHeader.size); if (!async) { const result = inflater.inflate(data); result.copy(data, 0); @@ -111,9 +111,9 @@ module.exports = function (/*Buffer*/ input) { if (uncompressedData.length && !_isDirectory) { var compressedData; // Local file header - switch (_entryHeader.method) { + switch (_centralHeader.method) { case Utils.Constants.STORED: - _entryHeader.compressedSize = _entryHeader.size; + _centralHeader.compressedSize = _centralHeader.size; compressedData = Buffer.alloc(uncompressedData.length); uncompressedData.copy(compressedData); @@ -125,12 +125,12 @@ module.exports = function (/*Buffer*/ input) { var deflater = new Methods.Deflater(uncompressedData); if (!async) { var deflated = deflater.deflate(); - _entryHeader.compressedSize = deflated.length; + _centralHeader.compressedSize = deflated.length; return deflated; } else { deflater.deflateAsync(function (data) { compressedData = Buffer.alloc(data.length); - _entryHeader.compressedSize = data.length; + _centralHeader.compressedSize = data.length; data.copy(compressedData); callback && callback(compressedData); }); @@ -171,26 +171,26 @@ module.exports = function (/*Buffer*/ input) { if (data.length >= Constants.EF_ZIP64_SCOMP) { size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); - if (_entryHeader.size === Constants.EF_ZIP64_OR_32) { - _entryHeader.size = size; + if (_centralHeader.size === Constants.EF_ZIP64_OR_32) { + _centralHeader.size = size; } } if (data.length >= Constants.EF_ZIP64_RHO) { compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); - if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { - _entryHeader.compressedSize = compressedSize; + if (_centralHeader.compressedSize === Constants.EF_ZIP64_OR_32) { + _centralHeader.compressedSize = compressedSize; } } if (data.length >= Constants.EF_ZIP64_DSN) { offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); - if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) { - _entryHeader.offset = offset; + if (_centralHeader.offset === Constants.EF_ZIP64_OR_32) { + _centralHeader.offset = offset; } } if (data.length >= Constants.EF_ZIP64_DSN + 4) { diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); - if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { - _entryHeader.diskNumStart = diskNumStart; + if (_centralHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { + _centralHeader.diskNumStart = diskNumStart; } } } @@ -206,7 +206,7 @@ module.exports = function (/*Buffer*/ input) { _entryName = Utils.toBuffer(val); var lastChar = _entryName[_entryName.length - 1]; _isDirectory = lastChar === 47 || lastChar === 92; - _entryHeader.fileNameLength = _entryName.length; + _centralHeader.fileNameLength = _entryName.length; }, get extra() { @@ -214,7 +214,7 @@ module.exports = function (/*Buffer*/ input) { }, set extra(val) { _extra = val; - _entryHeader.extraLength = val.length; + _centralHeader.extraLength = val.length; parseExtra(val); }, @@ -223,7 +223,7 @@ module.exports = function (/*Buffer*/ input) { }, set comment(val) { _comment = Utils.toBuffer(val); - _entryHeader.commentLength = _comment.length; + _centralHeader.commentLength = _comment.length; }, get name() { @@ -250,18 +250,18 @@ module.exports = function (/*Buffer*/ input) { setData: function (value) { uncompressedData = Utils.toBuffer(value); if (!_isDirectory && uncompressedData.length) { - _entryHeader.size = uncompressedData.length; - _entryHeader.method = Utils.Constants.DEFLATED; - _entryHeader.crc = Utils.crc32(value); - _entryHeader.changed = true; + _centralHeader.size = uncompressedData.length; + _centralHeader.method = Utils.Constants.DEFLATED; + _centralHeader.crc = Utils.crc32(value); + _centralHeader.changed = true; } else { // folders and blank files should be stored - _entryHeader.method = Utils.Constants.STORED; + _centralHeader.method = Utils.Constants.STORED; } }, getData: function (pass) { - if (_entryHeader.changed) { + if (_centralHeader.changed) { return uncompressedData; } else { return decompress(false, null, pass); @@ -269,7 +269,7 @@ module.exports = function (/*Buffer*/ input) { }, getDataAsync: function (/*Function*/ callback, pass) { - if (_entryHeader.changed) { + if (_centralHeader.changed) { callback(uncompressedData); } else { decompress(true, callback, pass); @@ -277,39 +277,59 @@ module.exports = function (/*Buffer*/ input) { }, set attr(attr) { - _entryHeader.attr = attr; + _centralHeader.attr = attr; }, get attr() { - return _entryHeader.attr; + return _centralHeader.attr; }, set header(/*Buffer*/ data) { - _entryHeader.loadFromBinary(data); + _centralHeader.loadFromBinary(data); }, get header() { - return _entryHeader; + return _centralHeader; }, - packHeader: function () { + packCentralHeader: function () { // 1. create header (buffer) - var header = _entryHeader.entryHeaderToBinary(); + var header = _centralHeader.centralHeaderToBinary(); var addpos = Utils.Constants.CENHDR; // 2. add file name _entryName.copy(header, addpos); addpos += _entryName.length; // 3. add extra data - if (_entryHeader.extraLength) { + if (_centralHeader.extraLength) { _extra.copy(header, addpos); - addpos += _entryHeader.extraLength; + addpos += _centralHeader.extraLength; } // 4. add file comment - if (_entryHeader.commentLength) { + if (_centralHeader.commentLength) { _comment.copy(header, addpos); } return header; }, + packLocalHeader: function () { + let addpos = 0; + + // 1. construct local header Buffer + const localHeaderBuf = _centralHeader.localHeaderToBinary(); + // 2. localHeader - crate header buffer + const localHeader = Buffer.alloc(localHeaderBuf.length + _entryName.length + _extra.length); + // 2.1 add localheader + localHeaderBuf.copy(localHeader, addpos); + addpos += localHeaderBuf.length; + // 2.2 add file name + _entryName.copy(localHeader, addpos); + addpos += _entryName.length; + // 2.3 add extra field + _extra.copy(localHeader, addpos); + addpos += _extra.length; + + return localHeader; + }, + toJSON: function () { const bytes = function (nr) { return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; @@ -320,7 +340,7 @@ module.exports = function (/*Buffer*/ input) { name: this.name, comment: this.comment, isDirectory: this.isDirectory, - header: _entryHeader.toJSON(), + header: _centralHeader.toJSON(), compressedData: bytes(input), data: bytes(uncompressedData) }; diff --git a/zipFile.js b/zipFile.js index 904bd77..d7debe7 100644 --- a/zipFile.js +++ b/zipFile.js @@ -34,7 +34,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - index += entry.header.entryHeaderSize; + index += entry.header.centralHeaderSize; callback(entry); } @@ -58,7 +58,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); - index += entry.header.entryHeaderSize; + index += entry.header.centralHeaderSize; entryList[i] = entry; entryTable[entry.entryName] = entry; @@ -243,7 +243,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { sortEntries(); const dataBlock = []; - const entryHeaders = []; + const headerBlocks = []; let totalSize = 0; let dindex = 0; @@ -253,30 +253,25 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { for (const entry of entryList) { // compress data and set local and entry header accordingly. Reason why is called first const compressedData = entry.getCompressedData(); - // 1. construct data header entry.header.offset = dindex; - const dataHeader = entry.header.dataHeaderToBinary(); - const entryNameLen = entry.rawEntryName.length; - // 1.2. postheader - data after data header - const postHeader = Buffer.alloc(entryNameLen + entry.extra.length); - entry.rawEntryName.copy(postHeader, 0); - entry.extra.copy(postHeader, entryNameLen); + + // 1. construct local header + const localHeader = entry.packLocalHeader(); // 2. offsets - const dataLength = dataHeader.length + postHeader.length + compressedData.length; + const dataLength = localHeader.length + compressedData.length; dindex += dataLength; // 3. store values in sequence - dataBlock.push(dataHeader); - dataBlock.push(postHeader); + dataBlock.push(localHeader); dataBlock.push(compressedData); - // 4. construct entry header - const entryHeader = entry.packHeader(); - entryHeaders.push(entryHeader); + // 4. construct central header + const centralHeader = entry.packCentralHeader(); + headerBlocks.push(centralHeader); // 5. update main header - mainHeader.size += entryHeader.length; - totalSize += dataLength + entryHeader.length; + mainHeader.size += centralHeader.length; + totalSize += dataLength + centralHeader.length; } totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length @@ -292,7 +287,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { } // write central directory entries - for (const content of entryHeaders) { + for (const content of headerBlocks) { content.copy(outBuffer, dindex); dindex += content.length; } @@ -315,7 +310,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { sortEntries(); const dataBlock = []; - const entryHeaders = []; + const centralHeaders = []; let totalSize = 0; let dindex = 0; @@ -323,29 +318,30 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { mainHeader.offset = 0; const compress2Buffer = function (entryLists) { - if (entryLists.length) { - const entry = entryLists.pop(); + if (entryLists.length > 0) { + const entry = entryLists.shift(); const name = entry.entryName + entry.extra.toString(); if (onItemStart) onItemStart(name); entry.getCompressedDataAsync(function (compressedData) { if (onItemEnd) onItemEnd(name); - entry.header.offset = dindex; - // data header - const dataHeader = entry.header.dataHeaderToBinary(); - const postHeader = Buffer.alloc(name.length, name); - const dataLength = dataHeader.length + postHeader.length + compressedData.length; + // 1. construct local header + const localHeader = entry.packLocalHeader(); + + // 2. offsets + const dataLength = localHeader.length + compressedData.length; dindex += dataLength; - dataBlock.push(dataHeader); - dataBlock.push(postHeader); + // 3. store values in sequence + dataBlock.push(localHeader); dataBlock.push(compressedData); - const entryHeader = entry.packHeader(); - entryHeaders.push(entryHeader); - mainHeader.size += entryHeader.length; - totalSize += dataLength + entryHeader.length; + // central header + const centalHeader = entry.packCentralHeader(); + centralHeaders.push(centalHeader); + mainHeader.size += centalHeader.length; + totalSize += dataLength + centalHeader.length; compress2Buffer(entryLists); }); @@ -360,7 +356,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { content.copy(outBuffer, dindex); // write data blocks dindex += content.length; }); - entryHeaders.forEach(function (content) { + centralHeaders.forEach(function (content) { content.copy(outBuffer, dindex); // write central directory entries dindex += content.length; }); @@ -376,7 +372,7 @@ module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { } }; - compress2Buffer(entryList); + compress2Buffer(Array.from(entryList)); } catch (e) { onFail(e); }