diff --git a/headers/entryHeader.js b/headers/entryHeader.js index 051d389..a32def1 100644 --- a/headers/entryHeader.js +++ b/headers/entryHeader.js @@ -170,39 +170,60 @@ module.exports = function () { }, get realDataOffset() { - return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; + return _offset + Constants.LOCHDR + _fnameLen + _extraLen; }, get dataHeader() { - return _dataHeader; - }, - - loadDataHeaderFromBinary: function (/*Buffer*/ input) { - var data = input.slice(_offset, _offset + Constants.LOCHDR); - // 30 bytes and should start with "PK\003\004" - if (data.readUInt32LE(0) !== Constants.LOCSIG) { - throw new Error(Utils.Errors.INVALID_LOC); - } _dataHeader = { // version needed to extract - version: data.readUInt16LE(Constants.LOCVER), + version: _version, // general purpose bit flag - flags: data.readUInt16LE(Constants.LOCFLG), + flags: _flags, // compression method - method: data.readUInt16LE(Constants.LOCHOW), + method: _method, // modification time (2 bytes time, 2 bytes date) - time: data.readUInt32LE(Constants.LOCTIM), + time: _time, // uncompressed file crc-32 value - crc: data.readUInt32LE(Constants.LOCCRC), + crc: _crc, // compressed size - compressedSize: data.readUInt32LE(Constants.LOCSIZ), + compressedSize: _compressedSize, // uncompressed size - size: data.readUInt32LE(Constants.LOCLEN), + size: _size, // filename length - fnameLen: data.readUInt16LE(Constants.LOCNAM), + fnameLen: _fnameLen, // extra field length - extraLen: data.readUInt16LE(Constants.LOCEXT) + extraLen: _extraLen }; + + return _dataHeader; + }, + + loadDataHeaderFromBinary: function (/*Buffer*/ input) { + var data = input.slice(_offset, _offset + Constants.LOCHDR); + // 30 bytes and should start with "PK\003\004" + if (data.readUInt32LE(0) !== Constants.LOCSIG) { + throw new Error(Utils.Errors.INVALID_LOC); + } + + // version needed to extract + _version = data.readUInt16LE(Constants.LOCVER); + // general purpose bit flag + _flags = data.readUInt16LE(Constants.LOCFLG); + // compression method + _method = data.readUInt16LE(Constants.LOCHOW); + // modification time (2 bytes time, 2 bytes date) + _time = data.readUInt32LE(Constants.LOCTIM); + // uncompressed file crc-32 value + _crc = data.readUInt32LE(Constants.LOCCRC); + // compressed size + _compressedSize = data.readUInt32LE(Constants.LOCSIZ); + // uncompressed size + _size = data.readUInt32LE(Constants.LOCLEN); + // filename length + _fnameLen = data.readUInt16LE(Constants.LOCNAM); + // extra field length + _extraLen = data.readUInt16LE(Constants.LOCEXT); + }, loadFromBinary: function (/*Buffer*/ data) { diff --git a/zipEntry.js b/zipEntry.js index 8c3053b..5345ce1 100644 --- a/zipEntry.js +++ b/zipEntry.js @@ -15,7 +15,8 @@ module.exports = function (/*Buffer*/ input) { if (!input || !Buffer.isBuffer(input)) { return Buffer.alloc(0); } - _entryHeader.loadDataHeaderFromBinary(input); + //Scanning a local file headers is not necessary (except in the case of corrupted archives) + if(!_entryHeader.compressedSize) _entryHeader.loadDataHeaderFromBinary(input); return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); }