pax_global_header00006660000000000000000000000064134003635360014515gustar00rootroot0000000000000052 comment=36b2a6a31a32637dca19b34e7b3295378e78a644 yazl-2.5.1/000077500000000000000000000000001340036353600125015ustar00rootroot00000000000000yazl-2.5.1/.gitignore000066400000000000000000000000271340036353600144700ustar00rootroot00000000000000node_modules/ coverage yazl-2.5.1/.travis.yml000066400000000000000000000003121340036353600146060ustar00rootroot00000000000000language: node_js node_js: - 10 - 8 - 6 - 4 - "0.10" script: - "npm run test-travis" after_script: - "npm install coveralls@2 && cat ./coverage/lcov.info | ./node_modules/.bin/coveralls" yazl-2.5.1/LICENSE000066400000000000000000000020651340036353600135110ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2014 Josh Wolfe Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. yazl-2.5.1/README.md000066400000000000000000000422361340036353600137670ustar00rootroot00000000000000# yazl [![Build Status](https://travis-ci.com/thejoshwolfe/yazl.svg?branch=master)](https://travis-ci.com/thejoshwolfe/yazl) [![Coverage Status](https://coveralls.io/repos/github/thejoshwolfe/yazl/badge.svg?branch=master)](https://coveralls.io/github/thejoshwolfe/yazl?branch=master) yet another zip library for node. For unzipping, see [yauzl](https://github.com/thejoshwolfe/yauzl). Design principles: * Don't block the JavaScript thread. Use and provide async APIs. * Keep memory usage under control. Don't attempt to buffer entire files in RAM at once. * Prefer to open input files one at a time than all at once. This is slightly suboptimal for time performance, but avoids OS-imposed limits on the number of simultaneously open file handles. ## Usage ```js var yazl = require("yazl"); var zipfile = new yazl.ZipFile(); zipfile.addFile("file1.txt", "file1.txt"); // (add only files, not directories) zipfile.addFile("path/to/file.txt", "path/in/zipfile.txt"); // pipe() can be called any time after the constructor zipfile.outputStream.pipe(fs.createWriteStream("output.zip")).on("close", function() { console.log("done"); }); // alternate apis for adding files: zipfile.addReadStream(process.stdin, "stdin.txt"); zipfile.addBuffer(Buffer.from("hello"), "hello.txt"); // call end() after all the files have been added zipfile.end(); ``` ## API ### Class: ZipFile #### new ZipFile() No parameters. Nothing can go wrong. #### addFile(realPath, metadataPath, [options]) Adds a file from the file system at `realPath` into the zipfile as `metadataPath`. Typically `metadataPath` would be calculated as `path.relative(root, realPath)`. Unzip programs would extract the file from the zipfile as `metadataPath`. `realPath` is not stored in the zipfile. A valid `metadataPath` must not be blank. If a `metadataPath` contains `"\\"` characters, they will be replaced by `"/"` characters. After this substitution, a valid `metadataPath` must not start with `"/"` or `/[A-Za-z]:\//`, and must not contain `".."` path segments. File paths must not end with `"/"`, but see `addEmptyDirectory()`. After UTF-8 encoding, `metadataPath` must be at most `0xffff` bytes in length. `options` may be omitted or null and has the following structure and default values: ```js { mtime: stats.mtime, mode: stats.mode, compress: true, forceZip64Format: false, fileComment: "", // or a UTF-8 Buffer } ``` Use `mtime` and/or `mode` to override the values that would normally be obtained by the `fs.Stats` for the `realPath`. The mode is the unix permission bits and file type. The mtime and mode are stored in the zip file in the fields "last mod file time", "last mod file date", and "external file attributes". yazl does not store group and user ids in the zip file. If `compress` is `true`, the file data will be deflated (compression method 8). If `compress` is `false`, the file data will be stored (compression method 0). If `forceZip64Format` is `true`, yazl will use ZIP64 format in this entry's Data Descriptor and Central Directory Record regardless of if it's required or not (this may be useful for testing.). Otherwise, yazl will use ZIP64 format where necessary. If `fileComment` is a `string`, it will be encoded with UTF-8. If `fileComment` is a `Buffer`, it should be a UTF-8 encoded string. In UTF-8, `fileComment` must be at most `0xffff` bytes in length. This becomes the "file comment" field in this entry's central directory file header. Internally, `fs.stat()` is called immediately in the `addFile` function, and `fs.createReadStream()` is used later when the file data is actually required. Throughout adding and encoding `n` files with `addFile()`, the number of simultaneous open files is `O(1)`, probably just 1 at a time. #### addReadStream(readStream, metadataPath, [options]) Adds a file to the zip file whose content is read from `readStream`. See `addFile()` for info about the `metadataPath` parameter. `options` may be omitted or null and has the following structure and default values: ```js { mtime: new Date(), mode: 0o100664, compress: true, forceZip64Format: false, fileComment: "", // or a UTF-8 Buffer size: 12345, // example value } ``` See `addFile()` for the meaning of `mtime`, `mode`, `compress`, `forceZip64Format`, and `fileComment`. If `size` is given, it will be checked against the actual number of bytes in the `readStream`, and an error will be emitted if there is a mismatch. Note that yazl will `.pipe()` data from `readStream`, so be careful using `.on('data')`. In certain versions of node, `.on('data')` makes `.pipe()` behave incorrectly. #### addBuffer(buffer, metadataPath, [options]) Adds a file to the zip file whose content is `buffer`. See below for info on the limitations on the size of `buffer`. See `addFile()` for info about the `metadataPath` parameter. `options` may be omitted or null and has the following structure and default values: ```js { mtime: new Date(), mode: 0o100664, compress: true, forceZip64Format: false, fileComment: "", // or a UTF-8 Buffer } ``` See `addFile()` for the meaning of `mtime`, `mode`, `compress`, `forceZip64Format`, and `fileComment`. This method has the unique property that General Purpose Bit `3` will not be used in the Local File Header. This doesn't matter for unzip implementations that conform to the Zip File Spec. However, 7-Zip 9.20 has a known bug where General Purpose Bit `3` is declared an unsupported compression method (note that it really has nothing to do with the compression method.). See [issue #11](https://github.com/thejoshwolfe/yazl/issues/11). If you would like to create zip files that 7-Zip 9.20 can understand, you must use `addBuffer()` instead of `addFile()` or `addReadStream()` for all entries in the zip file (and `addEmptyDirectory()` is fine too). Note that even when yazl provides the file sizes in the Local File Header, yazl never uses ZIP64 format for Local File Headers due to the size limit on `buffer` (see below). ##### Size limitation on buffer In order to require the ZIP64 format for a local file header, the provided `buffer` parameter would need to exceed `0xfffffffe` in length. Alternatively, the `buffer` parameter might not exceed `0xfffffffe` in length, but zlib compression fails to compress the buffer and actually inflates the data to more than `0xfffffffe` in length. Both of these scenarios are not allowed by yazl, and those are enforced by a size limit on the `buffer` parameter. According to [this zlib documentation](http://www.zlib.net/zlib_tech.html), the worst case compression results in "an expansion of at most 13.5%, plus eleven bytes". Furthermore, some configurations of Node.js impose a size limit of `0x3fffffff` on every `Buffer` object. Running this size through the worst case compression of zlib still produces a size less than `0xfffffffe` bytes, Therefore, yazl enforces that the provided `buffer` parameter must be at most `0x3fffffff` bytes long. #### addEmptyDirectory(metadataPath, [options]) Adds an entry to the zip file that indicates a directory should be created, even if no other items in the zip file are contained in the directory. This method is only required if the zip file is intended to contain an empty directory. See `addFile()` for info about the `metadataPath` parameter. If `metadataPath` does not end with a `"/"`, a `"/"` will be appended. `options` may be omitted or null and has the following structure and default values: ```js { mtime: new Date(), mode: 040775, } ``` See `addFile()` for the meaning of `mtime` and `mode`. #### end([options], [finalSizeCallback]) Indicates that no more files will be added via `addFile()`, `addReadStream()`, or `addBuffer()`, and causes the eventual close of `outputStream`. `options` may be omitted or null and has the following structure and default values: ```js { forceZip64Format: false, comment: "", // or a CP437 Buffer } ``` If `forceZip64Format` is `true`, yazl will include the ZIP64 End of Central Directory Locator and ZIP64 End of Central Directory Record regardless of whether or not they are required (this may be useful for testing.). Otherwise, yazl will include these structures if necessary. If `comment` is a `string`, it will be encoded with CP437. If `comment` is a `Buffer`, it should be a CP437 encoded string. `comment` must be at most `0xffff` bytes in length and must not include the byte sequence `[0x50,0x4b,0x05,0x06]`. This becomes the ".ZIP file comment" field in the end of central directory record. Note that in practice, most zipfile readers interpret this field in UTF-8 instead of CP437. If your string uses only codepoints in the range `0x20...0x7e` (printable ASCII, no whitespace except for sinlge space `' '`), then UTF-8 and CP437 (and ASCII) encodings are all identical. This restriction is recommended for maxium compatibility. To use UTF-8 encoding at your own risk, pass a `Buffer` into this function; it will not be validated. If specified and non-null, `finalSizeCallback` is given the parameters `(finalSize)` sometime during or after the call to `end()`. `finalSize` is of type `Number` and can either be `-1` or the guaranteed eventual size in bytes of the output data that can be read from `outputStream`. Note that `finalSizeCallback` is usually called well before `outputStream` has piped all its data; this callback does not mean that the stream is done. If `finalSize` is `-1`, it means means the final size is too hard to guess before processing the input file data. This will happen if and only if the `compress` option is `true` on any call to `addFile()`, `addReadStream()`, or `addBuffer()`, or if `addReadStream()` is called and the optional `size` option is not given. In other words, clients should know whether they're going to get a `-1` or a real value by looking at how they are using this library. The call to `finalSizeCallback` might be delayed if yazl is still waiting for `fs.Stats` for an `addFile()` entry. If `addFile()` was never called, `finalSizeCallback` will be called during the call to `end()`. It is not required to start piping data from `outputStream` before `finalSizeCallback` is called. `finalSizeCallback` will be called only once, and only if this is the first call to `end()`. #### outputStream A readable stream that will produce the contents of the zip file. It is typical to pipe this stream to a writable stream created from `fs.createWriteStream()`. Internally, large amounts of file data are piped to `outputStream` using `pipe()`, which means throttling happens appropriately when this stream is piped to a slow destination. Data becomes available in this stream soon after calling one of `addFile()`, `addReadStream()`, or `addBuffer()`. Clients can call `pipe()` on this stream at any time, such as immediately after getting a new `ZipFile` instance, or long after calling `end()`. This stream will remain open while you add entries until you `end()` the zip file. As a reminder, be careful using both `.on('data')` and `.pipe()` with this stream. In certain versions of node, you cannot use both `.on('data')` and `.pipe()` successfully. ### dateToDosDateTime(jsDate) `jsDate` is a `Date` instance. Returns `{date: date, time: time}`, where `date` and `time` are unsigned 16-bit integers. ## Regarding ZIP64 Support yazl automatically uses ZIP64 format to support files and archives over `2^32 - 2` bytes (~4GB) in size and to support archives with more than `2^16 - 2` (65534) files. (See the `forceZip64Format` option in the API above for more control over this behavior.) ZIP64 format is necessary to exceed the limits inherent in the original zip file format. ZIP64 format is supported by most popular zipfile readers, but not by all of them. Notably, the Mac Archive Utility does not understand ZIP64 format (as of writing this), and will behave very strangely when presented with such an archive. ## Output Structure The Zip File Spec leaves a lot of flexibility up to the zip file creator. This section explains and justifies yazl's interpretation and decisions regarding this flexibility. This section is probably not useful to yazl clients, but may be interesting to unzip implementors and zip file enthusiasts. ### Disk Numbers All values related to disk numbers are `0`, because yazl has no multi-disk archive support. (The exception being the Total Number of Disks field in the ZIP64 End of Central Directory Locator, which is always `1`.) ### Version Made By Always `0x033f == (3 << 8) | 63`, which means UNIX (3) and made from the spec version 6.3 (63). Note that the "UNIX" has implications in the External File Attributes. ### Version Needed to Extract Usually `20`, meaning 2.0. This allows filenames and file comments to be UTF-8 encoded. When ZIP64 format is used, some of the Version Needed to Extract values will be `45`, meaning 4.5. When this happens, there may be a mix of `20` and `45` values throughout the zipfile. ### General Purpose Bit Flag Bit `11` is always set. Filenames (and file comments) are always encoded in UTF-8, even if the result is indistinguishable from ascii. Bit `3` is usually set in the Local File Header. To support both a streaming input and streaming output api, it is impossible to know the crc32 before processing the file data. When bit `3` is set, data Descriptors are given after each file data with this information, as per the spec. But remember a complete metadata listing is still always available in the central directory record, so if unzip implementations are relying on that, like they should, none of this paragraph will matter anyway. Even so, some popular unzip implementations do not follow the spec. The Mac Archive Utility requires Data Descriptors to include the optional signature, so yazl includes the optional data descriptor signature. When bit `3` is not used, the Mac Archive Utility requires there to be no data descriptor, so yazl skips it in that case. Additionally, 7-Zip 9.20 does not seem to support bit `3` at all (see [issue #11](https://github.com/thejoshwolfe/yazl/issues/11)). All other bits are unset. ### Internal File Attributes Always `0`. The "apparently an ASCII or text file" bit is always unset meaning "apparently binary". This kind of determination is outside the scope of yazl, and is probably not significant in any modern unzip implementation. ### External File Attributes Always `stats.mode << 16`. This is apparently the convention for "version made by" = `0x03xx` (UNIX). Note that for directory entries (see `addEmptyDirectory()`), it is conventional to use the lower 8 bits for the MS-DOS directory attribute byte. However, the spec says this is only required if the Version Made By is DOS, so this library does not do that. ### Directory Entries When adding a `metadataPath` such as `"parent/file.txt"`, yazl does not add a directory entry for `"parent/"`, because file entries imply the need for their parent directories. Unzip clients seem to respect this style of pathing, and the zip file spec does not specify what is standard in this regard. In order to create empty directories, use `addEmptyDirectory()`. ### Size of Local File and Central Directory Entry Metadata The spec recommends that "The combined length of any directory record and [the file name, extra field, and comment fields] should not generally exceed 65,535 bytes". yazl makes no attempt to respect this recommendation. Instead, each of the fields is limited to 65,535 bytes due to the length of each being encoded as an unsigned 16 bit integer. ## Change History * 2.5.1 * Fix support for old versions of Node and add official support for Node versions 0.10, 4, 6, 8, 10. [pull #49](https://github.com/thejoshwolfe/yazl/pull/49) * 2.5.0 * Add support for `comment` and `fileComment`. [pull #44](https://github.com/thejoshwolfe/yazl/pull/44) * Avoid `new Buffer()`. [pull #43](https://github.com/thejoshwolfe/yazl/pull/43) * 2.4.3 * Clarify readme. [pull #33](https://github.com/thejoshwolfe/yazl/pull/33) * 2.4.2 * Remove octal literals to make yazl compatible with strict mode. [pull #28](https://github.com/thejoshwolfe/yazl/pull/28) * 2.4.1 * Fix Mac Archive Utility compatibility issue. [issue #24](https://github.com/thejoshwolfe/yazl/issues/24) * 2.4.0 * Add ZIP64 support. [issue #6](https://github.com/thejoshwolfe/yazl/issues/6) * 2.3.1 * Remove `.npmignore` from npm package. [pull #22](https://github.com/thejoshwolfe/yazl/pull/22) * 2.3.0 * `metadataPath` can have `\` characters now; they will be replaced with `/`. [issue #18](https://github.com/thejoshwolfe/yazl/issues/18) * 2.2.2 * Fix 7-Zip compatibility issue. [pull request #17](https://github.com/thejoshwolfe/yazl/pull/17) * 2.2.1 * Fix Mac Archive Utility compatibility issue. [issue #14](https://github.com/thejoshwolfe/yazl/issues/14) * 2.2.0 * Avoid using general purpose bit 3 for `addBuffer()` calls. [issue #13](https://github.com/thejoshwolfe/yazl/issues/13) * 2.1.3 * Fix bug when only addBuffer() and end() are called. [issue #12](https://github.com/thejoshwolfe/yazl/issues/12) * 2.1.2 * Fixed typo in parameter validation. [pull request #10](https://github.com/thejoshwolfe/yazl/pull/10) * 2.1.1 * Fixed stack overflow when using addBuffer() in certain ways. [issue #9](https://github.com/thejoshwolfe/yazl/issues/9) * 2.1.0 * Added `addEmptyDirectory()`. * `options` is now optional for `addReadStream()` and `addBuffer()`. * 2.0.0 * Initial release. yazl-2.5.1/examples/000077500000000000000000000000001340036353600143175ustar00rootroot00000000000000yazl-2.5.1/examples/zip.js000066400000000000000000000067171340036353600154720ustar00rootroot00000000000000var usage = "node " + __filename.replace(/.*[\/\\]/, "") + " " + "[FILE | --[no-]compress | {--file|--buffer|--stream} | --[no-]zip64 | --[no-]verbose]... -o OUTPUT.zip" + "\n" + "\n" + "all arguments and switches are processed in order. for example:" + "\n" + " node zip.js --compress a.txt --no-compress b.txt -o out.zip" + "\n" + "would result in compression for a.txt, but not for b.txt."; var yazl = require("../"); var fs = require("fs"); var zipfile = new yazl.ZipFile(); var options = {compress: false, forceZip64Format: false}; var addStrategy = "addFile"; var verbose = false; var args = process.argv.slice(2); if (Math.max(args.indexOf("-h"), args.indexOf("--help")) !== -1) { console.log("usage: " + usage); process.exit(1); } // this one's important if (args.indexOf("-o") === -1) throw new Error("missing -o"); if (args.indexOf("-o") + 1 >= args.length) throw new Error("missing argument after -o"); var its_the_dash_o = false; args.forEach(function(arg) { if (its_the_dash_o) { its_the_dash_o = false; var stream = arg === "-" ? process.stdout : fs.createWriteStream(arg); zipfile.outputStream.pipe(stream); } else if (arg === "--compress") { options.compress = true; } else if (arg === "--no-compress") { options.compress = false; } else if (arg === "--file") { addStrategy = "addFile"; } else if (arg === "--buffer") { addStrategy = "addBuffer"; } else if (arg === "--stream") { addStrategy = "addReadStream"; } else if (arg === "--no-verbose") { verbose = false; } else if (arg === "--verbose") { verbose = true; } else if (arg === "--zip64") { options.forceZip64Format = true; } else if (arg === "--no-zip64") { options.forceZip64Format = false; } else if (arg === "-o") { its_the_dash_o = true; } else if (arg === "-") { zipfile.addReadStream(process.stdin); } else { // file thing var stats = fs.statSync(arg); if (stats.isFile()) { switch (addStrategy) { case "addFile": if (verbose) console.log("addFile(" + JSON.stringify(arg) + ", " + JSON.stringify(arg) + ", " + JSON.stringify(options) + ");"); zipfile.addFile(arg, arg, options); break; case "addBuffer": if (verbose) console.log("addBuffer(fs.readFileSync(" + JSON.stringify(arg) + "), " + JSON.stringify(arg) + ", " + JSON.stringify(options) + ");"); zipfile.addBuffer(fs.readFileSync(arg), arg, options); break; case "addReadStream": if (verbose) console.log("addReadStream(fs.createReadStream(" + JSON.stringify(arg) + "), " + JSON.stringify(arg) + ", " + JSON.stringify(options) + ");"); zipfile.addReadStream(fs.createReadStream(arg), arg, options); break; default: throw new Error(); } } else if (stats.isDirectory()) { if (verbose) console.log("addEmptyDirectory(" + JSON.stringify(arg) + ", "); zipfile.addEmptyDirectory(arg); } else { throw new Error("what is this: " + arg); } } }); zipfile.end({forceZip64Format: options.forceZip64Format}, function(finalSize) { console.log("finalSize prediction: " + finalSize); }); yazl-2.5.1/index.js000066400000000000000000000715031340036353600141540ustar00rootroot00000000000000var fs = require("fs"); var Transform = require("stream").Transform; var PassThrough = require("stream").PassThrough; var zlib = require("zlib"); var util = require("util"); var EventEmitter = require("events").EventEmitter; var crc32 = require("buffer-crc32"); exports.ZipFile = ZipFile; exports.dateToDosDateTime = dateToDosDateTime; util.inherits(ZipFile, EventEmitter); function ZipFile() { this.outputStream = new PassThrough(); this.entries = []; this.outputStreamCursor = 0; this.ended = false; // .end() sets this this.allDone = false; // set when we've written the last bytes this.forceZip64Eocd = false; // configurable in .end() } ZipFile.prototype.addFile = function(realPath, metadataPath, options) { var self = this; metadataPath = validateMetadataPath(metadataPath, false); if (options == null) options = {}; var entry = new Entry(metadataPath, false, options); self.entries.push(entry); fs.stat(realPath, function(err, stats) { if (err) return self.emit("error", err); if (!stats.isFile()) return self.emit("error", new Error("not a file: " + realPath)); entry.uncompressedSize = stats.size; if (options.mtime == null) entry.setLastModDate(stats.mtime); if (options.mode == null) entry.setFileAttributesMode(stats.mode); entry.setFileDataPumpFunction(function() { var readStream = fs.createReadStream(realPath); entry.state = Entry.FILE_DATA_IN_PROGRESS; readStream.on("error", function(err) { self.emit("error", err); }); pumpFileDataReadStream(self, entry, readStream); }); pumpEntries(self); }); }; ZipFile.prototype.addReadStream = function(readStream, metadataPath, options) { var self = this; metadataPath = validateMetadataPath(metadataPath, false); if (options == null) options = {}; var entry = new Entry(metadataPath, false, options); self.entries.push(entry); entry.setFileDataPumpFunction(function() { entry.state = Entry.FILE_DATA_IN_PROGRESS; pumpFileDataReadStream(self, entry, readStream); }); pumpEntries(self); }; ZipFile.prototype.addBuffer = function(buffer, metadataPath, options) { var self = this; metadataPath = validateMetadataPath(metadataPath, false); if (buffer.length > 0x3fffffff) throw new Error("buffer too large: " + buffer.length + " > " + 0x3fffffff); if (options == null) options = {}; if (options.size != null) throw new Error("options.size not allowed"); var entry = new Entry(metadataPath, false, options); entry.uncompressedSize = buffer.length; entry.crc32 = crc32.unsigned(buffer); entry.crcAndFileSizeKnown = true; self.entries.push(entry); if (!entry.compress) { setCompressedBuffer(buffer); } else { zlib.deflateRaw(buffer, function(err, compressedBuffer) { setCompressedBuffer(compressedBuffer); }); } function setCompressedBuffer(compressedBuffer) { entry.compressedSize = compressedBuffer.length; entry.setFileDataPumpFunction(function() { writeToOutputStream(self, compressedBuffer); writeToOutputStream(self, entry.getDataDescriptor()); entry.state = Entry.FILE_DATA_DONE; // don't call pumpEntries() recursively. // (also, don't call process.nextTick recursively.) setImmediate(function() { pumpEntries(self); }); }); pumpEntries(self); } }; ZipFile.prototype.addEmptyDirectory = function(metadataPath, options) { var self = this; metadataPath = validateMetadataPath(metadataPath, true); if (options == null) options = {}; if (options.size != null) throw new Error("options.size not allowed"); if (options.compress != null) throw new Error("options.compress not allowed"); var entry = new Entry(metadataPath, true, options); self.entries.push(entry); entry.setFileDataPumpFunction(function() { writeToOutputStream(self, entry.getDataDescriptor()); entry.state = Entry.FILE_DATA_DONE; pumpEntries(self); }); pumpEntries(self); }; var eocdrSignatureBuffer = bufferFrom([0x50, 0x4b, 0x05, 0x06]); ZipFile.prototype.end = function(options, finalSizeCallback) { if (typeof options === "function") { finalSizeCallback = options; options = null; } if (options == null) options = {}; if (this.ended) return; this.ended = true; this.finalSizeCallback = finalSizeCallback; this.forceZip64Eocd = !!options.forceZip64Format; if (options.comment) { if (typeof options.comment === "string") { this.comment = encodeCp437(options.comment); } else { // It should be a Buffer this.comment = options.comment; } if (this.comment.length > 0xffff) throw new Error("comment is too large"); // gotta check for this, because the zipfile format is actually ambiguous. if (bufferIncludes(this.comment, eocdrSignatureBuffer)) throw new Error("comment contains end of central directory record signature"); } else { // no comment. this.comment = EMPTY_BUFFER; } pumpEntries(this); }; function writeToOutputStream(self, buffer) { self.outputStream.write(buffer); self.outputStreamCursor += buffer.length; } function pumpFileDataReadStream(self, entry, readStream) { var crc32Watcher = new Crc32Watcher(); var uncompressedSizeCounter = new ByteCounter(); var compressor = entry.compress ? new zlib.DeflateRaw() : new PassThrough(); var compressedSizeCounter = new ByteCounter(); readStream.pipe(crc32Watcher) .pipe(uncompressedSizeCounter) .pipe(compressor) .pipe(compressedSizeCounter) .pipe(self.outputStream, {end: false}); compressedSizeCounter.on("end", function() { entry.crc32 = crc32Watcher.crc32; if (entry.uncompressedSize == null) { entry.uncompressedSize = uncompressedSizeCounter.byteCount; } else { if (entry.uncompressedSize !== uncompressedSizeCounter.byteCount) return self.emit("error", new Error("file data stream has unexpected number of bytes")); } entry.compressedSize = compressedSizeCounter.byteCount; self.outputStreamCursor += entry.compressedSize; writeToOutputStream(self, entry.getDataDescriptor()); entry.state = Entry.FILE_DATA_DONE; pumpEntries(self); }); } function pumpEntries(self) { if (self.allDone) return; // first check if finalSize is finally known if (self.ended && self.finalSizeCallback != null) { var finalSize = calculateFinalSize(self); if (finalSize != null) { // we have an answer self.finalSizeCallback(finalSize); self.finalSizeCallback = null; } } // pump entries var entry = getFirstNotDoneEntry(); function getFirstNotDoneEntry() { for (var i = 0; i < self.entries.length; i++) { var entry = self.entries[i]; if (entry.state < Entry.FILE_DATA_DONE) return entry; } return null; } if (entry != null) { // this entry is not done yet if (entry.state < Entry.READY_TO_PUMP_FILE_DATA) return; // input file not open yet if (entry.state === Entry.FILE_DATA_IN_PROGRESS) return; // we'll get there // start with local file header entry.relativeOffsetOfLocalHeader = self.outputStreamCursor; var localFileHeader = entry.getLocalFileHeader(); writeToOutputStream(self, localFileHeader); entry.doFileDataPump(); } else { // all cought up on writing entries if (self.ended) { // head for the exit self.offsetOfStartOfCentralDirectory = self.outputStreamCursor; self.entries.forEach(function(entry) { var centralDirectoryRecord = entry.getCentralDirectoryRecord(); writeToOutputStream(self, centralDirectoryRecord); }); writeToOutputStream(self, getEndOfCentralDirectoryRecord(self)); self.outputStream.end(); self.allDone = true; } } } function calculateFinalSize(self) { var pretendOutputCursor = 0; var centralDirectorySize = 0; for (var i = 0; i < self.entries.length; i++) { var entry = self.entries[i]; // compression is too hard to predict if (entry.compress) return -1; if (entry.state >= Entry.READY_TO_PUMP_FILE_DATA) { // if addReadStream was called without providing the size, we can't predict the final size if (entry.uncompressedSize == null) return -1; } else { // if we're still waiting for fs.stat, we might learn the size someday if (entry.uncompressedSize == null) return null; } // we know this for sure, and this is important to know if we need ZIP64 format. entry.relativeOffsetOfLocalHeader = pretendOutputCursor; var useZip64Format = entry.useZip64Format(); pretendOutputCursor += LOCAL_FILE_HEADER_FIXED_SIZE + entry.utf8FileName.length; pretendOutputCursor += entry.uncompressedSize; if (!entry.crcAndFileSizeKnown) { // use a data descriptor if (useZip64Format) { pretendOutputCursor += ZIP64_DATA_DESCRIPTOR_SIZE; } else { pretendOutputCursor += DATA_DESCRIPTOR_SIZE; } } centralDirectorySize += CENTRAL_DIRECTORY_RECORD_FIXED_SIZE + entry.utf8FileName.length + entry.fileComment.length; if (useZip64Format) { centralDirectorySize += ZIP64_EXTENDED_INFORMATION_EXTRA_FIELD_SIZE; } } var endOfCentralDirectorySize = 0; if (self.forceZip64Eocd || self.entries.length >= 0xffff || centralDirectorySize >= 0xffff || pretendOutputCursor >= 0xffffffff) { // use zip64 end of central directory stuff endOfCentralDirectorySize += ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE + ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE; } endOfCentralDirectorySize += END_OF_CENTRAL_DIRECTORY_RECORD_SIZE + self.comment.length; return pretendOutputCursor + centralDirectorySize + endOfCentralDirectorySize; } var ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE = 56; var ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE = 20; var END_OF_CENTRAL_DIRECTORY_RECORD_SIZE = 22; function getEndOfCentralDirectoryRecord(self, actuallyJustTellMeHowLongItWouldBe) { var needZip64Format = false; var normalEntriesLength = self.entries.length; if (self.forceZip64Eocd || self.entries.length >= 0xffff) { normalEntriesLength = 0xffff; needZip64Format = true; } var sizeOfCentralDirectory = self.outputStreamCursor - self.offsetOfStartOfCentralDirectory; var normalSizeOfCentralDirectory = sizeOfCentralDirectory; if (self.forceZip64Eocd || sizeOfCentralDirectory >= 0xffffffff) { normalSizeOfCentralDirectory = 0xffffffff; needZip64Format = true; } var normalOffsetOfStartOfCentralDirectory = self.offsetOfStartOfCentralDirectory; if (self.forceZip64Eocd || self.offsetOfStartOfCentralDirectory >= 0xffffffff) { normalOffsetOfStartOfCentralDirectory = 0xffffffff; needZip64Format = true; } if (actuallyJustTellMeHowLongItWouldBe) { if (needZip64Format) { return ( ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE + ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE + END_OF_CENTRAL_DIRECTORY_RECORD_SIZE ); } else { return END_OF_CENTRAL_DIRECTORY_RECORD_SIZE; } } var eocdrBuffer = bufferAlloc(END_OF_CENTRAL_DIRECTORY_RECORD_SIZE + self.comment.length); // end of central dir signature 4 bytes (0x06054b50) eocdrBuffer.writeUInt32LE(0x06054b50, 0); // number of this disk 2 bytes eocdrBuffer.writeUInt16LE(0, 4); // number of the disk with the start of the central directory 2 bytes eocdrBuffer.writeUInt16LE(0, 6); // total number of entries in the central directory on this disk 2 bytes eocdrBuffer.writeUInt16LE(normalEntriesLength, 8); // total number of entries in the central directory 2 bytes eocdrBuffer.writeUInt16LE(normalEntriesLength, 10); // size of the central directory 4 bytes eocdrBuffer.writeUInt32LE(normalSizeOfCentralDirectory, 12); // offset of start of central directory with respect to the starting disk number 4 bytes eocdrBuffer.writeUInt32LE(normalOffsetOfStartOfCentralDirectory, 16); // .ZIP file comment length 2 bytes eocdrBuffer.writeUInt16LE(self.comment.length, 20); // .ZIP file comment (variable size) self.comment.copy(eocdrBuffer, 22); if (!needZip64Format) return eocdrBuffer; // ZIP64 format // ZIP64 End of Central Directory Record var zip64EocdrBuffer = bufferAlloc(ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE); // zip64 end of central dir signature 4 bytes (0x06064b50) zip64EocdrBuffer.writeUInt32LE(0x06064b50, 0); // size of zip64 end of central directory record 8 bytes writeUInt64LE(zip64EocdrBuffer, ZIP64_END_OF_CENTRAL_DIRECTORY_RECORD_SIZE - 12, 4); // version made by 2 bytes zip64EocdrBuffer.writeUInt16LE(VERSION_MADE_BY, 12); // version needed to extract 2 bytes zip64EocdrBuffer.writeUInt16LE(VERSION_NEEDED_TO_EXTRACT_ZIP64, 14); // number of this disk 4 bytes zip64EocdrBuffer.writeUInt32LE(0, 16); // number of the disk with the start of the central directory 4 bytes zip64EocdrBuffer.writeUInt32LE(0, 20); // total number of entries in the central directory on this disk 8 bytes writeUInt64LE(zip64EocdrBuffer, self.entries.length, 24); // total number of entries in the central directory 8 bytes writeUInt64LE(zip64EocdrBuffer, self.entries.length, 32); // size of the central directory 8 bytes writeUInt64LE(zip64EocdrBuffer, sizeOfCentralDirectory, 40); // offset of start of central directory with respect to the starting disk number 8 bytes writeUInt64LE(zip64EocdrBuffer, self.offsetOfStartOfCentralDirectory, 48); // zip64 extensible data sector (variable size) // nothing in the zip64 extensible data sector // ZIP64 End of Central Directory Locator var zip64EocdlBuffer = bufferAlloc(ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE); // zip64 end of central dir locator signature 4 bytes (0x07064b50) zip64EocdlBuffer.writeUInt32LE(0x07064b50, 0); // number of the disk with the start of the zip64 end of central directory 4 bytes zip64EocdlBuffer.writeUInt32LE(0, 4); // relative offset of the zip64 end of central directory record 8 bytes writeUInt64LE(zip64EocdlBuffer, self.outputStreamCursor, 8); // total number of disks 4 bytes zip64EocdlBuffer.writeUInt32LE(1, 16); return Buffer.concat([ zip64EocdrBuffer, zip64EocdlBuffer, eocdrBuffer, ]); } function validateMetadataPath(metadataPath, isDirectory) { if (metadataPath === "") throw new Error("empty metadataPath"); metadataPath = metadataPath.replace(/\\/g, "/"); if (/^[a-zA-Z]:/.test(metadataPath) || /^\//.test(metadataPath)) throw new Error("absolute path: " + metadataPath); if (metadataPath.split("/").indexOf("..") !== -1) throw new Error("invalid relative path: " + metadataPath); var looksLikeDirectory = /\/$/.test(metadataPath); if (isDirectory) { // append a trailing '/' if necessary. if (!looksLikeDirectory) metadataPath += "/"; } else { if (looksLikeDirectory) throw new Error("file path cannot end with '/': " + metadataPath); } return metadataPath; } var EMPTY_BUFFER = bufferAlloc(0); // this class is not part of the public API function Entry(metadataPath, isDirectory, options) { this.utf8FileName = bufferFrom(metadataPath); if (this.utf8FileName.length > 0xffff) throw new Error("utf8 file name too long. " + utf8FileName.length + " > " + 0xffff); this.isDirectory = isDirectory; this.state = Entry.WAITING_FOR_METADATA; this.setLastModDate(options.mtime != null ? options.mtime : new Date()); if (options.mode != null) { this.setFileAttributesMode(options.mode); } else { this.setFileAttributesMode(isDirectory ? 0o40775 : 0o100664); } if (isDirectory) { this.crcAndFileSizeKnown = true; this.crc32 = 0; this.uncompressedSize = 0; this.compressedSize = 0; } else { // unknown so far this.crcAndFileSizeKnown = false; this.crc32 = null; this.uncompressedSize = null; this.compressedSize = null; if (options.size != null) this.uncompressedSize = options.size; } if (isDirectory) { this.compress = false; } else { this.compress = true; // default if (options.compress != null) this.compress = !!options.compress; } this.forceZip64Format = !!options.forceZip64Format; if (options.fileComment) { if (typeof options.fileComment === "string") { this.fileComment = bufferFrom(options.fileComment, "utf-8"); } else { // It should be a Buffer this.fileComment = options.fileComment; } if (this.fileComment.length > 0xffff) throw new Error("fileComment is too large"); } else { // no comment. this.fileComment = EMPTY_BUFFER; } } Entry.WAITING_FOR_METADATA = 0; Entry.READY_TO_PUMP_FILE_DATA = 1; Entry.FILE_DATA_IN_PROGRESS = 2; Entry.FILE_DATA_DONE = 3; Entry.prototype.setLastModDate = function(date) { var dosDateTime = dateToDosDateTime(date); this.lastModFileTime = dosDateTime.time; this.lastModFileDate = dosDateTime.date; }; Entry.prototype.setFileAttributesMode = function(mode) { if ((mode & 0xffff) !== mode) throw new Error("invalid mode. expected: 0 <= " + mode + " <= " + 0xffff); // http://unix.stackexchange.com/questions/14705/the-zip-formats-external-file-attribute/14727#14727 this.externalFileAttributes = (mode << 16) >>> 0; }; // doFileDataPump() should not call pumpEntries() directly. see issue #9. Entry.prototype.setFileDataPumpFunction = function(doFileDataPump) { this.doFileDataPump = doFileDataPump; this.state = Entry.READY_TO_PUMP_FILE_DATA; }; Entry.prototype.useZip64Format = function() { return ( (this.forceZip64Format) || (this.uncompressedSize != null && this.uncompressedSize > 0xfffffffe) || (this.compressedSize != null && this.compressedSize > 0xfffffffe) || (this.relativeOffsetOfLocalHeader != null && this.relativeOffsetOfLocalHeader > 0xfffffffe) ); } var LOCAL_FILE_HEADER_FIXED_SIZE = 30; var VERSION_NEEDED_TO_EXTRACT_UTF8 = 20; var VERSION_NEEDED_TO_EXTRACT_ZIP64 = 45; // 3 = unix. 63 = spec version 6.3 var VERSION_MADE_BY = (3 << 8) | 63; var FILE_NAME_IS_UTF8 = 1 << 11; var UNKNOWN_CRC32_AND_FILE_SIZES = 1 << 3; Entry.prototype.getLocalFileHeader = function() { var crc32 = 0; var compressedSize = 0; var uncompressedSize = 0; if (this.crcAndFileSizeKnown) { crc32 = this.crc32; compressedSize = this.compressedSize; uncompressedSize = this.uncompressedSize; } var fixedSizeStuff = bufferAlloc(LOCAL_FILE_HEADER_FIXED_SIZE); var generalPurposeBitFlag = FILE_NAME_IS_UTF8; if (!this.crcAndFileSizeKnown) generalPurposeBitFlag |= UNKNOWN_CRC32_AND_FILE_SIZES; // local file header signature 4 bytes (0x04034b50) fixedSizeStuff.writeUInt32LE(0x04034b50, 0); // version needed to extract 2 bytes fixedSizeStuff.writeUInt16LE(VERSION_NEEDED_TO_EXTRACT_UTF8, 4); // general purpose bit flag 2 bytes fixedSizeStuff.writeUInt16LE(generalPurposeBitFlag, 6); // compression method 2 bytes fixedSizeStuff.writeUInt16LE(this.getCompressionMethod(), 8); // last mod file time 2 bytes fixedSizeStuff.writeUInt16LE(this.lastModFileTime, 10); // last mod file date 2 bytes fixedSizeStuff.writeUInt16LE(this.lastModFileDate, 12); // crc-32 4 bytes fixedSizeStuff.writeUInt32LE(crc32, 14); // compressed size 4 bytes fixedSizeStuff.writeUInt32LE(compressedSize, 18); // uncompressed size 4 bytes fixedSizeStuff.writeUInt32LE(uncompressedSize, 22); // file name length 2 bytes fixedSizeStuff.writeUInt16LE(this.utf8FileName.length, 26); // extra field length 2 bytes fixedSizeStuff.writeUInt16LE(0, 28); return Buffer.concat([ fixedSizeStuff, // file name (variable size) this.utf8FileName, // extra field (variable size) // no extra fields ]); }; var DATA_DESCRIPTOR_SIZE = 16; var ZIP64_DATA_DESCRIPTOR_SIZE = 24; Entry.prototype.getDataDescriptor = function() { if (this.crcAndFileSizeKnown) { // the Mac Archive Utility requires this not be present unless we set general purpose bit 3 return EMPTY_BUFFER; } if (!this.useZip64Format()) { var buffer = bufferAlloc(DATA_DESCRIPTOR_SIZE); // optional signature (required according to Archive Utility) buffer.writeUInt32LE(0x08074b50, 0); // crc-32 4 bytes buffer.writeUInt32LE(this.crc32, 4); // compressed size 4 bytes buffer.writeUInt32LE(this.compressedSize, 8); // uncompressed size 4 bytes buffer.writeUInt32LE(this.uncompressedSize, 12); return buffer; } else { // ZIP64 format var buffer = bufferAlloc(ZIP64_DATA_DESCRIPTOR_SIZE); // optional signature (unknown if anyone cares about this) buffer.writeUInt32LE(0x08074b50, 0); // crc-32 4 bytes buffer.writeUInt32LE(this.crc32, 4); // compressed size 8 bytes writeUInt64LE(buffer, this.compressedSize, 8); // uncompressed size 8 bytes writeUInt64LE(buffer, this.uncompressedSize, 16); return buffer; } }; var CENTRAL_DIRECTORY_RECORD_FIXED_SIZE = 46; var ZIP64_EXTENDED_INFORMATION_EXTRA_FIELD_SIZE = 28; Entry.prototype.getCentralDirectoryRecord = function() { var fixedSizeStuff = bufferAlloc(CENTRAL_DIRECTORY_RECORD_FIXED_SIZE); var generalPurposeBitFlag = FILE_NAME_IS_UTF8; if (!this.crcAndFileSizeKnown) generalPurposeBitFlag |= UNKNOWN_CRC32_AND_FILE_SIZES; var normalCompressedSize = this.compressedSize; var normalUncompressedSize = this.uncompressedSize; var normalRelativeOffsetOfLocalHeader = this.relativeOffsetOfLocalHeader; var versionNeededToExtract; var zeiefBuffer; if (this.useZip64Format()) { normalCompressedSize = 0xffffffff; normalUncompressedSize = 0xffffffff; normalRelativeOffsetOfLocalHeader = 0xffffffff; versionNeededToExtract = VERSION_NEEDED_TO_EXTRACT_ZIP64; // ZIP64 extended information extra field zeiefBuffer = bufferAlloc(ZIP64_EXTENDED_INFORMATION_EXTRA_FIELD_SIZE); // 0x0001 2 bytes Tag for this "extra" block type zeiefBuffer.writeUInt16LE(0x0001, 0); // Size 2 bytes Size of this "extra" block zeiefBuffer.writeUInt16LE(ZIP64_EXTENDED_INFORMATION_EXTRA_FIELD_SIZE - 4, 2); // Original Size 8 bytes Original uncompressed file size writeUInt64LE(zeiefBuffer, this.uncompressedSize, 4); // Compressed Size 8 bytes Size of compressed data writeUInt64LE(zeiefBuffer, this.compressedSize, 12); // Relative Header Offset 8 bytes Offset of local header record writeUInt64LE(zeiefBuffer, this.relativeOffsetOfLocalHeader, 20); // Disk Start Number 4 bytes Number of the disk on which this file starts // (omit) } else { versionNeededToExtract = VERSION_NEEDED_TO_EXTRACT_UTF8; zeiefBuffer = EMPTY_BUFFER; } // central file header signature 4 bytes (0x02014b50) fixedSizeStuff.writeUInt32LE(0x02014b50, 0); // version made by 2 bytes fixedSizeStuff.writeUInt16LE(VERSION_MADE_BY, 4); // version needed to extract 2 bytes fixedSizeStuff.writeUInt16LE(versionNeededToExtract, 6); // general purpose bit flag 2 bytes fixedSizeStuff.writeUInt16LE(generalPurposeBitFlag, 8); // compression method 2 bytes fixedSizeStuff.writeUInt16LE(this.getCompressionMethod(), 10); // last mod file time 2 bytes fixedSizeStuff.writeUInt16LE(this.lastModFileTime, 12); // last mod file date 2 bytes fixedSizeStuff.writeUInt16LE(this.lastModFileDate, 14); // crc-32 4 bytes fixedSizeStuff.writeUInt32LE(this.crc32, 16); // compressed size 4 bytes fixedSizeStuff.writeUInt32LE(normalCompressedSize, 20); // uncompressed size 4 bytes fixedSizeStuff.writeUInt32LE(normalUncompressedSize, 24); // file name length 2 bytes fixedSizeStuff.writeUInt16LE(this.utf8FileName.length, 28); // extra field length 2 bytes fixedSizeStuff.writeUInt16LE(zeiefBuffer.length, 30); // file comment length 2 bytes fixedSizeStuff.writeUInt16LE(this.fileComment.length, 32); // disk number start 2 bytes fixedSizeStuff.writeUInt16LE(0, 34); // internal file attributes 2 bytes fixedSizeStuff.writeUInt16LE(0, 36); // external file attributes 4 bytes fixedSizeStuff.writeUInt32LE(this.externalFileAttributes, 38); // relative offset of local header 4 bytes fixedSizeStuff.writeUInt32LE(normalRelativeOffsetOfLocalHeader, 42); return Buffer.concat([ fixedSizeStuff, // file name (variable size) this.utf8FileName, // extra field (variable size) zeiefBuffer, // file comment (variable size) this.fileComment, ]); }; Entry.prototype.getCompressionMethod = function() { var NO_COMPRESSION = 0; var DEFLATE_COMPRESSION = 8; return this.compress ? DEFLATE_COMPRESSION : NO_COMPRESSION; }; function dateToDosDateTime(jsDate) { var date = 0; date |= jsDate.getDate() & 0x1f; // 1-31 date |= ((jsDate.getMonth() + 1) & 0xf) << 5; // 0-11, 1-12 date |= ((jsDate.getFullYear() - 1980) & 0x7f) << 9; // 0-128, 1980-2108 var time = 0; time |= Math.floor(jsDate.getSeconds() / 2); // 0-59, 0-29 (lose odd numbers) time |= (jsDate.getMinutes() & 0x3f) << 5; // 0-59 time |= (jsDate.getHours() & 0x1f) << 11; // 0-23 return {date: date, time: time}; } function writeUInt64LE(buffer, n, offset) { // can't use bitshift here, because JavaScript only allows bitshifting on 32-bit integers. var high = Math.floor(n / 0x100000000); var low = n % 0x100000000; buffer.writeUInt32LE(low, offset); buffer.writeUInt32LE(high, offset + 4); } function defaultCallback(err) { if (err) throw err; } util.inherits(ByteCounter, Transform); function ByteCounter(options) { Transform.call(this, options); this.byteCount = 0; } ByteCounter.prototype._transform = function(chunk, encoding, cb) { this.byteCount += chunk.length; cb(null, chunk); }; util.inherits(Crc32Watcher, Transform); function Crc32Watcher(options) { Transform.call(this, options); this.crc32 = 0; } Crc32Watcher.prototype._transform = function(chunk, encoding, cb) { this.crc32 = crc32.unsigned(chunk, this.crc32); cb(null, chunk); }; var cp437 = '\u0000☺☻♥♦♣♠•◘○◙♂♀♪♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼ !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~⌂ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ '; if (cp437.length !== 256) throw new Error("assertion failure"); var reverseCp437 = null; function encodeCp437(string) { if (/^[\x20-\x7e]*$/.test(string)) { // CP437, ASCII, and UTF-8 overlap in this range. return bufferFrom(string, "utf-8"); } // This is the slow path. if (reverseCp437 == null) { // cache this once reverseCp437 = {}; for (var i = 0; i < cp437.length; i++) { reverseCp437[cp437[i]] = i; } } var result = bufferAlloc(string.length); for (var i = 0; i < string.length; i++) { var b = reverseCp437[string[i]]; if (b == null) throw new Error("character not encodable in CP437: " + JSON.stringify(string[i])); result[i] = b; } return result; } function bufferAlloc(size) { bufferAlloc = modern; try { return bufferAlloc(size); } catch (e) { bufferAlloc = legacy; return bufferAlloc(size); } function modern(size) { return Buffer.allocUnsafe(size); } function legacy(size) { return new Buffer(size); } } function bufferFrom(something, encoding) { bufferFrom = modern; try { return bufferFrom(something, encoding); } catch (e) { bufferFrom = legacy; return bufferFrom(something, encoding); } function modern(something, encoding) { return Buffer.from(something, encoding); } function legacy(something, encoding) { return new Buffer(something, encoding); } } function bufferIncludes(buffer, content) { bufferIncludes = modern; try { return bufferIncludes(buffer, content); } catch (e) { bufferIncludes = legacy; return bufferIncludes(buffer, content); } function modern(buffer, content) { return buffer.includes(content); } function legacy(buffer, content) { for (var i = 0; i <= buffer.length - content.length; i++) { for (var j = 0;; j++) { if (j === content.length) return true; if (buffer[i + j] !== content[j]) break; } } return false; } } yazl-2.5.1/package.json000066400000000000000000000015031340036353600147660ustar00rootroot00000000000000{ "name": "yazl", "version": "2.5.1", "description": "yet another zip library for node", "main": "index.js", "scripts": { "test": "node test/test.js", "test-cov": "istanbul cover test/test.js", "test-travis": "istanbul cover --report lcovonly test/test.js" }, "repository": { "type": "git", "url": "https://github.com/thejoshwolfe/yazl.git" }, "keywords": [ "zip", "stream", "archive", "file" ], "author": "Josh Wolfe ", "license": "MIT", "bugs": { "url": "https://github.com/thejoshwolfe/yazl/issues" }, "homepage": "https://github.com/thejoshwolfe/yazl", "dependencies": { "buffer-crc32": "~0.2.3" }, "devDependencies": { "bl": "~0.9.3", "istanbul": "^0.4.5", "yauzl": "~2.3.1" }, "files": [ "index.js" ] } yazl-2.5.1/test/000077500000000000000000000000001340036353600134605ustar00rootroot00000000000000yazl-2.5.1/test/test.js000066400000000000000000000205571340036353600150060ustar00rootroot00000000000000var fs = require("fs"); var yazl = require("../"); var yauzl = require("yauzl"); var BufferList = require("bl"); (function() { var fileMetadata = { mtime: new Date(), mode: 0100664, }; var zipfile = new yazl.ZipFile(); zipfile.addFile(__filename, "unicōde.txt"); zipfile.addFile(__filename, "without-compression.txt", {compress: false}); zipfile.addReadStream(fs.createReadStream(__filename), "readStream.txt", fileMetadata); var expectedContents = fs.readFileSync(__filename); zipfile.addBuffer(expectedContents, "with/directories.txt", fileMetadata); zipfile.addBuffer(expectedContents, "with\\windows-paths.txt", fileMetadata); zipfile.end(function(finalSize) { if (finalSize !== -1) throw new Error("finalSize is impossible to know before compression"); zipfile.outputStream.pipe(new BufferList(function(err, data) { if (err) throw err; yauzl.fromBuffer(data, function(err, zipfile) { if (err) throw err; zipfile.on("entry", function(entry) { zipfile.openReadStream(entry, function(err, readStream) { if (err) throw err; readStream.pipe(new BufferList(function(err, data) { if (err) throw err; if (expectedContents.toString("binary") !== data.toString("binary")) throw new Error("unexpected contents"); console.log(entry.fileName + ": pass"); })); }); }); }); })); }); })(); (function() { var zip64Combinations = [ [0, 0, 0, 0, 0], [1, 1, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 0, 1], [1, 1, 1, 1, 1], ]; zip64Combinations.forEach(function(zip64Config) { var options = { compress: false, size: null, forceZip64Format: false, }; var zipfile = new yazl.ZipFile(); options.forceZip64Format = !!zip64Config[0]; zipfile.addFile(__filename, "asdf.txt", options); options.forceZip64Format = !!zip64Config[1]; zipfile.addFile(__filename, "fdsa.txt", options); options.forceZip64Format = !!zip64Config[2]; zipfile.addBuffer(bufferFrom("buffer"), "buffer.txt", options); options.forceZip64Format = !!zip64Config[3]; options.size = "stream".length; zipfile.addReadStream(new BufferList().append("stream"), "stream.txt", options); options.size = null; zipfile.end({forceZip64Format:!!zip64Config[4]}, function(finalSize) { if (finalSize === -1) throw new Error("finalSize should be known"); zipfile.outputStream.pipe(new BufferList(function(err, data) { if (data.length !== finalSize) throw new Error("finalSize prediction is wrong. " + finalSize + " !== " + data.length); console.log("finalSize(" + zip64Config.join("") + "): pass"); })); }); }); })(); (function() { var zipfile = new yazl.ZipFile(); // all options parameters are optional zipfile.addFile(__filename, "a.txt"); zipfile.addBuffer(bufferFrom("buffer"), "b.txt"); zipfile.addReadStream(new BufferList().append("stream"), "c.txt"); zipfile.addEmptyDirectory("d/"); zipfile.addEmptyDirectory("e"); zipfile.end(function(finalSize) { if (finalSize !== -1) throw new Error("finalSize should be unknown"); zipfile.outputStream.pipe(new BufferList(function(err, data) { if (err) throw err; yauzl.fromBuffer(data, function(err, zipfile) { if (err) throw err; var entryNames = ["a.txt", "b.txt", "c.txt", "d/", "e/"]; zipfile.on("entry", function(entry) { var expectedName = entryNames.shift(); if (entry.fileName !== expectedName) { throw new Error("unexpected entry fileName: " + entry.fileName + ", expected: " + expectedName); } }); zipfile.on("end", function() { if (entryNames.length === 0) console.log("optional parameters and directories: pass"); }); }); })); }); })(); (function() { var zipfile = new yazl.ZipFile(); // all options parameters are optional zipfile.addBuffer(bufferFrom("hello"), "hello.txt", {compress: false}); zipfile.end(function(finalSize) { if (finalSize === -1) throw new Error("finalSize should be known"); zipfile.outputStream.pipe(new BufferList(function(err, data) { if (err) throw err; if (data.length !== finalSize) throw new Error("finalSize prediction is wrong. " + finalSize + " !== " + data.length); yauzl.fromBuffer(data, function(err, zipfile) { if (err) throw err; var entryNames = ["hello.txt"]; zipfile.on("entry", function(entry) { var expectedName = entryNames.shift(); if (entry.fileName !== expectedName) { throw new Error("unexpected entry fileName: " + entry.fileName + ", expected: " + expectedName); } }); zipfile.on("end", function() { if (entryNames.length === 0) console.log("justAddBuffer: pass"); }); }); })); }); })(); var weirdChars = '\u0000☺☻♥♦♣♠•◘○◙♂♀♪♫☼►◄↕‼¶§▬↨↑↓→←∟↔▲▼⌂ÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞φε∩≡±≥≤⌠⌡÷≈°∙·√ⁿ²■ '; (function() { var testCases = [ ["Hello World", "Hello World"], [bufferFrom("Hello"), "Hello"], [weirdChars, weirdChars], ]; testCases.forEach(function(testCase, i) { var zipfile = new yazl.ZipFile(); zipfile.end({ comment: testCase[0], }, function(finalSize) { if (finalSize === -1) throw new Error("finalSize should be known"); zipfile.outputStream.pipe(new BufferList(function(err, data) { if (err) throw err; if (data.length !== finalSize) throw new Error("finalSize prediction is wrong. " + finalSize + " !== " + data.length); yauzl.fromBuffer(data, function(err, zipfile) { if (err) throw err; if (zipfile.comment !== testCase[1]) { throw new Error("comment is wrong. " + JSON.stringify(zipfile.comment) + " !== " + JSON.stringify(testCase[1])); } console.log("comment(" + i + "): pass"); }); })); }); }); })(); (function() { var zipfile = new yazl.ZipFile(); try { zipfile.end({ comment: bufferFrom("01234567890123456789" + "\x50\x4b\x05\x06" + "01234567890123456789") }); } catch (e) { if (e.toString().indexOf("comment contains end of central directory record signature") !== -1) { console.log("block eocdr signature in comment: pass"); return; } } throw new Error("expected error for including eocdr signature in comment"); })(); (function() { var testCases = [ ["Hello World!", "Hello World!"], [bufferFrom("Hello!"), "Hello!"], [weirdChars, weirdChars], ]; testCases.forEach(function(testCase, i) { var zipfile = new yazl.ZipFile(); // all options parameters are optional zipfile.addBuffer(bufferFrom("hello"), "hello.txt", {compress: false, fileComment: testCase[0]}); zipfile.end(function(finalSize) { if (finalSize === -1) throw new Error("finalSize should be known"); zipfile.outputStream.pipe(new BufferList(function(err, data) { if (err) throw err; if (data.length !== finalSize) throw new Error("finalSize prediction is wrong. " + finalSize + " !== " + data.length); yauzl.fromBuffer(data, function(err, zipfile) { if (err) throw err; var entryNames = ["hello.txt"]; zipfile.on("entry", function(entry) { var expectedName = entryNames.shift(); if (entry.fileComment !== testCase[1]) { throw new Error("fileComment is wrong. " + JSON.stringify(entry.fileComment) + " !== " + JSON.stringify(testCase[1])); } }); zipfile.on("end", function() { if (entryNames.length === 0) console.log("fileComment(" + i + "): pass"); }); }); })); }); }); })(); function bufferFrom(something, encoding) { bufferFrom = modern; try { return bufferFrom(something, encoding); } catch (e) { bufferFrom = legacy; return bufferFrom(something, encoding); } function modern(something, encoding) { return Buffer.from(something, encoding); } function legacy(something, encoding) { return new Buffer(something, encoding); } }