pax_global_header 0000666 0000000 0000000 00000000064 13334162373 0014517 g ustar 00root root 0000000 0000000 52 comment=85a6940070a137009ec51e0c9bf44716dee86f48 multiparty-4.2.1/ 0000775 0000000 0000000 00000000000 13334162373 0013735 5 ustar 00root root 0000000 0000000 multiparty-4.2.1/.editorconfig 0000664 0000000 0000000 00000000263 13334162373 0016413 0 ustar 00root root 0000000 0000000 # http://editorconfig.org root = true [*] charset = utf-8 insert_final_newline = true trim_trailing_whitespace = true [{*.js,*.json,*.yml}] indent_size = 2 indent_style = space multiparty-4.2.1/.eslintignore 0000664 0000000 0000000 00000000026 13334162373 0016436 0 ustar 00root root 0000000 0000000 coverage node_modules multiparty-4.2.1/.eslintrc.yml 0000664 0000000 0000000 00000000447 13334162373 0016366 0 ustar 00root root 0000000 0000000 root: true rules: comma-dangle: error comma-style: - error - last eol-last: error indent: - error - 2 - SwitchCase: 1 no-param-reassign: error no-trailing-spaces: error no-unused-vars: - error - vars: all args: none ignoreRestSiblings: true multiparty-4.2.1/.gitignore 0000664 0000000 0000000 00000000046 13334162373 0015725 0 ustar 00root root 0000000 0000000 node_modules/ coverage/ npm-debug.log multiparty-4.2.1/.travis.yml 0000664 0000000 0000000 00000000757 13334162373 0016057 0 ustar 00root root 0000000 0000000 language: node_js node_js: - "0.10" - "0.12" - "4" - "6" - "8" - "10" sudo: false cache: directories: - node_modules before_install: # Skip updating shrinkwrap / lock - "npm config set shrinkwrap false" # Update Node.js modules - "test ! -d node_modules || npm prune" - "test ! -d node_modules || npm rebuild" script: - "npm run test-travis" - "npm run lint" after_script: - "npm install coveralls@2 && cat ./coverage/lcov.info | ./node_modules/.bin/coveralls" multiparty-4.2.1/CHANGELOG.md 0000664 0000000 0000000 00000026042 13334162373 0015552 0 ustar 00root root 0000000 0000000 ### 4.2.1 * Douglas Christopher Wilson: - Use uid-safe module to for temp file names - Update to fd-slicer 1.1.0 - Update to http-errors 1.7.0 ### 4.2.0 * Douglas Christopher Wilson: - Use http-errors for raised errors - Use random-bytes module for polyfill - perf: remove parameter reassignment * Graham Hunter - examples: update placement of response in azureblobstorage ### 4.1.4 * Douglas Christopher Wilson: - Add 8 to Travis CI - Add 10 to Travis CI - Enable strict mode - Use safe-buffer for improved API safety * Mike Samuel - Fix file extension filtering stopping on certain whitespace characters ### 4.1.3 * Douglas Christopher Wilson: - Update to fd-slicer 1.0.1 - Add 4 to Travis CI - Add 6 to Travis CI - Move repository to pillarjs/multiparty * Ivan Blazevic - Fix Azure example style * Сковорода Никита Андреевич - Use `os.tmpdir()` instead of `os.tmpDir()` ### 4.1.2 * Douglas Christopher Wilson: - Do not emit error on part prior to emitting part - Fix filename with quotes truncating from certain clients ### 4.1.1 * Douglas Christopher Wilson: - Do not clobber existing temporary files ### 4.1.0 * Douglas Christopher Wilson: - Update dependencies to latest - HTTP-related errors have a `statusCode` field * Andrew Kelley: - Refactor tests so that we can have a coverage badge ### 4.0.0 * Andrew Kelley: - 'part' events for fields no longer fire if `autoFields` is on. - 'part' events for files no longer fire if `autoFiles` is on. - 'field', 'file', 'part' events are guaranteed to emit in the correct order - the order that the user places the parts in the request. Each `part` 'end' event is guaranteed to emit before the next 'part' event is emitted. - Drop Node.js 0.8.x support. - Remove support for generating the hash digest of a part. If you want this, do it in your own code. - Now `part` objects emit 'error' events. This makes streaming work better since the part stream will emit an error when it is no longer streaming. - `file` objects no longer have the undocumented `ws` property. - More robust `maxFilesSize` implementation. Before it was possible for race conditions to cause more than `maxFilesSize` bytes to get written to disk. That is now fixed. - More robustly random temp file names. Now using 18 bytes of randomness instead of 8. - Better s3 example code. - Delete some unused legacy code. - Update and clarify documentation. * Douglas Christopher Wilson: - Require the close boundary. This makes multiparty more RFC-compliant and makes some invalid requests which used to work, now emit an error instead. ### 3.3.2 * Douglas Christopher Wilson: - Do not invoke callback after close - Share callback ending logic between error and close ### 3.3.1 * Andrew Kelley: - update request dev dependency to latest - remove problematic test fixtures ### 3.3.0 * Douglas Christopher Wilson: - Always emit close after all parts ended ### 3.2.10 * Douglas Christopher Wilson: - Expand form.parse in README - Remove execute bit from files - Fix callback hang in node.js 0.8 on errors * Andrew Kelley: - tests refactor * Thanasis Polychronakis: - docs: fix code error in readme ### 3.2.9 * Fix attaching error listeners directly after form.parse * Fix to not synchronously invoke callback to form.parse on error ### 3.2.8 * Fix developer accidentally corrupting data * Fix handling epilogue in a separate chunk * Fix initial check errors to use supplied callback ### 3.2.7 * Fix errors hanging responses in callback-style ### 3.2.6 * Fix maxFields to error on field after max ### 3.2.5 * Support boundary containing equal sign (thanks [garel-a]) ### 3.2.4 * Keep part.byteCount undefined in chunked encoding (thanks [dougwilson]) * Fix temp files not always cleaned up (thanks [dougwilson]) ### 3.2.3 * improve parsing boundary attribute from Content-Type (thanks [dougwilson]) ### 3.2.2 * fix error on empty payloads (thanks [dougwilson]) ### 3.2.1 * fix maxFilesSize overcalculation bug (thanks [dougwilson] and [timothysoehnlin]) ### 3.2.0 * add maxFilesSize for autoFiles (thanks [dougwilson]) ### 3.1.2 * exclude test files from npm package (thanks Dag Einar Monsen) * fix incorrectly using autoFields value for autoFiles (thanks RG72) ### 3.1.1 * fix not emitting 'close' after all part 'end' events ### 3.1.0 * support UTF8 filename in Content-Disposition (thanks baoshan) ### 3.0.0 * form.parse callback API changed in a compatibility-breaking manner. sorry, I know it sucks but the way I had it before is misleading and inconsistent. ### 2.2.0 * additional callback API to support multiple files with same field name * fix assertion crash when max field count is exceeded * fix assertion crash when client aborts an invalid request * (>=v0.10 only) unpipe the request when an error occurs to save resources. * update readable-stream to ~1.1.9 * fix assertion crash when EMFILE occurrs * (no more assertions - only 'error' events) ### 2.1.9 * relax content-type detection regex. (thanks amitaibu) ### 2.1.8 * replace deprecated Buffer.write(). (thanks hueniverse) ### 2.1.7 * add repository field to package.json ### 2.1.6 * expose `hash` as an option to `Form`. (thanks wookiehangover) ### 2.1.5 * fix possible 'close' event before all temp files are done ### 2.1.4 * fix crash for invalid requests ### 2.1.3 * add `file.size` ### 2.1.2 * proper backpressure support * update s3 example ### 2.1.1 * fix uploads larger than 2KB * fix both s3 and upload example * add part.byteCount and part.byteOffset ### 2.1.0 (recalled) * Complete rewrite. See README for changes and new API. ### v1.0.13 * Only update hash if update method exists (Sven Lito) * According to travis v0.10 needs to go quoted (Sven Lito) * Bumping build node versions (Sven Lito) * Additional fix for empty requests (Eugene Girshov) * Change the default to 1000, to match the new Node behaviour. (OrangeDog) * Add ability to control maxKeys in the querystring parser. (OrangeDog) * Adjust test case to work with node 0.9.x (Eugene Girshov) * Update package.json (Sven Lito) * Path adjustment according to eb4468b (Markus Ast) ### v1.0.12 * Emit error on aborted connections (Eugene Girshov) * Add support for empty requests (Eugene Girshov) * Fix name/filename handling in Content-Disposition (jesperp) * Tolerate malformed closing boundary in multipart (Eugene Girshov) * Ignore preamble in multipart messages (Eugene Girshov) * Add support for application/json (Mike Frey, Carlos Rodriguez) * Add support for Base64 encoding (Elmer Bulthuis) * Add File#toJSON (TJ Holowaychuk) * Remove support for Node.js 0.4 & 0.6 (Andrew Kelley) * Documentation improvements (Sven Lito, Andre Azevedo) * Add support for application/octet-stream (Ion Lupascu, Chris Scribner) * Use os.tmpDir() to get tmp directory (Andrew Kelley) * Improve package.json (Andrew Kelley, Sven Lito) * Fix benchmark script (Andrew Kelley) * Fix scope issue in incoming_forms (Sven Lito) * Fix file handle leak on error (OrangeDog) ### v1.0.11 * Calculate checksums for incoming files (sreuter) * Add definition parameters to "IncomingForm" as an argument (Math-) ### v1.0.10 * Make parts to be proper Streams (Matt Robenolt) ### v1.0.9 * Emit progress when content length header parsed (Tim Koschützki) * Fix Readme syntax due to GitHub changes (goob) * Replace references to old 'sys' module in Readme with 'util' (Peter Sugihara) ### v1.0.8 * Strip potentially unsafe characters when using `keepExtensions: true`. * Switch to utest / urun for testing * Add travis build ### v1.0.7 * Remove file from package that was causing problems when installing on windows. (#102) * Fix typos in Readme (Jason Davies). ### v1.0.6 * Do not default to the default to the field name for file uploads where filename="". ### v1.0.5 * Support filename="" in multipart parts * Explain unexpected end() errors in parser better **Note:** Starting with this version, formidable emits 'file' events for empty file input fields. Previously those were incorrectly emitted as regular file input fields with value = "". ### v1.0.4 * Detect a good default tmp directory regardless of platform. (#88) ### v1.0.3 * Fix problems with utf8 characters (#84) / semicolons in filenames (#58) * Small performance improvements * New test suite and fixture system ### v1.0.2 * Exclude node\_modules folder from git * Implement new `'aborted'` event * Fix files in example folder to work with recent node versions * Make gently a devDependency [See Commits](https://github.com/felixge/node-formidable/compare/v1.0.1...v1.0.2) ### v1.0.1 * Fix package.json to refer to proper main directory. (#68, Dean Landolt) [See Commits](https://github.com/felixge/node-formidable/compare/v1.0.0...v1.0.1) ### v1.0.0 * Add support for multipart boundaries that are quoted strings. (Jeff Craig) This marks the beginning of development on version 2.0 which will include several architectural improvements. [See Commits](https://github.com/felixge/node-formidable/compare/v0.9.11...v1.0.0) ### v0.9.11 * Emit `'progress'` event when receiving data, regardless of parsing it. (Tim Koschützki) * Use [W3C FileAPI Draft](http://dev.w3.org/2006/webapi/FileAPI/) properties for File class **Important:** The old property names of the File class will be removed in a future release. [See Commits](https://github.com/felixge/node-formidable/compare/v0.9.10...v0.9.11) ### Older releases These releases were done before starting to maintain the above Changelog: * [v0.9.10](https://github.com/felixge/node-formidable/compare/v0.9.9...v0.9.10) * [v0.9.9](https://github.com/felixge/node-formidable/compare/v0.9.8...v0.9.9) * [v0.9.8](https://github.com/felixge/node-formidable/compare/v0.9.7...v0.9.8) * [v0.9.7](https://github.com/felixge/node-formidable/compare/v0.9.6...v0.9.7) * [v0.9.6](https://github.com/felixge/node-formidable/compare/v0.9.5...v0.9.6) * [v0.9.5](https://github.com/felixge/node-formidable/compare/v0.9.4...v0.9.5) * [v0.9.4](https://github.com/felixge/node-formidable/compare/v0.9.3...v0.9.4) * [v0.9.3](https://github.com/felixge/node-formidable/compare/v0.9.2...v0.9.3) * [v0.9.2](https://github.com/felixge/node-formidable/compare/v0.9.1...v0.9.2) * [v0.9.1](https://github.com/felixge/node-formidable/compare/v0.9.0...v0.9.1) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.1.0](https://github.com/felixge/node-formidable/commits/v0.1.0) multiparty-4.2.1/LICENSE 0000664 0000000 0000000 00000002211 13334162373 0014736 0 ustar 00root root 0000000 0000000 (The MIT License) Copyright (c) 2013 Felix Geisendörfer Copyright (c) 2014 Andrew Kelley Copyright (c) 2014 Douglas Christopher Wilson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. multiparty-4.2.1/README.md 0000664 0000000 0000000 00000021115 13334162373 0015214 0 ustar 00root root 0000000 0000000 # multiparty [![NPM Version][npm-image]][npm-url] [![NPM Downloads][downloads-image]][downloads-url] [![Node.js Version][node-version-image]][node-version-url] [![Build Status][travis-image]][travis-url] [![Test Coverage][coveralls-image]][coveralls-url] Parse http requests with content-type `multipart/form-data`, also known as file uploads. See also [busboy](https://github.com/mscdex/busboy) - a [faster](https://github.com/mscdex/dicer/wiki/Benchmarks) alternative which may be worth looking into. ## Installation This is a [Node.js](https://nodejs.org/en/) module available through the [npm registry](https://www.npmjs.com/). Installation is done using the [`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): ``` npm install multiparty ``` ## Usage * See [examples](examples). Parse an incoming `multipart/form-data` request. ```js var multiparty = require('multiparty'); var http = require('http'); var util = require('util'); http.createServer(function(req, res) { if (req.url === '/upload' && req.method === 'POST') { // parse a file upload var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { res.writeHead(200, {'content-type': 'text/plain'}); res.write('received upload:\n\n'); res.end(util.inspect({fields: fields, files: files})); }); return; } // show a file upload form res.writeHead(200, {'content-type': 'text/html'}); res.end( '
' ); }).listen(8080); ``` ## API ### multiparty.Form ```js var form = new multiparty.Form(options) ``` Creates a new form. Options: * `encoding` - sets encoding for the incoming form fields. Defaults to `utf8`. * `maxFieldsSize` - Limits the amount of memory all fields (not files) can allocate in bytes. If this value is exceeded, an `error` event is emitted. The default size is 2MB. * `maxFields` - Limits the number of fields that will be parsed before emitting an `error` event. A file counts as a field in this case. Defaults to 1000. * `maxFilesSize` - Only relevant when `autoFiles` is `true`. Limits the total bytes accepted for all files combined. If this value is exceeded, an `error` event is emitted. The default is `Infinity`. * `autoFields` - Enables `field` events and disables `part` events for fields. This is automatically set to `true` if you add a `field` listener. * `autoFiles` - Enables `file` events and disables `part` events for files. This is automatically set to `true` if you add a `file` listener. * `uploadDir` - Only relevant when `autoFiles` is `true`. The directory for placing file uploads in. You can move them later using `fs.rename()`. Defaults to `os.tmpdir()`. #### form.parse(request, [cb]) Parses an incoming node.js `request` containing form data.This will cause `form` to emit events based off the incoming request. ```js var count = 0; var form = new multiparty.Form(); // Errors may be emitted // Note that if you are listening to 'part' events, the same error may be // emitted from the `form` and the `part`. form.on('error', function(err) { console.log('Error parsing form: ' + err.stack); }); // Parts are emitted when parsing the form form.on('part', function(part) { // You *must* act on the part by reading it // NOTE: if you want to ignore it, just call "part.resume()" if (!part.filename) { // filename is not defined when this is a field and not a file console.log('got field named ' + part.name); // ignore field's content part.resume(); } if (part.filename) { // filename is defined when this is a file count++; console.log('got file named ' + part.name); // ignore file's content here part.resume(); } part.on('error', function(err) { // decide what to do }); }); // Close emitted after form parsed form.on('close', function() { console.log('Upload completed!'); res.setHeader('text/plain'); res.end('Received ' + count + ' files'); }); // Parse req form.parse(req); ``` If `cb` is provided, `autoFields` and `autoFiles` are set to `true` and all fields and files are collected and passed to the callback, removing the need to listen to any events on `form`. This is for convenience when you want to read everything, but be sure to write cleanup code, as this will write all uploaded files to the disk, even ones you may not be interested in. ```js form.parse(req, function(err, fields, files) { Object.keys(fields).forEach(function(name) { console.log('got field named ' + name); }); Object.keys(files).forEach(function(name) { console.log('got file named ' + name); }); console.log('Upload completed!'); res.setHeader('text/plain'); res.end('Received ' + files.length + ' files'); }); ``` `fields` is an object where the property names are field names and the values are arrays of field values. `files` is an object where the property names are field names and the values are arrays of file objects. #### form.bytesReceived The amount of bytes received for this form so far. #### form.bytesExpected The expected number of bytes in this form. ### Events #### 'error' (err) Unless you supply a callback to `form.parse`, you definitely want to handle this event. Otherwise your server *will* crash when users submit bogus multipart requests! Only one 'error' event can ever be emitted, and if an 'error' event is emitted, then 'close' will not be emitted. If the error would correspond to a certain HTTP response code, the `err` object will have a `statusCode` property with the value of the suggested HTTP response code to send back. Note that an 'error' event will be emitted both from the `form` and from the current `part`. #### 'part' (part) Emitted when a part is encountered in the request. `part` is a `ReadableStream`. It also has the following properties: * `headers` - the headers for this part. For example, you may be interested in `content-type`. * `name` - the field name for this part * `filename` - only if the part is an incoming file * `byteOffset` - the byte offset of this part in the request body * `byteCount` - assuming that this is the last part in the request, this is the size of this part in bytes. You could use this, for example, to set the `Content-Length` header if uploading to S3. If the part had a `Content-Length` header then that value is used here instead. Parts for fields are not emitted when `autoFields` is on, and likewise parts for files are not emitted when `autoFiles` is on. `part` emits 'error' events! Make sure you handle them. #### 'aborted' Emitted when the request is aborted. This event will be followed shortly by an `error` event. In practice you do not need to handle this event. #### 'progress' (bytesReceived, bytesExpected) #### 'close' Emitted after all parts have been parsed and emitted. Not emitted if an `error` event is emitted. If you have `autoFiles` on, this is not fired until all the data has been flushed to disk and the file handles have been closed. This is typically when you would send your response. #### 'file' (name, file) **By default multiparty will not touch your hard drive.** But if you add this listener, multiparty automatically sets `form.autoFiles` to `true` and will stream uploads to disk for you. **The max bytes accepted per request can be specified with `maxFilesSize`.** * `name` - the field name for this file * `file` - an object with these properties: - `fieldName` - same as `name` - the field name for this file - `originalFilename` - the filename that the user reports for the file - `path` - the absolute path of the uploaded file on disk - `headers` - the HTTP headers that were sent along with this file - `size` - size of the file in bytes #### 'field' (name, value) * `name` - field name * `value` - string field value ## License [MIT](LICENSE) [npm-image]: https://img.shields.io/npm/v/multiparty.svg [npm-url]: https://npmjs.org/package/multiparty [node-version-image]: https://img.shields.io/node/v/multiparty.svg [node-version-url]: https://nodejs.org/en/download/ [travis-image]: https://img.shields.io/travis/pillarjs/multiparty/master.svg [travis-url]: https://travis-ci.org/pillarjs/multiparty [coveralls-image]: https://img.shields.io/coveralls/pillarjs/multiparty/master.svg [coveralls-url]: https://coveralls.io/r/pillarjs/multiparty?branch=master [downloads-image]: https://img.shields.io/npm/dm/multiparty.svg [downloads-url]: https://npmjs.org/package/multiparty multiparty-4.2.1/examples/ 0000775 0000000 0000000 00000000000 13334162373 0015553 5 ustar 00root root 0000000 0000000 multiparty-4.2.1/examples/azureblobstorage.js 0000664 0000000 0000000 00000002307 13334162373 0021465 0 ustar 00root root 0000000 0000000 var azure = require('azure') var http = require('http') var multiparty = require('../') var PORT = process.env.PORT || 27372; var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var blobService = azure.createBlobService(); var form = new multiparty.Form(); form.on('part', function(part) { if (!part.filename) return; var size = part.byteCount; var name = part.filename; var container = 'blobContainerName'; blobService.createBlockBlobFromStream(container, name, part, size, function(error) { if (error) { // error handling res.status(500).send('Error uploading file'); } res.send('File uploaded successfully'); }); }); form.parse(req); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:' + PORT + '/'); }); multiparty-4.2.1/examples/s3.js 0000664 0000000 0000000 00000003575 13334162373 0016450 0 ustar 00root root 0000000 0000000 if (!process.env.S3_BUCKET || !process.env.S3_KEY || !process.env.S3_SECRET) { console.log("To run this example, do this:"); console.log("npm install aws-sdk"); console.log('S3_BUCKET="(your s3 bucket)" S3_KEY="(your s3 key)" S3_SECRET="(your s3 secret) node examples/s3.js"'); process.exit(1); } var http = require('http'); var multiparty = require('../'); var AWS = require('aws-sdk'); var PORT = process.env.PORT || 27372; var bucket = process.env.S3_BUCKET; var s3Client = new AWS.S3({ accessKeyId: process.env.S3_KEY, secretAccessKey: process.env.S3_SECRET // See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Config.html#constructor-property }); var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var form = new multiparty.Form(); var destPath; form.on('field', function(name, value) { if (name === 'path') { destPath = value; } }); form.on('part', function(part) { s3Client.putObject({ Bucket: bucket, Key: destPath, ACL: 'public-read', Body: part, ContentLength: part.byteCount }, function(err, data) { if (err) throw err; console.log("done", data); res.end("OK"); console.log("https://s3.amazonaws.com/" + bucket + '/' + destPath); }); }); form.parse(req); } else { res.writeHead(404, {'content-type': 'text/plain'}); res.end('404'); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); multiparty-4.2.1/examples/upload.js 0000664 0000000 0000000 00000002274 13334162373 0017402 0 ustar 00root root 0000000 0000000 var http = require('http') var multiparty = require('../') var util = require('util') var PORT = process.env.PORT || 27372 var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { if (err) { res.writeHead(400, {'content-type': 'text/plain'}); res.end("invalid request: " + err.message); return; } res.writeHead(200, {'content-type': 'text/plain'}); res.write('received fields:\n\n '+util.inspect(fields)); res.write('\n\n'); res.end('received files:\n\n '+util.inspect(files)); }); } else { res.writeHead(404, {'content-type': 'text/plain'}); res.end('404'); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); multiparty-4.2.1/index.js 0000664 0000000 0000000 00000052114 13334162373 0015405 0 ustar 00root root 0000000 0000000 /*! * multiparty * Copyright(c) 2013 Felix Geisendörfer * Copyright(c) 2014 Andrew Kelley * Copyright(c) 2014 Douglas Christopher Wilson * MIT Licensed */ 'use strict' var createError = require('http-errors') var uid = require('uid-safe') var stream = require('stream'); var util = require('util'); var fs = require('fs'); var path = require('path'); var os = require('os'); var Buffer = require('safe-buffer').Buffer var StringDecoder = require('string_decoder').StringDecoder; var fdSlicer = require('fd-slicer'); var START = 0; var START_BOUNDARY = 1; var HEADER_FIELD_START = 2; var HEADER_FIELD = 3; var HEADER_VALUE_START = 4; var HEADER_VALUE = 5; var HEADER_VALUE_ALMOST_DONE = 6; var HEADERS_ALMOST_DONE = 7; var PART_DATA_START = 8; var PART_DATA = 9; var CLOSE_BOUNDARY = 10; var END = 11; var LF = 10; var CR = 13; var SPACE = 32; var HYPHEN = 45; var COLON = 58; var A = 97; var Z = 122; var CONTENT_TYPE_RE = /^multipart\/(?:form-data|related)(?:;|$)/i; var CONTENT_TYPE_PARAM_RE = /;\s*([^=]+)=(?:"([^"]+)"|([^;]+))/gi; var FILE_EXT_RE = /(\.[_\-a-zA-Z0-9]{0,16})[\S\s]*/; var LAST_BOUNDARY_SUFFIX_LEN = 4; // --\r\n exports.Form = Form; util.inherits(Form, stream.Writable); function Form(options) { var opts = options || {} var self = this; stream.Writable.call(self); self.error = null; self.autoFields = !!opts.autoFields self.autoFiles = !!opts.autoFiles self.maxFields = opts.maxFields || 1000 self.maxFieldsSize = opts.maxFieldsSize || 2 * 1024 * 1024 self.maxFilesSize = opts.maxFilesSize || Infinity self.uploadDir = opts.uploadDir || os.tmpdir() self.encoding = opts.encoding || 'utf8' self.bytesReceived = 0; self.bytesExpected = null; self.openedFiles = []; self.totalFieldSize = 0; self.totalFieldCount = 0; self.totalFileSize = 0; self.flushing = 0; self.backpressure = false; self.writeCbs = []; self.emitQueue = []; self.on('newListener', function(eventName) { if (eventName === 'file') { self.autoFiles = true; } else if (eventName === 'field') { self.autoFields = true; } }); } Form.prototype.parse = function(req, cb) { var called = false; var self = this; var waitend = true; if (cb) { // if the user supplies a callback, this implies autoFields and autoFiles self.autoFields = true; self.autoFiles = true; // wait for request to end before calling cb var end = function (done) { if (called) return; called = true; // wait for req events to fire process.nextTick(function() { if (waitend && req.readable) { // dump rest of request req.resume(); req.once('end', done); return; } done(); }); }; var fields = {}; var files = {}; self.on('error', function(err) { end(function() { cb(err); }); }); self.on('field', function(name, value) { var fieldsArray = fields[name] || (fields[name] = []); fieldsArray.push(value); }); self.on('file', function(name, file) { var filesArray = files[name] || (files[name] = []); filesArray.push(file); }); self.on('close', function() { end(function() { cb(null, fields, files); }); }); } self.handleError = handleError; self.bytesExpected = getBytesExpected(req.headers); req.on('end', onReqEnd); req.on('error', function(err) { waitend = false; handleError(err); }); req.on('aborted', onReqAborted); var state = req._readableState; if (req._decoder || (state && (state.encoding || state.decoder))) { // this is a binary protocol // if an encoding is set, input is likely corrupted validationError(new Error('request encoding must not be set')); return; } var contentType = req.headers['content-type']; if (!contentType) { validationError(createError(415, 'missing content-type header')); return; } var m = CONTENT_TYPE_RE.exec(contentType); if (!m) { validationError(createError(415, 'unsupported content-type')); return; } var boundary; CONTENT_TYPE_PARAM_RE.lastIndex = m.index + m[0].length - 1; while ((m = CONTENT_TYPE_PARAM_RE.exec(contentType))) { if (m[1].toLowerCase() !== 'boundary') continue; boundary = m[2] || m[3]; break; } if (!boundary) { validationError(createError(400, 'content-type missing boundary')); return; } setUpParser(self, boundary); req.pipe(self); function onReqAborted() { waitend = false; self.emit('aborted'); handleError(new Error("Request aborted")); } function onReqEnd() { waitend = false; } function handleError(err) { var first = !self.error; if (first) { self.error = err; req.removeListener('aborted', onReqAborted); req.removeListener('end', onReqEnd); if (self.destStream) { errorEventQueue(self, self.destStream, err); } } cleanupOpenFiles(self); if (first) { self.emit('error', err); } } function validationError(err) { // handle error on next tick for event listeners to attach process.nextTick(handleError.bind(null, err)) } }; Form.prototype._write = function(buffer, encoding, cb) { if (this.error) return; var self = this; var i = 0; var len = buffer.length; var prevIndex = self.index; var index = self.index; var state = self.state; var lookbehind = self.lookbehind; var boundary = self.boundary; var boundaryChars = self.boundaryChars; var boundaryLength = self.boundary.length; var boundaryEnd = boundaryLength - 1; var bufferLength = buffer.length; var c; var cl; for (i = 0; i < len; i++) { c = buffer[i]; switch (state) { case START: index = 0; state = START_BOUNDARY; /* falls through */ case START_BOUNDARY: if (index === boundaryLength - 2 && c === HYPHEN) { index = 1; state = CLOSE_BOUNDARY; break; } else if (index === boundaryLength - 2) { if (c !== CR) return self.handleError(createError(400, 'Expected CR Received ' + c)); index++; break; } else if (index === boundaryLength - 1) { if (c !== LF) return self.handleError(createError(400, 'Expected LF Received ' + c)); index = 0; self.onParsePartBegin(); state = HEADER_FIELD_START; break; } if (c !== boundary[index+2]) index = -2; if (c === boundary[index+2]) index++; break; case HEADER_FIELD_START: state = HEADER_FIELD; self.headerFieldMark = i; index = 0; /* falls through */ case HEADER_FIELD: if (c === CR) { self.headerFieldMark = null; state = HEADERS_ALMOST_DONE; break; } index++; if (c === HYPHEN) break; if (c === COLON) { if (index === 1) { // empty header field self.handleError(createError(400, 'Empty header field')); return; } self.onParseHeaderField(buffer.slice(self.headerFieldMark, i)); self.headerFieldMark = null; state = HEADER_VALUE_START; break; } cl = lower(c); if (cl < A || cl > Z) { self.handleError(createError(400, 'Expected alphabetic character, received ' + c)); return; } break; case HEADER_VALUE_START: if (c === SPACE) break; self.headerValueMark = i; state = HEADER_VALUE; /* falls through */ case HEADER_VALUE: if (c === CR) { self.onParseHeaderValue(buffer.slice(self.headerValueMark, i)); self.headerValueMark = null; self.onParseHeaderEnd(); state = HEADER_VALUE_ALMOST_DONE; } break; case HEADER_VALUE_ALMOST_DONE: if (c !== LF) return self.handleError(createError(400, 'Expected LF Received ' + c)); state = HEADER_FIELD_START; break; case HEADERS_ALMOST_DONE: if (c !== LF) return self.handleError(createError(400, 'Expected LF Received ' + c)); var err = self.onParseHeadersEnd(i + 1); if (err) return self.handleError(err); state = PART_DATA_START; break; case PART_DATA_START: state = PART_DATA; self.partDataMark = i; /* falls through */ case PART_DATA: prevIndex = index; if (index === 0) { // boyer-moore derrived algorithm to safely skip non-boundary data i += boundaryEnd; while (i < bufferLength && !(buffer[i] in boundaryChars)) { i += boundaryLength; } i -= boundaryEnd; c = buffer[i]; } if (index < boundaryLength) { if (boundary[index] === c) { if (index === 0) { self.onParsePartData(buffer.slice(self.partDataMark, i)); self.partDataMark = null; } index++; } else { index = 0; } } else if (index === boundaryLength) { index++; if (c === CR) { // CR = part boundary self.partBoundaryFlag = true; } else if (c === HYPHEN) { index = 1; state = CLOSE_BOUNDARY; break; } else { index = 0; } } else if (index - 1 === boundaryLength) { if (self.partBoundaryFlag) { index = 0; if (c === LF) { self.partBoundaryFlag = false; self.onParsePartEnd(); self.onParsePartBegin(); state = HEADER_FIELD_START; break; } } else { index = 0; } } if (index > 0) { // when matching a possible boundary, keep a lookbehind reference // in case it turns out to be a false lead lookbehind[index-1] = c; } else if (prevIndex > 0) { // if our boundary turned out to be rubbish, the captured lookbehind // belongs to partData self.onParsePartData(lookbehind.slice(0, prevIndex)); prevIndex = 0; self.partDataMark = i; // reconsider the current character even so it interrupted the sequence // it could be the beginning of a new sequence i--; } break; case CLOSE_BOUNDARY: if (c !== HYPHEN) return self.handleError(createError(400, 'Expected HYPHEN Received ' + c)); if (index === 1) { self.onParsePartEnd(); state = END; } else if (index > 1) { return self.handleError(new Error("Parser has invalid state.")); } index++; break; case END: break; default: self.handleError(new Error("Parser has invalid state.")); return; } } if (self.headerFieldMark != null) { self.onParseHeaderField(buffer.slice(self.headerFieldMark)); self.headerFieldMark = 0; } if (self.headerValueMark != null) { self.onParseHeaderValue(buffer.slice(self.headerValueMark)); self.headerValueMark = 0; } if (self.partDataMark != null) { self.onParsePartData(buffer.slice(self.partDataMark)); self.partDataMark = 0; } self.index = index; self.state = state; self.bytesReceived += buffer.length; self.emit('progress', self.bytesReceived, self.bytesExpected); if (self.backpressure) { self.writeCbs.push(cb); } else { cb(); } }; Form.prototype.onParsePartBegin = function() { clearPartVars(this); } Form.prototype.onParseHeaderField = function(b) { this.headerField += this.headerFieldDecoder.write(b); } Form.prototype.onParseHeaderValue = function(b) { this.headerValue += this.headerValueDecoder.write(b); } Form.prototype.onParseHeaderEnd = function() { this.headerField = this.headerField.toLowerCase(); this.partHeaders[this.headerField] = this.headerValue; var m; if (this.headerField === 'content-disposition') { if (m = this.headerValue.match(/\bname="([^"]+)"/i)) { this.partName = m[1]; } this.partFilename = parseFilename(this.headerValue); } else if (this.headerField === 'content-transfer-encoding') { this.partTransferEncoding = this.headerValue.toLowerCase(); } this.headerFieldDecoder = new StringDecoder(this.encoding); this.headerField = ''; this.headerValueDecoder = new StringDecoder(this.encoding); this.headerValue = ''; } Form.prototype.onParsePartData = function(b) { if (this.partTransferEncoding === 'base64') { this.backpressure = ! this.destStream.write(b.toString('ascii'), 'base64'); } else { this.backpressure = ! this.destStream.write(b); } } Form.prototype.onParsePartEnd = function() { if (this.destStream) { flushWriteCbs(this); var s = this.destStream; process.nextTick(function() { s.end(); }); } clearPartVars(this); } Form.prototype.onParseHeadersEnd = function(offset) { var self = this; switch(self.partTransferEncoding){ case 'binary': case '7bit': case '8bit': self.partTransferEncoding = 'binary'; break; case 'base64': break; default: return createError(400, 'unknown transfer-encoding: ' + self.partTransferEncoding); } self.totalFieldCount += 1; if (self.totalFieldCount > self.maxFields) { return createError(413, 'maxFields ' + self.maxFields + ' exceeded.'); } self.destStream = new stream.PassThrough(); self.destStream.on('drain', function() { flushWriteCbs(self); }); self.destStream.headers = self.partHeaders; self.destStream.name = self.partName; self.destStream.filename = self.partFilename; self.destStream.byteOffset = self.bytesReceived + offset; var partContentLength = self.destStream.headers['content-length']; self.destStream.byteCount = partContentLength ? parseInt(partContentLength, 10) : self.bytesExpected ? (self.bytesExpected - self.destStream.byteOffset - self.boundary.length - LAST_BOUNDARY_SUFFIX_LEN) : undefined; if (self.destStream.filename == null && self.autoFields) { handleField(self, self.destStream); } else if (self.destStream.filename != null && self.autoFiles) { handleFile(self, self.destStream); } else { handlePart(self, self.destStream); } } function flushWriteCbs(self) { self.writeCbs.forEach(function(cb) { process.nextTick(cb); }); self.writeCbs = []; self.backpressure = false; } function getBytesExpected(headers) { var contentLength = headers['content-length']; if (contentLength) { return parseInt(contentLength, 10); } else if (headers['transfer-encoding'] == null) { return 0; } else { return null; } } function beginFlush(self) { self.flushing += 1; } function endFlush(self) { self.flushing -= 1; if (self.flushing < 0) { // if this happens this is a critical bug in multiparty and this stack trace // will help us figure it out. self.handleError(new Error("unexpected endFlush")); return; } maybeClose(self); } function maybeClose(self) { if (self.flushing > 0 || self.error) return; // go through the emit queue in case any field, file, or part events are // waiting to be emitted holdEmitQueue(self)(function() { // nextTick because the user is listening to part 'end' events and we are // using part 'end' events to decide when to emit 'close'. we add our 'end' // handler before the user gets a chance to add theirs. So we make sure // their 'end' event fires before we emit the 'close' event. // this is covered by test/standalone/test-issue-36 process.nextTick(function() { self.emit('close'); }); }); } function cleanupOpenFiles(self) { self.openedFiles.forEach(function(internalFile) { // since fd slicer autoClose is true, destroying the only write stream // is guaranteed by the API to close the fd internalFile.ws.destroy(); fs.unlink(internalFile.publicFile.path, function(err) { if (err) self.handleError(err); }); }); self.openedFiles = []; } function holdEmitQueue(self, eventEmitter) { var item = {cb: null, ee: eventEmitter, err: null}; self.emitQueue.push(item); return function(cb) { item.cb = cb; flushEmitQueue(self); }; } function errorEventQueue(self, eventEmitter, err) { var items = self.emitQueue.filter(function (item) { return item.ee === eventEmitter; }); if (items.length === 0) { eventEmitter.emit('error', err); return; } items.forEach(function (item) { item.err = err; }); } function flushEmitQueue(self) { while (self.emitQueue.length > 0 && self.emitQueue[0].cb) { var item = self.emitQueue.shift(); // invoke the callback item.cb(); if (item.err) { // emit the delayed error item.ee.emit('error', item.err); } } } function handlePart(self, partStream) { beginFlush(self); var emitAndReleaseHold = holdEmitQueue(self, partStream); partStream.on('end', function() { endFlush(self); }); emitAndReleaseHold(function() { self.emit('part', partStream); }); } function handleFile(self, fileStream) { if (self.error) return; var publicFile = { fieldName: fileStream.name, originalFilename: fileStream.filename, path: uploadPath(self.uploadDir, fileStream.filename), headers: fileStream.headers, size: 0 }; var internalFile = { publicFile: publicFile, ws: null }; beginFlush(self); // flush to write stream var emitAndReleaseHold = holdEmitQueue(self, fileStream); fileStream.on('error', function(err) { self.handleError(err); }); fs.open(publicFile.path, 'wx', function(err, fd) { if (err) return self.handleError(err); var slicer = fdSlicer.createFromFd(fd, {autoClose: true}); // end option here guarantees that no more than that amount will be written // or else an error will be emitted internalFile.ws = slicer.createWriteStream({end: self.maxFilesSize - self.totalFileSize}); // if an error ocurred while we were waiting for fs.open we handle that // cleanup now self.openedFiles.push(internalFile); if (self.error) return cleanupOpenFiles(self); var prevByteCount = 0; internalFile.ws.on('error', function(err) { self.handleError(err.code === 'ETOOBIG' ? createError(413, err.message, { code: err.code }) : err) }); internalFile.ws.on('progress', function() { publicFile.size = internalFile.ws.bytesWritten; var delta = publicFile.size - prevByteCount; self.totalFileSize += delta; prevByteCount = publicFile.size; }); slicer.on('close', function() { if (self.error) return; emitAndReleaseHold(function() { self.emit('file', fileStream.name, publicFile); }); endFlush(self); }); fileStream.pipe(internalFile.ws); }); } function handleField(self, fieldStream) { var value = ''; var decoder = new StringDecoder(self.encoding); beginFlush(self); var emitAndReleaseHold = holdEmitQueue(self, fieldStream); fieldStream.on('error', function(err) { self.handleError(err); }); fieldStream.on('readable', function() { var buffer = fieldStream.read(); if (!buffer) return; self.totalFieldSize += buffer.length; if (self.totalFieldSize > self.maxFieldsSize) { self.handleError(createError(413, 'maxFieldsSize ' + self.maxFieldsSize + ' exceeded')); return; } value += decoder.write(buffer); }); fieldStream.on('end', function() { emitAndReleaseHold(function() { self.emit('field', fieldStream.name, value); }); endFlush(self); }); } function clearPartVars(self) { self.partHeaders = {}; self.partName = null; self.partFilename = null; self.partTransferEncoding = 'binary'; self.destStream = null; self.headerFieldDecoder = new StringDecoder(self.encoding); self.headerField = ""; self.headerValueDecoder = new StringDecoder(self.encoding); self.headerValue = ""; } function setUpParser(self, boundary) { self.boundary = Buffer.alloc(boundary.length + 4) self.boundary.write('\r\n--', 0, boundary.length + 4, 'ascii'); self.boundary.write(boundary, 4, boundary.length, 'ascii'); self.lookbehind = Buffer.alloc(self.boundary.length + 8) self.state = START; self.boundaryChars = {}; for (var i = 0; i < self.boundary.length; i++) { self.boundaryChars[self.boundary[i]] = true; } self.index = null; self.partBoundaryFlag = false; beginFlush(self); self.on('finish', function() { if (self.state !== END) { self.handleError(createError(400, 'stream ended unexpectedly')); } endFlush(self); }); } function uploadPath(baseDir, filename) { var ext = path.extname(filename).replace(FILE_EXT_RE, '$1'); var name = uid.sync(18) + ext return path.join(baseDir, name); } function parseFilename(headerValue) { var m = headerValue.match(/\bfilename="(.*?)"($|; )/i); if (!m) { m = headerValue.match(/\bfilename\*=utf-8\'\'(.*?)($|; )/i); if (m) { m[1] = decodeURI(m[1]); } else { return; } } var filename = m[1]; filename = filename.replace(/%22|\\"/g, '"'); filename = filename.replace(/([\d]{4});/g, function(m, code) { return String.fromCharCode(code); }); return filename.substr(filename.lastIndexOf('\\') + 1); } function lower(c) { return c | 0x20; } multiparty-4.2.1/package.json 0000664 0000000 0000000 00000002417 13334162373 0016227 0 ustar 00root root 0000000 0000000 { "name": "multiparty", "description": "multipart/form-data parser which supports streaming", "version": "4.2.1", "author": "Andrew Kelley