pax_global_header 0000666 0000000 0000000 00000000064 12420125301 0014500 g ustar 00root root 0000000 0000000 52 comment=78d2aee1f6d1e6aad71c9654371c84fdcd3c8255 node-multiparty-4.0.0/ 0000775 0000000 0000000 00000000000 12420125301 0014636 5 ustar 00root root 0000000 0000000 node-multiparty-4.0.0/.gitignore 0000664 0000000 0000000 00000000016 12420125301 0016623 0 ustar 00root root 0000000 0000000 /node_modules node-multiparty-4.0.0/.jshintrc 0000664 0000000 0000000 00000007011 12420125301 0016462 0 ustar 00root root 0000000 0000000 { // Settings "passfail" : false, // Stop on first error. "maxerr" : 100, // Maximum errors before stopping. // Predefined globals whom JSHint will ignore. "browser" : false, // Standard browser globals e.g. `window`, `document`. "node" : true, "rhino" : false, "couch" : false, "wsh" : false, // Windows Scripting Host. "jquery" : false, "prototypejs" : false, "mootools" : false, "dojo" : false, "predef" : [ "describe", "it", "before", "after" ], // Development. "debug" : true, // Allow debugger statements e.g. browser breakpoints. "devel" : true, // Allow development statements e.g. `console.log();`. // EcmaScript 5. "es5" : true, // Allow EcmaScript 5 syntax. "strict" : false, // Require `use strict` pragma in every file. "globalstrict" : true, // Allow global "use strict" (also enables 'strict'). // The Good Parts. "asi" : true, // Tolerate Automatic Semicolon Insertion (no semicolons). "laxbreak" : false, // Tolerate unsafe line breaks e.g. `return [\n] x` without semicolons. "laxcomma" : true, "bitwise" : false, // Prohibit bitwise operators (&, |, ^, etc.). "boss" : true, // Tolerate assignments inside if, for & while. Usually conditions & loops are for comparison, not assignments. "curly" : false, // Require {} for every new block or scope. "eqeqeq" : true, // Require triple equals i.e. `===`. "eqnull" : true, // Tolerate use of `== null`. "evil" : false, // Tolerate use of `eval`. "expr" : false, // Tolerate `ExpressionStatement` as Programs. "forin" : false, // Prohibt `for in` loops without `hasOwnProperty`. "immed" : true, // Require immediate invocations to be wrapped in parens e.g. `( function(){}() );` "latedef" : false, // Prohibit variable use before definition. "loopfunc" : false, // Allow functions to be defined within loops. "noarg" : true, // Prohibit use of `arguments.caller` and `arguments.callee`. "regexp" : false, // Prohibit `.` and `[^...]` in regular expressions. "regexdash" : false, // Tolerate unescaped last dash i.e. `[-...]`. "scripturl" : false, // Tolerate script-targeted URLs. "shadow" : false, // Allows re-define variables later in code e.g. `var x=1; x=2;`. "supernew" : false, // Tolerate `new function () { ... };` and `new Object;`. "undef" : true, // Require all non-global variables be declared before they are used. // Persone styling prefrences. "newcap" : true, // Require capitalization of all constructor functions e.g. `new F()`. "noempty" : true, // Prohibit use of empty blocks. "nonew" : true, // Prohibit use of constructors for side-effects. "nomen" : false, // Prohibit use of initial or trailing underbars in names. "onevar" : false, // Allow only one `var` statement per function. "plusplus" : false, // Prohibit use of `++` & `--`. "sub" : false, // Tolerate all forms of subscript notation besides dot notation e.g. `dict['key']` instead of `dict.key`. "trailing" : true, // Prohibit trailing whitespaces. "white" : false // Check against strict whitespace and indentation rules. } node-multiparty-4.0.0/.npmignore 0000664 0000000 0000000 00000000021 12420125301 0016626 0 ustar 00root root 0000000 0000000 test/ examples/ node-multiparty-4.0.0/.travis.yml 0000664 0000000 0000000 00000000046 12420125301 0016747 0 ustar 00root root 0000000 0000000 language: node_js node_js: - "0.10" node-multiparty-4.0.0/CHANGELOG.md 0000664 0000000 0000000 00000023172 12420125301 0016454 0 ustar 00root root 0000000 0000000 ### 4.0.0 * Andrew Kelley: - 'part' events for fields no longer fire if `autoFields` is on. - 'part' events for files no longer fire if `autoFiles` is on. - 'field', 'file', 'part' events are guaranteed to emit in the correct order - the order that the user places the parts in the request. Each `part` 'end' event is guaranteed to emit before the next 'part' event is emitted. - Drop Node.js 0.8.x support. - Remove support for generating the hash digest of a part. If you want this, do it in your own code. - Now `part` objects emit 'error' events. This makes streaming work better since the part stream will emit an error when it is no longer streaming. - `file` objects no longer have the undocumented `ws` property. - More robust `maxFilesSize` implementation. Before it was possible for race conditions to cause more than `maxFilesSize` bytes to get written to disk. That is now fixed. - More robustly random temp file names. Now using 18 bytes of randomness instead of 8. - Better s3 example code. - Delete some unused legacy code. - Update and clarify documentation. * Douglas Christopher Wilson: - Require the close boundary. This makes multiparty more RFC-compliant and makes some invalid requests which used to work, now emit an error instead. ### 3.3.2 * Douglas Christopher Wilson: - Do not invoke callback after close - Share callback ending logic between error and close ### 3.3.1 * Andrew Kelley: - update request dev dependency to latest - remove problematic test fixtures ### 3.3.0 * Douglas Christopher Wilson: - Always emit close after all parts ended ### 3.2.10 * Douglas Christopher Wilson: - Expand form.parse in README - Remove execute bit from files - Fix callback hang in node.js 0.8 on errors * Andrew Kelley: - tests refactor * Thanasis Polychronakis: - docs: fix code error in readme ### 3.2.9 * Fix attaching error listeners directly after form.parse * Fix to not synchronously invoke callback to form.parse on error ### 3.2.8 * Fix developer accidentally corrupting data * Fix handling epilogue in a separate chunk * Fix initial check errors to use supplied callback ### 3.2.7 * Fix errors hanging responses in callback-style ### 3.2.6 * Fix maxFields to error on field after max ### 3.2.5 * Support boundary containing equal sign (thanks [garel-a]) ### 3.2.4 * Keep part.byteCount undefined in chunked encoding (thanks [dougwilson]) * Fix temp files not always cleaned up (thanks [dougwilson]) ### 3.2.3 * improve parsing boundary attribute from Content-Type (thanks [dougwilson]) ### 3.2.2 * fix error on empty payloads (thanks [dougwilson]) ### 3.2.1 * fix maxFilesSize overcalculation bug (thanks [dougwilson] and [timothysoehnlin]) ### 3.2.0 * add maxFilesSize for autoFiles (thanks [dougwilson]) ### 3.1.2 * exclude test files from npm package (thanks Dag Einar Monsen) * fix incorrectly using autoFields value for autoFiles (thanks RG72) ### 3.1.1 * fix not emitting 'close' after all part 'end' events ### 3.1.0 * support UTF8 filename in Content-Disposition (thanks baoshan) ### 3.0.0 * form.parse callback API changed in a compatibility-breaking manner. sorry, I know it sucks but the way I had it before is misleading and inconsistent. ### 2.2.0 * additional callback API to support multiple files with same field name * fix assertion crash when max field count is exceeded * fix assertion crash when client aborts an invalid request * (>=v0.10 only) unpipe the request when an error occurs to save resources. * update readable-stream to ~1.1.9 * fix assertion crash when EMFILE occurrs * (no more assertions - only 'error' events) ### 2.1.9 * relax content-type detection regex. (thanks amitaibu) ### 2.1.8 * replace deprecated Buffer.write(). (thanks hueniverse) ### 2.1.7 * add repository field to package.json ### 2.1.6 * expose `hash` as an option to `Form`. (thanks wookiehangover) ### 2.1.5 * fix possible 'close' event before all temp files are done ### 2.1.4 * fix crash for invalid requests ### 2.1.3 * add `file.size` ### 2.1.2 * proper backpressure support * update s3 example ### 2.1.1 * fix uploads larger than 2KB * fix both s3 and upload example * add part.byteCount and part.byteOffset ### 2.1.0 (recalled) * Complete rewrite. See README for changes and new API. ### v1.0.13 * Only update hash if update method exists (Sven Lito) * According to travis v0.10 needs to go quoted (Sven Lito) * Bumping build node versions (Sven Lito) * Additional fix for empty requests (Eugene Girshov) * Change the default to 1000, to match the new Node behaviour. (OrangeDog) * Add ability to control maxKeys in the querystring parser. (OrangeDog) * Adjust test case to work with node 0.9.x (Eugene Girshov) * Update package.json (Sven Lito) * Path adjustment according to eb4468b (Markus Ast) ### v1.0.12 * Emit error on aborted connections (Eugene Girshov) * Add support for empty requests (Eugene Girshov) * Fix name/filename handling in Content-Disposition (jesperp) * Tolerate malformed closing boundary in multipart (Eugene Girshov) * Ignore preamble in multipart messages (Eugene Girshov) * Add support for application/json (Mike Frey, Carlos Rodriguez) * Add support for Base64 encoding (Elmer Bulthuis) * Add File#toJSON (TJ Holowaychuk) * Remove support for Node.js 0.4 & 0.6 (Andrew Kelley) * Documentation improvements (Sven Lito, Andre Azevedo) * Add support for application/octet-stream (Ion Lupascu, Chris Scribner) * Use os.tmpDir() to get tmp directory (Andrew Kelley) * Improve package.json (Andrew Kelley, Sven Lito) * Fix benchmark script (Andrew Kelley) * Fix scope issue in incoming_forms (Sven Lito) * Fix file handle leak on error (OrangeDog) ### v1.0.11 * Calculate checksums for incoming files (sreuter) * Add definition parameters to "IncomingForm" as an argument (Math-) ### v1.0.10 * Make parts to be proper Streams (Matt Robenolt) ### v1.0.9 * Emit progress when content length header parsed (Tim Koschützki) * Fix Readme syntax due to GitHub changes (goob) * Replace references to old 'sys' module in Readme with 'util' (Peter Sugihara) ### v1.0.8 * Strip potentially unsafe characters when using `keepExtensions: true`. * Switch to utest / urun for testing * Add travis build ### v1.0.7 * Remove file from package that was causing problems when installing on windows. (#102) * Fix typos in Readme (Jason Davies). ### v1.0.6 * Do not default to the default to the field name for file uploads where filename="". ### v1.0.5 * Support filename="" in multipart parts * Explain unexpected end() errors in parser better **Note:** Starting with this version, formidable emits 'file' events for empty file input fields. Previously those were incorrectly emitted as regular file input fields with value = "". ### v1.0.4 * Detect a good default tmp directory regardless of platform. (#88) ### v1.0.3 * Fix problems with utf8 characters (#84) / semicolons in filenames (#58) * Small performance improvements * New test suite and fixture system ### v1.0.2 * Exclude node\_modules folder from git * Implement new `'aborted'` event * Fix files in example folder to work with recent node versions * Make gently a devDependency [See Commits](https://github.com/felixge/node-formidable/compare/v1.0.1...v1.0.2) ### v1.0.1 * Fix package.json to refer to proper main directory. (#68, Dean Landolt) [See Commits](https://github.com/felixge/node-formidable/compare/v1.0.0...v1.0.1) ### v1.0.0 * Add support for multipart boundaries that are quoted strings. (Jeff Craig) This marks the beginning of development on version 2.0 which will include several architectural improvements. [See Commits](https://github.com/felixge/node-formidable/compare/v0.9.11...v1.0.0) ### v0.9.11 * Emit `'progress'` event when receiving data, regardless of parsing it. (Tim Koschützki) * Use [W3C FileAPI Draft](http://dev.w3.org/2006/webapi/FileAPI/) properties for File class **Important:** The old property names of the File class will be removed in a future release. [See Commits](https://github.com/felixge/node-formidable/compare/v0.9.10...v0.9.11) ### Older releases These releases were done before starting to maintain the above Changelog: * [v0.9.10](https://github.com/felixge/node-formidable/compare/v0.9.9...v0.9.10) * [v0.9.9](https://github.com/felixge/node-formidable/compare/v0.9.8...v0.9.9) * [v0.9.8](https://github.com/felixge/node-formidable/compare/v0.9.7...v0.9.8) * [v0.9.7](https://github.com/felixge/node-formidable/compare/v0.9.6...v0.9.7) * [v0.9.6](https://github.com/felixge/node-formidable/compare/v0.9.5...v0.9.6) * [v0.9.5](https://github.com/felixge/node-formidable/compare/v0.9.4...v0.9.5) * [v0.9.4](https://github.com/felixge/node-formidable/compare/v0.9.3...v0.9.4) * [v0.9.3](https://github.com/felixge/node-formidable/compare/v0.9.2...v0.9.3) * [v0.9.2](https://github.com/felixge/node-formidable/compare/v0.9.1...v0.9.2) * [v0.9.1](https://github.com/felixge/node-formidable/compare/v0.9.0...v0.9.1) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.1.0](https://github.com/felixge/node-formidable/commits/v0.1.0) node-multiparty-4.0.0/LICENSE 0000664 0000000 0000000 00000002217 12420125301 0015645 0 ustar 00root root 0000000 0000000 The MIT License (Expat) Copyright (c) 2014 Andrew Kelley Copyright (c) 2014 Douglas Christopher Wilson Copyright (c) 2013 Felix Geisendörfer Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. node-multiparty-4.0.0/README.md 0000664 0000000 0000000 00000020026 12420125301 0016115 0 ustar 00root root 0000000 0000000 # multiparty [](https://travis-ci.org/andrewrk/node-multiparty) [](http://badge.fury.io/js/multiparty) Parse http requests with content-type `multipart/form-data`, also known as file uploads. See also [busboy](https://github.com/mscdex/busboy) - a [faster](https://github.com/mscdex/dicer/wiki/Benchmarks) alternative which may be worth looking into. ### Why the fork? * This module uses the Node.js v0.10 streams properly * It will not create a temp file for you unless you want it to. * Counts bytes and does math to help you figure out the `Content-Length` of the final part. * You can stream uploads to s3 with [aws-sdk](https://github.com/aws/aws-sdk-js), for [example](examples/s3.js). * Less bugs. This code is simpler, has all deprecated functionality removed, has cleaner tests, and does not try to do anything beyond multipart stream parsing. ## Installation ``` npm install multiparty ``` ## Usage * See [examples](examples). Parse an incoming `multipart/form-data` request. ```js var multiparty = require('multiparty') , http = require('http') , util = require('util') http.createServer(function(req, res) { if (req.url === '/upload' && req.method === 'POST') { // parse a file upload var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { res.writeHead(200, {'content-type': 'text/plain'}); res.write('received upload:\n\n'); res.end(util.inspect({fields: fields, files: files})); }); return; } // show a file upload form res.writeHead(200, {'content-type': 'text/html'}); res.end( '
' ); }).listen(8080); ``` ## API ### multiparty.Form ```js var form = new multiparty.Form(options) ``` Creates a new form. Options: * `encoding` - sets encoding for the incoming form fields. Defaults to `utf8`. * `maxFieldsSize` - Limits the amount of memory all fields (not files) can allocate in bytes. If this value is exceeded, an `error` event is emitted. The default size is 2MB. * `maxFields` - Limits the number of fields that will be parsed before emitting an `error` event. A file counts as a field in this case. Defaults to 1000. * `maxFilesSize` - Only relevant when `autoFiles` is `true`. Limits the total bytes accepted for all files combined. If this value is exceeded, an `error` event is emitted. The default is `Infinity`. * `autoFields` - Enables `field` events and disables `part` events for fields. This is automatically set to `true` if you add a `field` listener. * `autoFiles` - Enables `file` events and disables `part` events for files. This is automatically set to `true` if you add a `file` listener. * `uploadDir` - Only relevant when `autoFiles` is `true`. The directory for placing file uploads in. You can move them later using `fs.rename()`. Defaults to `os.tmpDir()`. #### form.parse(request, [cb]) Parses an incoming node.js `request` containing form data.This will cause `form` to emit events based off the incoming request. ```js var count = 0; var form = new multiparty.Form(); // Errors may be emitted // Note that if you are listening to 'part' events, the same error may be // emitted from the `form` and the `part`. form.on('error', function(err) { console.log('Error parsing form: ' + err.stack); }); // Parts are emitted when parsing the form form.on('part', function(part) { // You *must* act on the part by reading it // NOTE: if you want to ignore it, just call "part.resume()" if (part.filename === null) { // filename is "null" when this is a field and not a file console.log('got field named ' + part.name); // ignore field's content part.resume(); } if (part.filename !== null) { // filename is not "null" when this is a file count++; console.log('got file named ' + part.name); // ignore file's content here part.resume(); } part.on('error', function(err) { // decide what to do }); }); // Close emitted after form parsed form.on('close', function() { console.log('Upload completed!'); res.setHeader('text/plain'); res.end('Received ' + count + ' files'); }); // Parse req form.parse(req); ``` If `cb` is provided, `autoFields` and `autoFiles` are set to `true` and all fields and files are collected and passed to the callback, removing the need to listen to any events on `form`. This is for convenience when you want to read everything, but be sure to write cleanup code, as this will write all uploaded files to the disk, even ones you may not be interested in. ```js form.parse(req, function(err, fields, files) { Object.keys(fields).forEach(function(name) { console.log('got field named ' + name); }); Object.keys(files).forEach(function(name) { console.log('got file named ' + name); }); console.log('Upload completed!'); res.setHeader('text/plain'); res.end('Received ' + files.length + ' files'); }); ``` `fields` is an object where the property names are field names and the values are arrays of field values. `files` is an object where the property names are field names and the values are arrays of file objects. #### form.bytesReceived The amount of bytes received for this form so far. #### form.bytesExpected The expected number of bytes in this form. ### Events #### 'error' (err) Unless you supply a callback to `form.parse`, you definitely want to handle this event. Otherwise your server *will* crash when users submit bogus multipart requests! Only one 'error' event can ever be emitted, and if an 'error' event is emitted, then 'close' will not be emitted. Note that an 'error' event will be emitted both from the `form` and from the current `part`. #### 'part' (part) Emitted when a part is encountered in the request. `part` is a `ReadableStream`. It also has the following properties: * `headers` - the headers for this part. For example, you may be interested in `content-type`. * `name` - the field name for this part * `filename` - only if the part is an incoming file * `byteOffset` - the byte offset of this part in the request body * `byteCount` - assuming that this is the last part in the request, this is the size of this part in bytes. You could use this, for example, to set the `Content-Length` header if uploading to S3. If the part had a `Content-Length` header then that value is used here instead. Parts for fields are not emitted when `autoFields` is on, and likewise parts for files are not emitted when `autoFiles` is on. `part` emits 'error' events! Make sure you handle them. #### 'aborted' Emitted when the request is aborted. This event will be followed shortly by an `error` event. In practice you do not need to handle this event. #### 'progress' (bytesReceived, bytesExpected) #### 'close' Emitted after all parts have been parsed and emitted. Not emitted if an `error` event is emitted. If you have `autoFiles` on, this is not fired until all the data has been flushed to disk and the file handles have been closed. This is typically when you would send your response. #### 'file' (name, file) **By default multiparty will not touch your hard drive.** But if you add this listener, multiparty automatically sets `form.autoFiles` to `true` and will stream uploads to disk for you. **The max bytes accepted per request can be specified with `maxFilesSize`.** * `name` - the field name for this file * `file` - an object with these properties: - `fieldName` - same as `name` - the field name for this file - `originalFilename` - the filename that the user reports for the file - `path` - the absolute path of the uploaded file on disk - `headers` - the HTTP headers that were sent along with this file - `size` - size of the file in bytes #### 'field' (name, value) * `name` - field name * `value` - string field value node-multiparty-4.0.0/examples/ 0000775 0000000 0000000 00000000000 12420125301 0016454 5 ustar 00root root 0000000 0000000 node-multiparty-4.0.0/examples/azureblobstorage.js 0000664 0000000 0000000 00000002204 12420125301 0022362 0 ustar 00root root 0000000 0000000 var http = require('http') , util = require('util') , multiparty = require('../') , azure = require('azure') , PORT = process.env.PORT || 27372 var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var blobService = azure.createBlobService(); var form = new multiparty.Form(); form.on('part', function(part) { if (!part.filename) return; var size = part.byteCount - part.byteOffset; var name = part.filename; var container = 'blobContainerName'; blobService.createBlockBlobFromStream(container, name, part, size, function(error) { if (error) { // error handling } }); }); form.parse(req); res.send('File uploaded successfully'); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); node-multiparty-4.0.0/examples/s3.js 0000664 0000000 0000000 00000003742 12420125301 0017345 0 ustar 00root root 0000000 0000000 if (!process.env.S3_BUCKET || !process.env.S3_KEY || !process.env.S3_SECRET) { console.log("To run this example, do this:"); console.log("npm install aws-sdk"); console.log('S3_BUCKET="(your s3 bucket)" S3_KEY="(your s3 key)" S3_SECRET="(your s3 secret) node examples/s3.js"'); process.exit(1); } var http = require('http'); var util = require('util'); var multiparty = require('../'); var AWS = require('aws-sdk'); var PORT = process.env.PORT || 27372; var bucket = process.env.S3_BUCKET; var s3Client = new AWS.S3({ accessKeyId: process.env.S3_KEY, secretAccessKey: process.env.S3_SECRET, // See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Config.html#constructor-property }); var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var form = new multiparty.Form(); var destPath; form.on('field', function(name, value) { if (name === 'path') { destPath = value; } }); form.on('part', function(part) { s3Client.putObject({ Bucket: bucket, Key: destPath, ACL: 'public-read', Body: part, ContentLength: part.byteCount, }, function(err, data) { if (err) throw err; console.log("done", data); res.end("OK"); console.log("https://s3.amazonaws.com/" + bucket + '/' + destPath); }); }); form.parse(req); } else { res.writeHead(404, {'content-type': 'text/plain'}); res.end('404'); } function onEnd() { throw new Error("no uploaded file"); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); node-multiparty-4.0.0/examples/upload.js 0000664 0000000 0000000 00000002273 12420125301 0020302 0 ustar 00root root 0000000 0000000 var http = require('http') , util = require('util') , multiparty = require('../') , PORT = process.env.PORT || 27372 var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { if (err) { res.writeHead(400, {'content-type': 'text/plain'}); res.end("invalid request: " + err.message); return; } res.writeHead(200, {'content-type': 'text/plain'}); res.write('received fields:\n\n '+util.inspect(fields)); res.write('\n\n'); res.end('received files:\n\n '+util.inspect(files)); }); } else { res.writeHead(404, {'content-type': 'text/plain'}); res.end('404'); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); node-multiparty-4.0.0/index.js 0000664 0000000 0000000 00000051057 12420125301 0016313 0 ustar 00root root 0000000 0000000 var stream = require('stream'); var util = require('util'); var fs = require('fs'); var crypto = require('crypto'); var path = require('path'); var os = require('os'); var StringDecoder = require('string_decoder').StringDecoder; var FdSlicer = require('fd-slicer'); var START = 0; var START_BOUNDARY = 1; var HEADER_FIELD_START = 2; var HEADER_FIELD = 3; var HEADER_VALUE_START = 4; var HEADER_VALUE = 5; var HEADER_VALUE_ALMOST_DONE = 6; var HEADERS_ALMOST_DONE = 7; var PART_DATA_START = 8; var PART_DATA = 9; var PART_END = 10; var CLOSE_BOUNDARY = 11; var END = 12; var LF = 10; var CR = 13; var SPACE = 32; var HYPHEN = 45; var COLON = 58; var A = 97; var Z = 122; var CONTENT_TYPE_RE = /^multipart\/(?:form-data|related)(?:;|$)/i; var CONTENT_TYPE_PARAM_RE = /;\s*([^=]+)=(?:"([^"]+)"|([^;]+))/gi; var FILE_EXT_RE = /(\.[_\-a-zA-Z0-9]{0,16}).*/; var LAST_BOUNDARY_SUFFIX_LEN = 4; // --\r\n // replace base64 characters with safe-for-filename characters var b64Safe = {'/': '_', '+': '-'}; exports.Form = Form; util.inherits(Form, stream.Writable); function Form(options) { var self = this; stream.Writable.call(self); options = options || {}; self.error = null; self.autoFields = !!options.autoFields; self.autoFiles = !!options.autoFiles; self.maxFields = options.maxFields || 1000; self.maxFieldsSize = options.maxFieldsSize || 2 * 1024 * 1024; self.maxFilesSize = options.maxFilesSize || Infinity; self.uploadDir = options.uploadDir || os.tmpDir(); self.encoding = options.encoding || 'utf8'; self.bytesReceived = 0; self.bytesExpected = null; self.openedFiles = []; self.totalFieldSize = 0; self.totalFieldCount = 0; self.totalFileSize = 0; self.flushing = 0; self.backpressure = false; self.writeCbs = []; self.emitQueue = []; self.on('newListener', function(eventName) { if (eventName === 'file') { self.autoFiles = true; } else if (eventName === 'field') { self.autoFields = true; } }); } Form.prototype.parse = function(req, cb) { var called = false; var self = this; var waitend = true; if (cb) { // if the user supplies a callback, this implies autoFields and autoFiles self.autoFields = true; self.autoFiles = true; // wait for request to end before calling cb var end = function (done) { if (called) return; called = true; // wait for req events to fire process.nextTick(function() { if (waitend && req.readable) { // dump rest of request req.resume(); req.once('end', done); return; } done(); }); }; var fields = {}; var files = {}; self.on('error', function(err) { end(function() { cb(err); }); }); self.on('field', function(name, value) { var fieldsArray = fields[name] || (fields[name] = []); fieldsArray.push(value); }); self.on('file', function(name, file) { var filesArray = files[name] || (files[name] = []); filesArray.push(file); }); self.on('close', function() { end(function() { cb(null, fields, files); }); }); } self.handleError = handleError; self.bytesExpected = getBytesExpected(req.headers); req.on('end', onReqEnd); req.on('error', function(err) { waitend = false; handleError(err); }); req.on('aborted', onReqAborted); var state = req._readableState; if (req._decoder || (state && (state.encoding || state.decoder))) { // this is a binary protocol // if an encoding is set, input is likely corrupted validationError(new Error('request encoding must not be set')); return; } var contentType = req.headers['content-type']; if (!contentType) { validationError(new Error('missing content-type header')); return; } var m = CONTENT_TYPE_RE.exec(contentType); if (!m) { validationError(new Error('unrecognized content-type: ' + contentType)); return; } var boundary; CONTENT_TYPE_PARAM_RE.lastIndex = m.index + m[0].length - 1; while ((m = CONTENT_TYPE_PARAM_RE.exec(contentType))) { if (m[1].toLowerCase() !== 'boundary') continue; boundary = m[2] || m[3]; break; } if (!boundary) { validationError(new Error('content-type missing boundary: ' + require('util').inspect(m))); return; } setUpParser(self, boundary); req.pipe(self); function onReqAborted() { waitend = false; self.emit('aborted'); handleError(new Error("Request aborted")); } function onReqEnd() { waitend = false; } function handleError(err) { var first = !self.error; if (first) { self.error = err; req.removeListener('aborted', onReqAborted); req.removeListener('end', onReqEnd); if (self.destStream) { self.destStream.emit('error', err); } } cleanupOpenFiles(self); if (first) { self.emit('error', err); } } function validationError(err) { // handle error on next tick for event listeners to attach process.nextTick(handleError.bind(null, err)) } }; Form.prototype._write = function(buffer, encoding, cb) { if (this.error) return; var self = this; var i = 0; var len = buffer.length; var prevIndex = self.index; var index = self.index; var state = self.state; var lookbehind = self.lookbehind; var boundary = self.boundary; var boundaryChars = self.boundaryChars; var boundaryLength = self.boundary.length; var boundaryEnd = boundaryLength - 1; var bufferLength = buffer.length; var c; var cl; for (i = 0; i < len; i++) { c = buffer[i]; switch (state) { case START: index = 0; state = START_BOUNDARY; /* falls through */ case START_BOUNDARY: if (index === boundaryLength - 2 && c === HYPHEN) { index = 1; state = CLOSE_BOUNDARY; break; } else if (index === boundaryLength - 2) { if (c !== CR) return self.handleError(new Error("Expected CR Received " + c)); index++; break; } else if (index === boundaryLength - 1) { if (c !== LF) return self.handleError(new Error("Expected LF Received " + c)); index = 0; self.onParsePartBegin(); state = HEADER_FIELD_START; break; } if (c !== boundary[index+2]) index = -2; if (c === boundary[index+2]) index++; break; case HEADER_FIELD_START: state = HEADER_FIELD; self.headerFieldMark = i; index = 0; /* falls through */ case HEADER_FIELD: if (c === CR) { self.headerFieldMark = null; state = HEADERS_ALMOST_DONE; break; } index++; if (c === HYPHEN) break; if (c === COLON) { if (index === 1) { // empty header field self.handleError(new Error("Empty header field")); return; } self.onParseHeaderField(buffer.slice(self.headerFieldMark, i)); self.headerFieldMark = null; state = HEADER_VALUE_START; break; } cl = lower(c); if (cl < A || cl > Z) { self.handleError(new Error("Expected alphabetic character, received " + c)); return; } break; case HEADER_VALUE_START: if (c === SPACE) break; self.headerValueMark = i; state = HEADER_VALUE; /* falls through */ case HEADER_VALUE: if (c === CR) { self.onParseHeaderValue(buffer.slice(self.headerValueMark, i)); self.headerValueMark = null; self.onParseHeaderEnd(); state = HEADER_VALUE_ALMOST_DONE; } break; case HEADER_VALUE_ALMOST_DONE: if (c !== LF) return self.handleError(new Error("Expected LF Received " + c)); state = HEADER_FIELD_START; break; case HEADERS_ALMOST_DONE: if (c !== LF) return self.handleError(new Error("Expected LF Received " + c)); var err = self.onParseHeadersEnd(i + 1); if (err) return self.handleError(err); state = PART_DATA_START; break; case PART_DATA_START: state = PART_DATA; self.partDataMark = i; /* falls through */ case PART_DATA: prevIndex = index; if (index === 0) { // boyer-moore derrived algorithm to safely skip non-boundary data i += boundaryEnd; while (i < bufferLength && !(buffer[i] in boundaryChars)) { i += boundaryLength; } i -= boundaryEnd; c = buffer[i]; } if (index < boundaryLength) { if (boundary[index] === c) { if (index === 0) { self.onParsePartData(buffer.slice(self.partDataMark, i)); self.partDataMark = null; } index++; } else { index = 0; } } else if (index === boundaryLength) { index++; if (c === CR) { // CR = part boundary self.partBoundaryFlag = true; } else if (c === HYPHEN) { index = 1; state = CLOSE_BOUNDARY; break; } else { index = 0; } } else if (index - 1 === boundaryLength) { if (self.partBoundaryFlag) { index = 0; if (c === LF) { self.partBoundaryFlag = false; self.onParsePartEnd(); self.onParsePartBegin(); state = HEADER_FIELD_START; break; } } else { index = 0; } } if (index > 0) { // when matching a possible boundary, keep a lookbehind reference // in case it turns out to be a false lead lookbehind[index-1] = c; } else if (prevIndex > 0) { // if our boundary turned out to be rubbish, the captured lookbehind // belongs to partData self.onParsePartData(lookbehind.slice(0, prevIndex)); prevIndex = 0; self.partDataMark = i; // reconsider the current character even so it interrupted the sequence // it could be the beginning of a new sequence i--; } break; case CLOSE_BOUNDARY: if (c !== HYPHEN) return self.handleError(new Error("Expected HYPHEN Received " + c)); if (index === 1) { self.onParsePartEnd(); state = END; } else if (index > 1) { return self.handleError(new Error("Parser has invalid state.")); } index++; break; case END: break; default: self.handleError(new Error("Parser has invalid state.")); return; } } if (self.headerFieldMark != null) { self.onParseHeaderField(buffer.slice(self.headerFieldMark)); self.headerFieldMark = 0; } if (self.headerValueMark != null) { self.onParseHeaderValue(buffer.slice(self.headerValueMark)); self.headerValueMark = 0; } if (self.partDataMark != null) { self.onParsePartData(buffer.slice(self.partDataMark)); self.partDataMark = 0; } self.index = index; self.state = state; self.bytesReceived += buffer.length; self.emit('progress', self.bytesReceived, self.bytesExpected); if (self.backpressure) { self.writeCbs.push(cb); } else { cb(); } }; Form.prototype.onParsePartBegin = function() { clearPartVars(this); } Form.prototype.onParseHeaderField = function(b) { this.headerField += this.headerFieldDecoder.write(b); } Form.prototype.onParseHeaderValue = function(b) { this.headerValue += this.headerValueDecoder.write(b); } Form.prototype.onParseHeaderEnd = function() { this.headerField = this.headerField.toLowerCase(); this.partHeaders[this.headerField] = this.headerValue; var m; if (this.headerField === 'content-disposition') { if (m = this.headerValue.match(/\bname="([^"]+)"/i)) { this.partName = m[1]; } this.partFilename = parseFilename(this.headerValue); } else if (this.headerField === 'content-transfer-encoding') { this.partTransferEncoding = this.headerValue.toLowerCase(); } this.headerFieldDecoder = new StringDecoder(this.encoding); this.headerField = ''; this.headerValueDecoder = new StringDecoder(this.encoding); this.headerValue = ''; } Form.prototype.onParsePartData = function(b) { if (this.partTransferEncoding === 'base64') { this.backpressure = ! this.destStream.write(b.toString('ascii'), 'base64'); } else { this.backpressure = ! this.destStream.write(b); } } Form.prototype.onParsePartEnd = function() { if (this.destStream) { flushWriteCbs(this); var s = this.destStream; process.nextTick(function() { s.end(); }); } clearPartVars(this); } Form.prototype.onParseHeadersEnd = function(offset) { var self = this; switch(self.partTransferEncoding){ case 'binary': case '7bit': case '8bit': self.partTransferEncoding = 'binary'; break; case 'base64': break; default: return new Error("unknown transfer-encoding: " + self.partTransferEncoding); } self.totalFieldCount += 1; if (self.totalFieldCount > self.maxFields) { return new Error("maxFields " + self.maxFields + " exceeded."); } self.destStream = new stream.PassThrough(); self.destStream.on('drain', function() { flushWriteCbs(self); }); self.destStream.headers = self.partHeaders; self.destStream.name = self.partName; self.destStream.filename = self.partFilename; self.destStream.byteOffset = self.bytesReceived + offset; var partContentLength = self.destStream.headers['content-length']; self.destStream.byteCount = partContentLength ? parseInt(partContentLength, 10) : self.bytesExpected ? (self.bytesExpected - self.destStream.byteOffset - self.boundary.length - LAST_BOUNDARY_SUFFIX_LEN) : undefined; if (self.destStream.filename == null && self.autoFields) { handleField(self, self.destStream); } else if (self.destStream.filename != null && self.autoFiles) { handleFile(self, self.destStream); } else { handlePart(self, self.destStream); } } function flushWriteCbs(self) { self.writeCbs.forEach(function(cb) { process.nextTick(cb); }); self.writeCbs = []; self.backpressure = false; } function getBytesExpected(headers) { var contentLength = headers['content-length']; if (contentLength) { return parseInt(contentLength, 10); } else if (headers['transfer-encoding'] == null) { return 0; } else { return null; } } function beginFlush(self) { self.flushing += 1; } function endFlush(self) { self.flushing -= 1; if (self.flushing < 0) { // if this happens this is a critical bug in multiparty and this stack trace // will help us figure it out. self.handleError(new Error("unexpected endFlush")); return; } maybeClose(self); } function maybeClose(self) { if (self.flushing > 0 || self.error) return; // go through the emit queue in case any field, file, or part events are // waiting to be emitted holdEmitQueue(self)(function() { // nextTick because the user is listening to part 'end' events and we are // using part 'end' events to decide when to emit 'close'. we add our 'end' // handler before the user gets a chance to add theirs. So we make sure // their 'end' event fires before we emit the 'close' event. // this is covered by test/standalone/test-issue-36 process.nextTick(function() { self.emit('close'); }); }); } function cleanupOpenFiles(self) { self.openedFiles.forEach(function(internalFile) { // since fd slicer autoClose is true, destroying the only write stream // is guaranteed by the API to close the fd internalFile.ws.destroy(); fs.unlink(internalFile.publicFile.path, function(err) { if (err) self.handleError(err); }); }); self.openedFiles = []; } function holdEmitQueue(self) { var o = {cb: null}; self.emitQueue.push(o); return function(cb) { o.cb = cb; flushEmitQueue(self); }; } function flushEmitQueue(self) { while (self.emitQueue.length > 0 && self.emitQueue[0].cb) { self.emitQueue.shift().cb(); } } function handlePart(self, partStream) { beginFlush(self); var emitAndReleaseHold = holdEmitQueue(self); partStream.on('end', function() { endFlush(self); }); emitAndReleaseHold(function() { self.emit('part', partStream); }); } function handleFile(self, fileStream) { if (self.error) return; var publicFile = { fieldName: fileStream.name, originalFilename: fileStream.filename, path: uploadPath(self.uploadDir, fileStream.filename), headers: fileStream.headers, size: 0, }; var internalFile = { publicFile: publicFile, ws: null, }; beginFlush(self); // flush to write stream var emitAndReleaseHold = holdEmitQueue(self); fileStream.on('error', function(err) { self.handleError(err); }); fs.open(publicFile.path, 'w', function(err, fd) { if (err) return self.handleError(err); var fdSlicer = new FdSlicer(fd, {autoClose: true}); // end option here guarantees that no more than that amount will be written // or else an error will be emitted internalFile.ws = fdSlicer.createWriteStream({end: self.maxFilesSize - self.totalFileSize}); // if an error ocurred while we were waiting for fs.open we handle that // cleanup now self.openedFiles.push(internalFile); if (self.error) return cleanupOpenFiles(self); var prevByteCount = 0; internalFile.ws.on('error', function(err) { self.handleError(err); }); internalFile.ws.on('progress', function() { publicFile.size = internalFile.ws.bytesWritten; var delta = publicFile.size - prevByteCount; self.totalFileSize += delta; prevByteCount = publicFile.size; }); fdSlicer.on('close', function() { if (self.error) return; emitAndReleaseHold(function() { self.emit('file', fileStream.name, publicFile); }); endFlush(self); }); fileStream.pipe(internalFile.ws); }); } function handleField(self, fieldStream) { var value = ''; var decoder = new StringDecoder(self.encoding); beginFlush(self); var emitAndReleaseHold = holdEmitQueue(self); fieldStream.on('error', function(err) { self.handleError(err); }); fieldStream.on('readable', function() { var buffer = fieldStream.read(); if (!buffer) return; self.totalFieldSize += buffer.length; if (self.totalFieldSize > self.maxFieldsSize) { self.handleError(new Error("maxFieldsSize " + self.maxFieldsSize + " exceeded")); return; } value += decoder.write(buffer); }); fieldStream.on('end', function() { emitAndReleaseHold(function() { self.emit('field', fieldStream.name, value); }); endFlush(self); }); } function clearPartVars(self) { self.partHeaders = {}; self.partName = null; self.partFilename = null; self.partTransferEncoding = 'binary'; self.destStream = null; self.headerFieldDecoder = new StringDecoder(self.encoding); self.headerField = ""; self.headerValueDecoder = new StringDecoder(self.encoding); self.headerValue = ""; } function setUpParser(self, boundary) { self.boundary = new Buffer(boundary.length + 4); self.boundary.write('\r\n--', 0, boundary.length + 4, 'ascii'); self.boundary.write(boundary, 4, boundary.length, 'ascii'); self.lookbehind = new Buffer(self.boundary.length + 8); self.state = START; self.boundaryChars = {}; for (var i = 0; i < self.boundary.length; i++) { self.boundaryChars[self.boundary[i]] = true; } self.index = null; self.partBoundaryFlag = false; beginFlush(self); self.on('finish', function() { if (self.state !== END) { self.handleError(new Error('stream ended unexpectedly')); } endFlush(self); }); } function uploadPath(baseDir, filename) { var ext = path.extname(filename).replace(FILE_EXT_RE, '$1'); var name = randoString(18) + ext; return path.join(baseDir, name); } function randoString(size) { return rando(size).toString('base64').replace(/[\/\+]/g, function(x) { return b64Safe[x]; }); } function rando(size) { try { return crypto.randomBytes(size); } catch (err) { return crypto.pseudoRandomBytes(size); } } function parseFilename(headerValue) { var m = headerValue.match(/\bfilename="(.*?)"($|; )/i); if (!m) { m = headerValue.match(/\bfilename\*=utf-8\'\'(.*?)($|; )/i); if (m) { m[1] = decodeURI(m[1]); } else { return; } } var filename = m[1].substr(m[1].lastIndexOf('\\') + 1); filename = filename.replace(/%22/g, '"'); filename = filename.replace(/([\d]{4});/g, function(m, code) { return String.fromCharCode(code); }); return filename; } function lower(c) { return c | 0x20; } node-multiparty-4.0.0/package.json 0000664 0000000 0000000 00000001522 12420125301 0017124 0 ustar 00root root 0000000 0000000 { "name": "multiparty", "version": "4.0.0", "description": "multipart/form-data parser which supports streaming", "repository": { "type": "git", "url": "git@github.com:andrewrk/node-multiparty.git" }, "keywords": [ "file", "upload", "formidable", "stream", "s3" ], "devDependencies": { "mkdirp": "~0.5.0", "pend": "~1.1.3", "rimraf": "~2.2.8", "superagent": "~0.20.0", "findit2": "~2.2.2" }, "scripts": { "test": "node test/test.js" }, "engines": { "node": ">=0.10.0" }, "license": "MIT", "dependencies": { "fd-slicer": "~0.3.2" }, "bugs": { "url": "https://github.com/andrewrk/node-multiparty/issues" }, "main": "index.js", "directories": { "example": "examples", "test": "test" }, "author": "Andrew Kelley