pax_global_header 0000666 0000000 0000000 00000000064 12227324540 0014513 g ustar 00root root 0000000 0000000 52 comment=4fe7400d01cdc3e7770b30d8ddbdcb7eb967aeea node-multiparty-2.2.0/ 0000775 0000000 0000000 00000000000 12227324540 0014651 5 ustar 00root root 0000000 0000000 node-multiparty-2.2.0/.gitignore 0000664 0000000 0000000 00000000016 12227324540 0016636 0 ustar 00root root 0000000 0000000 /node_modules node-multiparty-2.2.0/.jshintrc 0000664 0000000 0000000 00000007011 12227324540 0016475 0 ustar 00root root 0000000 0000000 { // Settings "passfail" : false, // Stop on first error. "maxerr" : 100, // Maximum errors before stopping. // Predefined globals whom JSHint will ignore. "browser" : false, // Standard browser globals e.g. `window`, `document`. "node" : true, "rhino" : false, "couch" : false, "wsh" : false, // Windows Scripting Host. "jquery" : false, "prototypejs" : false, "mootools" : false, "dojo" : false, "predef" : [ "describe", "it", "before", "after" ], // Development. "debug" : true, // Allow debugger statements e.g. browser breakpoints. "devel" : true, // Allow development statements e.g. `console.log();`. // EcmaScript 5. "es5" : true, // Allow EcmaScript 5 syntax. "strict" : false, // Require `use strict` pragma in every file. "globalstrict" : true, // Allow global "use strict" (also enables 'strict'). // The Good Parts. "asi" : true, // Tolerate Automatic Semicolon Insertion (no semicolons). "laxbreak" : false, // Tolerate unsafe line breaks e.g. `return [\n] x` without semicolons. "laxcomma" : true, "bitwise" : false, // Prohibit bitwise operators (&, |, ^, etc.). "boss" : true, // Tolerate assignments inside if, for & while. Usually conditions & loops are for comparison, not assignments. "curly" : false, // Require {} for every new block or scope. "eqeqeq" : true, // Require triple equals i.e. `===`. "eqnull" : true, // Tolerate use of `== null`. "evil" : false, // Tolerate use of `eval`. "expr" : false, // Tolerate `ExpressionStatement` as Programs. "forin" : false, // Prohibt `for in` loops without `hasOwnProperty`. "immed" : true, // Require immediate invocations to be wrapped in parens e.g. `( function(){}() );` "latedef" : false, // Prohibit variable use before definition. "loopfunc" : false, // Allow functions to be defined within loops. "noarg" : true, // Prohibit use of `arguments.caller` and `arguments.callee`. "regexp" : false, // Prohibit `.` and `[^...]` in regular expressions. "regexdash" : false, // Tolerate unescaped last dash i.e. `[-...]`. "scripturl" : false, // Tolerate script-targeted URLs. "shadow" : false, // Allows re-define variables later in code e.g. `var x=1; x=2;`. "supernew" : false, // Tolerate `new function () { ... };` and `new Object;`. "undef" : true, // Require all non-global variables be declared before they are used. // Persone styling prefrences. "newcap" : true, // Require capitalization of all constructor functions e.g. `new F()`. "noempty" : true, // Prohibit use of empty blocks. "nonew" : true, // Prohibit use of constructors for side-effects. "nomen" : false, // Prohibit use of initial or trailing underbars in names. "onevar" : false, // Allow only one `var` statement per function. "plusplus" : false, // Prohibit use of `++` & `--`. "sub" : false, // Tolerate all forms of subscript notation besides dot notation e.g. `dict['key']` instead of `dict.key`. "trailing" : true, // Prohibit trailing whitespaces. "white" : false // Check against strict whitespace and indentation rules. } node-multiparty-2.2.0/.travis.yml 0000664 0000000 0000000 00000000121 12227324540 0016754 0 ustar 00root root 0000000 0000000 language: node_js node_js: - "0.8" - "0.10" before_script: - ulimit -n 500 node-multiparty-2.2.0/CHANGELOG.md 0000664 0000000 0000000 00000014520 12227324540 0016464 0 ustar 00root root 0000000 0000000 ### 2.2.0 * additional callback API to support multiple files with same field name * fix assertion crash when max field count is exceeded * fix assertion crash when client aborts an invalid request * (>=v0.10 only) unpipe the request when an error occurs to save resources. * update readable-stream to ~1.1.9 * fix assertion crash when EMFILE occurrs * (no more assertions - only 'error' events) ### 2.1.9 * relax content-type detection regex. (thanks amitaibu) ### 2.1.8 * replace deprecated Buffer.write(). (thanks hueniverse) ### 2.1.7 * add repository field to package.json ### 2.1.6 * expose `hash` as an option to `Form`. (thanks wookiehangover) ### 2.1.5 * fix possible 'close' event before all temp files are done ### 2.1.4 * fix crash for invalid requests ### 2.1.3 * add `file.size` ### 2.1.2 * proper backpressure support * update s3 example ### 2.1.1 * fix uploads larger than 2KB * fix both s3 and upload example * add part.byteCount and part.byteOffset ### 2.1.0 (recalled) * Complete rewrite. See README for changes and new API. ### v1.0.13 * Only update hash if update method exists (Sven Lito) * According to travis v0.10 needs to go quoted (Sven Lito) * Bumping build node versions (Sven Lito) * Additional fix for empty requests (Eugene Girshov) * Change the default to 1000, to match the new Node behaviour. (OrangeDog) * Add ability to control maxKeys in the querystring parser. (OrangeDog) * Adjust test case to work with node 0.9.x (Eugene Girshov) * Update package.json (Sven Lito) * Path adjustment according to eb4468b (Markus Ast) ### v1.0.12 * Emit error on aborted connections (Eugene Girshov) * Add support for empty requests (Eugene Girshov) * Fix name/filename handling in Content-Disposition (jesperp) * Tolerate malformed closing boundary in multipart (Eugene Girshov) * Ignore preamble in multipart messages (Eugene Girshov) * Add support for application/json (Mike Frey, Carlos Rodriguez) * Add support for Base64 encoding (Elmer Bulthuis) * Add File#toJSON (TJ Holowaychuk) * Remove support for Node.js 0.4 & 0.6 (Andrew Kelley) * Documentation improvements (Sven Lito, Andre Azevedo) * Add support for application/octet-stream (Ion Lupascu, Chris Scribner) * Use os.tmpDir() to get tmp directory (Andrew Kelley) * Improve package.json (Andrew Kelley, Sven Lito) * Fix benchmark script (Andrew Kelley) * Fix scope issue in incoming_forms (Sven Lito) * Fix file handle leak on error (OrangeDog) ### v1.0.11 * Calculate checksums for incoming files (sreuter) * Add definition parameters to "IncomingForm" as an argument (Math-) ### v1.0.10 * Make parts to be proper Streams (Matt Robenolt) ### v1.0.9 * Emit progress when content length header parsed (Tim Koschützki) * Fix Readme syntax due to GitHub changes (goob) * Replace references to old 'sys' module in Readme with 'util' (Peter Sugihara) ### v1.0.8 * Strip potentially unsafe characters when using `keepExtensions: true`. * Switch to utest / urun for testing * Add travis build ### v1.0.7 * Remove file from package that was causing problems when installing on windows. (#102) * Fix typos in Readme (Jason Davies). ### v1.0.6 * Do not default to the default to the field name for file uploads where filename="". ### v1.0.5 * Support filename="" in multipart parts * Explain unexpected end() errors in parser better **Note:** Starting with this version, formidable emits 'file' events for empty file input fields. Previously those were incorrectly emitted as regular file input fields with value = "". ### v1.0.4 * Detect a good default tmp directory regardless of platform. (#88) ### v1.0.3 * Fix problems with utf8 characters (#84) / semicolons in filenames (#58) * Small performance improvements * New test suite and fixture system ### v1.0.2 * Exclude node\_modules folder from git * Implement new `'aborted'` event * Fix files in example folder to work with recent node versions * Make gently a devDependency [See Commits](https://github.com/felixge/node-formidable/compare/v1.0.1...v1.0.2) ### v1.0.1 * Fix package.json to refer to proper main directory. (#68, Dean Landolt) [See Commits](https://github.com/felixge/node-formidable/compare/v1.0.0...v1.0.1) ### v1.0.0 * Add support for multipart boundaries that are quoted strings. (Jeff Craig) This marks the beginning of development on version 2.0 which will include several architectural improvements. [See Commits](https://github.com/felixge/node-formidable/compare/v0.9.11...v1.0.0) ### v0.9.11 * Emit `'progress'` event when receiving data, regardless of parsing it. (Tim Koschützki) * Use [W3C FileAPI Draft](http://dev.w3.org/2006/webapi/FileAPI/) properties for File class **Important:** The old property names of the File class will be removed in a future release. [See Commits](https://github.com/felixge/node-formidable/compare/v0.9.10...v0.9.11) ### Older releases These releases were done before starting to maintain the above Changelog: * [v0.9.10](https://github.com/felixge/node-formidable/compare/v0.9.9...v0.9.10) * [v0.9.9](https://github.com/felixge/node-formidable/compare/v0.9.8...v0.9.9) * [v0.9.8](https://github.com/felixge/node-formidable/compare/v0.9.7...v0.9.8) * [v0.9.7](https://github.com/felixge/node-formidable/compare/v0.9.6...v0.9.7) * [v0.9.6](https://github.com/felixge/node-formidable/compare/v0.9.5...v0.9.6) * [v0.9.5](https://github.com/felixge/node-formidable/compare/v0.9.4...v0.9.5) * [v0.9.4](https://github.com/felixge/node-formidable/compare/v0.9.3...v0.9.4) * [v0.9.3](https://github.com/felixge/node-formidable/compare/v0.9.2...v0.9.3) * [v0.9.2](https://github.com/felixge/node-formidable/compare/v0.9.1...v0.9.2) * [v0.9.1](https://github.com/felixge/node-formidable/compare/v0.9.0...v0.9.1) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.9.0](https://github.com/felixge/node-formidable/compare/v0.8.0...v0.9.0) * [v0.1.0](https://github.com/felixge/node-formidable/commits/v0.1.0) node-multiparty-2.2.0/LICENSE 0000664 0000000 0000000 00000002073 12227324540 0015660 0 ustar 00root root 0000000 0000000 Copyright (C) 2011-2013 Felix Geisendörfer, Andrew Kelley Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. node-multiparty-2.2.0/README.md 0000664 0000000 0000000 00000013752 12227324540 0016140 0 ustar 00root root 0000000 0000000 [](https://travis-ci.org/superjoe30/node-multiparty) # multiparty Parse http requests with content-type `multipart/form-data`, also known as file uploads. See also [busboy](https://github.com/mscdex/busboy) - a [faster](https://github.com/mscdex/dicer/wiki/Benchmarks) alternative which may be worth looking into. ### Why the fork? * This module uses the Node.js v0.10 streams properly, *even in Node.js v0.8* * It will not create a temp file for you unless you want it to. * Counts bytes and does math to help you figure out the `Content-Length` of each part. * You can easily stream uploads to s3 with [knox](https://github.com/LearnBoost/knox), for [example](examples/s3.js). * Less bugs. This code is simpler, has all deprecated functionality removed, has cleaner tests, and does not try to do anything beyond multipart stream parsing. ## Installation ``` npm install multiparty ``` ## Usage * See [examples](examples). Parse an incoming `multipart/form-data` request. ```js var multiparty = require('multiparty') , http = require('http') , util = require('util') http.createServer(function(req, res) { if (req.url === '/upload' && req.method === 'POST') { // parse a file upload var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { res.writeHead(200, {'content-type': 'text/plain'}); res.write('received upload:\n\n'); res.end(util.inspect({fields: fields, files: files})); }); return; } // show a file upload form res.writeHead(200, {'content-type': 'text/html'}); res.end( '
' ); }).listen(8080); ``` ## API ### multiparty.Form ```js var form = new multiparty.Form(options) ``` Creates a new form. Options: * `encoding` - sets encoding for the incoming form fields. Defaults to `utf8`. * `maxFieldSize` - Limits the amount of memory a field (not a file) can allocate in bytes. If this value is exceeded, an `error` event is emitted. The default size is 2MB. * `maxFields` - Limits the number of fields that will be parsed before emitting an `error` event. A file counts as a field in this case. Defaults to 1000. * `autoFields` - Enables `field` events. This is automatically set to `true` if you add a `field` listener. * `autoFiles` - Enables `file` events. This is automatically set to `true` if you add a `file` listener. * `uploadDir` - Only relevant when `autoFiles` is `true`. The directory for placing file uploads in. You can move them later using `fs.rename()`. Defaults to `os.tmpDir()`. * `hash` - Only relevant when `autoFiles` is `true`. If you want checksums calculated for incoming files, set this to either `sha1` or `md5`. Defaults to off. #### form.parse(request, [cb]) Parses an incoming node.js `request` containing form data. If `cb` is provided, `autoFields` and `autoFiles` are set to `true` and all fields and files are collected and passed to the callback: ```js form.parse(req, function(err, fieldsObject, filesObject, fieldsList, filesList) { // ... }); ``` It is often convenient to access a field or file by name. In this situation, use `fieldsObject` or `filesObject`. However sometimes, as in the case of a `` the multipart stream will contain multiple files of the same input name, and you are interested in all of them. In this case, use `filesList`. Another example is when you do not care what the field name of a file is; you are merely interested in a single upload. In this case, set `maxFields` to 1 (assuming no other fields expected besides the file) and use `filesList[0]`. #### form.bytesReceived The amount of bytes received for this form so far. #### form.bytesExpected The expected number of bytes in this form. ### Events #### 'error' (err) You definitely want to handle this event. If not your server *will* crash when users submit bogus multipart requests! #### 'part' (part) Emitted when a part is encountered in the request. `part` is a `ReadableStream`. It also has the following properties: * `headers` - the headers for this part. For example, you may be interested in `content-type`. * `name` - the field name for this part * `filename` - only if the part is an incoming file * `byteOffset` - the byte offset of this part in the request body * `byteCount` - assuming that this is the last part in the request, this is the size of this part in bytes. You could use this, for example, to set the `Content-Length` header if uploading to S3. If the part had a `Content-Length` header then that value is used here instead. #### 'aborted' Emitted when the request is aborted. This event will be followed shortly by an `error` event. In practice you do not need to handle this event. #### 'progress' (bytesReceived, bytesExpected) #### 'close' Emitted after all parts have been parsed and emitted. Not emitted if an `error` event is emitted. This is typically when you would send your response. #### 'file' (name, file) **By default multiparty will not touch your hard drive.** But if you add this listener, multiparty automatically sets `form.autoFiles` to `true` and will stream uploads to disk for you. * `name` - the field name for this file * `file` - an object with these properties: - `fieldName` - same as `name` - the field name for this file - `originalFilename` - the filename that the user reports for the file - `path` - the absolute path of the uploaded file on disk - `headers` - the HTTP headers that were sent along with this file - `size` - size of the file in bytes If you set the `form.hash` option, then `file` will also contain a `hash` property which is the checksum of the file. #### 'field' (name, value) * `name` - field name * `value` - string field value node-multiparty-2.2.0/examples/ 0000775 0000000 0000000 00000000000 12227324540 0016467 5 ustar 00root root 0000000 0000000 node-multiparty-2.2.0/examples/azureblobstorage.js 0000664 0000000 0000000 00000002204 12227324540 0022375 0 ustar 00root root 0000000 0000000 var http = require('http') , util = require('util') , multiparty = require('../') , azure = require('azure') , PORT = process.env.PORT || 27372 var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var blobService = azure.createBlobService(); var form = new multiparty.Form(); form.on('part', function(part) { if (!part.filename) return; var size = part.byteCount - part.byteOffset; var name = part.filename; var container = 'blobContainerName'; blobService.createBlockBlobFromStream(container, name, part, size, function(error) { if (error) { // error handling } }); }); form.parse(req); res.send('File uploaded successfully'); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); node-multiparty-2.2.0/examples/s3.js 0000664 0000000 0000000 00000004071 12227324540 0017354 0 ustar 00root root 0000000 0000000 var http = require('http') , util = require('util') , multiparty = require('../') , knox = require('knox') , Batch = require('batch') , PORT = process.env.PORT || 27372 var s3Client = knox.createClient({ secure: false, key: process.env.S3_KEY, secret: process.env.S3_SECRET, bucket: process.env.S3_BUCKET, }); var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var headers = { 'x-amz-acl': 'public-read', }; var form = new multiparty.Form(); var batch = new Batch(); batch.push(function(cb) { form.on('field', function(name, value) { if (name === 'path') { var destPath = value; if (destPath[0] !== '/') destPath = '/' + destPath; cb(null, destPath); } }); }); batch.push(function(cb) { form.on('part', function(part) { if (! part.filename) return; cb(null, part); }); }); batch.end(function(err, results) { if (err) throw err; form.removeListener('close', onEnd); var destPath = results[0] , part = results[1]; headers['Content-Length'] = part.byteCount; s3Client.putStream(part, destPath, headers, function(err, s3Response) { if (err) throw err; res.statusCode = s3Response.statusCode; s3Response.pipe(res); console.log("https://s3.amazonaws.com/" + process.env.S3_BUCKET + destPath); }); }); form.on('close', onEnd); form.parse(req); } else { res.writeHead(404, {'content-type': 'text/plain'}); res.end('404'); } function onEnd() { throw new Error("no uploaded file"); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); node-multiparty-2.2.0/examples/upload.js 0000664 0000000 0000000 00000002273 12227324540 0020315 0 ustar 00root root 0000000 0000000 var http = require('http') , util = require('util') , multiparty = require('../') , PORT = process.env.PORT || 27372 var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, {'content-type': 'text/html'}); res.end( '' ); } else if (req.url === '/upload') { var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { if (err) { res.writeHead(400, {'content-type': 'text/plain'}); res.end("invalid request: " + err.message); return; } res.writeHead(200, {'content-type': 'text/plain'}); res.write('received fields:\n\n '+util.inspect(fields)); res.write('\n\n'); res.end('received files:\n\n '+util.inspect(files)); }); } else { res.writeHead(404, {'content-type': 'text/plain'}); res.end('404'); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); node-multiparty-2.2.0/index.js 0000775 0000000 0000000 00000040743 12227324540 0016331 0 ustar 00root root 0000000 0000000 exports.Form = Form; var stream = require('readable-stream') , util = require('util') , fs = require('fs') , crypto = require('crypto') , path = require('path') , os = require('os') , StringDecoder = require('string_decoder').StringDecoder , StreamCounter = require('stream-counter') var START = 0 , START_BOUNDARY = 1 , HEADER_FIELD_START = 2 , HEADER_FIELD = 3 , HEADER_VALUE_START = 4 , HEADER_VALUE = 5 , HEADER_VALUE_ALMOST_DONE = 6 , HEADERS_ALMOST_DONE = 7 , PART_DATA_START = 8 , PART_DATA = 9 , PART_END = 10 , END = 11 , LF = 10 , CR = 13 , SPACE = 32 , HYPHEN = 45 , COLON = 58 , A = 97 , Z = 122 var CONTENT_TYPE_RE = /^multipart\/(form-data|related);\s*boundary=(?:"([^"]+)"|([^;]+))$/i; var FILE_EXT_RE = /(\.[_\-a-zA-Z0-9]{0,16}).*/; var LAST_BOUNDARY_SUFFIX_LEN = 4; // --\r\n util.inherits(Form, stream.Writable); function Form(options) { var self = this; stream.Writable.call(self); options = options || {}; self.error = null; self.finished = false; self.autoFields = !!options.autoFields; self.autoFiles = !!options.autoFields; self.maxFields = options.maxFields || 1000; self.maxFieldsSize = options.maxFieldsSize || 2 * 1024 * 1024; self.uploadDir = options.uploadDir || os.tmpDir(); self.encoding = options.encoding || 'utf8'; self.hash = options.hash || false; self.bytesReceived = 0; self.bytesExpected = null; self.openedFiles = []; self.totalFieldSize = 0; self.totalFieldCount = 0; self.flushing = 0; self.backpressure = false; self.writeCbs = []; if (options.boundary) setUpParser(self, options.boundary); self.on('newListener', function(eventName) { if (eventName === 'file') { self.autoFiles = true; } else if (eventName === 'field') { self.autoFields = true; } }); } Form.prototype.parse = function(req, cb) { var self = this; // if the user supplies a callback, this implies autoFields and autoFiles if (cb) { self.autoFields = true; self.autoFiles = true; } self.handleError = handleError; self.bytesExpected = getBytesExpected(req.headers); req.on('error', handleError); req.on('aborted', onReqAborted); var contentType = req.headers['content-type']; if (!contentType) { handleError(new Error('missing content-type header')); return; } var m = contentType.match(CONTENT_TYPE_RE); if (!m) { handleError(new Error('unrecognized content-type: ' + contentType)); return; } var boundary = m[2] || m[3]; setUpParser(self, boundary); req.pipe(self); if (cb) { var fieldsTable = {}; var filesTable = {}; var fieldsList = []; var filesList = []; self.on('error', function(err) { cb(err); }); self.on('field', function(name, value) { fieldsTable[name] = value; fieldsList.push({name: name, value: value}); }); self.on('file', function(name, file) { filesTable[name] = file; filesList.push(file); }); self.on('close', function() { cb(null, fieldsTable, filesTable, fieldsList, filesList); }); } function onReqAborted() { self.emit('aborted'); handleError(new Error("Request aborted")); } function handleError(err) { var first = !self.error; if (first) { self.error = err; req.removeListener('aborted', onReqAborted); // welp. 0.8 doesn't support unpipe, too bad so sad. // let's drop support for 0.8 soon. if (req.unpipe) { req.unpipe(self); } } self.openedFiles.forEach(function(file) { file.ws.destroy(); fs.unlink(file.path, function(err) { // this is already an error condition, ignore 2nd error }); }); self.openedFiles = []; if (first) { self.emit('error', err); } } }; Form.prototype._write = function(buffer, encoding, cb) { var self = this , i = 0 , len = buffer.length , prevIndex = self.index , index = self.index , state = self.state , lookbehind = self.lookbehind , boundary = self.boundary , boundaryChars = self.boundaryChars , boundaryLength = self.boundary.length , boundaryEnd = boundaryLength - 1 , bufferLength = buffer.length , c , cl for (i = 0; i < len; i++) { c = buffer[i]; switch (state) { case START: index = 0; state = START_BOUNDARY; /* falls through */ case START_BOUNDARY: if (index === boundaryLength - 2) { if (c !== CR) return self.handleError(new Error("Expected CR Received " + c)); index++; break; } else if (index === boundaryLength - 1) { if (c !== LF) return self.handleError(new Error("Expected LF Received " + c)); index = 0; self.onParsePartBegin(); state = HEADER_FIELD_START; break; } if (c !== boundary[index+2]) index = -2; if (c === boundary[index+2]) index++; break; case HEADER_FIELD_START: state = HEADER_FIELD; self.headerFieldMark = i; index = 0; /* falls through */ case HEADER_FIELD: if (c === CR) { self.headerFieldMark = null; state = HEADERS_ALMOST_DONE; break; } index++; if (c === HYPHEN) break; if (c === COLON) { if (index === 1) { // empty header field self.handleError(new Error("Empty header field")); return; } self.onParseHeaderField(buffer.slice(self.headerFieldMark, i)); self.headerFieldMark = null; state = HEADER_VALUE_START; break; } cl = lower(c); if (cl < A || cl > Z) { self.handleError(new Error("Expected alphabetic character, received " + c)); return; } break; case HEADER_VALUE_START: if (c === SPACE) break; self.headerValueMark = i; state = HEADER_VALUE; /* falls through */ case HEADER_VALUE: if (c === CR) { self.onParseHeaderValue(buffer.slice(self.headerValueMark, i)); self.headerValueMark = null; self.onParseHeaderEnd(); state = HEADER_VALUE_ALMOST_DONE; } break; case HEADER_VALUE_ALMOST_DONE: if (c !== LF) return self.handleError(new Error("Expected LF Received " + c)); state = HEADER_FIELD_START; break; case HEADERS_ALMOST_DONE: if (c !== LF) return self.handleError(new Error("Expected LF Received " + c)); var err = self.onParseHeadersEnd(i + 1); if (err) return self.handleError(err); state = PART_DATA_START; break; case PART_DATA_START: state = PART_DATA; self.partDataMark = i; /* falls through */ case PART_DATA: prevIndex = index; if (index === 0) { // boyer-moore derrived algorithm to safely skip non-boundary data i += boundaryEnd; while (i < bufferLength && !(buffer[i] in boundaryChars)) { i += boundaryLength; } i -= boundaryEnd; c = buffer[i]; } if (index < boundaryLength) { if (boundary[index] === c) { if (index === 0) { self.onParsePartData(buffer.slice(self.partDataMark, i)); self.partDataMark = null; } index++; } else { index = 0; } } else if (index === boundaryLength) { index++; if (c === CR) { // CR = part boundary self.partBoundaryFlag = true; } else if (c === HYPHEN) { // HYPHEN = end boundary self.lastBoundaryFlag = true; } else { index = 0; } } else if (index - 1 === boundaryLength) { if (self.partBoundaryFlag) { index = 0; if (c === LF) { self.partBoundaryFlag = false; self.onParsePartEnd(); self.onParsePartBegin(); state = HEADER_FIELD_START; break; } } else if (self.lastBoundaryFlag) { if (c === HYPHEN) { self.onParsePartEnd(); self.end(); state = END; } else { index = 0; } } else { index = 0; } } if (index > 0) { // when matching a possible boundary, keep a lookbehind reference // in case it turns out to be a false lead lookbehind[index-1] = c; } else if (prevIndex > 0) { // if our boundary turned out to be rubbish, the captured lookbehind // belongs to partData self.onParsePartData(lookbehind.slice(0, prevIndex)); prevIndex = 0; self.partDataMark = i; // reconsider the current character even so it interrupted the sequence // it could be the beginning of a new sequence i--; } break; case END: break; default: self.handleError(new Error("Parser has invalid state.")); return; } } if (self.headerFieldMark != null) { self.onParseHeaderField(buffer.slice(self.headerFieldMark)); self.headerFieldMark = 0; } if (self.headerValueMark != null) { self.onParseHeaderValue(buffer.slice(self.headerValueMark)); self.headerValueMark = 0; } if (self.partDataMark != null) { self.onParsePartData(buffer.slice(self.partDataMark)); self.partDataMark = 0; } self.index = index; self.state = state; self.bytesReceived += buffer.length; self.emit('progress', self.bytesReceived, self.bytesExpected); if (self.backpressure) { self.writeCbs.push(cb); } else { cb(); } }; Form.prototype.onParsePartBegin = function() { clearPartVars(this); } Form.prototype.onParseHeaderField = function(b) { this.headerField += this.headerFieldDecoder.write(b); } Form.prototype.onParseHeaderValue = function(b) { this.headerValue += this.headerValueDecoder.write(b); } Form.prototype.onParseHeaderEnd = function() { this.headerField = this.headerField.toLowerCase(); this.partHeaders[this.headerField] = this.headerValue; var m; if (this.headerField === 'content-disposition') { if (m = this.headerValue.match(/\bname="([^"]+)"/i)) { this.partName = m[1]; } this.partFilename = parseFilename(this.headerValue); } else if (this.headerField === 'content-transfer-encoding') { this.partTransferEncoding = this.headerValue.toLowerCase(); } this.headerFieldDecoder = new StringDecoder(this.encoding); this.headerField = ''; this.headerValueDecoder = new StringDecoder(this.encoding); this.headerValue = ''; } Form.prototype.onParsePartData = function(b) { if (this.partTransferEncoding === 'base64') { this.backpressure = ! this.destStream.write(b.toString('ascii'), 'base64'); } else { this.backpressure = ! this.destStream.write(b); } } Form.prototype.onParsePartEnd = function() { if (this.destStream) { flushWriteCbs(this); var s = this.destStream; process.nextTick(function() { s.end(); }); } clearPartVars(this); } Form.prototype.onParseHeadersEnd = function(offset) { var self = this; switch(self.partTransferEncoding){ case 'binary': case '7bit': case '8bit': self.partTransferEncoding = 'binary'; break; case 'base64': break; default: return new Error("unknown transfer-encoding: " + self.partTransferEncoding); } self.totalFieldCount += 1; if (self.totalFieldCount >= self.maxFields) { return new Error("maxFields " + self.maxFields + " exceeded."); } self.destStream = new stream.PassThrough(); self.destStream.on('drain', function() { flushWriteCbs(self); }); self.destStream.headers = self.partHeaders; self.destStream.name = self.partName; self.destStream.filename = self.partFilename; self.destStream.byteOffset = self.bytesReceived + offset; var partContentLength = self.destStream.headers['content-length']; self.destStream.byteCount = partContentLength ? parseInt(partContentLength, 10) : (self.bytesExpected - self.destStream.byteOffset - self.boundary.length - LAST_BOUNDARY_SUFFIX_LEN); self.emit('part', self.destStream); if (self.destStream.filename == null && self.autoFields) { handleField(self, self.destStream); } else if (self.destStream.filename != null && self.autoFiles) { handleFile(self, self.destStream); } } function flushWriteCbs(self) { self.writeCbs.forEach(function(cb) { process.nextTick(cb); }); self.writeCbs = []; self.backpressure = false; } function getBytesExpected(headers) { var contentLength = headers['content-length']; if (contentLength) { return parseInt(contentLength, 10); } else if (headers['transfer-encoding'] == null) { return 0; } else { return null; } } function beginFlush(self) { self.flushing += 1; } function endFlush(self) { self.flushing -= 1; maybeClose(self); } function maybeClose(self) { if (!self.flushing && self.finished && !self.error) { self.emit('close'); } } function handleFile(self, fileStream) { beginFlush(self); var file = { fieldName: fileStream.name, originalFilename: fileStream.filename, path: uploadPath(self.uploadDir, fileStream.filename), headers: fileStream.headers, }; file.ws = fs.createWriteStream(file.path); self.openedFiles.push(file); fileStream.pipe(file.ws); var counter = new StreamCounter(); fileStream.pipe(counter); var hashWorkaroundStream , hash = null; if (self.hash) { // workaround stream because https://github.com/joyent/node/issues/5216 hashWorkaroundStream = stream.Writable(); hash = crypto.createHash(self.hash); hashWorkaroundStream._write = function(buffer, encoding, callback) { hash.update(buffer); callback(); }; fileStream.pipe(hashWorkaroundStream); } file.ws.on('error', function(err) { if (!self.error) self.handleError(err); }); file.ws.on('close', function() { if (hash) file.hash = hash.digest('hex'); file.size = counter.bytes; self.emit('file', fileStream.name, file); endFlush(self); }); } function handleField(self, fieldStream) { var value = ''; var decoder = new StringDecoder(self.encoding); beginFlush(self); fieldStream.on('readable', function() { var buffer = fieldStream.read(); if (!buffer) return; self.totalFieldSize += buffer.length; if (self.totalFieldSize > self.maxFieldsSize) { self.handleError(new Error("maxFieldsSize " + self.maxFieldsSize + " exceeded")); return; } value += decoder.write(buffer); }); fieldStream.on('end', function() { self.emit('field', fieldStream.name, value); endFlush(self); }); } function clearPartVars(self) { self.partHeaders = {}; self.partName = null; self.partFilename = null; self.partTransferEncoding = 'binary'; self.destStream = null; self.headerFieldDecoder = new StringDecoder(self.encoding); self.headerField = ""; self.headerValueDecoder = new StringDecoder(self.encoding); self.headerValue = ""; } function setUpParser(self, boundary) { self.boundary = new Buffer(boundary.length + 4); self.boundary.write('\r\n--', 0, boundary.length + 4, 'ascii'); self.boundary.write(boundary, 4, boundary.length, 'ascii'); self.lookbehind = new Buffer(self.boundary.length + 8); self.state = START; self.boundaryChars = {}; for (var i = 0; i < self.boundary.length; i++) { self.boundaryChars[self.boundary[i]] = true; } self.index = null; self.partBoundaryFlag = false; self.lastBoundaryFlag = false; self.on('finish', function() { if ((self.state === HEADER_FIELD_START && self.index === 0) || (self.state === PART_DATA && self.index === self.boundary.length)) { self.onParsePartEnd(); } else if (self.state !== END) { self.handleError(new Error('stream ended unexpectedly')); } self.finished = true; maybeClose(self); }); } function uploadPath(baseDir, filename) { var ext = path.extname(filename).replace(FILE_EXT_RE, '$1'); var name = process.pid + '-' + (Math.random() * 0x100000000 + 1).toString(36) + ext; return path.join(baseDir, name); } function parseFilename(headerValue) { var m = headerValue.match(/\bfilename="(.*?)"($|; )/i); if (!m) return; var filename = m[1].substr(m[1].lastIndexOf('\\') + 1); filename = filename.replace(/%22/g, '"'); filename = filename.replace(/([\d]{4});/g, function(m, code) { return String.fromCharCode(code); }); return filename; } function lower(c) { return c | 0x20; } node-multiparty-2.2.0/package.json 0000664 0000000 0000000 00000001356 12227324540 0017144 0 ustar 00root root 0000000 0000000 { "name": "multiparty", "version": "2.2.0", "description": "multipart/form-data parser which supports streaming", "repository": { "type": "git", "url": "git@github.com:superjoe30/node-multiparty.git" }, "keywords": [ "file", "upload", "formidable", "stream", "s3" ], "devDependencies": { "findit": "0.1.1", "hashish": "0.0.4", "mocha": "~1.8.2", "request": "~2.16.6", "mkdirp": "~0.3.5", "superagent": "~0.14.1" }, "scripts": { "test": "ulimit -n 500 && mocha --timeout 4000 --reporter spec --recursive test/test.js" }, "engines": { "node": ">=0.8.0" }, "license": "MIT", "dependencies": { "readable-stream": "~1.1.9", "stream-counter": "~0.2.0" } } node-multiparty-2.2.0/test/ 0000775 0000000 0000000 00000000000 12227324540 0015630 5 ustar 00root root 0000000 0000000 node-multiparty-2.2.0/test/bench-multipart-parser.js 0000664 0000000 0000000 00000003140 12227324540 0022554 0 ustar 00root root 0000000 0000000 var assert = require('assert') , Form = require('../').Form , boundary = '-----------------------------168072824752491622650073' , mb = 100 , buffer = createMultipartBuffer(boundary, mb * 1024 * 1024) var callbacks = { partBegin: -1, partEnd: -1, headerField: -1, headerValue: -1, partData: -1, end: -1, }; var form = new Form({ boundary: boundary }); hijack('onParseHeaderField', function() { callbacks.headerField++; }); hijack('onParseHeaderValue', function() { callbacks.headerValue++; }); hijack('onParsePartBegin', function() { callbacks.partBegin++; }); hijack('onParsePartData', function() { callbacks.partData++; }); hijack('onParsePartEnd', function() { callbacks.partEnd++; }); form.on('finish', function() { callbacks.end++; }); var start = new Date(); form.write(buffer, function(err) { var duration = new Date() - start; assert.ifError(err); var mbPerSec = (mb / (duration / 1000)).toFixed(2); console.log(mbPerSec+' mb/sec'); }); process.on('exit', function() { for (var k in callbacks) { assert.equal(0, callbacks[k], k+' count off by '+callbacks[k]); } }); function createMultipartBuffer(boundary, size) { var head = '--'+boundary+'\r\n' + 'content-disposition: form-data; name="field1"\r\n' + '\r\n' , tail = '\r\n--'+boundary+'--\r\n' , buffer = new Buffer(size); buffer.write(head, 'ascii', 0); buffer.write(tail, 'ascii', buffer.length - tail.length); return buffer; } function hijack(name, fn) { var oldFn = form[name]; form[name] = function() { fn(); return oldFn.apply(this, arguments); }; } node-multiparty-2.2.0/test/fixture/ 0000775 0000000 0000000 00000000000 12227324540 0017316 5 ustar 00root root 0000000 0000000 node-multiparty-2.2.0/test/fixture/file/ 0000775 0000000 0000000 00000000000 12227324540 0020235 5 ustar 00root root 0000000 0000000 node-multiparty-2.2.0/test/fixture/file/beta-sticker-1.png 0000664 0000000 0000000 00000003174 12227324540 0023463 0 ustar 00root root 0000000 0000000 PNG IHDR $ $ tEXtSoftware Adobe ImageReadyqe<