pax_global_header 0000666 0000000 0000000 00000000064 13707700713 0014517 g ustar 00root root 0000000 0000000 52 comment=684b6ea2327f0446bc08ce62a06aab31e589923d multiparty-4.2.2/ 0000775 0000000 0000000 00000000000 13707700713 0013736 5 ustar 00root root 0000000 0000000 multiparty-4.2.2/.editorconfig 0000664 0000000 0000000 00000000263 13707700713 0016414 0 ustar 00root root 0000000 0000000 # http://editorconfig.org root = true [*] charset = utf-8 insert_final_newline = true trim_trailing_whitespace = true [{*.js,*.json,*.yml}] indent_size = 2 indent_style = space multiparty-4.2.2/.eslintignore 0000664 0000000 0000000 00000000026 13707700713 0016437 0 ustar 00root root 0000000 0000000 coverage node_modules multiparty-4.2.2/.eslintrc.yml 0000664 0000000 0000000 00000001210 13707700713 0016354 0 ustar 00root root 0000000 0000000 root: true rules: brace-style: - error - 1tbs - allowSingleLine: true comma-dangle: error comma-style: - error - last eol-last: error indent: - error - 2 - SwitchCase: 1 no-multi-spaces: error no-param-reassign: error no-trailing-spaces: error no-unused-vars: - error - vars: all args: none ignoreRestSiblings: true no-useless-escape: error object-curly-spacing: - error - always padded-blocks: - error - blocks: never switches: never classes: never quotes: - error - single - avoidEscape: true allowTemplateLiterals: false multiparty-4.2.2/.gitignore 0000664 0000000 0000000 00000000105 13707700713 0015722 0 ustar 00root root 0000000 0000000 .nyc_output/ node_modules/ coverage/ npm-debug.log package-lock.json multiparty-4.2.2/.travis.yml 0000664 0000000 0000000 00000004630 13707700713 0016052 0 ustar 00root root 0000000 0000000 language: node_js node_js: - "0.10" - "0.12" - "4.9" - "6.17" - "8.17" - "10.22" - "12.18" - "14.6" cache: directories: - node_modules before_install: - | # Setup utility functions function node_version_lt () { [[ "$(v "$TRAVIS_NODE_VERSION")" -lt "$(v "${1}")" ]] } function npm_module_installed () { npm -lsp ls | grep -Fq "$(pwd)/node_modules/${1}:${1}@" } function npm_remove_module_re () { node -e ' fs = require("fs"); p = JSON.parse(fs.readFileSync("package.json", "utf8")); r = RegExp(process.argv[1]); for (k in p.devDependencies) { if (r.test(k)) delete p.devDependencies[k]; } fs.writeFileSync("package.json", JSON.stringify(p, null, 2) + "\n"); ' "$@" } function npm_use_module () { node -e ' fs = require("fs"); p = JSON.parse(fs.readFileSync("package.json", "utf8")); p.devDependencies[process.argv[1]] = process.argv[2]; fs.writeFileSync("package.json", JSON.stringify(p, null, 2) + "\n"); ' "$@" } function v () { tr '.' '\n' <<< "${1}" \ | awk '{ printf "%03d", $0 }' \ | sed 's/^0*//' } # Configure npm - | # Skip updating shrinkwrap / lock npm config set shrinkwrap false # Setup Node.js version-specific dependencies - | # Configure eslint for linting if node_version_lt '10.0'; then npm_remove_module_re '^eslint(-|$)' fi - | # Configure mocha for testing if node_version_lt '4.0'; then npm_use_module 'mocha' '3.5.3' elif node_version_lt '6.0'; then npm_use_module 'mocha' '5.2.0' elif node_version_lt '8.0'; then npm_use_module 'mocha' '6.2.2' fi - | # Configure nyc for testing if node_version_lt '4.0'; then npm_use_module 'nyc' '10.3.2' elif node_version_lt '6.0'; then npm_use_module 'nyc' '11.9.0' elif node_version_lt '8.0'; then npm_use_module 'nyc' '14.1.1' fi # Update Node.js modules - | # Prune & rebuild node_modules if [[ -d node_modules ]]; then npm prune npm rebuild fi script: - | # Run test script npm run-script test-ci - | # Run linting, depending on eslint install if npm_module_installed 'eslint'; then npm run-script lint fi after_script: - | # Upload coverage to coveralls npm install --save-dev coveralls@2 nyc report --reporter=text-lcov | coveralls multiparty-4.2.2/HISTORY.md 0000664 0000000 0000000 00000013230 13707700713 0015420 0 ustar 00root root 0000000 0000000 4.2.2 / 2020-07-27 ================== * Fix empty files on Node.js 14.x * Fix form emitting aborted error after close * Replace `fd-slicer` module with internal transform stream * deps: http-errors@~1.8.0 - Fix error creating objects in some environments - deps: inherits@2.0.4 - deps: setprototypeof@1.2.0 * deps: safe-buffer@5.2.1 4.2.1 / 2018-08-12 ================== * Use `uid-safe` module to for temp file names * deps: fd-slicer@1.1.0 * deps: http-errors@~1.7.0 4.2.0 / 2018-07-30 ================== * Use `http-errors` for raised errors * Use `random-bytes` module for polyfill * perf: remove parameter reassignment 4.1.4 / 2018-05-11 ================== * Fix file extension filtering stopping on certain whitespace characters * Use `safe-buffer` for improved API safety * perf: enable strict mode 4.1.3 / 2017-01-22 ================== * Use `os.tmpdir()` instead of `os.tmpDir()` * deps: fd-slicer@1.0.1 4.1.2 / 2015-05-09 ================== * Do not emit error on part prior to emitting part * Fix filename with quotes truncating from certain clients 4.1.1 / 2015-01-18 ================== * Do not clobber existing temporary files 4.1.0 / 2014-12-04 ================== * Add `statusCode` field to HTTP-related errors * deps: fd-slicer@1.0.0 4.0.0 / 2014-10-14 ================== * `part` events for fields no longer fire if `autoFields` is on * `part` events for files no longer fire if `autoFiles` is on * `field`, `file`, and `part` events are guaranteed to emit in the correct order - the order that the user places the parts in the request. Each `part` `end` event is guaranteed to emit before the next `part` event is emitted. * Drop Node.js 0.8.x support * Improve random temp file names - Now using 18 bytes of randomness instead of 8. * More robust `maxFilesSize` implementation - Before it was possible for race conditions to cause more than `maxFilesSize` bytes to get written to disk. That is now fixed. * Now `part` objects emit `error` events - This makes streaming work better since the part stream will emit an error when it is no longer streaming. * Remove support for generating the hash digest of a part - If you want this, do it in your own code. * Remove undocumented `ws` property from `file` objects * Require the close boundary - This makes multiparty more RFC-compliant and makes some invalid requests which used to work, now emit an error instead. 3.3.2 / 2014-08-07 ================== * Do not invoke callback after close * Share callback ending logic between error and close 3.3.1 / 2014-07-22 ================== * Remove problematic test fixtures 3.3.0 / 2014-07-03 ================== * Always emit close after all parts ended 3.2.10 / 2014-07-03 =================== * Fix callback hang in node.js 0.8 on errors * Remove execute bit from files 3.2.9 / 2014-06-16 ================== * Fix attaching error listeners directly after form.parse * Fix to not synchronously invoke callback to form.parse on error 3.2.8 / 2014-06-01 ================== * Fix developer accidentally corrupting data * Fix handling epilogue in a separate chunk * Fix initial check errors to use supplied callback 3.2.7 / 2014-05-26 ================== * Fix errors hanging responses in callback-style 3.2.6 / 2014-05-13 ================== * Fix `maxFields` to error on field after max 3.2.5 / 2014-05-11 ================== * Support boundary containing equal sign 3.2.4 / 2014-03-26 ================== * Keep `part.byteCount` undefined in chunked encoding * Fix temp files not always cleaned up 3.2.3 / 2014-02-20 ================== * Improve parsing boundary attribute from `Content-Type` 3.2.2 / 2014-01-29 ================== * Fix error on empty payloads 3.2.1 / 2014-01-27 ================== * Fix `maxFilesSize` overcalculation bug 3.2.0 / 2014-01-17 ================== * Add `maxFilesSize` for `autoFiles` 3.1.2 / 2014-01-13 ================== * Fix incorrectly using `autoFields` value for `autoFiles` 3.1.1 / 2013-12-13 ================== * Fix not emitting `close` after all part `end` events 3.1.0 / 2013-11-10 ================== * Support UTF-8 filename in `Content-Disposition` 3.0.0 / 2013-10-25 ================== * `form.parse` callback API changed in a compatibility-breaking manner 2.2.0 / 2013-10-15 ================== * Add callback API to support multiple files with same field name * Fix assertion crash when max field count is exceeded * Fix assertion crash when client aborts an invalid request * Fix assertion crash when `EMFILE` occurrs * Switch from assertions to only `error` events * Unpipe the request when an error occurs to save resources * Update readable-stream to ~1.1.9 2.1.9 / 2013-10-06 ================== * relax `Content-Type` detection regex 2.1.8 / 2013-08-26 ================== * Replace deprecated `Buffer.write()` 2.1.7 / 2013-05-23 ================== * Add repository field to package.json 2.1.6 / 2013-04-30 ================== * Expose `hash` as an option to `Form` 2.1.5 / 2013-04-10 ================== * Fix possible `close` event before all temp files are done 2.1.4 / 2013-04-09 ================== * Fix crash for invalid requests 2.1.3 / 2013-04-09 ================== * Add `file.size` 2.1.2 / 2013-04-08 ================== * Add proper backpressure support 2.1.1 / 2013-04-05 ================== * Add `part.byteCount` and `part.byteOffset` * Fix uploads larger than 2KB 2.1.0 / 2013-04-04 ================== * Complete rewrite. See README for changes and new API. 2.0.0 / 2013-04-02 ================== * Fork and rewrite from `formidable` multiparty-4.2.2/LICENSE 0000664 0000000 0000000 00000002211 13707700713 0014737 0 ustar 00root root 0000000 0000000 (The MIT License) Copyright (c) 2013 Felix Geisendörfer Copyright (c) 2014 Andrew Kelley Copyright (c) 2014 Douglas Christopher Wilson Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. multiparty-4.2.2/README.md 0000664 0000000 0000000 00000021451 13707700713 0015220 0 ustar 00root root 0000000 0000000 # multiparty [![NPM Version][npm-version-image]][npm-url] [![NPM Downloads][npm-downloads-image]][npm-url] [![Node.js Version][node-version-image]][node-version-url] [![Build Status][travis-image]][travis-url] [![Test Coverage][coveralls-image]][coveralls-url] Parse http requests with content-type `multipart/form-data`, also known as file uploads. See also [busboy](https://github.com/mscdex/busboy) - a [faster](https://github.com/mscdex/dicer/wiki/Benchmarks) alternative which may be worth looking into. ## Installation This is a [Node.js](https://nodejs.org/en/) module available through the [npm registry](https://www.npmjs.com/). Installation is done using the [`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): ``` npm install multiparty ``` ## Usage * See [examples](examples). Parse an incoming `multipart/form-data` request. ```js var multiparty = require('multiparty'); var http = require('http'); var util = require('util'); http.createServer(function(req, res) { if (req.url === '/upload' && req.method === 'POST') { // parse a file upload var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { res.writeHead(200, { 'content-type': 'text/plain' }); res.write('received upload:\n\n'); res.end(util.inspect({ fields: fields, files: files })); }); return; } // show a file upload form res.writeHead(200, { 'content-type': 'text/html' }); res.end( '
' ); }).listen(8080); ``` ## API ### multiparty.Form ```js var form = new multiparty.Form(options) ``` Creates a new form. Options: * `encoding` - sets encoding for the incoming form fields. Defaults to `utf8`. * `maxFieldsSize` - Limits the amount of memory all fields (not files) can allocate in bytes. If this value is exceeded, an `error` event is emitted. The default size is 2MB. * `maxFields` - Limits the number of fields that will be parsed before emitting an `error` event. A file counts as a field in this case. Defaults to 1000. * `maxFilesSize` - Only relevant when `autoFiles` is `true`. Limits the total bytes accepted for all files combined. If this value is exceeded, an `error` event is emitted. The default is `Infinity`. * `autoFields` - Enables `field` events and disables `part` events for fields. This is automatically set to `true` if you add a `field` listener. * `autoFiles` - Enables `file` events and disables `part` events for files. This is automatically set to `true` if you add a `file` listener. * `uploadDir` - Only relevant when `autoFiles` is `true`. The directory for placing file uploads in. You can move them later using `fs.rename()`. Defaults to `os.tmpdir()`. #### form.parse(request, [cb]) Parses an incoming node.js `request` containing form data.This will cause `form` to emit events based off the incoming request. ```js var count = 0; var form = new multiparty.Form(); // Errors may be emitted // Note that if you are listening to 'part' events, the same error may be // emitted from the `form` and the `part`. form.on('error', function(err) { console.log('Error parsing form: ' + err.stack); }); // Parts are emitted when parsing the form form.on('part', function(part) { // You *must* act on the part by reading it // NOTE: if you want to ignore it, just call "part.resume()" if (!part.filename) { // filename is not defined when this is a field and not a file console.log('got field named ' + part.name); // ignore field's content part.resume(); } if (part.filename) { // filename is defined when this is a file count++; console.log('got file named ' + part.name); // ignore file's content here part.resume(); } part.on('error', function(err) { // decide what to do }); }); // Close emitted after form parsed form.on('close', function() { console.log('Upload completed!'); res.setHeader('text/plain'); res.end('Received ' + count + ' files'); }); // Parse req form.parse(req); ``` If `cb` is provided, `autoFields` and `autoFiles` are set to `true` and all fields and files are collected and passed to the callback, removing the need to listen to any events on `form`. This is for convenience when you want to read everything, but be sure to write cleanup code, as this will write all uploaded files to the disk, even ones you may not be interested in. ```js form.parse(req, function(err, fields, files) { Object.keys(fields).forEach(function(name) { console.log('got field named ' + name); }); Object.keys(files).forEach(function(name) { console.log('got file named ' + name); }); console.log('Upload completed!'); res.setHeader('text/plain'); res.end('Received ' + files.length + ' files'); }); ``` `fields` is an object where the property names are field names and the values are arrays of field values. `files` is an object where the property names are field names and the values are arrays of file objects. #### form.bytesReceived The amount of bytes received for this form so far. #### form.bytesExpected The expected number of bytes in this form. ### Events #### 'error' (err) Unless you supply a callback to `form.parse`, you definitely want to handle this event. Otherwise your server *will* crash when users submit bogus multipart requests! Only one 'error' event can ever be emitted, and if an 'error' event is emitted, then 'close' will not be emitted. If the error would correspond to a certain HTTP response code, the `err` object will have a `statusCode` property with the value of the suggested HTTP response code to send back. Note that an 'error' event will be emitted both from the `form` and from the current `part`. #### 'part' (part) Emitted when a part is encountered in the request. `part` is a `ReadableStream`. It also has the following properties: * `headers` - the headers for this part. For example, you may be interested in `content-type`. * `name` - the field name for this part * `filename` - only if the part is an incoming file * `byteOffset` - the byte offset of this part in the request body * `byteCount` - assuming that this is the last part in the request, this is the size of this part in bytes. You could use this, for example, to set the `Content-Length` header if uploading to S3. If the part had a `Content-Length` header then that value is used here instead. Parts for fields are not emitted when `autoFields` is on, and likewise parts for files are not emitted when `autoFiles` is on. `part` emits 'error' events! Make sure you handle them. #### 'aborted' Emitted when the request is aborted. This event will be followed shortly by an `error` event. In practice you do not need to handle this event. #### 'progress' (bytesReceived, bytesExpected) Emitted when a chunk of data is received for the form. The `bytesReceived` argument contains the total count of bytes received for this form so far. The `bytesExpected` argument contains the total expected bytes if known, otherwise `null`. #### 'close' Emitted after all parts have been parsed and emitted. Not emitted if an `error` event is emitted. If you have `autoFiles` on, this is not fired until all the data has been flushed to disk and the file handles have been closed. This is typically when you would send your response. #### 'file' (name, file) **By default multiparty will not touch your hard drive.** But if you add this listener, multiparty automatically sets `form.autoFiles` to `true` and will stream uploads to disk for you. **The max bytes accepted per request can be specified with `maxFilesSize`.** * `name` - the field name for this file * `file` - an object with these properties: - `fieldName` - same as `name` - the field name for this file - `originalFilename` - the filename that the user reports for the file - `path` - the absolute path of the uploaded file on disk - `headers` - the HTTP headers that were sent along with this file - `size` - size of the file in bytes #### 'field' (name, value) * `name` - field name * `value` - string field value ## License [MIT](LICENSE) [coveralls-image]: https://badgen.net/coveralls/c/github/pillarjs/multiparty/master [coveralls-url]: https://coveralls.io/r/pillarjs/multiparty?branch=master [node-version-image]: https://badgen.net/npm/node/multiparty [node-version-url]: https://nodejs.org/en/download [npm-downloads-image]: https://badgen.net/npm/dm/multiparty [npm-url]: https://npmjs.org/package/multiparty [npm-version-image]: https://badgen.net/npm/v/multiparty [travis-image]: https://badgen.net/travis/pillarjs/multiparty/master [travis-url]: https://travis-ci.org/pillarjs/multiparty multiparty-4.2.2/examples/ 0000775 0000000 0000000 00000000000 13707700713 0015554 5 ustar 00root root 0000000 0000000 multiparty-4.2.2/examples/azureblobstorage.js 0000664 0000000 0000000 00000002306 13707700713 0021465 0 ustar 00root root 0000000 0000000 var azure = require('azure') var http = require('http') var multiparty = require('../') var PORT = process.env.PORT || 27372; var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, { 'content-type': 'text/html' }) res.end( '' ); } else if (req.url === '/upload') { var blobService = azure.createBlobService(); var form = new multiparty.Form(); form.on('part', function(part) { if (!part.filename) return; var size = part.byteCount; var name = part.filename; var container = 'blobContainerName'; blobService.createBlockBlobFromStream(container, name, part, size, function(error) { if (error) { // error handling res.status(500).send('Error uploading file'); } res.send('File uploaded successfully'); }); }); form.parse(req); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:' + PORT + '/'); }); multiparty-4.2.2/examples/progress.js 0000664 0000000 0000000 00000002736 13707700713 0017766 0 ustar 00root root 0000000 0000000 var http = require('http') var multiparty = require('../') var util = require('util') var PORT = process.env.PORT || 8080 var server = http.createServer(function (req, res) { if (req.url === '/') { res.writeHead(200, { 'content-type': 'text/html' }) res.end( '' ) } else if (req.url === '/upload') { var form = new multiparty.Form() form.on('progress', function (bytesReceived, bytesExpected) { if (bytesExpected === null) { return } var percentComplete = (bytesReceived / bytesExpected) * 100 console.log('the form is ' + Math.floor(percentComplete) + '%' + ' complete') }) form.parse(req, function (err, fields, files) { if (err) { res.writeHead(400, { 'content-type': 'text/plain' }) res.end('invalid request: ' + err.message) return } res.writeHead(200, { 'content-type': 'text/plain' }) res.write('received fields:\n\n ' + util.inspect(fields)) res.write('\n\n') res.end('received files:\n\n ' + util.inspect(files)) }) } else { res.writeHead(404, { 'content-type': 'text/plain' }) res.end('404') } }) server.listen(PORT, function () { console.info('listening on http://0.0.0.0:' + PORT + '/') }) multiparty-4.2.2/examples/s3.js 0000664 0000000 0000000 00000003571 13707700713 0016445 0 ustar 00root root 0000000 0000000 if (!process.env.S3_BUCKET || !process.env.S3_KEY || !process.env.S3_SECRET) { console.log('To run this example, do this:') console.log('npm install aws-sdk') console.log('S3_BUCKET="(your s3 bucket)" S3_KEY="(your s3 key)" S3_SECRET="(your s3 secret) node examples/s3.js"'); process.exit(1); } var http = require('http'); var multiparty = require('../'); var AWS = require('aws-sdk'); var PORT = process.env.PORT || 27372; var bucket = process.env.S3_BUCKET; var s3Client = new AWS.S3({ accessKeyId: process.env.S3_KEY, secretAccessKey: process.env.S3_SECRET // See: http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Config.html#constructor-property }); var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, { 'content-type': 'text/html' }) res.end( '' ); } else if (req.url === '/upload') { var form = new multiparty.Form(); var destPath; form.on('field', function(name, value) { if (name === 'path') { destPath = value; } }); form.on('part', function(part) { s3Client.putObject({ Bucket: bucket, Key: destPath, ACL: 'public-read', Body: part, ContentLength: part.byteCount }, function(err, data) { if (err) throw err; console.log('done', data) res.end('OK') console.log('https://s3.amazonaws.com/' + bucket + '/' + destPath) }); }); form.parse(req); } else { res.writeHead(404, { 'content-type': 'text/plain' }) res.end('404'); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); multiparty-4.2.2/examples/upload.js 0000664 0000000 0000000 00000002277 13707700713 0017406 0 ustar 00root root 0000000 0000000 var http = require('http') var multiparty = require('../') var util = require('util') var PORT = process.env.PORT || 27372 var server = http.createServer(function(req, res) { if (req.url === '/') { res.writeHead(200, { 'content-type': 'text/html' }) res.end( '' ); } else if (req.url === '/upload') { var form = new multiparty.Form(); form.parse(req, function(err, fields, files) { if (err) { res.writeHead(400, { 'content-type': 'text/plain' }) res.end('invalid request: ' + err.message) return; } res.writeHead(200, { 'content-type': 'text/plain' }) res.write('received fields:\n\n '+util.inspect(fields)); res.write('\n\n'); res.end('received files:\n\n '+util.inspect(files)); }); } else { res.writeHead(404, { 'content-type': 'text/plain' }) res.end('404'); } }); server.listen(PORT, function() { console.info('listening on http://0.0.0.0:'+PORT+'/'); }); multiparty-4.2.2/index.js 0000664 0000000 0000000 00000053043 13707700713 0015410 0 ustar 00root root 0000000 0000000 /*! * multiparty * Copyright(c) 2013 Felix Geisendörfer * Copyright(c) 2014 Andrew Kelley * Copyright(c) 2014 Douglas Christopher Wilson * MIT Licensed */ 'use strict' var createError = require('http-errors') var uid = require('uid-safe') var stream = require('stream'); var util = require('util'); var fs = require('fs'); var path = require('path'); var os = require('os'); var Buffer = require('safe-buffer').Buffer var StringDecoder = require('string_decoder').StringDecoder; var START = 0; var START_BOUNDARY = 1; var HEADER_FIELD_START = 2; var HEADER_FIELD = 3; var HEADER_VALUE_START = 4; var HEADER_VALUE = 5; var HEADER_VALUE_ALMOST_DONE = 6; var HEADERS_ALMOST_DONE = 7; var PART_DATA_START = 8; var PART_DATA = 9; var CLOSE_BOUNDARY = 10; var END = 11; var LF = 10; var CR = 13; var SPACE = 32; var HYPHEN = 45; var COLON = 58; var A = 97; var Z = 122; var CONTENT_TYPE_RE = /^multipart\/(?:form-data|related)(?:;|$)/i; var CONTENT_TYPE_PARAM_RE = /;\s*([^=]+)=(?:"([^"]+)"|([^;]+))/gi; var FILE_EXT_RE = /(\.[_\-a-zA-Z0-9]{0,16})[\S\s]*/; var LAST_BOUNDARY_SUFFIX_LEN = 4; // --\r\n exports.Form = Form; util.inherits(Form, stream.Writable); function Form(options) { var opts = options || {} var self = this; stream.Writable.call(self, { emitClose: false }) self.error = null; self.autoFields = !!opts.autoFields self.autoFiles = !!opts.autoFiles self.maxFields = opts.maxFields || 1000 self.maxFieldsSize = opts.maxFieldsSize || 2 * 1024 * 1024 self.maxFilesSize = opts.maxFilesSize || Infinity self.uploadDir = opts.uploadDir || os.tmpdir() self.encoding = opts.encoding || 'utf8' self.bytesReceived = 0; self.bytesExpected = null; self.openedFiles = []; self.totalFieldSize = 0; self.totalFieldCount = 0; self.totalFileSize = 0; self.flushing = 0; self.backpressure = false; self.writeCbs = []; self.emitQueue = []; self.on('newListener', function(eventName) { if (eventName === 'file') { self.autoFiles = true; } else if (eventName === 'field') { self.autoFields = true; } }); } Form.prototype.parse = function(req, cb) { var called = false; var self = this; var waitend = true; self.on('close', onClosed) if (cb) { // if the user supplies a callback, this implies autoFields and autoFiles self.autoFields = true; self.autoFiles = true; // wait for request to end before calling cb var end = function (done) { if (called) return; called = true; // wait for req events to fire process.nextTick(function() { if (waitend && req.readable) { // dump rest of request req.resume(); req.once('end', done); return; } done(); }); }; var fields = {}; var files = {}; self.on('error', function(err) { end(function() { cb(err); }); }); self.on('field', function(name, value) { var fieldsArray = fields[name] || (fields[name] = []); fieldsArray.push(value); }); self.on('file', function(name, file) { var filesArray = files[name] || (files[name] = []); filesArray.push(file); }); self.on('close', function() { end(function() { cb(null, fields, files); }); }); } self.handleError = handleError; self.bytesExpected = getBytesExpected(req.headers); req.on('end', onReqEnd); req.on('error', function(err) { waitend = false; handleError(err); }); req.on('aborted', onReqAborted); var state = req._readableState; if (req._decoder || (state && (state.encoding || state.decoder))) { // this is a binary protocol // if an encoding is set, input is likely corrupted validationError(new Error('request encoding must not be set')); return; } var contentType = req.headers['content-type']; if (!contentType) { validationError(createError(415, 'missing content-type header')); return; } var m = CONTENT_TYPE_RE.exec(contentType); if (!m) { validationError(createError(415, 'unsupported content-type')); return; } var boundary; CONTENT_TYPE_PARAM_RE.lastIndex = m.index + m[0].length - 1; while ((m = CONTENT_TYPE_PARAM_RE.exec(contentType))) { if (m[1].toLowerCase() !== 'boundary') continue; boundary = m[2] || m[3]; break; } if (!boundary) { validationError(createError(400, 'content-type missing boundary')); return; } setUpParser(self, boundary); req.pipe(self); function onClosed () { req.removeListener('aborted', onReqAborted) } function onReqAborted() { waitend = false; self.emit('aborted'); handleError(new Error('Request aborted')) } function onReqEnd() { waitend = false; } function handleError(err) { var first = !self.error; if (first) { self.error = err; req.removeListener('aborted', onReqAborted); req.removeListener('end', onReqEnd); if (self.destStream) { errorEventQueue(self, self.destStream, err); } } cleanupOpenFiles(self); if (first) { self.emit('error', err); } } function validationError(err) { // handle error on next tick for event listeners to attach process.nextTick(handleError.bind(null, err)) } }; Form.prototype._write = function(buffer, encoding, cb) { if (this.error) return; var self = this; var i = 0; var len = buffer.length; var prevIndex = self.index; var index = self.index; var state = self.state; var lookbehind = self.lookbehind; var boundary = self.boundary; var boundaryChars = self.boundaryChars; var boundaryLength = self.boundary.length; var boundaryEnd = boundaryLength - 1; var bufferLength = buffer.length; var c; var cl; for (i = 0; i < len; i++) { c = buffer[i]; switch (state) { case START: index = 0; state = START_BOUNDARY; /* falls through */ case START_BOUNDARY: if (index === boundaryLength - 2 && c === HYPHEN) { index = 1; state = CLOSE_BOUNDARY; break; } else if (index === boundaryLength - 2) { if (c !== CR) return self.handleError(createError(400, 'Expected CR Received ' + c)); index++; break; } else if (index === boundaryLength - 1) { if (c !== LF) return self.handleError(createError(400, 'Expected LF Received ' + c)); index = 0; self.onParsePartBegin(); state = HEADER_FIELD_START; break; } if (c !== boundary[index+2]) index = -2; if (c === boundary[index+2]) index++; break; case HEADER_FIELD_START: state = HEADER_FIELD; self.headerFieldMark = i; index = 0; /* falls through */ case HEADER_FIELD: if (c === CR) { self.headerFieldMark = null; state = HEADERS_ALMOST_DONE; break; } index++; if (c === HYPHEN) break; if (c === COLON) { if (index === 1) { // empty header field self.handleError(createError(400, 'Empty header field')); return; } self.onParseHeaderField(buffer.slice(self.headerFieldMark, i)); self.headerFieldMark = null; state = HEADER_VALUE_START; break; } cl = lower(c); if (cl < A || cl > Z) { self.handleError(createError(400, 'Expected alphabetic character, received ' + c)); return; } break; case HEADER_VALUE_START: if (c === SPACE) break; self.headerValueMark = i; state = HEADER_VALUE; /* falls through */ case HEADER_VALUE: if (c === CR) { self.onParseHeaderValue(buffer.slice(self.headerValueMark, i)); self.headerValueMark = null; self.onParseHeaderEnd(); state = HEADER_VALUE_ALMOST_DONE; } break; case HEADER_VALUE_ALMOST_DONE: if (c !== LF) return self.handleError(createError(400, 'Expected LF Received ' + c)); state = HEADER_FIELD_START; break; case HEADERS_ALMOST_DONE: if (c !== LF) return self.handleError(createError(400, 'Expected LF Received ' + c)); var err = self.onParseHeadersEnd(i + 1); if (err) return self.handleError(err); state = PART_DATA_START; break; case PART_DATA_START: state = PART_DATA; self.partDataMark = i; /* falls through */ case PART_DATA: prevIndex = index; if (index === 0) { // boyer-moore derrived algorithm to safely skip non-boundary data i += boundaryEnd; while (i < bufferLength && !(buffer[i] in boundaryChars)) { i += boundaryLength; } i -= boundaryEnd; c = buffer[i]; } if (index < boundaryLength) { if (boundary[index] === c) { if (index === 0) { self.onParsePartData(buffer.slice(self.partDataMark, i)); self.partDataMark = null; } index++; } else { index = 0; } } else if (index === boundaryLength) { index++; if (c === CR) { // CR = part boundary self.partBoundaryFlag = true; } else if (c === HYPHEN) { index = 1; state = CLOSE_BOUNDARY; break; } else { index = 0; } } else if (index - 1 === boundaryLength) { if (self.partBoundaryFlag) { index = 0; if (c === LF) { self.partBoundaryFlag = false; self.onParsePartEnd(); self.onParsePartBegin(); state = HEADER_FIELD_START; break; } } else { index = 0; } } if (index > 0) { // when matching a possible boundary, keep a lookbehind reference // in case it turns out to be a false lead lookbehind[index-1] = c; } else if (prevIndex > 0) { // if our boundary turned out to be rubbish, the captured lookbehind // belongs to partData self.onParsePartData(lookbehind.slice(0, prevIndex)); prevIndex = 0; self.partDataMark = i; // reconsider the current character even so it interrupted the sequence // it could be the beginning of a new sequence i--; } break; case CLOSE_BOUNDARY: if (c !== HYPHEN) return self.handleError(createError(400, 'Expected HYPHEN Received ' + c)); if (index === 1) { self.onParsePartEnd(); state = END; } else if (index > 1) { return self.handleError(new Error('Parser has invalid state.')) } index++; break; case END: break; default: self.handleError(new Error('Parser has invalid state.')) return; } } if (self.headerFieldMark != null) { self.onParseHeaderField(buffer.slice(self.headerFieldMark)); self.headerFieldMark = 0; } if (self.headerValueMark != null) { self.onParseHeaderValue(buffer.slice(self.headerValueMark)); self.headerValueMark = 0; } if (self.partDataMark != null) { self.onParsePartData(buffer.slice(self.partDataMark)); self.partDataMark = 0; } self.index = index; self.state = state; self.bytesReceived += buffer.length; self.emit('progress', self.bytesReceived, self.bytesExpected); if (self.backpressure) { self.writeCbs.push(cb); } else { cb(); } }; Form.prototype.onParsePartBegin = function() { clearPartVars(this); } Form.prototype.onParseHeaderField = function(b) { this.headerField += this.headerFieldDecoder.write(b); } Form.prototype.onParseHeaderValue = function(b) { this.headerValue += this.headerValueDecoder.write(b); } Form.prototype.onParseHeaderEnd = function() { this.headerField = this.headerField.toLowerCase(); this.partHeaders[this.headerField] = this.headerValue; var m; if (this.headerField === 'content-disposition') { if (m = this.headerValue.match(/\bname="([^"]+)"/i)) { this.partName = m[1]; } this.partFilename = parseFilename(this.headerValue); } else if (this.headerField === 'content-transfer-encoding') { this.partTransferEncoding = this.headerValue.toLowerCase(); } this.headerFieldDecoder = new StringDecoder(this.encoding); this.headerField = ''; this.headerValueDecoder = new StringDecoder(this.encoding); this.headerValue = ''; } Form.prototype.onParsePartData = function(b) { if (this.partTransferEncoding === 'base64') { this.backpressure = ! this.destStream.write(b.toString('ascii'), 'base64'); } else { this.backpressure = ! this.destStream.write(b); } } Form.prototype.onParsePartEnd = function() { if (this.destStream) { flushWriteCbs(this); var s = this.destStream; process.nextTick(function() { s.end(); }); } clearPartVars(this); } Form.prototype.onParseHeadersEnd = function(offset) { var self = this; switch(self.partTransferEncoding){ case 'binary': case '7bit': case '8bit': self.partTransferEncoding = 'binary'; break; case 'base64': break; default: return createError(400, 'unknown transfer-encoding: ' + self.partTransferEncoding); } self.totalFieldCount += 1; if (self.totalFieldCount > self.maxFields) { return createError(413, 'maxFields ' + self.maxFields + ' exceeded.'); } self.destStream = new stream.PassThrough(); self.destStream.on('drain', function() { flushWriteCbs(self); }); self.destStream.headers = self.partHeaders; self.destStream.name = self.partName; self.destStream.filename = self.partFilename; self.destStream.byteOffset = self.bytesReceived + offset; var partContentLength = self.destStream.headers['content-length']; self.destStream.byteCount = partContentLength ? parseInt(partContentLength, 10) : self.bytesExpected ? (self.bytesExpected - self.destStream.byteOffset - self.boundary.length - LAST_BOUNDARY_SUFFIX_LEN) : undefined; if (self.destStream.filename == null && self.autoFields) { handleField(self, self.destStream); } else if (self.destStream.filename != null && self.autoFiles) { handleFile(self, self.destStream); } else { handlePart(self, self.destStream); } } util.inherits(LimitStream, stream.Transform) function LimitStream (limit) { stream.Transform.call(this) this.bytes = 0 this.limit = limit } LimitStream.prototype._transform = function _transform (chunk, encoding, callback) { var length = !Buffer.isBuffer(chunk) ? Buffer.byteLength(chunk, encoding) : chunk.length this.bytes += length if (this.bytes > this.limit) { var err = new Error('maximum file length exceeded') err.code = 'ETOOBIG' callback(err) } else { this.push(chunk) this.emit('progress', this.bytes, length) callback() } } function flushWriteCbs(self) { self.writeCbs.forEach(function(cb) { process.nextTick(cb); }); self.writeCbs = []; self.backpressure = false; } function getBytesExpected(headers) { var contentLength = headers['content-length']; if (contentLength) { return parseInt(contentLength, 10); } else if (headers['transfer-encoding'] == null) { return 0; } else { return null; } } function beginFlush(self) { self.flushing += 1; } function endFlush(self) { self.flushing -= 1; if (self.flushing < 0) { // if this happens this is a critical bug in multiparty and this stack trace // will help us figure it out. self.handleError(new Error('unexpected endFlush')) return; } maybeClose(self); } function maybeClose(self) { if (self.flushing > 0 || self.error) return; // go through the emit queue in case any field, file, or part events are // waiting to be emitted holdEmitQueue(self)(function() { // nextTick because the user is listening to part 'end' events and we are // using part 'end' events to decide when to emit 'close'. we add our 'end' // handler before the user gets a chance to add theirs. So we make sure // their 'end' event fires before we emit the 'close' event. // this is covered by test/standalone/test-issue-36 process.nextTick(function() { self.emit('close'); }); }); } function cleanupOpenFiles(self) { self.openedFiles.forEach(function(internalFile) { // since fd slicer autoClose is true, destroying the only write stream // is guaranteed by the API to close the fd internalFile.ws.destroy(); fs.unlink(internalFile.publicFile.path, function(err) { if (err) self.handleError(err); }); }); self.openedFiles = []; } function holdEmitQueue(self, eventEmitter) { var item = { cb: null, ee: eventEmitter, err: null } self.emitQueue.push(item); return function(cb) { item.cb = cb; flushEmitQueue(self); }; } function errorEventQueue(self, eventEmitter, err) { var items = self.emitQueue.filter(function (item) { return item.ee === eventEmitter; }); if (items.length === 0) { eventEmitter.emit('error', err); return; } items.forEach(function (item) { item.err = err; }); } function flushEmitQueue(self) { while (self.emitQueue.length > 0 && self.emitQueue[0].cb) { var item = self.emitQueue.shift(); // invoke the callback item.cb(); if (item.err) { // emit the delayed error item.ee.emit('error', item.err); } } } function handlePart(self, partStream) { beginFlush(self); var emitAndReleaseHold = holdEmitQueue(self, partStream); partStream.on('end', function() { endFlush(self); }); emitAndReleaseHold(function() { self.emit('part', partStream); }); } function handleFile(self, fileStream) { if (self.error) return; var publicFile = { fieldName: fileStream.name, originalFilename: fileStream.filename, path: uploadPath(self.uploadDir, fileStream.filename), headers: fileStream.headers, size: 0 }; var internalFile = { publicFile: publicFile, ls: null, ws: fs.createWriteStream(publicFile.path, { flags: 'wx' }) }; self.openedFiles.push(internalFile) beginFlush(self); // flush to write stream var emitAndReleaseHold = holdEmitQueue(self, fileStream); fileStream.on('error', function(err) { self.handleError(err); }); internalFile.ws.on('error', function (err) { self.handleError(err) }) internalFile.ws.on('open', function () { // end option here guarantees that no more than that amount will be written // or else an error will be emitted internalFile.ls = new LimitStream(self.maxFilesSize - self.totalFileSize) internalFile.ls.pipe(internalFile.ws) internalFile.ls.on('error', function (err) { self.handleError(err.code === 'ETOOBIG' ? createError(413, err.message, { code: err.code }) : err) }); internalFile.ls.on('progress', function (totalBytes, chunkBytes) { publicFile.size = totalBytes self.totalFileSize += chunkBytes }); internalFile.ws.on('close', function () { if (self.error) return; emitAndReleaseHold(function() { self.emit('file', fileStream.name, publicFile); }); endFlush(self); }); fileStream.pipe(internalFile.ls) }); } function handleField(self, fieldStream) { var value = ''; var decoder = new StringDecoder(self.encoding); beginFlush(self); var emitAndReleaseHold = holdEmitQueue(self, fieldStream); fieldStream.on('error', function(err) { self.handleError(err); }); fieldStream.on('readable', function() { var buffer = fieldStream.read(); if (!buffer) return; self.totalFieldSize += buffer.length; if (self.totalFieldSize > self.maxFieldsSize) { self.handleError(createError(413, 'maxFieldsSize ' + self.maxFieldsSize + ' exceeded')); return; } value += decoder.write(buffer); }); fieldStream.on('end', function() { emitAndReleaseHold(function() { self.emit('field', fieldStream.name, value); }); endFlush(self); }); } function clearPartVars(self) { self.partHeaders = {}; self.partName = null; self.partFilename = null; self.partTransferEncoding = 'binary'; self.destStream = null; self.headerFieldDecoder = new StringDecoder(self.encoding); self.headerField = '' self.headerValueDecoder = new StringDecoder(self.encoding); self.headerValue = '' } function setUpParser(self, boundary) { self.boundary = Buffer.alloc(boundary.length + 4) self.boundary.write('\r\n--', 0, boundary.length + 4, 'ascii'); self.boundary.write(boundary, 4, boundary.length, 'ascii'); self.lookbehind = Buffer.alloc(self.boundary.length + 8) self.state = START; self.boundaryChars = {}; for (var i = 0; i < self.boundary.length; i++) { self.boundaryChars[self.boundary[i]] = true; } self.index = null; self.partBoundaryFlag = false; beginFlush(self); self.on('finish', function() { if (self.state !== END) { self.handleError(createError(400, 'stream ended unexpectedly')); } endFlush(self); }); } function uploadPath(baseDir, filename) { var ext = path.extname(filename).replace(FILE_EXT_RE, '$1'); var name = uid.sync(18) + ext return path.join(baseDir, name); } function parseFilename(headerValue) { var m = headerValue.match(/\bfilename="(.*?)"($|; )/i); if (!m) { m = headerValue.match(/\bfilename\*=utf-8''(.*?)($|; )/i) if (m) { m[1] = decodeURI(m[1]); } else { return; } } var filename = m[1]; filename = filename.replace(/%22|\\"/g, '"'); filename = filename.replace(/([\d]{4});/g, function(m, code) { return String.fromCharCode(code); }); return filename.substr(filename.lastIndexOf('\\') + 1); } function lower(c) { return c | 0x20; } multiparty-4.2.2/package.json 0000664 0000000 0000000 00000002242 13707700713 0016224 0 ustar 00root root 0000000 0000000 { "name": "multiparty", "description": "multipart/form-data parser which supports streaming", "version": "4.2.2", "author": "Andrew Kelley