pax_global_header00006660000000000000000000000064123466604700014522gustar00rootroot0000000000000052 comment=6c2b3bb2fb6f44f4533369dbdf0a2833c09eb8b9 raw-body-1.2.0/000077500000000000000000000000001234666047000132465ustar00rootroot00000000000000raw-body-1.2.0/.gitignore000066400000000000000000000011401234666047000152320ustar00rootroot00000000000000# Compiled source # ################### *.com *.class *.dll *.exe *.o *.so # Packages # ############ # it's better to unpack these files and commit the raw source # git has its own built in compression methods *.7z *.dmg *.gz *.iso *.jar *.rar *.tar *.zip # Logs and databases # ###################### *.log *.sql *.sqlite # OS generated files # ###################### .DS_Store* ehthumbs.db Icon? Thumbs.db # Node.js # ########### lib-cov *.seed *.log *.csv *.dat *.out *.pid *.gz pids logs results coverage node_modules npm-debug.log # Components # ############## /build /components /vendors *.origraw-body-1.2.0/.npmignore000066400000000000000000000000341234666047000152420ustar00rootroot00000000000000coverage/ test/ .travis.yml raw-body-1.2.0/.travis.yml000066400000000000000000000002571234666047000153630ustar00rootroot00000000000000node_js: - "0.8" - "0.10" - "0.11" language: node_js script: "npm run-script test-travis" after_script: "npm install coveralls@2.10.0 && cat ./coverage/lcov.info | coveralls" raw-body-1.2.0/HISTORY.md000066400000000000000000000026741234666047000147420ustar00rootroot000000000000001.2.0 / 2014-06-13 ================== * Passing string as `options` interpreted as encoding * Support all encodings from `iconv-lite` 1.1.7 / 2014-06-12 ================== * use `string_decoder` module from npm 1.1.6 / 2014-05-27 ================== * check encoding for old streams1 * support node.js < 0.10.6 1.1.5 / 2014-05-14 ================== * bump bytes 1.1.4 / 2014-04-19 ================== * allow true as an option * bump bytes 1.1.3 / 2014-03-02 ================== * fix case when length=null 1.1.2 / 2013-12-01 ================== * be less strict on state.encoding check 1.1.1 / 2013-11-27 ================== * add engines 1.1.0 / 2013-11-27 ================== * add err.statusCode and err.type * allow for encoding option to be true * pause the stream instead of dumping on error * throw if the stream's encoding is set 1.0.1 / 2013-11-19 ================== * dont support streams1, throw if dev set encoding 1.0.0 / 2013-11-17 ================== * rename `expected` option to `length` 0.2.0 / 2013-11-15 ================== * republish 0.1.1 / 2013-11-15 ================== * use bytes 0.1.0 / 2013-11-11 ================== * generator support 0.0.3 / 2013-10-10 ================== * update repo 0.0.2 / 2013-09-14 ================== * dump stream on bad headers * listen to events after defining received and buffers 0.0.1 / 2013-09-14 ================== * Initial release raw-body-1.2.0/README.md000066400000000000000000000071631234666047000145340ustar00rootroot00000000000000# raw-body [![NPM version](https://badge.fury.io/js/raw-body.svg)](http://badge.fury.io/js/raw-body) [![Build Status](https://travis-ci.org/stream-utils/raw-body.svg?branch=master)](https://travis-ci.org/stream-utils/raw-body) [![Coverage Status](https://img.shields.io/coveralls/stream-utils/raw-body.svg?branch=master)](https://coveralls.io/r/stream-utils/raw-body) Gets the entire buffer of a stream either as a `Buffer` or a string. Validates the stream's length against an expected length and maximum limit. Ideal for parsing request bodies. ## API ```js var getRawBody = require('raw-body') var typer = require('media-typer') app.use(function (req, res, next) { getRawBody(req, { length: req.headers['content-length'], limit: '1mb', encoding: typer.parse(req.headers['content-type']).parameters.charset }, function (err, string) { if (err) return next(err) req.text = string next() }) }) ``` or in a Koa generator: ```js app.use(function* (next) { var string = yield getRawBody(this.req, { length: this.length, limit: '1mb', encoding: this.charset }) }) ``` ### getRawBody(stream, [options], [callback]) Returns a thunk for yielding with generators. Options: - `length` - The length length of the stream. If the contents of the stream do not add up to this length, an `400` error code is returned. - `limit` - The byte limit of the body. If the body ends up being larger than this limit, a `413` error code is returned. - `encoding` - The requested encoding. By default, a `Buffer` instance will be returned. Most likely, you want `utf8`. You can use any type of encoding supported by [iconv-lite](https://www.npmjs.org/package/iconv-lite#readme). You can also pass a string in place of options to just specify the encoding. `callback(err, res)`: - `err` - the following attributes will be defined if applicable: - `limit` - the limit in bytes - `length` and `expected` - the expected length of the stream - `received` - the received bytes - `status` and `statusCode` - the corresponding status code for the error - `type` - either `entity.too.large`, `request.size.invalid`, or `stream.encoding.set` - `res` - the result, either as a `String` if an encoding was set or a `Buffer` otherwise. If an error occurs, the stream will be paused, and you are responsible for correctly disposing the stream. For HTTP requests, no handling is required if you send a response. For streams that use file descriptors, you should `stream.destroy()` or `stream.close()` to prevent leaks. ## License The MIT License (MIT) Copyright (c) 2013 Jonathan Ong me@jongleberry.com Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. raw-body-1.2.0/index.js000066400000000000000000000102101234666047000147050ustar00rootroot00000000000000var bytes = require('bytes') var iconv = require('iconv-lite') module.exports = function (stream, options, done) { if (options === true || typeof options === 'string') { // short cut for encoding options = { encoding: options } } options = options || {} if (typeof options === 'function') { done = options options = {} } // get encoding var encoding = options.encoding !== true ? options.encoding : 'utf-8' // convert the limit to an integer var limit = null if (typeof options.limit === 'number') limit = options.limit if (typeof options.limit === 'string') limit = bytes(options.limit) // convert the expected length to an integer var length = null if (options.length != null && !isNaN(options.length)) length = parseInt(options.length, 10) // check the length and limit options. // note: we intentionally leave the stream paused, // so users should handle the stream themselves. if (limit !== null && length !== null && length > limit) { if (typeof stream.pause === 'function') stream.pause() process.nextTick(function () { var err = makeError('request entity too large', 'entity.too.large') err.status = err.statusCode = 413 err.length = err.expected = length err.limit = limit done(err) }) return defer } // streams1: assert request encoding is buffer. // streams2+: assert the stream encoding is buffer. // stream._decoder: streams1 // state.encoding: streams2 // state.decoder: streams2, specifically < 0.10.6 var state = stream._readableState if (stream._decoder || (state && (state.encoding || state.decoder))) { if (typeof stream.pause === 'function') stream.pause() process.nextTick(function () { var err = makeError('stream encoding should not be set', 'stream.encoding.set') // developer error err.status = err.statusCode = 500 done(err) }) return defer } var received = 0 var decoder = getDecoder(encoding) var buffer = decoder ? '' : [] stream.on('data', onData) stream.once('end', onEnd) stream.once('error', onEnd) stream.once('close', cleanup) return defer // yieldable support function defer(fn) { done = fn } function onData(chunk) { received += chunk.length decoder ? buffer += decoder.write(chunk) : buffer.push(chunk) if (limit !== null && received > limit) { if (typeof stream.pause === 'function') stream.pause() var err = makeError('request entity too large', 'entity.too.large') err.status = err.statusCode = 413 err.received = received err.limit = limit done(err) cleanup() } } function onEnd(err) { if (err) { if (typeof stream.pause === 'function') stream.pause() done(err) } else if (length !== null && received !== length) { err = makeError('request size did not match content length', 'request.size.invalid') err.status = err.statusCode = 400 err.received = received err.length = err.expected = length done(err) } else { done(null, decoder ? buffer + (decoder.end() || '') : Buffer.concat(buffer) ) } cleanup() } function cleanup() { received = buffer = null stream.removeListener('data', onData) stream.removeListener('end', onEnd) stream.removeListener('error', onEnd) stream.removeListener('close', cleanup) } } function getDecoder(encoding) { if (!encoding) return null try { return iconv.getCodec(encoding).decoder() } catch (e) { var err = makeError('specified encoding unsupported', 'encoding.unsupported') err.status = err.statusCode = 415 err.encoding = encoding throw err } } // to create serializable errors you must re-set message so // that it is enumerable and you must re configure the type // property so that is writable and enumerable function makeError(message, type) { var error = new Error() error.message = message Object.defineProperty(error, 'type', { value: type, enumerable: true, writable: true, configurable: true }) return error } raw-body-1.2.0/package.json000066400000000000000000000014421234666047000155350ustar00rootroot00000000000000{ "name": "raw-body", "description": "Get and validate the raw body of a readable stream.", "version": "1.2.0", "author": "Jonathan Ong (http://jongleberry.com)", "license": "MIT", "repository": "stream-utils/raw-body", "dependencies": { "bytes": "1", "iconv-lite": "0.4.2" }, "devDependencies": { "istanbul": "0.2.10", "mocha": "~1.20.1", "readable-stream": "~1.0.17", "request": ">= 2.36.0 < 3", "through2": "~0.4.1" }, "engines": { "node": ">= 0.8.0" }, "scripts": { "test": "mocha --reporter spec --bail test/", "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot test/", "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --reporter spec test/" } } raw-body-1.2.0/test/000077500000000000000000000000001234666047000142255ustar00rootroot00000000000000raw-body-1.2.0/test/flowing.js000066400000000000000000000061611234666047000162340ustar00rootroot00000000000000var assert = require('assert') var Readable = require('readable-stream').Readable var getRawBody = require('../') var defaultLimit = 1024 * 1024 describe('stream flowing', function () { describe('when limit lower then length', function (done) { it('should stop the steam flow', function (done) { var stream = createInfiniteStream() getRawBody(stream, { limit: defaultLimit, length: defaultLimit * 2 }, function (err, body) { assert.ok(err) assert.equal(err.type, 'entity.too.large') assert.equal(err.message, 'request entity too large') assert.equal(err.statusCode, 413) assert.equal(err.length, defaultLimit * 2) assert.equal(err.limit, defaultLimit) assert.equal(body, undefined) assert.ok(!stream.flowing) done() }) }) }) describe('when stream has encoding set', function (done) { it('should stop the steam flow', function (done) { var stream = createInfiniteStream() stream.setEncoding('utf8') getRawBody(stream, { limit: defaultLimit }, function (err, body) { assert.ok(err) assert.equal(err.type, 'stream.encoding.set') assert.equal(err.message, 'stream encoding should not be set') assert.equal(err.statusCode, 500) assert.ok(!stream.flowing) done() }) }) }) describe('when stream has limit', function (done) { it('should stop the steam flow', function (done) { var stream = createInfiniteStream() getRawBody(stream, { limit: defaultLimit }, function (err, body) { assert.ok(err) assert.equal(err.type, 'entity.too.large') assert.equal(err.statusCode, 413) assert.ok(err.received > defaultLimit) assert.equal(err.limit, defaultLimit) assert.ok(!stream.flowing) done() }) }) }) describe('when stream has limit', function (done) { it('should stop the steam flow', function (done) { var stream = createInfiniteStream() getRawBody(stream, function (err, body) { assert.ok(err) assert.equal(err.message, 'BOOM') assert.ok(!stream.flowing) done() }) setTimeout(function () { stream.emit('error', new Error('BOOM')) }, 500) }) }) }) function createChunk() { var base = Math.random().toString(32) var KB_4 = 32 * 4 var KB_8 = KB_4 * 2 var KB_16 = KB_8 * 2 var KB_64 = KB_16 * 4 var rand = Math.random() if (rand < 0.25) { return repeat(base, KB_4) } else if (rand < 0.5) { return repeat(base, KB_8) } else if (rand < 0.75) { return repeat(base, KB_16) } else { return repeat(base, KB_64) } function repeat(str, num) { return new Array(num + 1).join(str) } } function createInfiniteStream() { var stream = new Readable() stream._read = function () { var rand = 2 + Math.floor(Math.random() * 10) setTimeout(function () { for (var i = 0; i < rand; i++) { stream.push(createChunk()) } }, 100) } // immediately put the stream in flowing mode stream.resume() return stream } raw-body-1.2.0/test/index.js000066400000000000000000000230211234666047000156700ustar00rootroot00000000000000var assert = require('assert') var fs = require('fs') var path = require('path') var http = require('http') var through = require('through2') var request = require('request') var Readable = require('readable-stream').Readable var getRawBody = require('../') var file = path.join(__dirname, 'index.js') var length = fs.statSync(file).size var string = fs.readFileSync(file, 'utf8') describe('Raw Body', function () { it('should work without any options', function (done) { getRawBody(createStream(), function (err, buf) { assert.ifError(err) checkBuffer(buf) done() }) }) it('should work with `true` as an option', function (done) { getRawBody(createStream(), true, function (err, buf) { assert.ifError(err) assert.equal(typeof buf, 'string') done() }) }) it('should work as a thunk', function (done) { var thunk = getRawBody(createStream()) thunk(function (err, buf) { assert.ifError(err) checkBuffer(buf) done() }) }) it('should work with length', function (done) { getRawBody(createStream(), { length: length }, function (err, buf) { assert.ifError(err) checkBuffer(buf) done() }) }) it('should work when length=0', function (done) { var t = through() t.end() getRawBody(t, { length: 0, encoding: true }, function (err, str) { assert.ifError(err) assert.equal(str, '') done() }) }) it('should work with limit', function (done) { getRawBody(createStream(), { limit: length + 1 }, function (err, buf) { assert.ifError(err) checkBuffer(buf) done() }) }) it('should work with limit as a string', function (done) { getRawBody(createStream(), { limit: '1gb' }, function (err, buf) { assert.ifError(err) checkBuffer(buf) done() }) }) it('should work with limit and length', function (done) { getRawBody(createStream(), { length: length, limit: length + 1 }, function (err, buf) { assert.ifError(err) checkBuffer(buf) done() }) }) it('should check options for limit and length', function (done) { getRawBody(createStream(), { length: length, limit: length - 1 }, function (err, buf) { assert.equal(err.status, 413) assert.equal(err.statusCode, 413) assert.equal(err.expected, length) assert.equal(err.length, length) assert.equal(err.limit, length - 1) assert.equal(err.type, 'entity.too.large') assert.equal(err.message, 'request entity too large') assert.deepEqual(err, { type: 'entity.too.large', message: 'request entity too large', statusCode: 413, status: 413, expected: length, length: length, limit: length - 1 }) done() }) }) it('should work as a thunk when length > limit', function (done) { var thunk = getRawBody(createStream(), { length: length, limit: length - 1 }) thunk(function (err, buf) { assert.equal(err.status, 413) done() }) }) it('should work with an empty stream', function (done) { var stream = new Readable() stream.push(null) getRawBody(stream, { length: 0, limit: 1 }, function (err, buf) { assert.ifError(err) assert.equal(buf.length, 0) done() }) stream.emit('end') }) it('should throw on empty string and incorrect length', function (done) { var stream = new Readable() stream.push(null) getRawBody(stream, { length: 1, limit: 2 }, function (err, buf) { assert.equal(err.status, 400) done() }) stream.emit('end') }) it('should throw if length > limit', function (done) { getRawBody(createStream(), { limit: length - 1 }, function (err, buf) { assert.equal(err.status, 413) done() }) }) it('should throw if incorrect length supplied', function (done) { getRawBody(createStream(), { length: length - 1 }, function (err, buf) { assert.equal(err.status, 400) done() }) }) it('should work with if length is null', function (done) { getRawBody(createStream(), { length: null, limit: length + 1 }, function (err, buf) { assert.ifError(err) checkBuffer(buf) done() }) }) it('should work with {"test":"å"}', function (done) { // https://github.com/visionmedia/express/issues/1816 var stream = new Readable() stream.push('{"test":"å"}') stream.push(null) getRawBody(stream, { length: 13 }, function (err, buf) { assert.ok(buf) assert.equal(buf.length, 13) done() }) }) it('should throw if stream encoding is set', function (done) { var stream = new Readable() stream.push('akl;sdjfklajsdfkljasdf') stream.push(null) stream.setEncoding('utf8') getRawBody(stream, function (err, buf) { assert.equal(err.status, 500) done() }) }) it('should throw when given an invalid encoding', function () { var err try { getRawBody(new Readable(), { encoding: 'akljsdflkajsdf' }, function () {}) } catch (e) { err = e } assert.ok(err) assert.ok(/encoding/.test(err.message)) assert.equal(err.status, 415) }) describe('when an encoding is set', function () { it('should return a string', function (done) { getRawBody(createStream(), { encoding: 'utf8' }, function (err, str) { assert.ifError(err) assert.equal(str, string) done() }) }) it('should handle encoding true', function (done) { getRawBody(createStream(), { encoding: true }, function (err, str) { assert.ifError(err) assert.equal(str, string) done() }) }) it('should handle encoding as options string', function (done) { getRawBody(createStream(), 'utf8', function (err, str) { assert.ifError(err) assert.equal(str, string) done() }) }) it('should decode codepage string', function (done) { var stream = createStream(new Buffer('bf43f36d6f20657374e1733f', 'hex')) var string = '¿Cómo estás?' getRawBody(stream, 'iso-8859-1', function (err, str) { assert.ifError(err) assert.equal(str, string) done() }) }) it('should decode UTF-8 string', function (done) { var stream = createStream(new Buffer('c2bf43c3b36d6f20657374c3a1733f', 'hex')) var string = '¿Cómo estás?' getRawBody(stream, 'utf-8', function (err, str) { assert.ifError(err) assert.equal(str, string) done() }) }) it('should decode UTF-16LE string', function (done) { // UTF-16LE is different from UTF-16 due to BOM behavior var stream = createStream(new Buffer('bf004300f3006d006f002000650073007400e10073003f00', 'hex')) var string = '¿Cómo estás?' getRawBody(stream, 'utf-16le', function (err, str) { assert.ifError(err) assert.equal(str, string) done() }) }) it('should correctly calculate the expected length', function (done) { var stream = createStream(new Buffer('{"test":"å"}')) getRawBody(stream, { encoding: 'utf8', length: 13 }, done) }) }) it('should work on streams1 stream', function (done) { var stream = through() stream.pause() stream.write('foobar') stream.write('foobaz') stream.write('yay!!') stream.end() getRawBody(stream, { encoding: true, length: 17 }, function (err, value) { assert.ifError(err) done() }) // you have to call resume() for through stream.resume() }) describe('when using with http server', function () { var PORT = 10000 + Math.floor(Math.random() * 20000) var uri = 'http://localhost:' + PORT var server = http.createServer() before(function (done) { server.on('request', function (req, res) { if (req.headers['x-req-encoding']) { req.setEncoding(req.headers['x-req-encoding']); } getRawBody(req, { length: req.headers['content-length'] }, function (err, body) { if (err) { res.statusCode = 500 return res.end(err.message) } res.end(body) }) }) server.listen(PORT, done) }) it('should echo data', function (done) { var resp = createStream().pipe(request({ uri: uri, method: 'POST' })) getRawBody(resp, { encoding: true }, function (err, str) { assert.ifError(err) assert.equal(str, string) done() }) }) it('should throw if stream encoding is set', function (done) { var resp = createStream().pipe(request({ uri: uri, method: 'POST', headers: { 'x-req-encoding': 'utf8' } })) getRawBody(resp, { encoding: true }, function (err, str) { assert.ifError(err) assert.equal(str, 'stream encoding should not be set') done() }) }) after(function (done) { server.close(done) }) }) }) function checkBuffer(buf) { assert.ok(Buffer.isBuffer(buf)) assert.equal(buf.length, length) assert.equal(buf.toString('utf8'), string) } function createStream(buf) { if (!buf) return fs.createReadStream(file) var stream = new Readable() stream._read = function () { stream.push(buf) stream.push(null) } return stream }