pax_global_header00006660000000000000000000000064144674367160014534gustar00rootroot0000000000000052 comment=d4c9f72fe03e2d3baca352c0e505bbea4ce873a8 get-stream-8.0.1/000077500000000000000000000000001446743671600136125ustar00rootroot00000000000000get-stream-8.0.1/.editorconfig000066400000000000000000000002571446743671600162730ustar00rootroot00000000000000root = true [*] indent_style = tab end_of_line = lf charset = utf-8 trim_trailing_whitespace = true insert_final_newline = true [*.yml] indent_style = space indent_size = 2 get-stream-8.0.1/.gitattributes000066400000000000000000000000231446743671600165000ustar00rootroot00000000000000* text=auto eol=lf get-stream-8.0.1/.github/000077500000000000000000000000001446743671600151525ustar00rootroot00000000000000get-stream-8.0.1/.github/funding.yml000066400000000000000000000001641446743671600173300ustar00rootroot00000000000000github: sindresorhus open_collective: sindresorhus tidelift: npm/get-stream custom: https://sindresorhus.com/donate get-stream-8.0.1/.github/security.md000066400000000000000000000002631446743671600173440ustar00rootroot00000000000000# Security Policy To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. get-stream-8.0.1/.github/workflows/000077500000000000000000000000001446743671600172075ustar00rootroot00000000000000get-stream-8.0.1/.github/workflows/main.yml000066400000000000000000000006641446743671600206640ustar00rootroot00000000000000name: CI on: - push - pull_request jobs: test: name: Node.js ${{ matrix.node-version }} runs-on: ubuntu-latest strategy: fail-fast: false matrix: node-version: - 20 - 18 - 16 steps: - uses: actions/checkout@v3 - uses: actions/setup-node@v3 with: node-version: ${{ matrix.node-version }} - run: npm install - run: npm test get-stream-8.0.1/.gitignore000066400000000000000000000000271446743671600156010ustar00rootroot00000000000000node_modules yarn.lock get-stream-8.0.1/.npmrc000066400000000000000000000000231446743671600147250ustar00rootroot00000000000000package-lock=false get-stream-8.0.1/benchmarks/000077500000000000000000000000001446743671600157275ustar00rootroot00000000000000get-stream-8.0.1/benchmarks/fixture.js000066400000000000000000000006431446743671600177560ustar00rootroot00000000000000import {writeFile, rm} from 'node:fs/promises'; // Create and delete a big fixture file export const createFixture = async () => { await writeFile(FIXTURE_FILE, '.'.repeat(FIXTURE_BYTE_SIZE)); }; export const deleteFixture = async () => { await rm(FIXTURE_FILE); }; export const FIXTURE_FILE = 'benchmark_fixture'; const FIXTURE_BYTE_SIZE = 1e8; export const FIXTURE_HUMAN_SIZE = `${FIXTURE_BYTE_SIZE / 1e6} MB`; get-stream-8.0.1/benchmarks/index.js000066400000000000000000000035171446743671600174020ustar00rootroot00000000000000import {text, buffer, arrayBuffer} from 'node:stream/consumers'; import getStream, {getStreamAsBuffer, getStreamAsArrayBuffer, getStreamAsArray} from '../source/index.js'; import {createFixture, deleteFixture, FIXTURE_HUMAN_SIZE} from './fixture.js'; import {createNodeStreamBinary, createNodeStreamText, createWebStreamBinary, createWebStreamText} from './stream.js'; import {measureTask} from './measure.js'; const runBenchmarks = async () => { await createFixture(); try { await benchmarkNodeStreams(createNodeStreamBinary, `Node.js stream (${FIXTURE_HUMAN_SIZE}, binary)`); await benchmarkNodeStreams(createNodeStreamText, `Node.js stream (${FIXTURE_HUMAN_SIZE}, text)`); await benchmarkStreams(createWebStreamBinary, `Web ReadableStream (${FIXTURE_HUMAN_SIZE}, binary)`); await benchmarkStreams(createWebStreamText, `Web ReadableStream (${FIXTURE_HUMAN_SIZE}, text)`); } finally { await deleteFixture(); } }; const benchmarkNodeStreams = async (createStream, header) => { await benchmarkStreams(createStream, header); await logResult('stream.toArray', createStream, stream => stream.toArray()); }; const benchmarkStreams = async (createStream, header) => { logHeader(header); await logResult('getStream', createStream, getStream); await logResult('text', createStream, text); await logResult('getStreamAsBuffer', createStream, getStreamAsBuffer); await logResult('buffer', createStream, buffer); await logResult('getStreamAsArrayBuffer', createStream, getStreamAsArrayBuffer); await logResult('arrayBuffer', createStream, arrayBuffer); await logResult('getStreamAsArray', createStream, getStreamAsArray); }; const logHeader = header => { console.log(`\n### ${header}\n`); }; const logResult = async (name, createStream, task) => { console.log(`- \`${name}()\`: ${await measureTask(createStream, task)}ms`); }; await runBenchmarks(); get-stream-8.0.1/benchmarks/measure.js000066400000000000000000000012601446743671600177250ustar00rootroot00000000000000import now from 'precise-now'; // Return how many ms running `task()` takes export const measureTask = async ({start, stop}, task) => { const taskInputs = await Promise.all(Array.from({length: MAX_LOOPS + 1}, start)); // Pre-warm await task(taskInputs[0].stream); const startTimestamp = now(); for (let index = 1; index <= MAX_LOOPS; index += 1) { // eslint-disable-next-line no-await-in-loop await task(taskInputs[index].stream); } const duration = Math.round((now() - startTimestamp) / (MAX_LOOPS * NANOSECS_TO_MILLESECS)); await Promise.all(taskInputs.map(taskInput => stop(taskInput))); return duration; }; const MAX_LOOPS = 10; const NANOSECS_TO_MILLESECS = 1e6; get-stream-8.0.1/benchmarks/stream.js000066400000000000000000000022101446743671600175530ustar00rootroot00000000000000import {open} from 'node:fs/promises'; import {createReadStream} from 'node:fs'; import {FIXTURE_FILE} from './fixture.js'; const createNodeStream = encoding => ({ start: () => ({stream: createReadStream(FIXTURE_FILE, encoding)}), stop() {}, }); export const createNodeStreamBinary = createNodeStream(undefined); export const createNodeStreamText = createNodeStream('utf8'); const createWebStream = type => ({ async start() { const fileHandle = await open(FIXTURE_FILE); const stream = fileHandle.readableWebStream({type}); return {fileHandle, stream}; }, async stop({fileHandle}) { await fileHandle.close(); }, }); export const createWebStreamBinary = createWebStream('bytes'); // `Text` is somewhat of a misnomer here: // - `fs.readableWebStream({ type: 'bytes' })` creates a `ReadableStream` with a "bytes controller" and `Uint8Array` chunks // - `fs.readableWebStream({ type: undefined })` creates a `ReadableStream` with a "default controller" and `ArrayBuffer` chunks. // Node.js currently does not allow creating a file-based `ReadableStream` with string chunks. export const createWebStreamText = createWebStream(undefined); get-stream-8.0.1/fixture000066400000000000000000000000101446743671600152120ustar00rootroot00000000000000unicorn get-stream-8.0.1/license000066400000000000000000000021351446743671600151600ustar00rootroot00000000000000MIT License Copyright (c) Sindre Sorhus (https://sindresorhus.com) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. get-stream-8.0.1/package.json000066400000000000000000000020221446743671600160740ustar00rootroot00000000000000{ "name": "get-stream", "version": "8.0.1", "description": "Get a stream as a string, Buffer, ArrayBuffer or array", "license": "MIT", "repository": "sindresorhus/get-stream", "funding": "https://github.com/sponsors/sindresorhus", "author": { "name": "Sindre Sorhus", "email": "sindresorhus@gmail.com", "url": "https://sindresorhus.com" }, "type": "module", "exports": { "types": "./source/index.d.ts", "default": "./source/index.js" }, "engines": { "node": ">=16" }, "scripts": { "benchmark": "node benchmarks/index.js", "test": "xo && ava && tsd --typings=source/index.d.ts --files=source/index.test-d.ts" }, "files": [ "source", "!*.test-d.ts" ], "keywords": [ "get", "stream", "promise", "concat", "string", "text", "buffer", "read", "data", "consume", "readable", "readablestream", "object", "concat" ], "devDependencies": { "@types/node": "^20.5.0", "ava": "^5.3.1", "precise-now": "^2.0.0", "stream-json": "^1.8.0", "tsd": "^0.28.1", "xo": "^0.56.0" } } get-stream-8.0.1/readme.md000066400000000000000000000226461446743671600154030ustar00rootroot00000000000000# get-stream > Get a stream as a string, Buffer, ArrayBuffer or array ## Features - Works in any JavaScript environment ([Node.js](#nodejs-streams), [browsers](#web-streams), etc.). - Supports [text streams](#getstreamstream-options), [binary streams](#getstreamasbufferstream-options) and [object streams](#getstreamasarraystream-options). - Supports [async iterables](#async-iterables). - Can set a [maximum stream size](#maxbuffer). - Returns [partially read data](#errors) when the stream errors. - [Fast](#benchmarks). ## Install ```sh npm install get-stream ``` ## Usage ### Node.js streams ```js import fs from 'node:fs'; import getStream from 'get-stream'; const stream = fs.createReadStream('unicorn.txt'); console.log(await getStream(stream)); /* ,,))))))));, __)))))))))))))), \|/ -\(((((''''((((((((. -*-==//////(('' . `)))))), /|\ ))| o ;-. '((((( ,(, ( `| / ) ;))))' ,_))^;(~ | | | ,))((((_ _____------~~~-. %,;(;(>';'~ o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ ; ''''```` `: `:::|\,__,%% );`'; ~ | _ ) / `:|`----' `-' ______/\/~ | / / /~;;.____/;;' / ___--,-( `;;;/ / // _;______;'------~~~~~ /;;/\ / // | | / ; \;;,\ (<_ | ; /',/-----' _> \_| ||_ //~;~~~~~~~~~ `\_| (,~~ \~\ ~~ */ ``` ### Web streams ```js import getStream from 'get-stream'; const {body: readableStream} = await fetch('https://example.com'); console.log(await getStream(readableStream)); ``` ### Async iterables ```js import {opendir} from 'node:fs/promises'; import {getStreamAsArray} from 'get-stream'; const asyncIterable = await opendir(directory); console.log(await getStreamAsArray(asyncIterable)); ``` ## API The following methods read the stream's contents and return it as a promise. ### getStream(stream, options?) `stream`: [`stream.Readable`](https://nodejs.org/api/stream.html#class-streamreadable), [`ReadableStream`](https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream), or [`AsyncIterable`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols#the_async_iterator_and_async_iterable_protocols)\ `options`: [`Options`](#options) Get the given `stream` as a string. ### getStreamAsBuffer(stream, options?) Get the given `stream` as a Node.js [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer). ```js import {getStreamAsBuffer} from 'get-stream'; const stream = fs.createReadStream('unicorn.png'); console.log(await getStreamAsBuffer(stream)); ``` ### getStreamAsArrayBuffer(stream, options?) Get the given `stream` as an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). ```js import {getStreamAsArrayBuffer} from 'get-stream'; const {body: readableStream} = await fetch('https://example.com'); console.log(await getStreamAsArrayBuffer(readableStream)); ``` ### getStreamAsArray(stream, options?) Get the given `stream` as an array. Unlike [other methods](#api), this supports [streams of objects](https://nodejs.org/api/stream.html#object-mode). ```js import {getStreamAsArray} from 'get-stream'; const {body: readableStream} = await fetch('https://example.com'); console.log(await getStreamAsArray(readableStream)); ``` #### options Type: `object` ##### maxBuffer Type: `number`\ Default: `Infinity` Maximum length of the stream. If exceeded, the promise will be rejected with a `MaxBufferError`. Depending on the [method](#api), the length is measured with [`string.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), [`buffer.length`](https://nodejs.org/api/buffer.html#buflength), [`arrayBuffer.byteLength`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/byteLength) or [`array.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/length). ## Errors If the stream errors, the returned promise will be rejected with the `error`. Any contents already read from the stream will be set to `error.bufferedData`, which is a `string`, a `Buffer`, an `ArrayBuffer` or an array depending on the [method used](#api). ```js import getStream from 'get-stream'; try { await getStream(streamThatErrorsAtTheEnd('unicorn')); } catch (error) { console.log(error.bufferedData); //=> 'unicorn' } ``` ## Tips ### Alternatives If you do not need the [`maxBuffer`](#maxbuffer) option, [`error.bufferedData`](#errors), nor browser support, you can use the following methods instead of this package. #### [`streamConsumers.text()`](https://nodejs.org/api/webstreams.html#streamconsumerstextstream) ```js import fs from 'node:fs'; import {text} from 'node:stream/consumers'; const stream = fs.createReadStream('unicorn.txt', {encoding: 'utf8'}); console.log(await text(stream)) ``` #### [`streamConsumers.buffer()`](https://nodejs.org/api/webstreams.html#streamconsumersbufferstream) ```js import {buffer} from 'node:stream/consumers'; console.log(await buffer(stream)) ``` #### [`streamConsumers.arrayBuffer()`](https://nodejs.org/api/webstreams.html#streamconsumersarraybufferstream) ```js import {arrayBuffer} from 'node:stream/consumers'; console.log(await arrayBuffer(stream)) ``` #### [`readable.toArray()`](https://nodejs.org/api/stream.html#readabletoarrayoptions) ```js console.log(await stream.toArray()) ``` #### [`Array.fromAsync()`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/fromAsync) If your [environment supports it](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/fromAsync#browser_compatibility): ```js console.log(await Array.fromAsync(stream)) ``` ### Non-UTF-8 encoding When all of the following conditions apply: - [`getStream()`](#getstreamstream-options) is used (as opposed to [`getStreamAsBuffer()`](#getstreamasbufferstream-options) or [`getStreamAsArrayBuffer()`](#getstreamasarraybufferstream-options)) - The stream is binary (not text) - The stream's encoding is not UTF-8 (for example, it is UTF-16, hexadecimal, or Base64) Then the stream must be decoded using a transform stream like [`TextDecoderStream`](https://developer.mozilla.org/en-US/docs/Web/API/TextDecoderStream) or [`b64`](https://github.com/hapijs/b64). ```js import getStream from 'get-stream'; const textDecoderStream = new TextDecoderStream('utf-16le'); const {body: readableStream} = await fetch('https://example.com'); console.log(await getStream(readableStream.pipeThrough(textDecoderStream))); ``` ### Blobs [`getStreamAsArrayBuffer()`](#getstreamasarraybufferstream-options) can be used to create [Blobs](https://developer.mozilla.org/en-US/docs/Web/API/Blob). ```js import {getStreamAsArrayBuffer} from 'get-stream'; const stream = fs.createReadStream('unicorn.txt'); console.log(new Blob([await getStreamAsArrayBuffer(stream)])); ``` ### JSON streaming [`getStreamAsArray()`](#getstreamasarraystream-options) can be combined with JSON streaming utilities to parse JSON incrementally. ```js import fs from 'node:fs'; import {compose as composeStreams} from 'node:stream'; import {getStreamAsArray} from 'get-stream'; import streamJson from 'stream-json'; import streamJsonArray from 'stream-json/streamers/StreamArray.js'; const stream = fs.createReadStream('big-array-of-objects.json'); console.log(await getStreamAsArray( composeStreams(stream, streamJson.parser(), streamJsonArray.streamArray()), )); ``` ## Benchmarks ### Node.js stream (100 MB, binary) - `getStream()`: 142ms - `text()`: 139ms - `getStreamAsBuffer()`: 106ms - `buffer()`: 83ms - `getStreamAsArrayBuffer()`: 105ms - `arrayBuffer()`: 81ms - `getStreamAsArray()`: 24ms - `stream.toArray()`: 21ms ### Node.js stream (100 MB, text) - `getStream()`: 90ms - `text()`: 89ms - `getStreamAsBuffer()`: 127ms - `buffer()`: 192ms - `getStreamAsArrayBuffer()`: 129ms - `arrayBuffer()`: 195ms - `getStreamAsArray()`: 89ms - `stream.toArray()`: 90ms ### Web ReadableStream (100 MB, binary) - `getStream()`: 223ms - `text()`: 221ms - `getStreamAsBuffer()`: 182ms - `buffer()`: 153ms - `getStreamAsArrayBuffer()`: 171ms - `arrayBuffer()`: 155ms - `getStreamAsArray()`: 83ms ### Web ReadableStream (100 MB, text) - `getStream()`: 141ms - `text()`: 139ms - `getStreamAsBuffer()`: 91ms - `buffer()`: 80ms - `getStreamAsArrayBuffer()`: 89ms - `arrayBuffer()`: 81ms - `getStreamAsArray()`: 21ms [Benchmarks' source file](benchmarks/index.js). ## FAQ ### How is this different from [`concat-stream`](https://github.com/maxogden/concat-stream)? This module accepts a stream instead of being one and returns a promise instead of using a callback. The API is simpler and it only supports returning a string, `Buffer`, an `ArrayBuffer` or an array. It doesn't have a fragile type inference. You explicitly choose what you want. And it doesn't depend on the huge `readable-stream` package. ## Related - [get-stdin](https://github.com/sindresorhus/get-stdin) - Get stdin as a string or buffer - [into-stream](https://github.com/sindresorhus/into-stream) - The opposite of this package get-stream-8.0.1/source/000077500000000000000000000000001446743671600151125ustar00rootroot00000000000000get-stream-8.0.1/source/array-buffer.js000066400000000000000000000063751446743671600200500ustar00rootroot00000000000000import {getStreamContents} from './contents.js'; import {noop, throwObjectStream, getLengthProp} from './utils.js'; export async function getStreamAsArrayBuffer(stream, options) { return getStreamContents(stream, arrayBufferMethods, options); } const initArrayBuffer = () => ({contents: new ArrayBuffer(0)}); const useTextEncoder = chunk => textEncoder.encode(chunk); const textEncoder = new TextEncoder(); const useUint8Array = chunk => new Uint8Array(chunk); const useUint8ArrayWithOffset = chunk => new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength); const truncateArrayBufferChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize); // `contents` is an increasingly growing `Uint8Array`. const addArrayBufferChunk = (convertedChunk, {contents, length: previousLength}, length) => { const newContents = hasArrayBufferResize() ? resizeArrayBuffer(contents, length) : resizeArrayBufferSlow(contents, length); new Uint8Array(newContents).set(convertedChunk, previousLength); return newContents; }; // Without `ArrayBuffer.resize()`, `contents` size is always a power of 2. // This means its last bytes are zeroes (not stream data), which need to be // trimmed at the end with `ArrayBuffer.slice()`. const resizeArrayBufferSlow = (contents, length) => { if (length <= contents.byteLength) { return contents; } const arrayBuffer = new ArrayBuffer(getNewContentsLength(length)); new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0); return arrayBuffer; }; // With `ArrayBuffer.resize()`, `contents` size matches exactly the size of // the stream data. It does not include extraneous zeroes to trim at the end. // The underlying `ArrayBuffer` does allocate a number of bytes that is a power // of 2, but those bytes are only visible after calling `ArrayBuffer.resize()`. const resizeArrayBuffer = (contents, length) => { if (length <= contents.maxByteLength) { contents.resize(length); return contents; } const arrayBuffer = new ArrayBuffer(length, {maxByteLength: getNewContentsLength(length)}); new Uint8Array(arrayBuffer).set(new Uint8Array(contents), 0); return arrayBuffer; }; // Retrieve the closest `length` that is both >= and a power of 2 const getNewContentsLength = length => SCALE_FACTOR ** Math.ceil(Math.log(length) / Math.log(SCALE_FACTOR)); const SCALE_FACTOR = 2; const finalizeArrayBuffer = ({contents, length}) => hasArrayBufferResize() ? contents : contents.slice(0, length); // `ArrayBuffer.slice()` is slow. When `ArrayBuffer.resize()` is available // (Node >=20.0.0, Safari >=16.4 and Chrome), we can use it instead. // eslint-disable-next-line no-warning-comments // TODO: remove after dropping support for Node 20. // eslint-disable-next-line no-warning-comments // TODO: use `ArrayBuffer.transferToFixedLength()` instead once it is available const hasArrayBufferResize = () => 'resize' in ArrayBuffer.prototype; const arrayBufferMethods = { init: initArrayBuffer, convertChunk: { string: useTextEncoder, buffer: useUint8Array, arrayBuffer: useUint8Array, dataView: useUint8ArrayWithOffset, typedArray: useUint8ArrayWithOffset, others: throwObjectStream, }, getSize: getLengthProp, truncateChunk: truncateArrayBufferChunk, addChunk: addArrayBufferChunk, getFinalChunk: noop, finalize: finalizeArrayBuffer, }; get-stream-8.0.1/source/array.js000066400000000000000000000013251446743671600165670ustar00rootroot00000000000000import {getStreamContents} from './contents.js'; import {identity, noop, getContentsProp} from './utils.js'; export async function getStreamAsArray(stream, options) { return getStreamContents(stream, arrayMethods, options); } const initArray = () => ({contents: []}); const increment = () => 1; const addArrayChunk = (convertedChunk, {contents}) => { contents.push(convertedChunk); return contents; }; const arrayMethods = { init: initArray, convertChunk: { string: identity, buffer: identity, arrayBuffer: identity, dataView: identity, typedArray: identity, others: identity, }, getSize: increment, truncateChunk: noop, addChunk: addArrayChunk, getFinalChunk: noop, finalize: getContentsProp, }; get-stream-8.0.1/source/buffer.js000066400000000000000000000011361446743671600167220ustar00rootroot00000000000000import {getStreamAsArrayBuffer} from './array-buffer.js'; export async function getStreamAsBuffer(stream, options) { if (!('Buffer' in globalThis)) { throw new Error('getStreamAsBuffer() is only supported in Node.js'); } try { return arrayBufferToNodeBuffer(await getStreamAsArrayBuffer(stream, options)); } catch (error) { if (error.bufferedData !== undefined) { error.bufferedData = arrayBufferToNodeBuffer(error.bufferedData); } throw error; } } // eslint-disable-next-line n/prefer-global/buffer const arrayBufferToNodeBuffer = arrayBuffer => globalThis.Buffer.from(arrayBuffer); get-stream-8.0.1/source/contents.js000066400000000000000000000053451446743671600173140ustar00rootroot00000000000000export const getStreamContents = async (stream, {init, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, finalize}, {maxBuffer = Number.POSITIVE_INFINITY} = {}) => { if (!isAsyncIterable(stream)) { throw new Error('The first argument must be a Readable, a ReadableStream, or an async iterable.'); } const state = init(); state.length = 0; try { for await (const chunk of stream) { const chunkType = getChunkType(chunk); const convertedChunk = convertChunk[chunkType](chunk, state); appendChunk({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}); } appendFinalChunk({state, convertChunk, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}); return finalize(state); } catch (error) { error.bufferedData = finalize(state); throw error; } }; const appendFinalChunk = ({state, getSize, truncateChunk, addChunk, getFinalChunk, maxBuffer}) => { const convertedChunk = getFinalChunk(state); if (convertedChunk !== undefined) { appendChunk({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}); } }; const appendChunk = ({convertedChunk, state, getSize, truncateChunk, addChunk, maxBuffer}) => { const chunkSize = getSize(convertedChunk); const newLength = state.length + chunkSize; if (newLength <= maxBuffer) { addNewChunk(convertedChunk, state, addChunk, newLength); return; } const truncatedChunk = truncateChunk(convertedChunk, maxBuffer - state.length); if (truncatedChunk !== undefined) { addNewChunk(truncatedChunk, state, addChunk, maxBuffer); } throw new MaxBufferError(); }; const addNewChunk = (convertedChunk, state, addChunk, newLength) => { state.contents = addChunk(convertedChunk, state, newLength); state.length = newLength; }; const isAsyncIterable = stream => typeof stream === 'object' && stream !== null && typeof stream[Symbol.asyncIterator] === 'function'; const getChunkType = chunk => { const typeOfChunk = typeof chunk; if (typeOfChunk === 'string') { return 'string'; } if (typeOfChunk !== 'object' || chunk === null) { return 'others'; } // eslint-disable-next-line n/prefer-global/buffer if (globalThis.Buffer?.isBuffer(chunk)) { return 'buffer'; } const prototypeName = objectToString.call(chunk); if (prototypeName === '[object ArrayBuffer]') { return 'arrayBuffer'; } if (prototypeName === '[object DataView]') { return 'dataView'; } if ( Number.isInteger(chunk.byteLength) && Number.isInteger(chunk.byteOffset) && objectToString.call(chunk.buffer) === '[object ArrayBuffer]' ) { return 'typedArray'; } return 'others'; }; const {toString: objectToString} = Object.prototype; export class MaxBufferError extends Error { name = 'MaxBufferError'; constructor() { super('maxBuffer exceeded'); } } get-stream-8.0.1/source/index.d.ts000066400000000000000000000101251446743671600170120ustar00rootroot00000000000000import {type Readable} from 'node:stream'; import {type Buffer} from 'node:buffer'; export class MaxBufferError extends Error { readonly name: 'MaxBufferError'; constructor(); } type TextStreamItem = string | Buffer | ArrayBuffer | ArrayBufferView; export type AnyStream = Readable | ReadableStream | AsyncIterable; export type Options = { /** Maximum length of the stream. If exceeded, the promise will be rejected with a `MaxBufferError`. Depending on the [method](#api), the length is measured with [`string.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/length), [`buffer.length`](https://nodejs.org/api/buffer.html#buflength), [`arrayBuffer.byteLength`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer/byteLength) or [`array.length`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/length). @default Infinity */ readonly maxBuffer?: number; }; /** Get the given `stream` as a string. @returns The stream's contents as a promise. @example ``` import fs from 'node:fs'; import getStream from 'get-stream'; const stream = fs.createReadStream('unicorn.txt'); console.log(await getStream(stream)); // ,,))))))));, // __)))))))))))))), // \|/ -\(((((''''((((((((. // -*-==//////(('' . `)))))), // /|\ ))| o ;-. '((((( ,(, // ( `| / ) ;))))' ,_))^;(~ // | | | ,))((((_ _____------~~~-. %,;(;(>';'~ // o_); ; )))(((` ~---~ `:: \ %%~~)(v;(`('~ // ; ''''```` `: `:::|\,__,%% );`'; ~ // | _ ) / `:|`----' `-' // ______/\/~ | / / // /~;;.____/;;' / ___--,-( `;;;/ // / // _;______;'------~~~~~ /;;/\ / // // | | / ; \;;,\ // (<_ | ; /',/-----' _> // \_| ||_ //~;~~~~~~~~~ // `\_| (,~~ // \~\ // ~~ ``` @example ``` import getStream from 'get-stream'; const {body: readableStream} = await fetch('https://example.com'); console.log(await getStream(readableStream)); ``` @example ``` import {opendir} from 'node:fs/promises'; import {getStreamAsArray} from 'get-stream'; const asyncIterable = await opendir(directory); console.log(await getStreamAsArray(asyncIterable)); ``` */ export default function getStream(stream: AnyStream, options?: Options): Promise; /** Get the given `stream` as a Node.js [`Buffer`](https://nodejs.org/api/buffer.html#class-buffer). @returns The stream's contents as a promise. @example ``` import {getStreamAsBuffer} from 'get-stream'; const stream = fs.createReadStream('unicorn.png'); console.log(await getStreamAsBuffer(stream)); ``` */ export function getStreamAsBuffer(stream: AnyStream, options?: Options): Promise; /** Get the given `stream` as an [`ArrayBuffer`](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer). @returns The stream's contents as a promise. @example ``` import {getStreamAsArrayBuffer} from 'get-stream'; const {body: readableStream} = await fetch('https://example.com'); console.log(await getStreamAsArrayBuffer(readableStream)); ``` */ export function getStreamAsArrayBuffer(stream: AnyStream, options?: Options): Promise; /** Get the given `stream` as an array. Unlike [other methods](#api), this supports [streams of objects](https://nodejs.org/api/stream.html#object-mode). @returns The stream's contents as a promise. @example ``` import {getStreamAsArray} from 'get-stream'; const {body: readableStream} = await fetch('https://example.com'); console.log(await getStreamAsArray(readableStream)); ``` */ export function getStreamAsArray(stream: AnyStream, options?: Options): Promise; get-stream-8.0.1/source/index.js000066400000000000000000000003761446743671600165650ustar00rootroot00000000000000export {getStreamAsArray} from './array.js'; export {getStreamAsArrayBuffer} from './array-buffer.js'; export {getStreamAsBuffer} from './buffer.js'; export {getStreamAsString as default} from './string.js'; export {MaxBufferError} from './contents.js'; get-stream-8.0.1/source/index.test-d.ts000066400000000000000000000123611446743671600177730ustar00rootroot00000000000000import {Buffer} from 'node:buffer'; import {open} from 'node:fs/promises'; import {type Readable} from 'node:stream'; import fs from 'node:fs'; import {expectType, expectError, expectAssignable, expectNotAssignable} from 'tsd'; import getStream, {getStreamAsBuffer, getStreamAsArrayBuffer, getStreamAsArray, MaxBufferError, type Options, type AnyStream} from './index.js'; const nodeStream = fs.createReadStream('foo') as Readable; const fileHandle = await open('test'); const readableStream = fileHandle.readableWebStream(); const asyncIterable = (value: T): AsyncGenerator => (async function * () { yield value; })(); const stringAsyncIterable = asyncIterable(''); const bufferAsyncIterable = asyncIterable(Buffer.from('')); const arrayBufferAsyncIterable = asyncIterable(new ArrayBuffer(0)); const dataViewAsyncIterable = asyncIterable(new DataView(new ArrayBuffer(0))); const typedArrayAsyncIterable = asyncIterable(new Uint8Array([])); const objectItem = {test: true}; const objectAsyncIterable = asyncIterable(objectItem); expectType(await getStream(nodeStream)); expectType(await getStream(nodeStream, {maxBuffer: 10})); expectType(await getStream(readableStream)); expectType(await getStream(stringAsyncIterable)); expectType(await getStream(bufferAsyncIterable)); expectType(await getStream(arrayBufferAsyncIterable)); expectType(await getStream(dataViewAsyncIterable)); expectType(await getStream(typedArrayAsyncIterable)); expectError(await getStream(objectAsyncIterable)); expectError(await getStream({})); expectError(await getStream(nodeStream, {maxBuffer: '10'})); expectError(await getStream(nodeStream, {unknownOption: 10})); expectError(await getStream(nodeStream, {maxBuffer: 10}, {})); expectType(await getStreamAsBuffer(nodeStream)); expectType(await getStreamAsBuffer(nodeStream, {maxBuffer: 10})); expectType(await getStreamAsBuffer(readableStream)); expectType(await getStreamAsBuffer(stringAsyncIterable)); expectType(await getStreamAsBuffer(bufferAsyncIterable)); expectType(await getStreamAsBuffer(arrayBufferAsyncIterable)); expectType(await getStreamAsBuffer(dataViewAsyncIterable)); expectType(await getStreamAsBuffer(typedArrayAsyncIterable)); expectError(await getStreamAsBuffer(objectAsyncIterable)); expectError(await getStreamAsBuffer({})); expectError(await getStreamAsBuffer(nodeStream, {maxBuffer: '10'})); expectError(await getStreamAsBuffer(nodeStream, {unknownOption: 10})); expectError(await getStreamAsBuffer(nodeStream, {maxBuffer: 10}, {})); expectType(await getStreamAsArrayBuffer(nodeStream)); expectType(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: 10})); expectType(await getStreamAsArrayBuffer(readableStream)); expectType(await getStreamAsArrayBuffer(stringAsyncIterable)); expectType(await getStreamAsArrayBuffer(bufferAsyncIterable)); expectType(await getStreamAsArrayBuffer(arrayBufferAsyncIterable)); expectType(await getStreamAsArrayBuffer(dataViewAsyncIterable)); expectType(await getStreamAsArrayBuffer(typedArrayAsyncIterable)); expectError(await getStreamAsArrayBuffer(objectAsyncIterable)); expectError(await getStreamAsArrayBuffer({})); expectError(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: '10'})); expectError(await getStreamAsArrayBuffer(nodeStream, {unknownOption: 10})); expectError(await getStreamAsArrayBuffer(nodeStream, {maxBuffer: 10}, {})); expectType(await getStreamAsArray(nodeStream)); expectType(await getStreamAsArray(nodeStream, {maxBuffer: 10})); expectType(await getStreamAsArray(readableStream)); expectType(await getStreamAsArray(readableStream as ReadableStream)); expectType(await getStreamAsArray(stringAsyncIterable)); expectType(await getStreamAsArray(bufferAsyncIterable)); expectType(await getStreamAsArray(arrayBufferAsyncIterable)); expectType(await getStreamAsArray(dataViewAsyncIterable)); expectType(await getStreamAsArray(typedArrayAsyncIterable)); expectType>(await getStreamAsArray(objectAsyncIterable)); expectError(await getStreamAsArray({})); expectError(await getStreamAsArray(nodeStream, {maxBuffer: '10'})); expectError(await getStreamAsArray(nodeStream, {unknownOption: 10})); expectError(await getStreamAsArray(nodeStream, {maxBuffer: 10}, {})); expectAssignable(nodeStream); expectAssignable(readableStream); expectAssignable(stringAsyncIterable); expectAssignable(bufferAsyncIterable); expectAssignable(arrayBufferAsyncIterable); expectAssignable(dataViewAsyncIterable); expectAssignable(typedArrayAsyncIterable); expectAssignable>(objectAsyncIterable); expectNotAssignable(objectAsyncIterable); expectAssignable>(stringAsyncIterable); expectNotAssignable>(bufferAsyncIterable); expectNotAssignable({}); expectAssignable({maxBuffer: 10}); expectNotAssignable({maxBuffer: '10'}); expectNotAssignable({unknownOption: 10}); expectType(new MaxBufferError()); get-stream-8.0.1/source/string.js000066400000000000000000000021631446743671600167600ustar00rootroot00000000000000import {getStreamContents} from './contents.js'; import {identity, getContentsProp, throwObjectStream, getLengthProp} from './utils.js'; export async function getStreamAsString(stream, options) { return getStreamContents(stream, stringMethods, options); } const initString = () => ({contents: '', textDecoder: new TextDecoder()}); const useTextDecoder = (chunk, {textDecoder}) => textDecoder.decode(chunk, {stream: true}); const addStringChunk = (convertedChunk, {contents}) => contents + convertedChunk; const truncateStringChunk = (convertedChunk, chunkSize) => convertedChunk.slice(0, chunkSize); const getFinalStringChunk = ({textDecoder}) => { const finalChunk = textDecoder.decode(); return finalChunk === '' ? undefined : finalChunk; }; const stringMethods = { init: initString, convertChunk: { string: identity, buffer: useTextDecoder, arrayBuffer: useTextDecoder, dataView: useTextDecoder, typedArray: useTextDecoder, others: throwObjectStream, }, getSize: getLengthProp, truncateChunk: truncateStringChunk, addChunk: addStringChunk, getFinalChunk: getFinalStringChunk, finalize: getContentsProp, }; get-stream-8.0.1/source/utils.js000066400000000000000000000005171446743671600166130ustar00rootroot00000000000000export const identity = value => value; export const noop = () => undefined; export const getContentsProp = ({contents}) => contents; export const throwObjectStream = chunk => { throw new Error(`Streams in object mode are not supported: ${String(chunk)}`); }; export const getLengthProp = convertedChunk => convertedChunk.length; get-stream-8.0.1/test/000077500000000000000000000000001446743671600145715ustar00rootroot00000000000000get-stream-8.0.1/test/array-buffer.js000066400000000000000000000147271446743671600175270ustar00rootroot00000000000000import {Buffer, constants as BufferConstants, Blob} from 'node:buffer'; import {arrayBuffer, blob} from 'node:stream/consumers'; import test from 'ava'; import {getStreamAsArrayBuffer, MaxBufferError} from '../source/index.js'; import {createStream, BIG_TEST_DURATION} from './helpers/index.js'; import { fixtureString, fixtureLength, fixtureBuffer, fixtureTypedArray, fixtureArrayBuffer, fixtureUint16Array, fixtureDataView, fixtureMultiString, fixtureMultiBuffer, fixtureMultiTypedArray, fixtureMultiArrayBuffer, fixtureMultiUint16Array, fixtureMultiDataView, fixtureTypedArrayWithOffset, fixtureUint16ArrayWithOffset, fixtureDataViewWithOffset, longString, bigArray, } from './fixtures/index.js'; const longTypedArray = new TextEncoder().encode(longString); const longArrayBuffer = longTypedArray.buffer; const longUint16Array = new Uint16Array(longArrayBuffer); const longDataView = new DataView(longArrayBuffer); const fixtureMultibyteUint16Array = new Uint16Array([0, 0]); const longMultibyteUint16Array = new Uint16Array([0, 0, 0]); const bigArrayBuffer = new Uint8Array(bigArray).buffer; const setupArrayBuffer = (streamDef, options) => getStreamAsArrayBuffer(createStream(streamDef), options); const getStreamToArrayBuffer = async (t, fixtureValue) => { const result = await setupArrayBuffer(fixtureValue); t.true(result instanceof ArrayBuffer); t.true(Buffer.from(result).equals(fixtureBuffer)); }; test('get stream from string to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureString]); test('get stream from buffer to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureBuffer]); test('get stream from arrayBuffer to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureArrayBuffer]); test('get stream from typedArray to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureTypedArray]); test('get stream from typedArray with offset to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureTypedArrayWithOffset]); test('get stream from uint16Array to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureUint16Array]); test('get stream from uint16Array with offset to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureUint16ArrayWithOffset]); test('get stream from dataView to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureDataView]); test('get stream from dataView with offset to arrayBuffer, with a single chunk', getStreamToArrayBuffer, [fixtureDataViewWithOffset]); test('get stream from string to arrayBuffer, with multiple chunks', getStreamToArrayBuffer, fixtureMultiString); test('get stream from buffer to arrayBuffer, with multiple chunks', getStreamToArrayBuffer, fixtureMultiBuffer); test('get stream from arrayBuffer to arrayBuffer, with multiple chunks', getStreamToArrayBuffer, fixtureMultiArrayBuffer); test('get stream from typedArray to arrayBuffer, with multiple chunks', getStreamToArrayBuffer, fixtureMultiTypedArray); test('get stream from uint16Array to arrayBuffer, with multiple chunks', getStreamToArrayBuffer, fixtureMultiUint16Array); test('get stream from dataView to arrayBuffer, with multiple chunks', getStreamToArrayBuffer, fixtureMultiDataView); const throwOnInvalidChunkType = async (t, fixtureValue) => { await t.throwsAsync(setupArrayBuffer([fixtureValue]), {message: /not supported/}); }; test('get stream from bigint to arrayBuffer', throwOnInvalidChunkType, 0n); test('get stream from number to arrayBuffer', throwOnInvalidChunkType, 0); test('get stream from array to arrayBuffer', throwOnInvalidChunkType, []); test('get stream from object to arrayBuffer', throwOnInvalidChunkType, {}); test('get stream from boolean to arrayBuffer', throwOnInvalidChunkType, false); test('get stream from undefined to arrayBuffer', throwOnInvalidChunkType, undefined); test('get stream from symbol to arrayBuffer', throwOnInvalidChunkType, Symbol('test')); const checkMaxBuffer = async (t, longValue, shortValue, maxBuffer) => { await t.throwsAsync(setupArrayBuffer([longValue], {maxBuffer}), {instanceOf: MaxBufferError}); await t.notThrowsAsync(setupArrayBuffer([shortValue], {maxBuffer})); }; test('maxBuffer throws when size is exceeded with an arrayBuffer', checkMaxBuffer, longArrayBuffer, fixtureArrayBuffer, fixtureLength); test('maxBuffer throws when size is exceeded with a typedArray', checkMaxBuffer, longTypedArray, fixtureTypedArray, fixtureLength); test('maxBuffer throws when size is exceeded with an uint16Array', checkMaxBuffer, longUint16Array, fixtureUint16Array, fixtureLength); test('maxBuffer throws when size is exceeded with a dataView', checkMaxBuffer, longDataView, fixtureDataView, fixtureLength); test('maxBuffer unit is bytes with getStreamAsArrayBuffer()', checkMaxBuffer, longMultibyteUint16Array, fixtureMultibyteUint16Array, fixtureMultibyteUint16Array.byteLength); const checkBufferedData = async (t, fixtureValue, expectedResult) => { const maxBuffer = expectedResult.byteLength; const {bufferedData} = await t.throwsAsync(setupArrayBuffer(fixtureValue, {maxBuffer}), {instanceOf: MaxBufferError}); t.is(bufferedData.byteLength, maxBuffer); t.deepEqual(expectedResult, bufferedData); }; test( 'set error.bufferedData when `maxBuffer` is hit, with a single chunk', checkBufferedData, [fixtureArrayBuffer], new Uint8Array(Buffer.from(fixtureString[0])).buffer, ); test( 'set error.bufferedData when `maxBuffer` is hit, with multiple chunks', checkBufferedData, [fixtureArrayBuffer, fixtureArrayBuffer], new Uint8Array(Buffer.from(`${fixtureString}${fixtureString[0]}`)).buffer, ); test('handles streams larger than arrayBuffer max length', async t => { t.timeout(BIG_TEST_DURATION); const chunkCount = Math.floor(BufferConstants.MAX_LENGTH / CHUNK_SIZE * 2); const chunk = Buffer.alloc(CHUNK_SIZE); const maxBufferChunks = Array.from({length: chunkCount}, () => chunk); const {bufferedData} = await t.throwsAsync(setupArrayBuffer(maxBufferChunks)); t.is(new Uint8Array(bufferedData)[0], 0); }); const CHUNK_SIZE = 2 ** 16; test('getStreamAsArrayBuffer() behaves like arrayBuffer()', async t => { const [nativeResult, customResult] = await Promise.all([ arrayBuffer(createStream([bigArrayBuffer])), setupArrayBuffer([bigArrayBuffer]), ]); t.deepEqual(nativeResult, customResult); }); test('getStreamAsArrayBuffer() can behave like blob()', async t => { const [nativeResult, customResult] = await Promise.all([ blob(createStream([bigArrayBuffer])), setupArrayBuffer([bigArrayBuffer]), ]); t.deepEqual(nativeResult, new Blob([customResult])); }); get-stream-8.0.1/test/array.js000066400000000000000000000113111446743671600162420ustar00rootroot00000000000000import {compose} from 'node:stream'; import test from 'ava'; import streamJson from 'stream-json'; import streamJsonArray from 'stream-json/streamers/StreamArray.js'; import {getStreamAsArray, MaxBufferError} from '../source/index.js'; import {createStream, BIG_TEST_DURATION} from './helpers/index.js'; import { fixtureString, fixtureBuffer, fixtureTypedArray, fixtureArrayBuffer, fixtureUint16Array, fixtureDataView, fixtureMultiString, fixtureMultiBuffer, fixtureMultiTypedArray, fixtureMultiArrayBuffer, fixtureMultiUint16Array, fixtureMultiDataView, fixtureTypedArrayWithOffset, fixtureUint16ArrayWithOffset, fixtureDataViewWithOffset, bigArray, } from './fixtures/index.js'; const fixtureArray = [{}, {}]; const setupArray = (streamDef, options) => getStreamAsArray(createStream(streamDef), options); const getStreamToArray = async (t, fixtureValue) => { const result = await setupArray(fixtureValue); t.deepEqual(result, fixtureValue); }; test('get stream from string to array, with a single chunk', getStreamToArray, [fixtureString]); test('get stream from buffer to array, with a single chunk', getStreamToArray, [fixtureBuffer]); test('get stream from arrayBuffer to array, with a single chunk', getStreamToArray, [fixtureArrayBuffer]); test('get stream from typedArray to array, with a single chunk', getStreamToArray, [fixtureTypedArray]); test('get stream from typedArray with offset to array, with a single chunk', getStreamToArray, [fixtureTypedArrayWithOffset]); test('get stream from uint16Array to array, with a single chunk', getStreamToArray, [fixtureUint16Array]); test('get stream from uint16Array with offset to array, with a single chunk', getStreamToArray, [fixtureUint16ArrayWithOffset]); test('get stream from dataView to array, with a single chunk', getStreamToArray, [fixtureDataView]); test('get stream from dataView with offset to array, with a single chunk', getStreamToArray, [fixtureDataViewWithOffset]); test('get stream from string to array, with multiple chunks', getStreamToArray, fixtureMultiString); test('get stream from buffer to array, with multiple chunks', getStreamToArray, fixtureMultiBuffer); test('get stream from arrayBuffer to array, with multiple chunks', getStreamToArray, fixtureMultiArrayBuffer); test('get stream from typedArray to array, with multiple chunks', getStreamToArray, fixtureMultiTypedArray); test('get stream from uint16Array to array, with multiple chunks', getStreamToArray, fixtureMultiUint16Array); test('get stream from dataView to array, with multiple chunks', getStreamToArray, fixtureMultiDataView); const allowsAnyChunkType = async (t, fixtureValue) => { await t.notThrowsAsync(setupArray([fixtureValue])); }; test('get stream from object to array', allowsAnyChunkType, {}); test('get stream from array to array', allowsAnyChunkType, []); test('get stream from boolean to array', allowsAnyChunkType, false); test('get stream from number to array', allowsAnyChunkType, 0); test('get stream from bigint to array', allowsAnyChunkType, 0n); test('get stream from undefined to array', allowsAnyChunkType, undefined); test('get stream from symbol to array', allowsAnyChunkType, Symbol('test')); test('maxBuffer unit is each array element with getStreamAsArray()', async t => { const maxBuffer = fixtureArray.length; await t.throwsAsync(setupArray([...fixtureArray, ...fixtureArray], {maxBuffer}), {instanceOf: MaxBufferError}); await t.notThrowsAsync(setupArray(fixtureArray, {maxBuffer})); }); const checkBufferedData = async (t, fixtureValue, expectedResult) => { const maxBuffer = expectedResult.length; const {bufferedData} = await t.throwsAsync(setupArray(fixtureValue, {maxBuffer}), {instanceOf: MaxBufferError}); t.is(bufferedData.length, maxBuffer); t.deepEqual(expectedResult, bufferedData); }; test( 'set error.bufferedData when `maxBuffer` is hit, with a single chunk', checkBufferedData, fixtureArray, fixtureArray.slice(0, 1), ); test( 'set error.bufferedData when `maxBuffer` is hit, with multiple chunks', checkBufferedData, [...fixtureArray, ...fixtureArray], [...fixtureArray, ...fixtureArray.slice(0, 1)], ); test('getStreamAsArray() behaves like readable.toArray()', async t => { const [nativeResult, customResult] = await Promise.all([ createStream([bigArray]).toArray(), setupArray([bigArray]), ]); t.deepEqual(nativeResult, customResult); }); test('getStreamAsArray() can stream JSON', async t => { t.timeout(BIG_TEST_DURATION); const bigJson = bigArray.map(byte => ({byte})); const bigJsonString = JSON.stringify(bigJson); const result = await getStreamAsArray(compose( createStream([bigJsonString]), streamJson.parser(), streamJsonArray.streamArray(), )); t.is(result.length, bigJson.length); t.deepEqual(result.at(-1).value, bigJson.at(-1)); }); get-stream-8.0.1/test/buffer.js000066400000000000000000000122351446743671600164030ustar00rootroot00000000000000import {Buffer} from 'node:buffer'; import {buffer} from 'node:stream/consumers'; import test from 'ava'; import {getStreamAsBuffer, MaxBufferError} from '../source/index.js'; import {createStream} from './helpers/index.js'; import { fixtureString, fixtureLength, fixtureBuffer, fixtureTypedArray, fixtureArrayBuffer, fixtureUint16Array, fixtureDataView, fixtureMultiString, fixtureMultiBuffer, fixtureMultiTypedArray, fixtureMultiArrayBuffer, fixtureMultiUint16Array, fixtureMultiDataView, fixtureTypedArrayWithOffset, fixtureUint16ArrayWithOffset, fixtureDataViewWithOffset, longString, fixtureMultibyteString, longMultibyteString, bigArray, } from './fixtures/index.js'; const longBuffer = Buffer.from(longString); const fixtureMultibyteBuffer = Buffer.from(fixtureMultibyteString); const longMultibyteBuffer = Buffer.from(longMultibyteString); const bigBuffer = Buffer.from(bigArray); const setupBuffer = (streamDef, options) => getStreamAsBuffer(createStream(streamDef), options); const getStreamToBuffer = async (t, fixtureValue) => { const result = await setupBuffer(fixtureValue); t.true(Buffer.isBuffer(result)); t.true(result.equals(fixtureBuffer)); }; test('get stream from string to buffer, with a single chunk', getStreamToBuffer, [fixtureString]); test('get stream from buffer to buffer, with a single chunk', getStreamToBuffer, [fixtureBuffer]); test('get stream from arrayBuffer to buffer, with a single chunk', getStreamToBuffer, [fixtureArrayBuffer]); test('get stream from typedArray to buffer, with a single chunk', getStreamToBuffer, [fixtureTypedArray]); test('get stream from typedArray with offset to buffer, with a single chunk', getStreamToBuffer, [fixtureTypedArrayWithOffset]); test('get stream from uint16Array to buffer, with a single chunk', getStreamToBuffer, [fixtureUint16Array]); test('get stream from uint16Array with offset to buffer, with a single chunk', getStreamToBuffer, [fixtureUint16ArrayWithOffset]); test('get stream from dataView to buffer, with a single chunk', getStreamToBuffer, [fixtureDataView]); test('get stream from dataView with offset to buffer, with a single chunk', getStreamToBuffer, [fixtureDataViewWithOffset]); test('get stream from string to buffer, with multiple chunks', getStreamToBuffer, fixtureMultiString); test('get stream from buffer to buffer, with multiple chunks', getStreamToBuffer, fixtureMultiBuffer); test('get stream from arrayBuffer to buffer, with multiple chunks', getStreamToBuffer, fixtureMultiArrayBuffer); test('get stream from typedArray to buffer, with multiple chunks', getStreamToBuffer, fixtureMultiTypedArray); test('get stream from uint16Array to buffer, with multiple chunks', getStreamToBuffer, fixtureMultiUint16Array); test('get stream from dataView to buffer, with multiple chunks', getStreamToBuffer, fixtureMultiDataView); const throwOnInvalidChunkType = async (t, fixtureValue) => { await t.throwsAsync(setupBuffer([fixtureValue]), {message: /not supported/}); }; test('get stream from object to buffer', throwOnInvalidChunkType, {}); test('get stream from array to buffer', throwOnInvalidChunkType, []); test('get stream from boolean to buffer', throwOnInvalidChunkType, false); test('get stream from number to buffer', throwOnInvalidChunkType, 0); test('get stream from bigint to buffer', throwOnInvalidChunkType, 0n); test('get stream from undefined to buffer', throwOnInvalidChunkType, undefined); test('get stream from symbol to buffer', throwOnInvalidChunkType, Symbol('test')); const checkMaxBuffer = async (t, longValue, shortValue, maxBuffer) => { await t.throwsAsync(setupBuffer([longValue], {maxBuffer}), {instanceOf: MaxBufferError}); await t.notThrowsAsync(setupBuffer([shortValue], {maxBuffer})); }; test('maxBuffer throws when size is exceeded with a buffer', checkMaxBuffer, longBuffer, fixtureBuffer, fixtureLength); test('maxBuffer unit is bytes with getStreamAsBuffer()', checkMaxBuffer, longMultibyteBuffer, fixtureMultibyteBuffer, fixtureMultibyteBuffer.byteLength); const checkBufferedData = async (t, fixtureValue, expectedResult) => { const maxBuffer = expectedResult.length; const {bufferedData} = await t.throwsAsync(setupBuffer(fixtureValue, {maxBuffer}), {instanceOf: MaxBufferError}); t.is(bufferedData.length, maxBuffer); t.deepEqual(expectedResult, bufferedData); }; test( 'set error.bufferedData when `maxBuffer` is hit, with a single chunk', checkBufferedData, [fixtureBuffer], fixtureBuffer.slice(0, 1), ); test( 'set error.bufferedData when `maxBuffer` is hit, with multiple chunks', checkBufferedData, [fixtureBuffer, fixtureBuffer], Buffer.from([...fixtureBuffer, ...fixtureBuffer.slice(0, 1)]), ); test('getStreamAsBuffer() behaves like buffer()', async t => { const [nativeResult, customResult] = await Promise.all([ buffer(createStream([bigBuffer])), setupBuffer([bigBuffer]), ]); t.deepEqual(nativeResult, customResult); }); /* eslint-disable n/prefer-global/buffer */ test('getStreamAsBuffer() only works in Node', async t => { const {Buffer} = globalThis; delete globalThis.Buffer; try { await t.throwsAsync(setupBuffer([fixtureString]), {message: /only supported in Node/}); } finally { globalThis.Buffer = Buffer; } }); /* eslint-enable n/prefer-global/buffer */ get-stream-8.0.1/test/contents.js000066400000000000000000000042541446743671600167710ustar00rootroot00000000000000import {setTimeout} from 'node:timers/promises'; import test from 'ava'; import getStream, {MaxBufferError} from '../source/index.js'; import {createStream} from './helpers/index.js'; import { fixtureString, fixtureBuffer, fixtureTypedArray, fixtureArrayBuffer, fixtureUint16Array, fixtureDataView, } from './fixtures/index.js'; const setupString = (streamDef, options) => getStream(createStream(streamDef), options); const generator = async function * () { yield 'a'; await setTimeout(0); yield 'b'; }; test('works with async iterable', async t => { const result = await getStream(generator()); t.is(result, 'ab'); }); test('get stream with mixed chunk types', async t => { const fixtures = [fixtureString, fixtureBuffer, fixtureArrayBuffer, fixtureTypedArray, fixtureUint16Array, fixtureDataView]; const result = await setupString(fixtures); t.is(result, fixtureString.repeat(fixtures.length)); }); test('getStream should not affect additional listeners attached to the stream', async t => { t.plan(3); const fixture = createStream(['foo', 'bar']); fixture.on('data', chunk => t.true(typeof chunk === 'string')); t.is(await getStream(fixture), 'foobar'); }); const errorStream = async function * () { yield fixtureString; await setTimeout(0); throw new Error('test'); }; test('set error.bufferedData when stream errors', async t => { const {bufferedData} = await t.throwsAsync(setupString(errorStream)); t.is(bufferedData, fixtureString); }); const infiniteIteration = async function * () { while (true) { // eslint-disable-next-line no-await-in-loop await setTimeout(0); yield '.'; } }; test('handles infinite stream', async t => { await t.throwsAsync(setupString(infiniteIteration, {maxBuffer: 1}), {instanceOf: MaxBufferError}); }); const firstArgumentCheck = async (t, firstArgument) => { await t.throwsAsync(getStream(firstArgument), {message: /first argument/}); }; test('Throws if the first argument is undefined', firstArgumentCheck, undefined); test('Throws if the first argument is null', firstArgumentCheck, null); test('Throws if the first argument is a string', firstArgumentCheck, ''); test('Throws if the first argument is an array', firstArgumentCheck, []); get-stream-8.0.1/test/fixtures/000077500000000000000000000000001446743671600164425ustar00rootroot00000000000000get-stream-8.0.1/test/fixtures/index.js000066400000000000000000000037001446743671600201070ustar00rootroot00000000000000import {Buffer} from 'node:buffer'; export const fixtureString = 'unicorn\n'; export const fixtureLength = fixtureString.length; export const fixtureBuffer = Buffer.from(fixtureString); export const fixtureTypedArray = new TextEncoder().encode(fixtureString); export const fixtureArrayBuffer = fixtureTypedArray.buffer; export const fixtureUint16Array = new Uint16Array(fixtureArrayBuffer); export const fixtureDataView = new DataView(fixtureArrayBuffer); export const fixtureUtf16 = Buffer.from(fixtureString, 'utf-16le'); export const fixtureMultiString = [...fixtureString]; const fixtureMultiBytes = [...fixtureBuffer]; export const fixtureMultiBuffer = fixtureMultiBytes.map(byte => Buffer.from([byte])); export const fixtureMultiTypedArray = fixtureMultiBytes.map(byte => new Uint8Array([byte])); export const fixtureMultiArrayBuffer = fixtureMultiTypedArray.map(({buffer}) => buffer); export const fixtureMultiUint16Array = Array.from({length: fixtureMultiBytes.length / 2}, (_, index) => new Uint16Array([((2 ** 8) * fixtureMultiBytes[(index * 2) + 1]) + fixtureMultiBytes[index * 2]]), ); export const fixtureMultiDataView = fixtureMultiArrayBuffer.map(arrayBuffer => new DataView(arrayBuffer)); const fixtureStringWide = ` ${fixtureString} `; const fixtureTypedArrayWide = new TextEncoder().encode(fixtureStringWide); const fixtureArrayBufferWide = fixtureTypedArrayWide.buffer; export const fixtureTypedArrayWithOffset = new Uint8Array(fixtureArrayBufferWide, 2, fixtureString.length); export const fixtureUint16ArrayWithOffset = new Uint16Array(fixtureArrayBufferWide, 2, fixtureString.length / 2); export const fixtureDataViewWithOffset = new DataView(fixtureArrayBufferWide, 2, fixtureString.length); export const longString = `${fixtureString}..`; export const fixtureMultibyteString = '\u1000'; export const longMultibyteString = `${fixtureMultibyteString}\u1000`; export const bigArray = Array.from({length: 1e6}, () => Math.floor(Math.random() * (2 ** 8))); get-stream-8.0.1/test/helpers/000077500000000000000000000000001446743671600162335ustar00rootroot00000000000000get-stream-8.0.1/test/helpers/index.js000066400000000000000000000006221446743671600177000ustar00rootroot00000000000000import {Duplex} from 'node:stream'; export const createStream = streamDef => { const generator = typeof streamDef === 'function' ? streamDef : function * () { yield * streamDef; }; return Duplex.from(generator); }; // Tests related to big buffers/strings can be slow. We run them serially and // with a higher timeout to ensure they do not randomly fail. export const BIG_TEST_DURATION = '2m'; get-stream-8.0.1/test/integration.js000066400000000000000000000057111446743671600174560ustar00rootroot00000000000000import {spawn} from 'node:child_process'; import {createReadStream} from 'node:fs'; import {open, opendir} from 'node:fs/promises'; import {version as nodeVersion} from 'node:process'; import {Duplex} from 'node:stream'; import test from 'ava'; import getStream, {getStreamAsBuffer, getStreamAsArray} from '../source/index.js'; import {fixtureString, fixtureBuffer, fixtureUtf16} from './fixtures/index.js'; const TEST_URL = 'https://nodejs.org/dist/index.json'; const createReadableStream = streamDef => Duplex.toWeb(Duplex.from(streamDef)).readable; test('works with opendir()', async t => { const directoryFiles = await opendir('.'); const entries = await getStreamAsArray(directoryFiles); t.true(entries.some(({name}) => name === 'package.json')); }); test('works with createReadStream() and buffers', async t => { const result = await getStreamAsBuffer(createReadStream('fixture')); t.true(result.equals(fixtureBuffer)); }); test('works with createReadStream() and utf8', async t => { const result = await getStream(createReadStream('fixture', 'utf8')); t.is(result, fixtureString); }); test('works with child_process.spawn()', async t => { const {stdout} = spawn('node', ['--version'], {stdio: ['ignore', 'pipe', 'ignore']}); const result = await getStream(stdout); t.is(result.trim(), nodeVersion); }); // @todo: remove this condition after dropping support for Node 16. // `ReadableStream` was added in Node 16.5.0. // `Duplex.toWeb()` and `fileHandle.readableWebStream` were added in Node 17.0.0. // `fetch()` without an experimental flag was added in Node 18.0.0. // However, `get-stream`'s implementation does not refer to any of those // variables and functions. Instead, it only supports specific chunk types // (`TypedArray`, `DataView`, `ArrayBuffer`) for any async iterable. // Doing so automatically works with `ReadableStream`s, regardless of whether // the environment supports them. if (!nodeVersion.startsWith('v16.')) { test('works with ReadableStream', async t => { const result = await getStream(createReadableStream(fixtureString)); t.is(result, fixtureString); }); const readableWebStream = async (t, type) => { const fileHandle = await open('fixture'); try { const result = await getStream(fileHandle.readableWebStream({type})); t.is(result, fixtureString); } finally { await fileHandle.close(); } }; test('works with readableWebStream({ type: undefined })', readableWebStream, undefined); test('works with readableWebStream({ type: "bytes" })', readableWebStream, 'bytes'); test('works with fetch()', async t => { const {body} = await fetch(TEST_URL); const result = await getStream(body); const parsedResult = JSON.parse(result); t.true(Array.isArray(parsedResult)); }); test('can use TextDecoderStream', async t => { const textDecoderStream = new TextDecoderStream('utf-16le'); const result = await getStream( createReadableStream(fixtureUtf16).pipeThrough(textDecoderStream), ); t.is(result, fixtureString); }); } get-stream-8.0.1/test/string.js000066400000000000000000000145271446743671600164460ustar00rootroot00000000000000import {Buffer, constants as BufferConstants} from 'node:buffer'; import {text} from 'node:stream/consumers'; import test from 'ava'; import getStream, {MaxBufferError} from '../source/index.js'; import {createStream, BIG_TEST_DURATION} from './helpers/index.js'; import { fixtureString, fixtureLength, fixtureBuffer, fixtureTypedArray, fixtureArrayBuffer, fixtureUint16Array, fixtureDataView, fixtureMultiString, fixtureMultiBuffer, fixtureMultiTypedArray, fixtureMultiArrayBuffer, fixtureMultiUint16Array, fixtureMultiDataView, fixtureTypedArrayWithOffset, fixtureUint16ArrayWithOffset, fixtureDataViewWithOffset, longString, fixtureMultibyteString, longMultibyteString, bigArray, } from './fixtures/index.js'; const bigString = Buffer.from(bigArray).toString(); const multiByteString = 'a\u1000'; const multiByteUint8Array = new TextEncoder().encode(multiByteString); const multiByteBuffer = [...multiByteUint8Array].map(byte => Buffer.from([byte])); const INVALID_UTF8_MARKER = '\uFFFD'; const setupString = (streamDef, options) => getStream(createStream(streamDef), options); const getStreamToString = async (t, fixtureValue) => { const result = await setupString(fixtureValue); t.is(typeof result, 'string'); t.is(result, fixtureString); }; test('get stream from string to string, with a single chunk', getStreamToString, [fixtureString]); test('get stream from buffer to string, with a single chunk', getStreamToString, [fixtureBuffer]); test('get stream from arrayBuffer to string, with a single chunk', getStreamToString, [fixtureArrayBuffer]); test('get stream from typedArray to string, with a single chunk', getStreamToString, [fixtureTypedArray]); test('get stream from typedArray with offset to string, with a single chunk', getStreamToString, [fixtureTypedArrayWithOffset]); test('get stream from uint16Array to string, with a single chunk', getStreamToString, [fixtureUint16Array]); test('get stream from uint16Array with offset to string, with a single chunk', getStreamToString, [fixtureUint16ArrayWithOffset]); test('get stream from dataView to string, with a single chunk', getStreamToString, [fixtureDataView]); test('get stream from dataView with offset to string, with a single chunk', getStreamToString, [fixtureDataViewWithOffset]); test('get stream from string to string, with multiple chunks', getStreamToString, fixtureMultiString); test('get stream from buffer to string, with multiple chunks', getStreamToString, fixtureMultiBuffer); test('get stream from arrayBuffer to string, with multiple chunks', getStreamToString, fixtureMultiArrayBuffer); test('get stream from typedArray to string, with multiple chunks', getStreamToString, fixtureMultiTypedArray); test('get stream from uint16Array to string, with multiple chunks', getStreamToString, fixtureMultiUint16Array); test('get stream from dataView to string, with multiple chunks', getStreamToString, fixtureMultiDataView); const throwOnInvalidChunkType = async (t, setupFunction, fixtureValue) => { await t.throwsAsync(setupFunction([fixtureValue]), {message: /not supported/}); }; test('get stream from object to string', throwOnInvalidChunkType, setupString, {}); test('get stream from array to string', throwOnInvalidChunkType, setupString, []); test('get stream from boolean to string', throwOnInvalidChunkType, setupString, false); test('get stream from number to string', throwOnInvalidChunkType, setupString, 0); test('get stream from bigint to string', throwOnInvalidChunkType, setupString, 0n); test('get stream from undefined to string', throwOnInvalidChunkType, setupString, undefined); test('get stream from symbol to string', throwOnInvalidChunkType, setupString, Symbol('test')); const checkMaxBuffer = async (t, longValue, shortValue, maxBuffer) => { await t.throwsAsync(setupString([longValue], {maxBuffer}), {instanceOf: MaxBufferError}); await t.notThrowsAsync(setupString([shortValue], {maxBuffer})); }; test('maxBuffer throws when size is exceeded with a string', checkMaxBuffer, longString, fixtureString, fixtureLength); test('maxBuffer unit is characters with getStream()', checkMaxBuffer, longMultibyteString, fixtureMultibyteString, fixtureMultibyteString.length); const checkBufferedData = async (t, fixtureValue, expectedResult) => { const maxBuffer = expectedResult.length; const {bufferedData} = await t.throwsAsync(setupString(fixtureValue, {maxBuffer}), {instanceOf: MaxBufferError}); t.is(bufferedData.length, maxBuffer); t.is(expectedResult, bufferedData); }; test( 'set error.bufferedData when `maxBuffer` is hit, with a single chunk', checkBufferedData, [fixtureString], fixtureString[0], ); test( 'set error.bufferedData when `maxBuffer` is hit, with multiple chunks', checkBufferedData, [fixtureString, fixtureString], `${fixtureString}${fixtureString[0]}`, ); test('handles streams larger than string max length', async t => { t.timeout(BIG_TEST_DURATION); const chunkCount = Math.floor(BufferConstants.MAX_STRING_LENGTH / CHUNK_SIZE * 2); const chunk = '.'.repeat(CHUNK_SIZE); const maxStringChunks = Array.from({length: chunkCount}, () => chunk); const {bufferedData} = await t.throwsAsync(setupString(maxStringChunks)); t.is(bufferedData[0], '.'); }); const CHUNK_SIZE = 2 ** 16; test('handles streams with a single chunk larger than string max length', async t => { const chunks = [Buffer.alloc(BufferConstants.MAX_STRING_LENGTH + 1)]; const {bufferedData} = await t.throwsAsync(setupString(chunks)); t.is(bufferedData, ''); }); test('getStream() behaves like text()', async t => { const [nativeResult, customResult] = await Promise.all([ text(createStream([bigString])), setupString([bigString]), ]); t.is(nativeResult, customResult); }); test('get stream with partial UTF-8 sequences', async t => { const result = await setupString(multiByteBuffer); t.is(result, multiByteString); }); test('get stream with truncated UTF-8 sequences', async t => { const result = await setupString(multiByteBuffer.slice(0, -1)); t.is(result, `${multiByteString.slice(0, -1)}${INVALID_UTF8_MARKER}`); }); test('handles truncated UTF-8 sequences over maxBuffer', async t => { const maxBuffer = multiByteString.length - 1; await t.throwsAsync(setupString(multiByteBuffer.slice(0, -1), {maxBuffer}), {instanceOf: MaxBufferError}); }); test('get stream with invalid UTF-8 sequences', async t => { const result = await setupString(multiByteBuffer.slice(1, 2)); t.is(result, INVALID_UTF8_MARKER); });