pax_global_header 0000666 0000000 0000000 00000000064 14261340256 0014515 g ustar 00root root 0000000 0000000 52 comment=2acec9c0dc3538c32da2825ba990dda983c09d6a
fetch-blob-3.2.0/ 0000775 0000000 0000000 00000000000 14261340256 0013524 5 ustar 00root root 0000000 0000000 fetch-blob-3.2.0/.codecov.yml 0000664 0000000 0000000 00000000133 14261340256 0015744 0 ustar 00root root 0000000 0000000 coverage:
round: up
precision: 2
parsers:
javascript:
enable_partials: yes
fetch-blob-3.2.0/.github/ 0000775 0000000 0000000 00000000000 14261340256 0015064 5 ustar 00root root 0000000 0000000 fetch-blob-3.2.0/.github/PULL_REQUEST_TEMPLATE.md 0000664 0000000 0000000 00000001410 14261340256 0020661 0 ustar 00root root 0000000 0000000
## The purpose of this PR is:
...
## This is what had to change:
...
## This is what like reviewers to know:
...
-------------------------------------------------------------------------------------------------
- [ ] I prefixed the PR-title with `docs: `, `fix(area): `, `feat(area): ` or `breaking(area): `
- [ ] I updated ./CHANGELOG.md with a link to this PR or Issue
- [ ] I updated the README.md
- [ ] I Added unit test(s)
-------------------------------------------------------------------------------------------------
- fix #000
fetch-blob-3.2.0/.github/dependabot.yml 0000664 0000000 0000000 00000000620 14261340256 0017712 0 ustar 00root root 0000000 0000000 version: 2
updates:
- package-ecosystem: npm
directory: "/"
schedule:
interval: daily
open-pull-requests-limit: 10
ignore:
- dependency-name: get-stream
versions:
- 6.0.0
- 6.0.1
- dependency-name: typescript
versions:
- 4.1.3
- 4.1.4
- 4.1.5
- 4.2.2
- 4.2.3
- 4.2.4
- dependency-name: xo
versions:
- 0.37.1
- 0.38.1
- 0.38.2
fetch-blob-3.2.0/.github/workflows/ 0000775 0000000 0000000 00000000000 14261340256 0017121 5 ustar 00root root 0000000 0000000 fetch-blob-3.2.0/.github/workflows/ci.yml 0000664 0000000 0000000 00000001221 14261340256 0020233 0 ustar 00root root 0000000 0000000 name: CI
on:
push:
branches: [master]
pull_request:
paths:
- "**.js"
- "package.json"
- ".github/workflows/ci.yml"
jobs:
test:
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
node: ["17.3"]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: '17.3'
- run: npm install
- run: npm test
- run: npm run report -- --colors
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
with:
file: ./coverage/coverage-final.json
fetch-blob-3.2.0/.gitignore 0000664 0000000 0000000 00000001655 14261340256 0015523 0 ustar 00root root 0000000 0000000 # Lock files
package-lock.json
yarn.lock
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# Optional npm cache directory
.npm
# Optional cache
.cache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
*.d.ts
*.d.cts
.DS_Store fetch-blob-3.2.0/.npmrc 0000664 0000000 0000000 00000000044 14261340256 0014642 0 ustar 00root root 0000000 0000000 package-lock=false
save-exact=false
fetch-blob-3.2.0/CHANGELOG.md 0000664 0000000 0000000 00000006152 14261340256 0015341 0 ustar 00root root 0000000 0000000 # Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/) and this
project adheres to [Semantic Versioning](http://semver.org/).
## v3.1.3
- Allow usage of iterable object in Blob constructor. [#108]
- Run test WPT test against our impl [#109]
- File name are now casted to string [#109]
- Slicing in the middle of multiple parts added more bytes than what what it should have [#109]
- Prefixed `stream/web` import with `node:` to allow easier static analysis detection of Node built-ins [#122]
- Added `node:` prefix in `from.js` as well [#114]
- Suppress warning when importing `stream/web` [#114]
## v3.1.2
- Improved typing
- Fixed a bug where position in iterator did not increase
## v3.1.0
- started to use real whatwg streams
- degraded fs/promise to fs.promise to support node v12
- degraded optional changing to support node v12
## v3.0.0
- Changed WeakMap for private field (require node 12)
- Switch to ESM
- blob.stream() return a subset of whatwg stream which is the async iterable part
(it no longer return a node stream)
- Reduced the dependency of Buffer by changing to global TextEncoder/Decoder (require node 11)
- Disabled xo since it could understand private fields (#)
- No longer transform the type to lowercase (https://github.com/w3c/FileAPI/issues/43)
This is more loose than strict, keys should be lowercased, but values should not.
It would require a more proper mime type parser - so we just made it loose.
- index.js and file.js can now be imported by browser & deno since it no longer depends on any
core node features (but why would you?)
- Implemented a File class
## v2.1.2
- Fixed a bug where `start` in BlobDataItem was undefined (#85)
## v2.1.1
- Add nullish values checking in Symbol.hasInstance (#82)
- Add generated typings for from.js file (#80)
- Updated dev dependencies
## v2.1.0
- Fix: .slice has an implementation bug (#54).
- Added blob backed up by filesystem (#55)
## v2.0.1
- Fix: remove upper bound for node engine semver (#49).
## v2.0.0
> Note: This release was previously published as `1.0.7`, but as it contains breaking changes, we renamed it to `2.0.0`.
- **Breaking:** minimum supported Node.js version is now 10.17.
- **Breaking:** `buffer` option has been removed.
- Enhance: create TypeScript declarations from JSDoc (#45).
- Enhance: operate on blob parts (byte sequence) (#44).
- Enhance: use a `WeakMap` for private properties (#42) .
- Other: update formatting.
## v1.0.6
- Enhance: use upstream Blob directly in typings (#38)
- Other: update dependencies
## v1.0.5
- Other: no change to code, update dev dependency to address vulnerability reports
## v1.0.4
- Other: general code rewrite to pass linting, prepare for `node-fetch` release v3
## v1.0.3
- Fix: package.json export `blob.js` properly now
## v1.0.2
- Other: fix test integration
## v1.0.1
- Other: readme update
## v1.0.0
- Major: initial release
[#108]: https://github.com/node-fetch/fetch-blob/pull/108
[#109]: https://github.com/node-fetch/fetch-blob/pull/109
[#114]: https://github.com/node-fetch/fetch-blob/pull/114
fetch-blob-3.2.0/LICENSE 0000664 0000000 0000000 00000002054 14261340256 0014532 0 ustar 00root root 0000000 0000000 MIT License
Copyright (c) 2019 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
fetch-blob-3.2.0/README.md 0000664 0000000 0000000 00000015012 14261340256 0015002 0 ustar 00root root 0000000 0000000 # fetch-blob
[![npm version][npm-image]][npm-url]
[![build status][ci-image]][ci-url]
[![coverage status][codecov-image]][codecov-url]
[![install size][install-size-image]][install-size-url]
A Blob implementation in Node.js, originally from [node-fetch](https://github.com/node-fetch/node-fetch).
## Installation
```sh
npm install fetch-blob
```
Upgrading from 2x to 3x
Updating from 2 to 3 should be a breeze since there is not many changes to the blob specification.
The major cause of a major release is coding standards.
- internal WeakMaps was replaced with private fields
- internal Buffer.from was replaced with TextEncoder/Decoder
- internal buffers was replaced with Uint8Arrays
- CommonJS was replaced with ESM
- The node stream returned by calling `blob.stream()` was replaced with whatwg streams
- (Read "Differences from other blobs" for more info.)
Differences from other Blobs
- Unlike NodeJS `buffer.Blob` (Added in: v15.7.0) and browser native Blob this polyfilled version can't be sent via PostMessage
- This blob version is more arbitrary, it can be constructed with blob parts that isn't a instance of itself
it has to look and behave as a blob to be accepted as a blob part.
- The benefit of this is that you can create other types of blobs that don't contain any internal data that has to be read in other ways, such as the `BlobDataItem` created in `from.js` that wraps a file path into a blob-like item and read lazily (nodejs plans to [implement this][fs-blobs] as well)
- The `blob.stream()` is the most noticeable differences. It returns a WHATWG stream now. to keep it as a node stream you would have to do:
```js
import {Readable} from 'stream'
const stream = Readable.from(blob.stream())
```
## Usage
```js
// Ways to import
// (PS it's dependency free ESM package so regular http-import from CDN works too)
import Blob from 'fetch-blob'
import File from 'fetch-blob/file.js'
import {Blob} from 'fetch-blob'
import {File} from 'fetch-blob/file.js'
const {Blob} = await import('fetch-blob')
// Ways to read the blob:
const blob = new Blob(['hello, world'])
await blob.text()
await blob.arrayBuffer()
for await (let chunk of blob.stream()) { ... }
blob.stream().getReader().read()
blob.stream().getReader({mode: 'byob'}).read(view)
```
### Blob part backed up by filesystem
`fetch-blob/from.js` comes packed with tools to convert any filepath into either a Blob or a File
It will not read the content into memory. It will only stat the file for last modified date and file size.
```js
// The default export is sync and use fs.stat to retrieve size & last modified as a blob
import blobFromSync from 'fetch-blob/from.js'
import {File, Blob, blobFrom, blobFromSync, fileFrom, fileFromSync} from 'fetch-blob/from.js'
const fsFile = fileFromSync('./2-GiB-file.bin', 'application/octet-stream')
const fsBlob = await blobFrom('./2-GiB-file.mp4')
// Not a 4 GiB memory snapshot, just holds references
// points to where data is located on the disk
const blob = new Blob([fsFile, fsBlob, 'memory', new Uint8Array(10)])
console.log(blob.size) // ~4 GiB
```
`blobFrom|blobFromSync|fileFrom|fileFromSync(path, [mimetype])`
### Creating a temporary file on the disk
(requires [FinalizationRegistry] - node v14.6)
When using both `createTemporaryBlob` and `createTemporaryFile`
then you will write data to the temporary folder in their respective OS.
The arguments can be anything that [fsPromises.writeFile] supports. NodeJS
v14.17.0+ also supports writing (async)Iterable streams and passing in a
AbortSignal, so both NodeJS stream and whatwg streams are supported. When the
file have been written it will return a Blob/File handle with a references to
this temporary location on the disk. When you no longer have a references to
this Blob/File anymore and it have been GC then it will automatically be deleted.
This files are also unlinked upon exiting the process.
```js
import { createTemporaryBlob, createTemporaryFile } from 'fetch-blob/from.js'
const req = new Request('https://httpbin.org/image/png')
const res = await fetch(req)
const type = res.headers.get('content-type')
const signal = req.signal
let blob = await createTemporaryBlob(res.body, { type, signal })
// const file = createTemporaryBlob(res.body, 'img.png', { type, signal })
blob = undefined // loosing references will delete the file from disk
```
`createTemporaryBlob(data, { type, signal })`
`createTemporaryFile(data, FileName, { type, signal, lastModified })`
### Creating Blobs backed up by other async sources
Our Blob & File class are more generic then any other polyfills in the way that it can accept any blob look-a-like item
An example of this is that our blob implementation can be constructed with parts coming from [BlobDataItem](https://github.com/node-fetch/fetch-blob/blob/8ef89adad40d255a3bbd55cf38b88597c1cd5480/from.js#L32) (aka a filepath) or from [buffer.Blob](https://nodejs.org/api/buffer.html#buffer_new_buffer_blob_sources_options), It dose not have to implement all the methods - just enough that it can be read/understood by our Blob implementation. The minium requirements is that it has `Symbol.toStringTag`, `size`, `slice()` and either a `stream()` or a `arrayBuffer()` method. If you then wrap it in our Blob or File `new Blob([blobDataItem])` then you get all of the other methods that should be implemented in a blob or file
An example of this could be to create a file or blob like item coming from a remote HTTP request. Or from a DataBase
See the [MDN documentation](https://developer.mozilla.org/en-US/docs/Web/API/Blob) and [tests](https://github.com/node-fetch/fetch-blob/blob/master/test.js) for more details of how to use the Blob.
[npm-image]: https://flat.badgen.net/npm/v/fetch-blob
[npm-url]: https://www.npmjs.com/package/fetch-blob
[ci-image]: https://github.com/node-fetch/fetch-blob/workflows/CI/badge.svg
[ci-url]: https://github.com/node-fetch/fetch-blob/actions
[codecov-image]: https://flat.badgen.net/codecov/c/github/node-fetch/fetch-blob/master
[codecov-url]: https://codecov.io/gh/node-fetch/fetch-blob
[install-size-image]: https://flat.badgen.net/packagephobia/install/fetch-blob
[install-size-url]: https://packagephobia.now.sh/result?p=fetch-blob
[fs-blobs]: https://github.com/nodejs/node/issues/37340
[fsPromises.writeFile]: https://nodejs.org/dist/latest-v18.x/docs/api/fs.html#fspromiseswritefilefile-data-options
[FinalizationRegistry]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry fetch-blob-3.2.0/file.js 0000664 0000000 0000000 00000002314 14261340256 0015001 0 ustar 00root root 0000000 0000000 import Blob from './index.js'
const _File = class File extends Blob {
#lastModified = 0
#name = ''
/**
* @param {*[]} fileBits
* @param {string} fileName
* @param {{lastModified?: number, type?: string}} options
*/// @ts-ignore
constructor (fileBits, fileName, options = {}) {
if (arguments.length < 2) {
throw new TypeError(`Failed to construct 'File': 2 arguments required, but only ${arguments.length} present.`)
}
super(fileBits, options)
if (options === null) options = {}
// Simulate WebIDL type casting for NaN value in lastModified option.
const lastModified = options.lastModified === undefined ? Date.now() : Number(options.lastModified)
if (!Number.isNaN(lastModified)) {
this.#lastModified = lastModified
}
this.#name = String(fileName)
}
get name () {
return this.#name
}
get lastModified () {
return this.#lastModified
}
get [Symbol.toStringTag] () {
return 'File'
}
static [Symbol.hasInstance] (object) {
return !!object && object instanceof Blob &&
/^(File)$/.test(object[Symbol.toStringTag])
}
}
/** @type {typeof globalThis.File} */// @ts-ignore
export const File = _File
export default File
fetch-blob-3.2.0/from.js 0000664 0000000 0000000 00000010521 14261340256 0015024 0 ustar 00root root 0000000 0000000 import {
realpathSync,
statSync,
rmdirSync,
createReadStream,
promises as fs
} from 'node:fs'
import { basename, sep, join } from 'node:path'
import { tmpdir } from 'node:os'
import process from 'node:process'
import DOMException from 'node-domexception'
import File from './file.js'
import Blob from './index.js'
const { stat, mkdtemp } = fs
let i = 0, tempDir, registry
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
*/
const blobFromSync = (path, type) => fromBlob(statSync(path), path, type)
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
* @returns {Promise}
*/
const blobFrom = (path, type) => stat(path).then(stat => fromBlob(stat, path, type))
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
* @returns {Promise}
*/
const fileFrom = (path, type) => stat(path).then(stat => fromFile(stat, path, type))
/**
* @param {string} path filepath on the disk
* @param {string} [type] mimetype to use
*/
const fileFromSync = (path, type) => fromFile(statSync(path), path, type)
// @ts-ignore
const fromBlob = (stat, path, type = '') => new Blob([new BlobDataItem({
path,
size: stat.size,
lastModified: stat.mtimeMs,
start: 0
})], { type })
// @ts-ignore
const fromFile = (stat, path, type = '') => new File([new BlobDataItem({
path,
size: stat.size,
lastModified: stat.mtimeMs,
start: 0
})], basename(path), { type, lastModified: stat.mtimeMs })
/**
* Creates a temporary blob backed by the filesystem.
* NOTE: requires node.js v14 or higher to use FinalizationRegistry
*
* @param {*} data Same as fs.writeFile data
* @param {BlobPropertyBag & {signal?: AbortSignal}} options
* @param {AbortSignal} [signal] in case you wish to cancel the write operation
* @returns {Promise}
*/
const createTemporaryBlob = async (data, {signal, type} = {}) => {
registry = registry || new FinalizationRegistry(fs.unlink)
tempDir = tempDir || await mkdtemp(realpathSync(tmpdir()) + sep)
const id = `${i++}`
const destination = join(tempDir, id)
if (data instanceof ArrayBuffer) data = new Uint8Array(data)
await fs.writeFile(destination, data, { signal })
const blob = await blobFrom(destination, type)
registry.register(blob, destination)
return blob
}
/**
* Creates a temporary File backed by the filesystem.
* Pretty much the same as constructing a new File(data, name, options)
*
* NOTE: requires node.js v14 or higher to use FinalizationRegistry
* @param {*} data
* @param {string} name
* @param {FilePropertyBag & {signal?: AbortSignal}} opts
* @returns {Promise}
*/
const createTemporaryFile = async (data, name, opts) => {
const blob = await createTemporaryBlob(data)
return new File([blob], name, opts)
}
/**
* This is a blob backed up by a file on the disk
* with minium requirement. Its wrapped around a Blob as a blobPart
* so you have no direct access to this.
*
* @private
*/
class BlobDataItem {
#path
#start
constructor (options) {
this.#path = options.path
this.#start = options.start
this.size = options.size
this.lastModified = options.lastModified
this.originalSize = options.originalSize === undefined
? options.size
: options.originalSize
}
/**
* Slicing arguments is first validated and formatted
* to not be out of range by Blob.prototype.slice
*/
slice (start, end) {
return new BlobDataItem({
path: this.#path,
lastModified: this.lastModified,
originalSize: this.originalSize,
size: end - start,
start: this.#start + start
})
}
async * stream () {
const { mtimeMs, size } = await stat(this.#path)
if (mtimeMs > this.lastModified || this.originalSize !== size) {
throw new DOMException('The requested file could not be read, typically due to permission problems that have occurred after a reference to a file was acquired.', 'NotReadableError')
}
yield * createReadStream(this.#path, {
start: this.#start,
end: this.#start + this.size - 1
})
}
get [Symbol.toStringTag] () {
return 'Blob'
}
}
process.once('exit', () => {
tempDir && rmdirSync(tempDir, { recursive: true })
})
export default blobFromSync
export {
Blob,
blobFrom,
blobFromSync,
createTemporaryBlob,
File,
fileFrom,
fileFromSync,
createTemporaryFile
} fetch-blob-3.2.0/index.js 0000664 0000000 0000000 00000016602 14261340256 0015176 0 ustar 00root root 0000000 0000000 /*! fetch-blob. MIT License. Jimmy Wärting */
// TODO (jimmywarting): in the feature use conditional loading with top level await (requires 14.x)
// Node has recently added whatwg stream into core
import './streams.cjs'
// 64 KiB (same size chrome slice theirs blob into Uint8array's)
const POOL_SIZE = 65536
/** @param {(Blob | Uint8Array)[]} parts */
async function * toIterator (parts, clone = true) {
for (const part of parts) {
if ('stream' in part) {
yield * (/** @type {AsyncIterableIterator} */ (part.stream()))
} else if (ArrayBuffer.isView(part)) {
if (clone) {
let position = part.byteOffset
const end = part.byteOffset + part.byteLength
while (position !== end) {
const size = Math.min(end - position, POOL_SIZE)
const chunk = part.buffer.slice(position, position + size)
position += chunk.byteLength
yield new Uint8Array(chunk)
}
} else {
yield part
}
/* c8 ignore next 10 */
} else {
// For blobs that have arrayBuffer but no stream method (nodes buffer.Blob)
let position = 0, b = (/** @type {Blob} */ (part))
while (position !== b.size) {
const chunk = b.slice(position, Math.min(b.size, position + POOL_SIZE))
const buffer = await chunk.arrayBuffer()
position += buffer.byteLength
yield new Uint8Array(buffer)
}
}
}
}
const _Blob = class Blob {
/** @type {Array.<(Blob|Uint8Array)>} */
#parts = []
#type = ''
#size = 0
#endings = 'transparent'
/**
* The Blob() constructor returns a new Blob object. The content
* of the blob consists of the concatenation of the values given
* in the parameter array.
*
* @param {*} blobParts
* @param {{ type?: string, endings?: string }} [options]
*/
constructor (blobParts = [], options = {}) {
if (typeof blobParts !== 'object' || blobParts === null) {
throw new TypeError('Failed to construct \'Blob\': The provided value cannot be converted to a sequence.')
}
if (typeof blobParts[Symbol.iterator] !== 'function') {
throw new TypeError('Failed to construct \'Blob\': The object must have a callable @@iterator property.')
}
if (typeof options !== 'object' && typeof options !== 'function') {
throw new TypeError('Failed to construct \'Blob\': parameter 2 cannot convert to dictionary.')
}
if (options === null) options = {}
const encoder = new TextEncoder()
for (const element of blobParts) {
let part
if (ArrayBuffer.isView(element)) {
part = new Uint8Array(element.buffer.slice(element.byteOffset, element.byteOffset + element.byteLength))
} else if (element instanceof ArrayBuffer) {
part = new Uint8Array(element.slice(0))
} else if (element instanceof Blob) {
part = element
} else {
part = encoder.encode(`${element}`)
}
const size = ArrayBuffer.isView(part) ? part.byteLength : part.size
// Avoid pushing empty parts into the array to better GC them
if (size) {
this.#size += size
this.#parts.push(part)
}
}
this.#endings = `${options.endings === undefined ? 'transparent' : options.endings}`
const type = options.type === undefined ? '' : String(options.type)
this.#type = /^[\x20-\x7E]*$/.test(type) ? type : ''
}
/**
* The Blob interface's size property returns the
* size of the Blob in bytes.
*/
get size () {
return this.#size
}
/**
* The type property of a Blob object returns the MIME type of the file.
*/
get type () {
return this.#type
}
/**
* The text() method in the Blob interface returns a Promise
* that resolves with a string containing the contents of
* the blob, interpreted as UTF-8.
*
* @return {Promise}
*/
async text () {
// More optimized than using this.arrayBuffer()
// that requires twice as much ram
const decoder = new TextDecoder()
let str = ''
for await (const part of toIterator(this.#parts, false)) {
str += decoder.decode(part, { stream: true })
}
// Remaining
str += decoder.decode()
return str
}
/**
* The arrayBuffer() method in the Blob interface returns a
* Promise that resolves with the contents of the blob as
* binary data contained in an ArrayBuffer.
*
* @return {Promise}
*/
async arrayBuffer () {
// Easier way... Just a unnecessary overhead
// const view = new Uint8Array(this.size);
// await this.stream().getReader({mode: 'byob'}).read(view);
// return view.buffer;
const data = new Uint8Array(this.size)
let offset = 0
for await (const chunk of toIterator(this.#parts, false)) {
data.set(chunk, offset)
offset += chunk.length
}
return data.buffer
}
stream () {
const it = toIterator(this.#parts, true)
return new globalThis.ReadableStream({
// @ts-ignore
type: 'bytes',
async pull (ctrl) {
const chunk = await it.next()
chunk.done ? ctrl.close() : ctrl.enqueue(chunk.value)
},
async cancel () {
await it.return()
}
})
}
/**
* The Blob interface's slice() method creates and returns a
* new Blob object which contains data from a subset of the
* blob on which it's called.
*
* @param {number} [start]
* @param {number} [end]
* @param {string} [type]
*/
slice (start = 0, end = this.size, type = '') {
const { size } = this
let relativeStart = start < 0 ? Math.max(size + start, 0) : Math.min(start, size)
let relativeEnd = end < 0 ? Math.max(size + end, 0) : Math.min(end, size)
const span = Math.max(relativeEnd - relativeStart, 0)
const parts = this.#parts
const blobParts = []
let added = 0
for (const part of parts) {
// don't add the overflow to new blobParts
if (added >= span) {
break
}
const size = ArrayBuffer.isView(part) ? part.byteLength : part.size
if (relativeStart && size <= relativeStart) {
// Skip the beginning and change the relative
// start & end position as we skip the unwanted parts
relativeStart -= size
relativeEnd -= size
} else {
let chunk
if (ArrayBuffer.isView(part)) {
chunk = part.subarray(relativeStart, Math.min(size, relativeEnd))
added += chunk.byteLength
} else {
chunk = part.slice(relativeStart, Math.min(size, relativeEnd))
added += chunk.size
}
relativeEnd -= size
blobParts.push(chunk)
relativeStart = 0 // All next sequential parts should start at 0
}
}
const blob = new Blob([], { type: String(type).toLowerCase() })
blob.#size = span
blob.#parts = blobParts
return blob
}
get [Symbol.toStringTag] () {
return 'Blob'
}
static [Symbol.hasInstance] (object) {
return (
object &&
typeof object === 'object' &&
typeof object.constructor === 'function' &&
(
typeof object.stream === 'function' ||
typeof object.arrayBuffer === 'function'
) &&
/^(Blob|File)$/.test(object[Symbol.toStringTag])
)
}
}
Object.defineProperties(_Blob.prototype, {
size: { enumerable: true },
type: { enumerable: true },
slice: { enumerable: true }
})
/** @type {typeof globalThis.Blob} */
export const Blob = _Blob
export default Blob
fetch-blob-3.2.0/package.json 0000664 0000000 0000000 00000003156 14261340256 0016017 0 ustar 00root root 0000000 0000000 {
"name": "fetch-blob",
"version": "3.1.5",
"description": "Blob & File implementation in Node.js, originally from node-fetch.",
"main": "index.js",
"type": "module",
"files": [
"from.js",
"file.js",
"file.d.ts",
"index.js",
"index.d.ts",
"from.d.ts",
"streams.cjs"
],
"scripts": {
"test": "node --experimental-loader ./test/http-loader.js ./test/test-wpt-in-node.js",
"report": "c8 --reporter json --reporter text npm run test",
"coverage": "npm run report && codecov -f coverage/coverage-final.json",
"prepublishOnly": "tsc --declaration --emitDeclarationOnly --allowJs index.js from.js"
},
"repository": "https://github.com/node-fetch/fetch-blob.git",
"keywords": [
"blob",
"file",
"node-fetch"
],
"engines": {
"node": "^12.20 || >= 14.13"
},
"author": "Jimmy Wärting (https://jimmy.warting.se)",
"license": "MIT",
"bugs": {
"url": "https://github.com/node-fetch/fetch-blob/issues"
},
"homepage": "https://github.com/node-fetch/fetch-blob#readme",
"devDependencies": {
"@types/node": "^18.0.2",
"c8": "^7.11.0",
"typescript": "^4.5.4"
},
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/jimmywarting"
},
{
"type": "paypal",
"url": "https://paypal.me/jimmywarting"
}
],
"dependencies": {
"node-domexception": "^1.0.0",
"web-streams-polyfill": "^3.0.3"
}
}
fetch-blob-3.2.0/streams.cjs 0000664 0000000 0000000 00000003104 14261340256 0015701 0 ustar 00root root 0000000 0000000 /* c8 ignore start */
// 64 KiB (same size chrome slice theirs blob into Uint8array's)
const POOL_SIZE = 65536
if (!globalThis.ReadableStream) {
// `node:stream/web` got introduced in v16.5.0 as experimental
// and it's preferred over the polyfilled version. So we also
// suppress the warning that gets emitted by NodeJS for using it.
try {
const process = require('node:process')
const { emitWarning } = process
try {
process.emitWarning = () => {}
Object.assign(globalThis, require('node:stream/web'))
process.emitWarning = emitWarning
} catch (error) {
process.emitWarning = emitWarning
throw error
}
} catch (error) {
// fallback to polyfill implementation
Object.assign(globalThis, require('web-streams-polyfill/dist/ponyfill.es2018.js'))
}
}
try {
// Don't use node: prefix for this, require+node: is not supported until node v14.14
// Only `import()` can use prefix in 12.20 and later
const { Blob } = require('buffer')
if (Blob && !Blob.prototype.stream) {
Blob.prototype.stream = function name (params) {
let position = 0
const blob = this
return new ReadableStream({
type: 'bytes',
async pull (ctrl) {
const chunk = blob.slice(position, Math.min(blob.size, position + POOL_SIZE))
const buffer = await chunk.arrayBuffer()
position += buffer.byteLength
ctrl.enqueue(new Uint8Array(buffer))
if (position === blob.size) {
ctrl.close()
}
}
})
}
}
} catch (error) {}
/* c8 ignore end */
fetch-blob-3.2.0/test/ 0000775 0000000 0000000 00000000000 14261340256 0014503 5 ustar 00root root 0000000 0000000 fetch-blob-3.2.0/test/http-loader.js 0000664 0000000 0000000 00000003671 14261340256 0017273 0 ustar 00root root 0000000 0000000 import { Buffer } from 'node:buffer'
import fs from 'node:fs'
import { get } from 'node:https'
const fetch = url => new Promise(rs => get(url, rs))
const cache = new URL('./.cache/', import.meta.url)
/**
* @param {string} specifier
* @param {{
* conditions: !Array,
* parentURL: !(string | undefined),
* }} context
* @param {Function} defaultResolve
* @returns {Promise<{ url: string }>}
*/
export async function resolve (specifier, context, defaultResolve) {
const { parentURL = null } = context
// Normally Node.js would error on specifiers starting with 'https://', so
// this hook intercepts them and converts them into absolute URLs to be
// passed along to the later hooks below.
if (specifier.startsWith('https://')) {
return {
url: specifier
}
} else if (parentURL && parentURL.startsWith('https://')) {
return {
url: new URL(specifier, parentURL).href
}
}
// Let Node.js handle all other specifiers.
return defaultResolve(specifier, context, defaultResolve)
}
export async function load (url, context, defaultLoad) {
// For JavaScript to be loaded over the network, we need to fetch and
// return it.
if (url.startsWith('https://')) {
const uuid = Buffer.from(url).toString('hex')
const cachedFile = new URL(uuid, cache)
let data = ''
// cache remote files for 1h
if (fs.existsSync(cachedFile) && fs.statSync(cachedFile).mtimeMs > Date.now() - 1000 * 60 * 60) {
data = fs.readFileSync(cachedFile, 'utf8')
} else {
const res = await fetch(url).catch(err => err)
for await (const chunk of res) data += chunk
fs.mkdirSync(cache, { recursive: true })
fs.writeFileSync(cachedFile, data)
}
return {
// This example assumes all network-provided JavaScript is ES module
// code.
format: 'module',
source: data
}
}
// Let Node.js handle all other URLs.
return defaultLoad(url, context, defaultLoad)
}
fetch-blob-3.2.0/test/own-misc-test.js 0000664 0000000 0000000 00000022770 14261340256 0017562 0 ustar 00root root 0000000 0000000 // @ts-nocheck
// @ts-ignore
import fs from 'node:fs'
import buffer from 'node:buffer'
import syncBlob, {
blobFromSync,
blobFrom,
fileFromSync,
fileFrom,
createTemporaryBlob,
createTemporaryFile
} from '../from.js'
const license = fs.readFileSync('./LICENSE')
test_blob(() => new Blob([
'a',
new Uint8Array([98]),
new Uint16Array([25699]),
new Uint8Array([101]).buffer,
Buffer.from('f'),
new Blob(['g']),
{},
new URLSearchParams('foo')
]), {
desc: 'Blob ctor parts',
expected: 'abcdefg[object Object]foo=',
type: '',
length: 26
})
promise_test(async () => {
assert_equals(fileFromSync('./LICENSE').name, 'LICENSE')
assert_equals((await fileFrom('./LICENSE')).name, 'LICENSE')
}, 'file from returns the same name')
// Could not find similar test on WPT
test(() => {
const now = new Date()
assert_equals(new File([], '', { lastModified: now }).lastModified, +now)
assert_equals(new File([], '', { lastModified: +now }).lastModified, +now)
assert_equals(new File([], '', { lastModified: 100 }).lastModified, 100)
assert_equals(new File([], '', { lastModified: '200' }).lastModified, 200)
assert_equals(new File([], '', { lastModified: true }).lastModified, 1)
assert_equals(new File([], '', { lastModified: false }).lastModified, 0)
assert_approx_equals(new File([], '').lastModified, Date.now(), 0.020)
assert_approx_equals(new File([], '', { lastModified: undefined }).lastModified, Date.now(), 0.020)
}, 'File sets current time')
// Could not find similar test on WPT
test(() => {
const values = ['Not a Number', [], {}, null]
// I can't really see anything about this in the spec,
// but this is how browsers handle type casting for this option...
for (const lastModified of values) {
const file = new File([], '', { lastModified })
assert_equals(file.lastModified, 0)
}
}, 'Interpretes NaN value in lastModified option as 0')
test(() => {
assert_equals(blobFromSync, syncBlob)
}, 'default export is named exported blobFromSync')
promise_test(async () => {
const { Blob, default: def } = await import('../index.js')
assert_equals(Blob, def)
}, 'Can use named import - as well as default')
// This was necessary to avoid large ArrayBuffer clones (slice)
promise_test(async t => {
const buf = new Uint8Array(65590)
const blob = new Blob([buf])
let i = 0
// eslint-disable-next-line no-unused-vars
for await (const chunk of blob.stream()) {
i++
}
assert_equals(i, 2)
}, 'Large chunks are divided into smaller chunks')
// Couldn't find a test for this on WPT
promise_test(async () => {
const buf = new Uint8Array([97])
const blob = new Blob([buf])
const chunk = await blob.stream().getReader().read()
assert_equals(chunk.value[0], 97)
chunk.value[0] = 98
assert_equals(await blob.text(), 'a')
}, 'Blobs are immutable')
/**
* Deviation from WPT: it's important to keep boundary value
* so we don't lowercase the type
* @see https://github.com/w3c/FileAPI/issues/43
*/
test(() => {
const type = 'multipart/form-data; boundary=----WebKitFormBoundaryTKqdrVt01qOBltBd'
assert_equals(new Blob([], { type }).type, type)
assert_equals(new File([], '', { type }).type, type)
}, 'Dose not lowercase the type')
test( // Because we have symbol.hasInstance it's best to test it...
() => (assert_false(null instanceof Blob), assert_false(null instanceof File)),
'Instanceof check returns false for nullish values'
)
test( // Because browser normally never tries things taken for granted
() => assert_equals(new Blob().toString(), '[object Blob]'),
'blob.toString() returns [object Blob]'
)
test( // Because browser normally never tries things taken for granted
() => assert_equals(new File([], '').toString(), '[object File]'),
'file.toString() returns [object File]'
)
// fetch-blob uniques is that it supports arbitrary blobs too
test(() => {
class File {
stream () {}
get [Symbol.toStringTag] () { return 'File' }
}
assert_true(new File() instanceof Blob)
}, 'Blob-ish class is an instance of Blob')
// fetch-blob uniques is that it supports arbitrary blobs too
if (buffer.Blob) {
test_blob(() => new Blob([new buffer.Blob(['blob part'])]), {
desc: 'Can wrap buffer.Blob to a fetch-blob',
expected: 'blob part',
type: '',
})
}
/**
* Test if Blob can be constructed with BOM and keep it when casted to string
* Test if blob.text() can correctly remove BOM - `buffer.toString()` is bad
*/
promise_test(async () => {
const text = '{"foo": "bar"}'
const blob = new Blob([`\uFEFF${text}`])
assert_equals(blob.size, 17)
assert_equals(await blob.text(), text)
const ab = await blob.slice(0, 3).arrayBuffer()
assert_equals_typed_array(new Uint8Array(ab), new Uint8Array([0xEF, 0xBB, 0xBF]))
}, 'Can wrap buffer.Blob to a fetch-blob')
// Here to make sure our `toIterator` is working as intended
promise_test(async () => {
const stream = new Blob(['Some content']).stream()
// Cancel the stream before start reading, or this will throw an error
await stream.cancel()
const reader = stream.getReader()
const { done, value: chunk } = await reader.read()
assert_true(done)
assert_equals(chunk, undefined)
}, 'Blob stream() can be cancelled')
/******************************************************************************/
/* */
/* Test Blobs backed up by the filesystem */
/* */
/******************************************************************************/
promise_test(async () => {
assert_equals(fileFromSync('./LICENSE', 'text/plain').type, 'text/plain')
assert_equals(fileFromSync('./LICENSE').type, '')
assert_equals(blobFromSync('./LICENSE', 'text/plain').type, 'text/plain')
assert_equals(blobFromSync('./LICENSE').type, '')
assert_equals((await fileFrom('./LICENSE', 'text/plain')).type, 'text/plain')
assert_equals((await fileFrom('./LICENSE')).type, '')
assert_equals((await blobFrom('./LICENSE', 'text/plain')).type, 'text/plain')
assert_equals((await blobFrom('./LICENSE')).type, '')
}, 'from utilities sets correct type')
promise_test(async () => {
assert_equals(await blobFromSync('./LICENSE').text(), license.toString())
assert_equals(await fileFromSync('./LICENSE').text(), license.toString())
assert_equals(await (await blobFrom('./LICENSE')).text(), license.toString())
assert_equals(await (await fileFrom('./LICENSE')).text(), license.toString())
}, 'blob part backed up by filesystem can be read')
promise_test(async () => {
assert_equals(await blobFromSync('./LICENSE').text(), license.toString())
assert_equals(await fileFromSync('./LICENSE').text(), license.toString())
assert_equals(await (await blobFrom('./LICENSE')).text(), license.toString())
assert_equals(await (await fileFrom('./LICENSE')).text(), license.toString())
}, 'blob part backed up by filesystem slice correctly')
promise_test(async () => {
let blob
// Can construct a temporary blob from a string
blob = await createTemporaryBlob(license.toString())
assert_equals(await blob.text(), license.toString())
// Can construct a temporary blob from a async iterator
blob = await createTemporaryBlob(blob.stream())
assert_equals(await blob.text(), license.toString())
// Can construct a temporary file from a arrayBuffer
blob = await createTemporaryBlob(await blob.arrayBuffer())
assert_equals(await blob.text(), license.toString())
// Can construct a temporary file from a arrayBufferView
blob = await createTemporaryBlob(await blob.arrayBuffer().then(ab => new Uint8Array(ab)))
assert_equals(await blob.text(), license.toString())
// Can specify a mime type
blob = await createTemporaryBlob('abc', { type: 'text/plain' })
assert_equals(blob.type, 'text/plain')
// Can create files too
let file = await createTemporaryFile('abc', 'abc.txt', {
type: 'text/plain',
lastModified: 123
})
assert_equals(file.name, 'abc.txt')
assert_equals(file.size, 3)
assert_equals(file.lastModified, 123)
}, 'creating temporary blob/file backed up by filesystem')
promise_test(async () => {
fs.writeFileSync('temp', '')
await blobFromSync('./temp').text()
fs.unlinkSync('./temp')
}, 'can read empty files')
test(async () => {
const blob = blobFromSync('./LICENSE')
await new Promise(resolve => setTimeout(resolve, 2000))
const now = new Date()
// Change modified time
fs.utimesSync('./LICENSE', now, now)
const error = await blob.text().then(assert_unreached, e => e)
assert_equals(error.constructor.name, 'DOMException')
assert_equals(error instanceof Error, true)
assert_equals(error.name, 'NotReadableError')
const file = fileFromSync('./LICENSE')
// Above test updates the last modified date to now
assert_equals(typeof file.lastModified, 'number')
// The lastModifiedDate is deprecated and removed from spec
assert_false('lastModifiedDate' in file)
assert_approx_equals(file.lastModified, +now, 1000)
}, 'Reading after modified should fail')
promise_test(async () => {
assert_equals(await blobFromSync('./LICENSE').slice(0, 0).text(), '')
assert_equals(await blobFromSync('./LICENSE').slice(0, 3).text(), license.slice(0, 3).toString())
assert_equals(await blobFromSync('./LICENSE').slice(4, 11).text(), license.slice(4, 11).toString())
assert_equals(await blobFromSync('./LICENSE').slice(-11).text(), license.slice(-11).toString())
assert_equals(await blobFromSync('./LICENSE').slice(4, 11).slice(2, 5).text(), license.slice(4, 11).slice(2, 5).toString())
}, 'slicing blobs backed up by filesystem returns correct string')
fetch-blob-3.2.0/test/test-wpt-in-node.js 0000664 0000000 0000000 00000011047 14261340256 0020162 0 ustar 00root root 0000000 0000000 // Don't want to use the FileReader, don't want to lowerCase the type either
// import from 'https://wpt.live/resources/testharnessreport.js'
import { File, Blob } from '../from.js'
let hasFailed
globalThis.self = globalThis
await import('https://wpt.live/resources/testharness.js')
setup({
explicit_timeout: true,
explicit_done: true
})
function test_blob (fn, expectations) {
const expected = expectations.expected
const type = expectations.type
const desc = expectations.desc
const length = expectations.length
const t = async_test(desc)
t.step(async function () {
const blob = fn()
assert_true(blob instanceof Blob)
assert_false(blob instanceof File)
assert_equals(blob.type.toLowerCase(), type)
assert_equals(await blob.text(), expected)
t.done()
})
}
function test_blob_binary (fn, expectations) {
const expected = expectations.expected
const type = expectations.type
const desc = expectations.desc
const t = async_test(desc)
t.step(async function () {
const blob = fn()
assert_true(blob instanceof Blob)
assert_false(blob instanceof File)
assert_equals(blob.type.toLowerCase(), type)
const result = await blob.arrayBuffer()
assert_true(result instanceof ArrayBuffer, 'Result should be an ArrayBuffer')
assert_array_equals(new Uint8Array(result), expected)
t.done()
})
}
// Assert that two TypedArray objects have the same byte values
globalThis.assert_equals_typed_array = (array1, array2) => {
const [view1, view2] = [array1, array2].map((array) => {
assert_true(array.buffer instanceof ArrayBuffer,
'Expect input ArrayBuffers to contain field `buffer`')
return new DataView(array.buffer, array.byteOffset, array.byteLength)
})
assert_equals(view1.byteLength, view2.byteLength,
'Expect both arrays to be of the same byte length')
const byteLength = view1.byteLength
for (let i = 0; i < byteLength; ++i) {
assert_equals(view1.getUint8(i), view2.getUint8(i),
`Expect byte at buffer position ${i} to be equal`)
}
}
globalThis.add_result_callback((test, ...args) => {
if ([
'Blob with type "A"',
'Blob with type "TEXT/HTML"',
'Getters and value conversions should happen in order until an exception is thrown.',
'Using type in File constructor: TEXT/PLAIN',
'Using type in File constructor: text/plain;charset=UTF-8'
].includes(test.name)) return
const INDENT_SIZE = 2
const reporter = {}
reporter.startSuite = name => console.log(`\n ${(name)}\n`)
reporter.pass = message => console.log((indent(('√ ') + message, INDENT_SIZE)))
reporter.fail = message => console.log((indent('\u00D7 ' + message, INDENT_SIZE)))
reporter.reportStack = stack => console.log((indent(stack, INDENT_SIZE * 2)))
function indent (string, times) {
const prefix = ' '.repeat(times)
return string.split('\n').map(l => prefix + l).join('\n')
}
if (test.status === 0) {
reporter.pass(test.name)
} else if (test.status === 1) {
reporter.fail(`${test.name}\n`)
reporter.reportStack(`${test.message}\n${test.stack}`)
hasFailed = true
} else if (test.status === 2) {
reporter.fail(`${test.name} (timeout)\n`)
reporter.reportStack(`${test.message}\n${test.stack}`)
hasFailed = true
} else if (test.status === 3) {
reporter.fail(`${test.name} (incomplete)\n`)
reporter.reportStack(`${test.message}\n${test.stack}`)
hasFailed = true
} else if (test.status === 4) {
reporter.fail(`${test.name} (precondition failed)\n`)
reporter.reportStack(`${test.message}\n${test.stack}`)
hasFailed = true
} else {
reporter.fail(`unknown test status: ${test.status}`)
hasFailed = true
}
})
globalThis.File = File
globalThis.Blob = Blob
globalThis.garbageCollect = () => {}
globalThis.test_blob = test_blob
globalThis.test_blob_binary = test_blob_binary
// Cuz WPT don't clean up after itself
globalThis.MessageChannel = class extends MessageChannel {
constructor () {
super()
setTimeout(() => {
this.port1.close()
this.port2.close()
this.port1.onmessage = this.port2.onmessage = null
}, 100)
}
}
import('https://wpt.live/FileAPI/file/File-constructor.any.js')
import('https://wpt.live/FileAPI/blob/Blob-constructor.any.js')
import('https://wpt.live/FileAPI/blob/Blob-array-buffer.any.js')
import('https://wpt.live/FileAPI/blob/Blob-slice-overflow.any.js')
import('https://wpt.live/FileAPI/blob/Blob-slice.any.js')
import('https://wpt.live/FileAPI/blob/Blob-stream.any.js')
import('https://wpt.live/FileAPI/blob/Blob-text.any.js')
import('./own-misc-test.js')
hasFailed && process.exit(1)