pax_global_header00006660000000000000000000000064135724317300014517gustar00rootroot0000000000000052 comment=b9376eb2aad7e7f4dc3352ff8d139ba5e4877519 readdirp-3.3.0/000077500000000000000000000000001357243173000133145ustar00rootroot00000000000000readdirp-3.3.0/.github/000077500000000000000000000000001357243173000146545ustar00rootroot00000000000000readdirp-3.3.0/.github/workflows/000077500000000000000000000000001357243173000167115ustar00rootroot00000000000000readdirp-3.3.0/.github/workflows/lint.yml000066400000000000000000000012331357243173000204010ustar00rootroot00000000000000name: Lint on: [push, pull_request] env: CI: true jobs: dtslint: runs-on: ubuntu-latest steps: - name: Clone repository uses: actions/checkout@v1 - name: Set up Node.js uses: actions/setup-node@v1 with: node-version: "12" - run: npm install - name: Run dtslint run: npm run dtslint eslint: runs-on: ubuntu-latest steps: - name: Clone repository uses: actions/checkout@v1 - name: Set up Node.js uses: actions/setup-node@v1 with: node-version: "12" - run: npm install - name: Run ESLint run: npm run lint readdirp-3.3.0/.github/workflows/nodejs.yml000066400000000000000000000010751357243173000207210ustar00rootroot00000000000000name: Node CI on: [push, pull_request] env: CI: true jobs: build: name: node ${{ matrix.node }} @ ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: node: [8, 10, 12] os: [ubuntu-latest, windows-latest, macOS-latest] steps: - uses: actions/checkout@v1 - name: Use Node.js ${{ matrix.node }} uses: actions/setup-node@v1 with: node-version: ${{ matrix.node }} - run: npm install - name: Run mocha tests run: npm run nyc -- npm run mocha readdirp-3.3.0/.gitignore000066400000000000000000000002151357243173000153020ustar00rootroot00000000000000*.seed *.log *.csv *.dat *.out *.pid *.gz /.nyc_output/ /coverage/ /lib-cov/ /node_modules/ /test-fixtures/ pids logs results npm-debug.log readdirp-3.3.0/.npmrc000066400000000000000000000000231357243173000144270ustar00rootroot00000000000000package-lock=false readdirp-3.3.0/LICENSE000066400000000000000000000021321357243173000143170ustar00rootroot00000000000000MIT License Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller (https://paulmillr.com) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. readdirp-3.3.0/README.md000066400000000000000000000152071357243173000146000ustar00rootroot00000000000000# readdirp [![Weekly downloads](https://img.shields.io/npm/dw/readdirp.svg)](https://github.com/paulmillr/readdirp) > Recursive version of [fs.readdir](https://nodejs.org/api/fs.html#fs_fs_readdir_path_options_callback). Exposes a **stream API** and a **promise API**. [![NPM](https://nodei.co/npm/readdirp.png)](https://www.npmjs.com/package/readdirp) ```sh npm install readdirp ``` ```javascript const readdirp = require('readdirp'); // Use streams to achieve small RAM & CPU footprint. // 1) Streams example with for-await. for await (const entry of readdirp('.')) { const {path} = entry; console.log(`${JSON.stringify({path})}`); } // 2) Streams example, non for-await. // Print out all JS files along with their size within the current folder & subfolders. readdirp('.', {fileFilter: '*.js', alwaysStat: true}) .on('data', (entry) => { const {path, stats: {size}} = entry; console.log(`${JSON.stringify({path, size})}`); }) // Optionally call stream.destroy() in `warn()` in order to abort and cause 'close' to be emitted .on('warn', error => console.error('non-fatal error', error)) .on('error', error => console.error('fatal error', error)) .on('end', () => console.log('done')); // 3) Promise example. More RAM and CPU than streams / for-await. const files = await readdirp.promise('.'); console.log(files.map(file => file.path)); // Other options. readdirp('test', { fileFilter: '*.js', directoryFilter: ['!.git', '!*modules'] // directoryFilter: (di) => di.basename.length === 9 type: 'files_directories', depth: 1 }); ``` For more examples, check out `examples` directory. ## API `const stream = readdirp(root[, options])` — **Stream API** - Reads given root recursively and returns a `stream` of [entry infos](#entryinfo) - Optionally can be used like `for await (const entry of stream)` with node.js 10+ (`asyncIterator`). - `on('data', (entry) => {})` [entry info](#entryinfo) for every file / dir. - `on('warn', (error) => {})` non-fatal `Error` that prevents a file / dir from being processed. Example: inaccessible to the user. - `on('error', (error) => {})` fatal `Error` which also ends the stream. Example: illegal options where passed. - `on('end')` — we are done. Called when all entries were found and no more will be emitted. - `on('close')` — stream is destroyed via `stream.destroy()`. Could be useful if you want to manually abort even on a non fatal error. At that point the stream is no longer `readable` and no more entries, warning or errors are emitted - To learn more about streams, consult the very detailed [nodejs streams documentation](https://nodejs.org/api/stream.html) or the [stream-handbook](https://github.com/substack/stream-handbook) `const entries = await readdirp.promise(root[, options])` — **Promise API**. Returns a list of [entry infos](#entryinfo). First argument is awalys `root`, path in which to start reading and recursing into subdirectories. ### options - `fileFilter: ["*.js"]`: filter to include or exclude files. A `Function`, Glob string or Array of glob strings. - **Function**: a function that takes an entry info as a parameter and returns true to include or false to exclude the entry - **Glob string**: a string (e.g., `*.js`) which is matched using [picomatch](https://github.com/micromatch/picomatch), so go there for more information. Globstars (`**`) are not supported since specifying a recursive pattern for an already recursive function doesn't make sense. Negated globs (as explained in the minimatch documentation) are allowed, e.g., `!*.txt` matches everything but text files. - **Array of glob strings**: either need to be all inclusive or all exclusive (negated) patterns otherwise an error is thrown. `['*.json', '*.js']` includes all JavaScript and Json files. `['!.git', '!node_modules']` includes all directories except the '.git' and 'node_modules'. - Directories that do not pass a filter will not be recursed into. - `directoryFilter: ['!.git']`: filter to include/exclude directories found and to recurse into. Directories that do not pass a filter will not be recursed into. - `depth: 5`: depth at which to stop recursing even if more subdirectories are found - `type: 'files'`: determines if data events on the stream should be emitted for `'files'` (default), `'directories'`, `'files_directories'`, or `'all'`. Setting to `'all'` will also include entries for other types of file descriptors like character devices, unix sockets and named pipes. - `alwaysStat: false`: always return `stats` property for every file. Setting it to `true` can double readdir execution time - use it only when you need file `size`, `mtime` etc. Cannot be enabled on node <10.10.0. - `lstat: false`: include symlink entries in the stream along with files. When `true`, `fs.lstat` would be used instead of `fs.stat` ### `EntryInfo` Has the following properties: - `path: 'assets/javascripts/react.js'`: path to the file/directory (relative to given root) - `fullPath: '/Users/dev/projects/app/assets/javascripts/react.js'`: full path to the file/directory found - `basename: 'react.js'`: name of the file/directory - `dirent: fs.Dirent`: built-in [dir entry object](https://nodejs.org/api/fs.html#fs_class_fs_dirent) - only with `alwaysStat: false` - `stats: fs.Stats`: built in [stat object](https://nodejs.org/api/fs.html#fs_class_fs_stats) - only with `alwaysStat: true` ## Changelog - 3.3 (Dec 6, 2019) stabilizes RAM consumption and enables perf management with `highWaterMark` option. Fixes race conditions related to `for-await` looping. - 3.2 (Oct 14, 2019) improves performance by 250% and makes streams implementation more idiomatic. - 3.1 (Jul 7, 2019) brings `bigint` support to `stat` output on Windows. This is backwards-incompatible for some cases. Be careful. It you use it incorrectly, you'll see "TypeError: Cannot mix BigInt and other types, use explicit conversions". - 3.0 brings huge performance improvements and stream backpressure support. - Upgrading 2.x to 3.x: - Signature changed from `readdirp(options)` to `readdirp(root, options)` - Replaced callback API with promise API. - Renamed `entryType` option to `type` - Renamed `entryType: 'both'` to `'files_directories'` - `EntryInfo` - Renamed `stat` to `stats` - Emitted only when `alwaysStat: true` - `dirent` is emitted instead of `stats` by default with `alwaysStat: false` - Renamed `name` to `basename` - Removed `parentDir` and `fullParentDir` properties - Supported node.js versions: - 3.x: node 8+ - 2.x: node 0.6+ ## License Copyright (c) 2012-2019 Thorsten Lorenz, Paul Miller () MIT License, see [LICENSE](LICENSE) file. readdirp-3.3.0/examples/000077500000000000000000000000001357243173000151325ustar00rootroot00000000000000readdirp-3.3.0/examples/bench.js000066400000000000000000000013331357243173000165470ustar00rootroot00000000000000/* eslint-disable no-unused-vars */ 'use strict'; const readdirp = require('..'); function logMem(i) { const vals = Object.entries(process.memoryUsage()).map(([k, v]) => { return `${k}=${(`${(v / 1e6).toFixed(1)}M`).padEnd(7)}`; }); console.log(String(i).padStart(6), ...vals); } const read = async (directory) => { const stream = readdirp(directory, {type: 'all'}); let i = 0; const start = Date.now(); let lap = 0; for await (const chunk of stream) { if (i % 1000 === 0) { const now = Date.now(); if (now - lap > 500) { lap = now; logMem(i); } } i++; } logMem(i); console.log(`Processed ${i} files in ${Date.now() - start} msecs`); }; read('../..'); readdirp-3.3.0/examples/grep.js000066400000000000000000000027221357243173000164300ustar00rootroot00000000000000'use strict'; const {createReadStream} = require('fs'); const es = require('event-stream'); const readdirp = require('..'); const findLinesMatching = (searchTerm) => { return es.through(function (entry) { let lineno = 0; const matchingLines = []; const fileStream = this; createReadStream(entry.fullPath, {encoding: 'utf-8'}) // handle file contents line by line .pipe(es.split('\n')) // filter, keep only the lines that matched the term .pipe(es.mapSync((line) => { lineno++; return ~line.indexOf(searchTerm) ? `${lineno}: ${line}` : undefined; })) // aggregate matching lines and delegate control back to the file stream .pipe(es.through( (data) => { matchingLines.push(data); }, () => { // drop files that had no matches if (matchingLines.length) { const result = { file: entry, lines: matchingLines }; fileStream.emit('data', result); // pass result on to file stream } this.emit('end'); })); }); }; // create a stream of all javascript files found in this and all sub directories // find all lines matching the term // for each file (if none found, that file is ignored) readdirp(__dirname, {fileFilter: '*.js'}) .pipe(findLinesMatching('arguments')) .pipe(es.mapSync((res) => { // format the results and output return `\n\n${res.file.path}\n\t${res.lines.join('\n\t')}`; })) .pipe(process.stdout); readdirp-3.3.0/examples/list.js000066400000000000000000000013351357243173000164450ustar00rootroot00000000000000/* eslint-disable no-unused-vars */ 'use strict'; const readdirp = require('..'); const read = async (directory) => { const stream = readdirp(directory, {type: 'all'}); let i = 0; const start = Date.now(); for await (const chunk of stream) { i++; // Check memory usage with this line. It should be 10MB or so. // Comment it out if you simply want to list files. // await new Promise(resolve => setTimeout(resolve, 500)); // if (i % 100000 === 0) // console.log(`${i}`, chunk); } console.log('finished', i, 'files in', Date.now() - start, 'ms'); // const entries = await readdirp.promise(directory, {alwaysStat: false}); // console.log('Promise done', entries.length); }; read('../..'); readdirp-3.3.0/examples/types.ts000066400000000000000000000011021357243173000166400ustar00rootroot00000000000000import readdirp from 'readdirp'; const read = async (directory: string) => { const stream = readdirp(directory, { type: 'all' }); let i = 0; for await (const chunk of stream) { // Check memory usage with this line. It should be 10MB or so. // Comment it out if you simply want to list files. await new Promise(resolve => setTimeout(resolve, 500)); console.log(`${++i}: ${chunk.path}`); } console.log('Stream done', i); const entries = await readdirp.promise(directory); console.log('Promise done', entries.map(e => e.path)); }; read(__dirname); readdirp-3.3.0/index.d.ts000066400000000000000000000017441357243173000152230ustar00rootroot00000000000000// TypeScript Version: 3.2 /// import * as fs from 'fs'; import { Readable } from 'stream'; declare namespace readdir { interface EntryInfo { path: string; fullPath: string; basename: string; stats?: fs.Stats; dirent?: fs.Dirent; } interface ReaddirpOptions { root?: string; fileFilter?: string | string[] | ((entry: EntryInfo) => boolean); directoryFilter?: (entry: EntryInfo) => boolean; type?: 'files' | 'directories' | 'files_directories' | 'all'; lstat?: boolean; depth?: number; alwaysStat?: boolean; } interface ReaddirpStream extends Readable, AsyncIterable { read(): EntryInfo; [Symbol.asyncIterator](): AsyncIterableIterator; } function promise( root: string, options?: ReaddirpOptions ): Promise; } declare function readdir( root: string, options?: readdir.ReaddirpOptions ): readdir.ReaddirpStream; export = readdir; readdirp-3.3.0/index.js000066400000000000000000000167441357243173000147750ustar00rootroot00000000000000'use strict'; const fs = require('fs'); const { Readable } = require('stream'); const sysPath = require('path'); const { promisify } = require('util'); const picomatch = require('picomatch'); const readdir = promisify(fs.readdir); const stat = promisify(fs.stat); const lstat = promisify(fs.lstat); /** * @typedef {Object} EntryInfo * @property {String} path * @property {String} fullPath * @property {fs.Stats=} stats * @property {fs.Dirent=} dirent * @property {String} basename */ const BANG = '!'; const NORMAL_FLOW_ERRORS = new Set(['ENOENT', 'EPERM', 'EACCES', 'ELOOP']); const FILE_TYPE = 'files'; const DIR_TYPE = 'directories'; const FILE_DIR_TYPE = 'files_directories'; const EVERYTHING_TYPE = 'all'; const ALL_TYPES = [FILE_TYPE, DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE]; const isNormalFlowError = error => NORMAL_FLOW_ERRORS.has(error.code); const normalizeFilter = filter => { if (filter === undefined) return; if (typeof filter === 'function') return filter; if (typeof filter === 'string') { const glob = picomatch(filter.trim()); return entry => glob(entry.basename); } if (Array.isArray(filter)) { const positive = []; const negative = []; for (const item of filter) { const trimmed = item.trim(); if (trimmed.charAt(0) === BANG) { negative.push(picomatch(trimmed.slice(1))); } else { positive.push(picomatch(trimmed)); } } if (negative.length > 0) { if (positive.length > 0) { return entry => positive.some(f => f(entry.basename)) && !negative.some(f => f(entry.basename)); } return entry => !negative.some(f => f(entry.basename)); } return entry => positive.some(f => f(entry.basename)); } }; class ReaddirpStream extends Readable { static get defaultOptions() { return { root: '.', /* eslint-disable no-unused-vars */ fileFilter: (path) => true, directoryFilter: (path) => true, /* eslint-enable no-unused-vars */ type: FILE_TYPE, lstat: false, depth: 2147483648, alwaysStat: false }; } constructor(options = {}) { super({ objectMode: true, autoDestroy: true, highWaterMark: options.highWaterMark || 4096 }); const opts = { ...ReaddirpStream.defaultOptions, ...options }; const { root, type } = opts; this._fileFilter = normalizeFilter(opts.fileFilter); this._directoryFilter = normalizeFilter(opts.directoryFilter); const statMethod = opts.lstat ? lstat : stat; // Use bigint stats if it's windows and stat() supports options (node 10+). if (process.platform === 'win32' && stat.length === 3) { this._stat = path => statMethod(path, { bigint: true }); } else { this._stat = statMethod; } this._maxDepth = opts.depth; this._wantsDir = [DIR_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type); this._wantsFile = [FILE_TYPE, FILE_DIR_TYPE, EVERYTHING_TYPE].includes(type); this._wantsEverything = type === EVERYTHING_TYPE; this._root = sysPath.resolve(root); this._isDirent = ('Dirent' in fs) && !opts.alwaysStat; this._statsProp = this._isDirent ? 'dirent' : 'stats'; this._rdOptions = { encoding: 'utf8', withFileTypes: this._isDirent }; // Launch stream with one parent, the root dir. try { this.parents = [this._exploreDir(root, 1)]; } catch (error) { this.destroy(error); } this.reading = false; this.parent = undefined; } async _read(batch) { if (this.reading) return; this.reading = true; try { while (!this.destroyed && batch > 0) { const { path, depth, files = [] } = this.parent || {}; if (files.length > 0) { const slice = files.splice(0, batch).map(dirent => this._formatEntry(dirent, path)); for (const entry of await Promise.all(slice)) { if (this._isDirAndMatchesFilter(entry)) { if (depth <= this._maxDepth) { this.parents.push(this._exploreDir(entry.fullPath, depth + 1)); } if (this._wantsDir) { this.push(entry); batch--; } } else if (this._isFileAndMatchesFilter(entry)) { if (this._wantsFile) { this.push(entry); batch--; } } } } else { const parent = this.parents.pop(); if (!parent) { this.push(null); break; } this.parent = await parent; } } } catch (error) { this.destroy(error); } finally { this.reading = false; } } async _exploreDir(path, depth) { let files; try { files = await readdir(path, this._rdOptions); } catch (error) { this._onError(error); } return {files, depth, path}; } async _formatEntry(dirent, path) { const basename = this._isDirent ? dirent.name : dirent; const fullPath = sysPath.resolve(sysPath.join(path, basename)); const entry = {path: sysPath.relative(this._root, fullPath), fullPath, basename}; try { entry[this._statsProp] = this._isDirent ? dirent : await this._stat(fullPath); } catch (err) { this._onError(err); } return entry; } _onError(err) { if (isNormalFlowError(err) && !this.destroyed) { this.emit('warn', err); } else { throw err; } } _isDirAndMatchesFilter(entry) { // entry may be undefined, because a warning or an error were emitted // and the statsProp is undefined const stats = entry && entry[this._statsProp]; return stats && stats.isDirectory() && this._directoryFilter(entry); } _isFileAndMatchesFilter(entry) { const stats = entry && entry[this._statsProp]; const isFileType = stats && ( (this._wantsEverything && !stats.isDirectory()) || (stats.isFile() || stats.isSymbolicLink()) ); return isFileType && this._fileFilter(entry); } } /** * @typedef {Object} ReaddirpArguments * @property {Function=} fileFilter * @property {Function=} directoryFilter * @property {String=} type * @property {Number=} depth * @property {String=} root * @property {Boolean=} lstat * @property {Boolean=} bigint */ /** * Main function which ends up calling readdirRec and reads all files and directories in given root recursively. * @param {String} root Root directory * @param {ReaddirpArguments=} options Options to specify root (start directory), filters and recursion depth */ const readdirp = (root, options = {}) => { let type = options.entryType || options.type; if (type === 'both') type = FILE_DIR_TYPE; // backwards-compatibility if (type) options.type = type; if (!root) { throw new Error('readdirp: root argument is required. Usage: readdirp(root, options)'); } else if (typeof root !== 'string') { throw new TypeError('readdirp: root argument must be a string. Usage: readdirp(root, options)'); } else if (type && !ALL_TYPES.includes(type)) { throw new Error(`readdirp: Invalid type passed. Use one of ${ALL_TYPES.join(', ')}`); } options.root = root; return new ReaddirpStream(options); }; const readdirpPromise = (root, options = {}) => { return new Promise((resolve, reject) => { const files = []; readdirp(root, options) .on('data', entry => files.push(entry)) .on('end', () => resolve(files)) .on('error', error => reject(error)); }); }; readdirp.promise = readdirpPromise; readdirp.ReaddirpStream = ReaddirpStream; readdirp.default = readdirp; module.exports = readdirp; readdirp-3.3.0/package.json000066400000000000000000000046761357243173000156170ustar00rootroot00000000000000{ "name": "readdirp", "description": "Recursive version of fs.readdir with streaming API.", "version": "3.3.0", "homepage": "https://github.com/paulmillr/readdirp", "repository": { "type": "git", "url": "git://github.com/paulmillr/readdirp.git" }, "license": "MIT", "bugs": { "url": "https://github.com/paulmillr/readdirp/issues" }, "author": "Thorsten Lorenz (thlorenz.com)", "contributors": [ "Thorsten Lorenz (thlorenz.com)", "Paul Miller (https://paulmillr.com)" ], "main": "index.js", "engines": { "node": ">=8.10.0" }, "files": [ "index.js", "index.d.ts" ], "keywords": [ "recursive", "fs", "stream", "streams", "readdir", "filesystem", "find", "filter" ], "scripts": { "dtslint": "dtslint", "nyc": "nyc", "mocha": "mocha --exit", "lint": "eslint --report-unused-disable-directives --ignore-path .gitignore .", "test": "npm run lint && nyc npm run mocha" }, "dependencies": { "picomatch": "^2.0.7" }, "devDependencies": { "@types/node": "^12", "chai": "^4.2", "chai-subset": "^1.6", "dtslint": "^2.0.0", "eslint": "^6.6.0", "mocha": "^6.2.2", "nyc": "^14.1.1", "rimraf": "^3.0.0" }, "nyc": { "reporter": [ "html", "text" ] }, "eslintConfig": { "root": true, "extends": "eslint:recommended", "parserOptions": { "ecmaVersion": 9, "sourceType": "script" }, "env": { "node": true, "es6": true }, "rules": { "array-callback-return": "error", "no-empty": [ "error", { "allowEmptyCatch": true } ], "no-else-return": [ "error", { "allowElseIf": false } ], "no-lonely-if": "error", "no-var": "error", "object-shorthand": "error", "prefer-arrow-callback": [ "error", { "allowNamedFunctions": true } ], "prefer-const": [ "error", { "ignoreReadBeforeAssign": true } ], "prefer-destructuring": [ "error", { "object": true, "array": false } ], "prefer-spread": "error", "prefer-template": "error", "radix": "error", "semi": "error", "strict": "error", "quotes": [ "error", "single" ] } } } readdirp-3.3.0/test.js000066400000000000000000000331661357243173000146420ustar00rootroot00000000000000/* eslint-env mocha */ 'use strict'; const fs = require('fs'); const sysPath = require('path'); const {Readable} = require('stream'); const {promisify} = require('util'); const chai = require('chai'); const chaiSubset = require('chai-subset'); const rimraf = require('rimraf'); const readdirp = require('.'); chai.use(chaiSubset); chai.should(); const pRimraf = promisify(rimraf); const mkdir = promisify(fs.mkdir); const symlink = promisify(fs.symlink); const readFile = promisify(fs.readFile); const writeFile = promisify(fs.writeFile); const supportsDirent = 'Dirent' in fs; const isWindows = process.platform === 'win32'; const root = sysPath.join(__dirname, 'test-fixtures'); let testCount = 0; let currPath; const read = async (options) => readdirp.promise(currPath, options); const touch = async (files = [], dirs = []) => { for (const name of files) { await writeFile(sysPath.join(currPath, name), `${Date.now()}`); } for (const dir of dirs) { await mkdir(sysPath.join(currPath, dir)); } }; const formatEntry = (file, dir = root) => { return { basename: sysPath.basename(file), path: sysPath.normalize(file), fullPath: sysPath.join(dir, file) }; }; const delay = ms => new Promise(resolve => setTimeout(resolve, ms)); const waitForEnd = stream => new Promise(resolve => stream.on('end', resolve)); beforeEach(async () => { testCount++; currPath = sysPath.join(root, testCount.toString()); await pRimraf(currPath); await mkdir(currPath); }); afterEach(async () => { await pRimraf(currPath); }); before(async () => { await pRimraf(root); await mkdir(root); }); after(async () => { await pRimraf(root); }); describe('basic', () => { it('reads directory', async () => { const files = ['a.txt', 'b.txt', 'c.txt']; await touch(files); const res = await read(); res.should.have.lengthOf(files.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(files[index], currPath)) ); }); }); describe('symlinks', () => { // not using arrow function, because this.skip before(function() { // GitHub Actions / default Windows installation disable symlink support unless admin if (isWindows) this.skip(); }); it('handles symlinks', async () => { const newPath = sysPath.join(currPath, 'test-symlinked.js'); await symlink(sysPath.join(__dirname, 'test.js'), newPath); const res = await read(); const first = res[0]; first.should.containSubset(formatEntry('test-symlinked.js', currPath)); const contents = await readFile(first.fullPath); contents.should.match(/handles symlinks/); // name of this test }); it('should use lstat instead of stat', async () => { const files = ['a.txt', 'b.txt', 'c.txt']; const symlinkName = 'test-symlinked.js'; const newPath = sysPath.join(currPath, symlinkName); await symlink(sysPath.join(__dirname, 'test.js'), newPath); await touch(files); const expect = [...files, symlinkName]; const res = await read({lstat: true, alwaysStat: true}); res.should.have.lengthOf(expect.length); res.forEach((entry, index) => { entry.should.containSubset(formatEntry(expect[index], currPath, false)); entry.should.include.own.key('stats'); if (entry.basename === symlinkName) { entry.stats.isSymbolicLink().should.equals(true); } }); }); }); describe('type', () => { const files = ['a.txt', 'b.txt', 'c.txt']; const dirs = ['d', 'e', 'f', 'g']; it('files', async () => { await touch(files, dirs); const res = await read({type: 'files'}); res.should.have.lengthOf(files.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(files[index], currPath)) ); }); it('directories', async () => { await touch(files, dirs); const res = await read({type: 'directories'}); res.should.have.lengthOf(dirs.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(dirs[index], currPath)) ); }); it('both', async () => { await touch(files, dirs); const res = await read({type: 'both'}); const both = files.concat(dirs); res.should.have.lengthOf(both.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(both[index], currPath)) ); }); it('all', async () => { await touch(files, dirs); const res = await read({type: 'all'}); const all = files.concat(dirs); res.should.have.lengthOf(all.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(all[index], currPath)) ); }); it('invalid', async () => { try { await read({type: 'bogus'}); } catch (error) { error.message.should.match(/Invalid type/); } }); }); describe('depth', () => { const depth0 = ['a.js', 'b.js', 'c.js']; const subdirs = ['subdir', 'deep']; const depth1 = ['subdir/d.js', 'deep/e.js']; const deepSubdirs = ['subdir/s1', 'subdir/s2', 'deep/d1', 'deep/d2']; const depth2 = ['subdir/s1/f.js', 'deep/d1/h.js']; beforeEach(async () => { await touch(depth0, subdirs); await touch(depth1, deepSubdirs); await touch(depth2); }); it('0', async () => { const res = await read({depth: 0}); res.should.have.lengthOf(depth0.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(depth0[index], currPath)) ); }); it('1', async () => { const res = await read({depth: 1}); const expect = [...depth0, ...depth1]; res.should.have.lengthOf(expect.length); res .sort((a, b) => a.basename > b.basename ? 1 : -1) .forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); }); it('2', async () => { const res = await read({depth: 2}); const expect = [...depth0, ...depth1, ...depth2]; res.should.have.lengthOf(expect.length); res .sort((a, b) => a.basename > b.basename ? 1 : -1) .forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); }); it('default', async () => { const res = await read(); const expect = [...depth0, ...depth1, ...depth2]; res.should.have.lengthOf(expect.length); res .sort((a, b) => a.basename > b.basename ? 1 : -1) .forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); }); }); describe('filtering', () => { beforeEach(async () => { await touch(['a.js', 'b.txt', 'c.js', 'd.js', 'e.rb']); }); it('glob', async () => { const expect1 = ['a.js', 'c.js', 'd.js']; const res = await read({fileFilter: '*.js'}); res.should.have.lengthOf(expect1.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(expect1[index], currPath)) ); const res2 = await read({fileFilter: ['*.js']}); res2.should.have.lengthOf(expect1.length); res2.forEach((entry, index) => entry.should.containSubset(formatEntry(expect1[index], currPath)) ); const expect2 = ['b.txt']; const res3 = await read({fileFilter: ['*.txt']}); res3.should.have.lengthOf(expect2.length); res3.forEach((entry, index) => entry.should.containSubset(formatEntry(expect2[index], currPath)) ); }); it('leading and trailing spaces', async () => { const expect = ['a.js', 'c.js', 'd.js', 'e.rb']; const res = await read({fileFilter: [' *.js', '*.rb ']}); res.should.have.lengthOf(expect.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); }); it('multiple glob', async () => { const expect = ['a.js', 'b.txt', 'c.js', 'd.js']; const res = await read({fileFilter: ['*.js', '*.txt']}); res.should.have.lengthOf(expect.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); }); it('negated glob', async () => { const expect = ['a.js', 'b.txt', 'c.js', 'e.rb']; const res = await read({fileFilter: ['!d.js']}); res.should.have.lengthOf(expect.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); }); it('glob & negated glob', async () => { const expect = ['a.js', 'c.js']; const res = await read({fileFilter: ['*.js', '!d.js']}); res.should.have.lengthOf(expect.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); }); it('two negated glob', async () => { const expect = ['b.txt']; const res = await read({fileFilter: ['!*.js', '!*.rb']}); res.should.have.lengthOf(expect.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); }); it('function', async () => { const expect = ['a.js', 'c.js', 'd.js']; const res = await read({fileFilter: (entry) => sysPath.extname(entry.fullPath) === '.js'}); res.should.have.lengthOf(expect.length); res.forEach((entry, index) => entry.should.containSubset(formatEntry(expect[index], currPath)) ); if (supportsDirent) { const expect2 = ['a.js', 'b.txt', 'c.js', 'd.js', 'e.rb']; const res2 = await read({fileFilter: (entry) => entry.dirent.isFile() }); res2.should.have.lengthOf(expect2.length); res2.forEach((entry, index) => entry.should.containSubset(formatEntry(expect2[index], currPath)) ); } }); it('function with stats', async () => { const expect = ['a.js', 'c.js', 'd.js']; const res = await read({alwaysStat: true, fileFilter: (entry) => sysPath.extname(entry.fullPath) === '.js'}); res.should.have.lengthOf(expect.length); res.forEach((entry, index) => { entry.should.containSubset(formatEntry(expect[index], currPath)); entry.should.include.own.key('stats'); }); const expect2 = ['a.js', 'b.txt', 'c.js', 'd.js', 'e.rb']; const res2 = await read({alwaysStat: true, fileFilter: (entry) => entry.stats.size > 0 }); res2.should.have.lengthOf(expect2.length); res2.forEach((entry, index) => { entry.should.containSubset(formatEntry(expect2[index], currPath)); entry.should.include.own.key('stats'); }); }); }); describe('various', () => { it('emits readable stream', () => { const stream = readdirp(currPath); stream.should.be.an.instanceof(Readable); stream.should.be.an.instanceof(readdirp.ReaddirpStream); }); it('fails without root option passed', async () => { try { readdirp(); } catch (error) { error.should.be.an.instanceof(Error); } }); it('disallows old API', () => { try { readdirp({root: '.'}); } catch (error) { error.should.be.an.instanceof(Error); } }); it('exposes promise API', async () => { const created = ['a.txt', 'c.txt']; await touch(created); const result = await readdirp.promise(currPath); result.should.have.lengthOf(created.length); result.forEach((entry, index) => entry.should.containSubset(formatEntry(created[index], currPath)) ); }); it('should emit warning for missing file', async () => { // readdirp() is initialized on some big root directory // readdirp() receives path a/b/c to its queue // readdirp is reading something else // a/b gets deleted, so stat()-ting a/b/c would now emit enoent // We should emit warnings for this case. // this.timeout(4000); fs.mkdirSync(sysPath.join(currPath, 'a')); fs.mkdirSync(sysPath.join(currPath, 'b')); fs.mkdirSync(sysPath.join(currPath, 'c')); let isWarningCalled = false; const stream = readdirp(currPath, { type: 'all', highWaterMark: 1 }); stream .on('warn', warning => { warning.should.be.an.instanceof(Error); warning.code.should.equals('ENOENT'); isWarningCalled = true; }); await delay(1000); await pRimraf(sysPath.join(currPath, 'a')); stream.resume(); await Promise.race([ waitForEnd(stream), delay(2000) ]); isWarningCalled.should.equals(true); }).timeout(4000); it('should emit warning for file with strict permission', async () => { // Windows doesn't throw permission error if you access permitted directory if (isWindows) { return true; } const permitedDir = sysPath.join(currPath, 'permited'); fs.mkdirSync(permitedDir, 0o0); let isWarningCalled = false; const stream = readdirp(currPath, { type: 'all' }) .on('data', () => {}) .on('warn', warning => { warning.should.be.an.instanceof(Error); warning.code.should.equals('EACCES'); isWarningCalled = true; }); await Promise.race([ waitForEnd(stream), delay(2000) ]); isWarningCalled.should.equals(true); }); it('should not emit warning after "end" event', async () => { // Windows doesn't throw permission error if you access permitted directory if (isWindows) { return true; } const subdir = sysPath.join(currPath, 'subdir'); const permitedDir = sysPath.join(subdir, 'permited'); fs.mkdirSync(subdir); fs.mkdirSync(permitedDir, 0o0); let isWarningCalled = false; let isEnded = false; let timer; const stream = readdirp(currPath, { type: 'all' }) .on('data', () => {}) .on('warn', warning => { warning.should.be.an.instanceof(Error); warning.code.should.equals('EACCES'); isEnded.should.equals(false); isWarningCalled = true; clearTimeout(timer); }) .on('end', () => { isWarningCalled.should.equals(true); isEnded = true; }); await Promise.race([ waitForEnd(stream), delay(2000) ]); isWarningCalled.should.equals(true); isEnded.should.equals(true); }); }); readdirp-3.3.0/tsconfig.json000066400000000000000000000005061357243173000160240ustar00rootroot00000000000000{ "compilerOptions": { "module": "commonjs", "lib": ["es2018", "esnext.asynciterable"], "noImplicitAny": true, "noImplicitThis": true, "strictNullChecks": true, "strictFunctionTypes": true, "noEmit": true, "esModuleInterop": true, "baseUrl": ".", "paths": { "readdirp": ["."] } } }