package/LICENSE000644 0000002067 3560116604 010272 0ustar00000000 000000 The MIT License (MIT) Copyright (c) 2013 Gareth Jones Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. package/lib/DateRollingFileStream.js000644 0000002225 3560116604 014545 0ustar00000000 000000 const RollingFileWriteStream = require('./RollingFileWriteStream'); // just to adapt the previous version class DateRollingFileStream extends RollingFileWriteStream { constructor(filename, pattern, options) { if (pattern && typeof(pattern) === 'object') { options = pattern; pattern = null; } if (!options) { options = {}; } if (!pattern) { pattern = 'yyyy-MM-dd'; } options.pattern = pattern; if (!options.numBackups && options.numBackups !== 0) { if (!options.daysToKeep && options.daysToKeep !== 0) { options.daysToKeep = 1; } else { process.emitWarning( "options.daysToKeep is deprecated due the confusion it causes when used " + "together with file size rolling. Please use options.numBackups instead.", "DeprecationWarning", "StreamRoller0001" ); } options.numBackups = options.daysToKeep; } else { options.daysToKeep = options.numBackups; } super(filename, options); this.mode = this.options.mode; } get theStream() { return this.currentFileStream; } } module.exports = DateRollingFileStream; package/lib/fileNameFormatter.js000644 0000001704 3560116604 013772 0ustar00000000 000000 const debug = require("debug")("streamroller:fileNameFormatter"); const path = require("path"); const ZIP_EXT = ".gz"; const DEFAULT_FILENAME_SEP = "."; module.exports = ({ file, keepFileExt, needsIndex, alwaysIncludeDate, compress, fileNameSep }) => { let FILENAME_SEP = fileNameSep || DEFAULT_FILENAME_SEP; const dirAndName = path.join(file.dir, file.name); const ext = f => f + file.ext; const index = (f, i, d) => (needsIndex || !d) && i ? f + FILENAME_SEP + i : f; const date = (f, i, d) => { return (i > 0 || alwaysIncludeDate) && d ? f + FILENAME_SEP + d : f; }; const gzip = (f, i) => (i && compress ? f + ZIP_EXT : f); const parts = keepFileExt ? [date, index, ext, gzip] : [ext, date, index, gzip]; return ({ date, index }) => { debug(`_formatFileName: date=${date}, index=${index}`); return parts.reduce( (filename, part) => part(filename, index, date), dirAndName ); }; }; package/lib/fileNameParser.js000644 0000005624 3560116604 013270 0ustar00000000 000000 const debug = require("debug")("streamroller:fileNameParser"); const ZIP_EXT = ".gz"; const format = require("date-format"); const DEFAULT_FILENAME_SEP = "."; module.exports = ({ file, keepFileExt, pattern, fileNameSep }) => { let FILENAME_SEP = fileNameSep || DEFAULT_FILENAME_SEP; // All these functions take two arguments: f, the filename, and p, the result placeholder // They return the filename with any matching parts removed. // The "zip" function, for instance, removes the ".gz" part of the filename (if present) const zip = (f, p) => { if (f.endsWith(ZIP_EXT)) { debug("it is gzipped"); p.isCompressed = true; return f.slice(0, -1 * ZIP_EXT.length); } return f; }; const __NOT_MATCHING__ = "__NOT_MATCHING__"; const extAtEnd = f => { if (f.startsWith(file.name) && f.endsWith(file.ext)) { debug("it starts and ends with the right things"); return f.slice(file.name.length + 1, -1 * file.ext.length); } return __NOT_MATCHING__; }; const extInMiddle = f => { if (f.startsWith(file.base)) { debug("it starts with the right things"); return f.slice(file.base.length + 1); } return __NOT_MATCHING__; }; const dateAndIndex = (f, p) => { const items = f.split(FILENAME_SEP); let indexStr = items[items.length - 1]; debug("items: ", items, ", indexStr: ", indexStr); let dateStr = f; if (indexStr !== undefined && indexStr.match(/^\d+$/)) { dateStr = f.slice(0, -1 * (indexStr.length + 1)); debug(`dateStr is ${dateStr}`); if (pattern && !dateStr) { dateStr = indexStr; indexStr = "0"; } } else { indexStr = "0"; } try { // Two arguments for new Date() are intentional. This will set other date // components to minimal values in the current timezone instead of UTC, // as new Date(0) will do. const date = format.parse(pattern, dateStr, new Date(0, 0)); if (format.asString(pattern, date) !== dateStr) return f; p.index = parseInt(indexStr, 10); p.date = dateStr; p.timestamp = date.getTime(); return ""; } catch (e) { //not a valid date, don't panic. debug(`Problem parsing ${dateStr} as ${pattern}, error was: `, e); return f; } }; const index = (f, p) => { if (f.match(/^\d+$/)) { debug("it has an index"); p.index = parseInt(f, 10); return ""; } return f; }; let parts = [ zip, keepFileExt ? extAtEnd : extInMiddle, pattern ? dateAndIndex : index ]; return filename => { let result = { filename, index: 0, isCompressed: false }; // pass the filename through each of the file part parsers let whatsLeftOver = parts.reduce( (remains, part) => part(remains, result), filename ); // if there's anything left after parsing, then it wasn't a valid filename return whatsLeftOver ? null : result; }; }; package/lib/index.js000644 0000000306 3560116604 011472 0ustar00000000 000000 module.exports = { RollingFileWriteStream: require('./RollingFileWriteStream'), RollingFileStream: require('./RollingFileStream'), DateRollingFileStream: require('./DateRollingFileStream') }; package/lib/moveAndMaybeCompressFile.js000644 0000004454 3560116604 015256 0ustar00000000 000000 const debug = require('debug')('streamroller:moveAndMaybeCompressFile'); const fs = require('fs-extra'); const zlib = require('zlib'); const _parseOption = function(rawOptions){ const defaultOptions = { mode: parseInt("0600", 8), compress: false, }; const options = Object.assign({}, defaultOptions, rawOptions); debug( `_parseOption: moveAndMaybeCompressFile called with option=${JSON.stringify(options)}` ); return options; } const moveAndMaybeCompressFile = async ( sourceFilePath, targetFilePath, options ) => { options = _parseOption(options); if (sourceFilePath === targetFilePath) { debug( `moveAndMaybeCompressFile: source and target are the same, not doing anything` ); return; } if (await fs.pathExists(sourceFilePath)) { debug( `moveAndMaybeCompressFile: moving file from ${sourceFilePath} to ${targetFilePath} ${ options.compress ? "with" : "without" } compress` ); if (options.compress) { await new Promise((resolve, reject) => { fs.createReadStream(sourceFilePath) .pipe(zlib.createGzip()) .pipe(fs.createWriteStream(targetFilePath, {mode: options.mode})) .on("finish", () => { debug( `moveAndMaybeCompressFile: finished compressing ${targetFilePath}, deleting ${sourceFilePath}` ); fs.unlink(sourceFilePath) .then(resolve) .catch(() => { debug(`Deleting ${sourceFilePath} failed, truncating instead`); fs.truncate(sourceFilePath).then(resolve).catch(reject) }); }); }); } else { debug( `moveAndMaybeCompressFile: deleting file=${targetFilePath}, renaming ${sourceFilePath} to ${targetFilePath}` ); try { await fs.move(sourceFilePath, targetFilePath, { overwrite: true }); } catch (e) { debug( `moveAndMaybeCompressFile: error moving ${sourceFilePath} to ${targetFilePath}`, e ); debug(`Trying copy+truncate instead`); await fs.copy(sourceFilePath, targetFilePath, { overwrite: true }); await fs.truncate(sourceFilePath); } } } }; module.exports = moveAndMaybeCompressFile; package/lib/now.js000644 0000000117 3560116604 011166 0ustar00000000 000000 // allows us to inject a mock date in tests module.exports = () => new Date(); package/lib/RollingFileStream.js000644 0000001264 3560116604 013751 0ustar00000000 000000 const RollingFileWriteStream = require('./RollingFileWriteStream'); // just to adapt the previous version class RollingFileStream extends RollingFileWriteStream { constructor(filename, size, backups, options) { if (!options) { options = {}; } if (size) { options.maxSize = size; } if (!options.numBackups && options.numBackups !== 0) { if (!backups && backups !== 0) { backups = 1; } options.numBackups = backups; } super(filename, options); this.backups = options.numBackups; this.size = this.options.maxSize; } get theStream() { return this.currentFileStream; } } module.exports = RollingFileStream; package/lib/RollingFileWriteStream.js000644 0000022054 3560116604 014764 0ustar00000000 000000 const debug = require("debug")("streamroller:RollingFileWriteStream"); const fs = require("fs-extra"); const path = require("path"); const newNow = require("./now"); const format = require("date-format"); const { Writable } = require("stream"); const fileNameFormatter = require("./fileNameFormatter"); const fileNameParser = require("./fileNameParser"); const moveAndMaybeCompressFile = require("./moveAndMaybeCompressFile"); /** * RollingFileWriteStream is mainly used when writing to a file rolling by date or size. * RollingFileWriteStream inherits from stream.Writable */ class RollingFileWriteStream extends Writable { /** * Create a RollingFileWriteStream * @constructor * @param {string} filePath - The file path to write. * @param {object} options - The extra options * @param {number} options.numToKeep - The max numbers of files to keep. * @param {number} options.maxSize - The maxSize one file can reach. Unit is Byte. * This should be more than 1024. The default is Number.MAX_SAFE_INTEGER. * @param {string} options.mode - The mode of the files. The default is '0600'. Refer to stream.writable for more. * @param {string} options.flags - The default is 'a'. Refer to stream.flags for more. * @param {boolean} options.compress - Whether to compress backup files. * @param {boolean} options.keepFileExt - Whether to keep the file extension. * @param {string} options.pattern - The date string pattern in the file name. * @param {boolean} options.alwaysIncludePattern - Whether to add date to the name of the first file. */ constructor(filePath, options) { debug(`constructor: creating RollingFileWriteStream. path=${filePath}`); super(options); this.options = this._parseOption(options); this.fileObject = path.parse(filePath); if (this.fileObject.dir === "") { this.fileObject = path.parse(path.join(process.cwd(), filePath)); } this.fileFormatter = fileNameFormatter({ file: this.fileObject, alwaysIncludeDate: this.options.alwaysIncludePattern, needsIndex: this.options.maxSize < Number.MAX_SAFE_INTEGER, compress: this.options.compress, keepFileExt: this.options.keepFileExt, fileNameSep: this.options.fileNameSep }); this.fileNameParser = fileNameParser({ file: this.fileObject, keepFileExt: this.options.keepFileExt, pattern: this.options.pattern, fileNameSep: this.options.fileNameSep }); this.state = { currentSize: 0 }; if (this.options.pattern) { this.state.currentDate = format(this.options.pattern, newNow()); } this.filename = this.fileFormatter({ index: 0, date: this.state.currentDate }); if (["a", "a+", "as", "as+"].includes(this.options.flags)) { this._setExistingSizeAndDate(); } debug( `constructor: create new file ${this.filename}, state=${JSON.stringify( this.state )}` ); this._renewWriteStream(); } _setExistingSizeAndDate() { try { const stats = fs.statSync(this.filename); this.state.currentSize = stats.size; if (this.options.pattern) { this.state.currentDate = format(this.options.pattern, stats.mtime); } } catch (e) { //file does not exist, that's fine - move along return; } } _parseOption(rawOptions) { const defaultOptions = { maxSize: Number.MAX_SAFE_INTEGER, numToKeep: Number.MAX_SAFE_INTEGER, encoding: "utf8", mode: parseInt("0600", 8), flags: "a", compress: false, keepFileExt: false, alwaysIncludePattern: false }; const options = Object.assign({}, defaultOptions, rawOptions); if (options.maxSize <= 0) { throw new Error(`options.maxSize (${options.maxSize}) should be > 0`); } // options.numBackups will supercede options.numToKeep if (options.numBackups || options.numBackups === 0) { if (options.numBackups < 0) { throw new Error(`options.numBackups (${options.numBackups}) should be >= 0`); } else if (options.numBackups >= Number.MAX_SAFE_INTEGER) { // to cater for numToKeep (include the hot file) at Number.MAX_SAFE_INTEGER throw new Error(`options.numBackups (${options.numBackups}) should be < Number.MAX_SAFE_INTEGER`); } else { options.numToKeep = options.numBackups + 1; } } else if (options.numToKeep <= 0) { throw new Error(`options.numToKeep (${options.numToKeep}) should be > 0`); } debug( `_parseOption: creating stream with option=${JSON.stringify(options)}` ); return options; } _final(callback) { this.currentFileStream.end("", this.options.encoding, callback); } _write(chunk, encoding, callback) { this._shouldRoll().then(() => { debug( `_write: writing chunk. ` + `file=${this.currentFileStream.path} ` + `state=${JSON.stringify(this.state)} ` + `chunk=${chunk}` ); this.currentFileStream.write(chunk, encoding, e => { this.state.currentSize += chunk.length; callback(e); }); }); } async _shouldRoll() { if (this._dateChanged() || this._tooBig()) { debug( `_shouldRoll: rolling because dateChanged? ${this._dateChanged()} or tooBig? ${this._tooBig()}` ); await this._roll(); } } _dateChanged() { return ( this.state.currentDate && this.state.currentDate !== format(this.options.pattern, newNow()) ); } _tooBig() { return this.state.currentSize >= this.options.maxSize; } _roll() { debug(`_roll: closing the current stream`); return new Promise((resolve, reject) => { this.currentFileStream.end("", this.options.encoding, () => { this._moveOldFiles() .then(resolve) .catch(reject); }); }); } async _moveOldFiles() { const files = await this._getExistingFiles(); const todaysFiles = this.state.currentDate ? files.filter(f => f.date === this.state.currentDate) : files; for (let i = todaysFiles.length; i >= 0; i--) { debug(`_moveOldFiles: i = ${i}`); const sourceFilePath = this.fileFormatter({ date: this.state.currentDate, index: i }); const targetFilePath = this.fileFormatter({ date: this.state.currentDate, index: i + 1 }); const moveAndCompressOptions = { compress: this.options.compress && i === 0, mode: this.options.mode } await moveAndMaybeCompressFile( sourceFilePath, targetFilePath, moveAndCompressOptions ); } this.state.currentSize = 0; this.state.currentDate = this.state.currentDate ? format(this.options.pattern, newNow()) : null; debug( `_moveOldFiles: finished rolling files. state=${JSON.stringify( this.state )}` ); this._renewWriteStream(); // wait for the file to be open before cleaning up old ones, // otherwise the daysToKeep calculations can be off await new Promise((resolve, reject) => { this.currentFileStream.write("", "utf8", () => { this._clean() .then(resolve) .catch(reject); }); }); } // Sorted from the oldest to the latest async _getExistingFiles() { const files = await fs.readdir(this.fileObject.dir).catch(() => []); debug(`_getExistingFiles: files=${files}`); const existingFileDetails = files .map(n => this.fileNameParser(n)) .filter(n => n); const getKey = n => (n.timestamp ? n.timestamp : newNow().getTime()) - n.index; existingFileDetails.sort((a, b) => getKey(a) - getKey(b)); return existingFileDetails; } _renewWriteStream() { fs.ensureDirSync(this.fileObject.dir); const filePath = this.fileFormatter({ date: this.state.currentDate, index: 0 }); const ops = { flags: this.options.flags, encoding: this.options.encoding, mode: this.options.mode }; this.currentFileStream = fs.createWriteStream(filePath, ops); this.currentFileStream.on("error", e => { this.emit("error", e); }); } async _clean() { const existingFileDetails = await this._getExistingFiles(); debug( `_clean: numToKeep = ${this.options.numToKeep}, existingFiles = ${existingFileDetails.length}` ); debug("_clean: existing files are: ", existingFileDetails); if (this._tooManyFiles(existingFileDetails.length)) { const fileNamesToRemove = existingFileDetails .slice(0, existingFileDetails.length - this.options.numToKeep) .map(f => path.format({ dir: this.fileObject.dir, base: f.filename })); await deleteFiles(fileNamesToRemove); } } _tooManyFiles(numFiles) { return this.options.numToKeep > 0 && numFiles > this.options.numToKeep; } } const deleteFiles = fileNames => { debug(`deleteFiles: files to delete: ${fileNames}`); return Promise.all(fileNames.map(f => fs.unlink(f).catch((e) => { debug(`deleteFiles: error when unlinking ${f}, ignoring. Error was ${e}`); }))); }; module.exports = RollingFileWriteStream; package/package.json000644 0000003501 3560116604 011545 0ustar00000000 000000 { "name": "streamroller", "version": "3.0.2", "description": "file streams that roll over when size limits, or dates are reached", "main": "lib/index.js", "files": [ "lib" ], "directories": { "test": "test" }, "scripts": { "codecheck": "eslint \"lib/*.js\" \"test/*.js\"", "prepublishOnly": "npm test", "pretest": "npm run codecheck", "clean": "rm -rf node_modules/", "test": "nyc --check-coverage mocha", "html-report": "nyc report --reporter=html" }, "repository": { "type": "git", "url": "https://github.com/nomiddlename/streamroller.git" }, "keywords": [ "stream", "rolling" ], "author": "Gareth Jones , Huang Yichao ", "license": "MIT", "readmeFilename": "README.md", "gitHead": "ece35d7d86c87c04ff09e8604accae81cf36a0ce", "devDependencies": { "@commitlint/cli": "^16.0.2", "@commitlint/config-conventional": "^16.0.0", "@types/node": "^17.0.9", "eslint": "^8.7.0", "husky": "^7.0.4", "mocha": "^9.1.4", "nyc": "^15.1.0", "proxyquire": "^2.1.1", "should": "^13.2.3" }, "dependencies": { "date-format": "^4.0.3", "debug": "^4.1.1", "fs-extra": "^10.0.0" }, "engines": { "node": ">=8.0" }, "commitlint": { "extends": [ "@commitlint/config-conventional" ] }, "eslintConfig": { "env": { "browser": false, "node": true, "es6": true, "mocha": true }, "parserOptions": { "ecmaVersion": 8 }, "extends": "eslint:recommended", "rules": { "no-console": "off" } }, "husky": { "hooks": { "commit-msg": "commitlint -e $HUSKY_GIT_PARAMS" } }, "nyc": { "include": [ "lib/**" ], "branches": 100, "lines": 100, "functions": 98 } } package/README.md000644 0000010037 3560116604 010540 0ustar00000000 000000 streamroller ============ node.js file streams that roll over when they reach a maximum size, or a date/time. ```sh npm install streamroller ``` ## usage ```javascript var rollers = require('streamroller'); var stream = new rollers.RollingFileStream('myfile', 1024, 3); stream.write("stuff"); stream.end(); ``` The streams behave the same as standard node.js streams, except that when certain conditions are met they will rename the current file to a backup and start writing to a new file. ### new RollingFileStream(filename [, maxSize, numBackups, options]) * `filename` \ * `maxSize` \ - defaults to `MAX_SAFE_INTEGER` - the size in bytes to trigger a rollover * `numBackups` \ - defaults to `1` - the number of old files to keep (excluding the hot file) * `options` \ * `encoding` \ - defaults to `'utf8'` * `mode` \ - defaults to `0o600` * `flags` \ - defaults to `'a'` (see [fs.open](https://nodejs.org/dist/latest-v8.x/docs/api/fs.html#fs_fs_open_path_flags_mode_callback) for more details) * `compress` \ - defaults to `false` - compress the backup files using gzip (files will have `.gz` extension). * `keepFileExt` \ - defaults to `false` - keep the file original extension. e.g.: `abc.log -> abc.1.log`. * `fileNameSep` \ - defaults to `'.'` - the filename separator when rolling. e.g.: abc.log`.`1 or abc`.`1.log (keepFileExt) This returns a `WritableStream`. When the current file being written to (given by `filename`) gets up to or larger than `maxSize`, then the current file will be renamed to `filename.1` and a new file will start being written to. Up to `numBackups` of old files are maintained, so if `numBackups` is 3 then there will be 4 files:
     filename
     filename.1
     filename.2
     filename.3
When filename size >= maxSize then:
     filename -> filename.1
     filename.1 -> filename.2
     filename.2 -> filename.3
     filename.3 gets overwritten
     filename is a new file
### new DateRollingFileStream(filename [, pattern, options]) * `filename` \ * `pattern` \ - defaults to `yyyy-MM-dd` - the date pattern to trigger rolling (see below) * `options` \ * `encoding` \ - defaults to `'utf8'` * `mode` \ - defaults to `0o600` * `flags` \ - defaults to `'a'` (see [fs.open](https://nodejs.org/dist/latest-v8.x/docs/api/fs.html#fs_fs_open_path_flags_mode_callback) for more details) * `compress` \ - defaults to `false` - compress the backup files using gzip (files will have `.gz` extension). * `keepFileExt` \ - defaults to `false` - keep the file original extension. e.g.: `abc.log -> abc.2013-08-30.log`. * `fileNameSep` \ - defaults to `'.'` - the filename separator when rolling. e.g.: abc.log`.`2013-08-30 or abc`.`2013-08-30.log (keepFileExt) * `alwaysIncludePattern` \ - defaults to `false` - extend the initial file with the pattern * `daysToKeep` `numBackups` \ - defaults to `1` - the number of old files that matches the pattern to keep (excluding the hot file) This returns a `WritableStream`. When the current time, formatted as `pattern`, changes then the current file will be renamed to `filename.formattedDate` where `formattedDate` is the result of processing the date through the pattern, and a new file will begin to be written. Streamroller uses [date-format](http://github.com/nomiddlename/date-format) to format dates, and the `pattern` should use the date-format format. e.g. with a `pattern` of `".yyyy-MM-dd"`, and assuming today is August 29, 2013 then writing to the stream today will just write to `filename`. At midnight (or more precisely, at the next file write after midnight), `filename` will be renamed to `filename.2013-08-29` and a new `filename` will be created. If `options.alwaysIncludePattern` is true, then the initial file will be `filename.2013-08-29` and no renaming will occur at midnight, but a new file will be written to with the name `filename.2013-08-30`.