streamroller
Advanced tools
Comparing version 3.0.7 to 3.0.8
# streamroller Changelog | ||
## 3.0.8 | ||
- [chore(fix): concurrency issues when forked processes trying to roll same file](https://github.com/log4js-node/streamroller/pull/124) - thanks [@peteriman](https://github.com/peteriman) | ||
- [chore(refactor): use writeStream.destroy() instead](https://github.com/log4js-node/streamroller/pull/125) | ||
- [chore(refactor): use isCreated variable instead of e.code='EEXIST'](https://github.com/log4js-node/streamroller/pull/126) | ||
- [chore(lint): added .eslintrc and fixed linting issues](https://github.com/log4js-node/streamroller/pull/123) - thanks [@peteriman](https://github.com/peteriman) | ||
- [chore(dep): updated dependencies](https://github.com/log4js-node/streamroller/pull/127) - thanks [@peteriman](https://github.com/peteriman) | ||
- chore(dev): bump @types/node from 17.0.24 to 17.0.26 | ||
- chore(dev): bump eslint from 8.13.0 to 8.14.0 | ||
- chore(dep): bump date-format from 4.0.7 to 4.0.9 | ||
- updated package-lock.json | ||
- [chore(dep): updated dependencies](https://github.com/log4js-node/streamroller/pull/119) - thanks [@peteriman](https://github.com/peteriman) | ||
- chore(dep): bump fs-extra from 10.0.1 to 10.1.0 | ||
- updated package-lock.json | ||
- Revert "[chore(dep): temporary fix for fs-extra issue (to be reverted when fs-extra patches it)](https://github.com/log4js-node/streamroller/pull/116)" | ||
## 3.0.7 | ||
@@ -4,0 +20,0 @@ |
const debug = require('debug')('streamroller:moveAndMaybeCompressFile'); | ||
const realFs = require('fs'); | ||
const current = realFs.realpath.native; | ||
if (!realFs.realpath.native) realFs.realpath.native = realFs.realpath; | ||
let fs = require('fs-extra'); | ||
realFs.realpath.native = current; | ||
const fs = require('fs-extra'); | ||
const zlib = require('zlib'); | ||
@@ -17,7 +11,5 @@ | ||
const options = Object.assign({}, defaultOptions, rawOptions); | ||
debug( | ||
`_parseOption: moveAndMaybeCompressFile called with option=${JSON.stringify(options)}` | ||
); | ||
debug(`_parseOption: moveAndMaybeCompressFile called with option=${JSON.stringify(options)}`); | ||
return options; | ||
} | ||
}; | ||
@@ -30,50 +22,80 @@ const moveAndMaybeCompressFile = async ( | ||
options = _parseOption(options); | ||
if (sourceFilePath === targetFilePath) { | ||
debug( | ||
`moveAndMaybeCompressFile: source and target are the same, not doing anything` | ||
); | ||
debug(`moveAndMaybeCompressFile: source and target are the same, not doing anything`); | ||
return; | ||
} | ||
if (await fs.pathExists(sourceFilePath)) { | ||
debug( | ||
`moveAndMaybeCompressFile: moving file from ${sourceFilePath} to ${targetFilePath} ${ | ||
options.compress ? "with" : "without" | ||
} compress` | ||
); | ||
if (options.compress) { | ||
await new Promise((resolve, reject) => { | ||
fs.createReadStream(sourceFilePath) | ||
.pipe(zlib.createGzip()) | ||
.pipe(fs.createWriteStream(targetFilePath, {mode: options.mode})) | ||
.on("finish", () => { | ||
debug( | ||
`moveAndMaybeCompressFile: finished compressing ${targetFilePath}, deleting ${sourceFilePath}` | ||
); | ||
fs.unlink(sourceFilePath) | ||
.then(resolve) | ||
.catch(() => { | ||
debug(`Deleting ${sourceFilePath} failed, truncating instead`); | ||
fs.truncate(sourceFilePath).then(resolve).catch(reject) | ||
if (await fs.pathExists(sourceFilePath)) { | ||
debug( | ||
`moveAndMaybeCompressFile: moving file from ${sourceFilePath} to ${targetFilePath} ${ | ||
options.compress ? "with" : "without" | ||
} compress` | ||
); | ||
if (options.compress) { | ||
await new Promise((resolve, reject) => { | ||
let isCreated = false; | ||
// to avoid concurrency, the forked process which can create the file will proceed (using flags wx) | ||
const writeStream = fs.createWriteStream(targetFilePath, {mode: options.mode, flags: "wx"}) | ||
// wait until writable stream is valid before proceeding to read | ||
.on("open", () => { | ||
isCreated = true; | ||
const readStream = fs.createReadStream(sourceFilePath) | ||
// wait until readable stream is valid before piping | ||
.on("open", () => { | ||
readStream.pipe(zlib.createGzip()).pipe(writeStream); | ||
}) | ||
.on("error", (e) => { | ||
debug(`moveAndMaybeCompressFile: error reading ${sourceFilePath}`, e); | ||
// manually close writable: https://nodejs.org/api/stream.html#readablepipedestination-options | ||
writeStream.destroy(e); | ||
}); | ||
}) | ||
.on("finish", () => { | ||
debug(`moveAndMaybeCompressFile: finished compressing ${targetFilePath}, deleting ${sourceFilePath}`); | ||
// delete sourceFilePath | ||
fs.unlink(sourceFilePath) | ||
.then(resolve) | ||
.catch((e) => { | ||
debug(`moveAndMaybeCompressFile: error deleting ${sourceFilePath}, truncating instead`, e); | ||
// fallback to truncate | ||
fs.truncate(sourceFilePath) | ||
.then(resolve) | ||
.catch((e) => { | ||
debug(`moveAndMaybeCompressFile: error truncating ${sourceFilePath}`, e); | ||
reject(e); | ||
}); | ||
}); | ||
}) | ||
.on("error", (e) => { | ||
if (!isCreated) { | ||
debug(`moveAndMaybeCompressFile: error creating ${targetFilePath}`, e); | ||
// do not do anything if handled by another forked process | ||
reject(e); | ||
} else { | ||
debug(`moveAndMaybeCompressFile: error writing ${targetFilePath}, deleting`, e); | ||
// delete targetFilePath (taking as nothing happened) | ||
fs.unlink(targetFilePath) | ||
.then(() => { reject(e); }) | ||
.catch((e) => { | ||
debug(`moveAndMaybeCompressFile: error deleting ${targetFilePath}`, e); | ||
reject(e); | ||
}); | ||
}); | ||
}); | ||
} else { | ||
debug( | ||
`moveAndMaybeCompressFile: deleting file=${targetFilePath}, renaming ${sourceFilePath} to ${targetFilePath}` | ||
); | ||
try { | ||
await fs.move(sourceFilePath, targetFilePath, { overwrite: true }); | ||
} catch (e) { | ||
debug( | ||
`moveAndMaybeCompressFile: error moving ${sourceFilePath} to ${targetFilePath}`, e | ||
); | ||
debug(`Trying copy+truncate instead`); | ||
await fs.copy(sourceFilePath, targetFilePath, { overwrite: true }); | ||
await fs.truncate(sourceFilePath); | ||
} | ||
} | ||
}); | ||
}).catch(() => {}); | ||
} else { | ||
debug(`moveAndMaybeCompressFile: renaming ${sourceFilePath} to ${targetFilePath}`); | ||
try { | ||
await fs.move(sourceFilePath, targetFilePath, { overwrite: true }); | ||
} catch (e) { | ||
debug(`moveAndMaybeCompressFile: error renaming ${sourceFilePath} to ${targetFilePath}`, e); | ||
debug(`moveAndMaybeCompressFile: trying copy+truncate instead`); | ||
await fs.copy(sourceFilePath, targetFilePath, { overwrite: true }); | ||
await fs.truncate(sourceFilePath); | ||
} | ||
} | ||
} | ||
}; | ||
module.exports = moveAndMaybeCompressFile; |
const debug = require("debug")("streamroller:RollingFileWriteStream"); | ||
const realFs = require('fs'); | ||
const current = realFs.realpath.native; | ||
if (!realFs.realpath.native) realFs.realpath.native = realFs.realpath; | ||
const fs = require('fs-extra'); | ||
realFs.realpath.native = current; | ||
const fs = require("fs-extra"); | ||
const path = require("path"); | ||
@@ -17,2 +11,9 @@ const newNow = require("./now"); | ||
const deleteFiles = fileNames => { | ||
debug(`deleteFiles: files to delete: ${fileNames}`); | ||
return Promise.all(fileNames.map(f => fs.unlink(f).catch((e) => { | ||
debug(`deleteFiles: error when unlinking ${f}, ignoring. Error was ${e}`); | ||
}))); | ||
}; | ||
/** | ||
@@ -236,3 +237,4 @@ * RollingFileWriteStream is mainly used when writing to a file rolling by date or size. | ||
async _getExistingFiles() { | ||
const files = await fs.readdir(this.fileObject.dir).catch( /* istanbul ignore next: will not happen on windows */ () => []); | ||
const files = await fs.readdir(this.fileObject.dir) | ||
.catch( /* istanbul ignore next: will not happen on windows */ () => []); | ||
@@ -318,9 +320,2 @@ debug(`_getExistingFiles: files=${files}`); | ||
const deleteFiles = fileNames => { | ||
debug(`deleteFiles: files to delete: ${fileNames}`); | ||
return Promise.all(fileNames.map(f => fs.unlink(f).catch((e) => { | ||
debug(`deleteFiles: error when unlinking ${f}, ignoring. Error was ${e}`); | ||
}))); | ||
}; | ||
module.exports = RollingFileWriteStream; |
{ | ||
"name": "streamroller", | ||
"version": "3.0.7", | ||
"version": "3.0.8", | ||
"description": "file streams that roll over when size limits, or dates are reached", | ||
@@ -36,4 +36,4 @@ "main": "lib/index.js", | ||
"@commitlint/config-conventional": "^16.2.1", | ||
"@types/node": "^17.0.24", | ||
"eslint": "^8.13.0", | ||
"@types/node": "^17.0.26", | ||
"eslint": "^8.14.0", | ||
"husky": "^7.0.4", | ||
@@ -46,5 +46,5 @@ "mocha": "^9.2.2", | ||
"dependencies": { | ||
"date-format": "^4.0.7", | ||
"date-format": "^4.0.9", | ||
"debug": "^4.3.4", | ||
"fs-extra": "^10.0.1" | ||
"fs-extra": "^10.1.0" | ||
}, | ||
@@ -51,0 +51,0 @@ "engines": { |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
39907
572
2
Updateddate-format@^4.0.9
Updatedfs-extra@^10.1.0