Comparing version 2.0.0 to 2.1.0
@@ -6,2 +6,8 @@ All notable changes to this project will be documented in this file. | ||
## [2.1.0] - 2024-04-23 | ||
### Added | ||
- verbose logging will print processing time of a file | ||
### Fixed | ||
- compressing files over 2GB | ||
## [2.0.0] - 2024-01-25 | ||
@@ -8,0 +14,0 @@ ### Breaking |
@@ -8,3 +8,3 @@ "use strict"; | ||
const noop = () => undefined; | ||
const compressQueue = (options) => { | ||
const compressQueue = (options, verbose) => { | ||
return new Promise((resolve, reject) => { | ||
@@ -15,10 +15,21 @@ let fulfilled = false; | ||
options.files.forEach(file => { | ||
const task = q.add(() => (0, compress_js_1.compress)({ | ||
filePath: file, | ||
mode: options.mode, | ||
quality: options.quality, | ||
windowSize: options.windowSize, | ||
writeTo: options.printToStdOut ? "stdout" : "file", | ||
br: options.br, | ||
})); | ||
let taskStart = 0; | ||
const task = q.add(() => { | ||
taskStart = Date.now(); | ||
if (verbose) { | ||
console.warn(`Processing file ${file}`); | ||
} | ||
return (0, compress_js_1.compress)({ | ||
filePath: file, | ||
mode: options.mode, | ||
quality: options.quality, | ||
windowSize: options.windowSize, | ||
writeTo: options.printToStdOut ? "stdout" : "file", | ||
br: options.br, | ||
}).finally(() => { | ||
if (verbose) { | ||
console.warn(`File ${file} processed in ${Date.now() - taskStart}ms`); | ||
} | ||
}); | ||
}); | ||
task.promise.catch(noop); | ||
@@ -25,0 +36,0 @@ task.data = { file }; |
@@ -168,3 +168,3 @@ #!/usr/bin/env node | ||
}; | ||
await (0, compressQueue_js_1.compressQueue)(options); | ||
await (0, compressQueue_js_1.compressQueue)(options, Boolean(argv.v)); | ||
console.warn("OK"); | ||
@@ -171,0 +171,0 @@ } |
@@ -9,3 +9,29 @@ "use strict"; | ||
const util_1 = __importDefault(require("util")); | ||
const readFile = util_1.default.promisify(fs_1.default.readFile); | ||
const buffer_1 = __importDefault(require("buffer")); | ||
const os_1 = __importDefault(require("os")); | ||
const stat = util_1.default.promisify(fs_1.default.stat); | ||
const is64Bit = ["x64", "arm64", "ppc64", "s390x"].includes(os_1.default.arch()); | ||
const readFileSizeLimit = is64Bit ? Math.pow(2, 31) - 1 : Math.pow(2, 30) - 1; | ||
const nativeReadFile = util_1.default.promisify(fs_1.default.readFile); | ||
const readFile = async (path) => { | ||
const fileSize = await stat(path).then(stats => stats.size); | ||
if (fileSize > buffer_1.default.constants.MAX_LENGTH) { | ||
throw new Error(`File ${path} is too big to process, ` | ||
+ `${fileSize} bytes read but max ${buffer_1.default.constants.MAX_LENGTH} bytes allowed`); | ||
} | ||
if (fileSize < readFileSizeLimit) { | ||
return nativeReadFile(path); | ||
} | ||
return new Promise((resolve, reject) => { | ||
const stream = fs_1.default.createReadStream(path); | ||
const chunks = []; | ||
stream.on("data", chunk => { | ||
chunks.push(Buffer.from(chunk)); | ||
}); | ||
stream.on("end", () => { | ||
resolve(Buffer.concat(chunks)); | ||
}); | ||
stream.on("error", reject); | ||
}); | ||
}; | ||
exports.readFile = readFile; | ||
@@ -12,0 +38,0 @@ const writeFile = util_1.default.promisify(fs_1.default.writeFile); |
{ | ||
"name": "brotli-cli", | ||
"version": "2.0.0", | ||
"version": "2.1.0", | ||
"repository": "git@github.com:dzek69/brotli-cli.git", | ||
@@ -5,0 +5,0 @@ "homepage": "https://github.com/dzek69/brotli-cli", |
@@ -12,4 +12,10 @@ # brotli-cli | ||
1. `brotli-cli compress file1.txt file2.svg file3.js` | ||
> Files will be created in the same directory, but with `.br` extension appended. Overwriting will occur without asking. | ||
### Compressing by glob pattern: | ||
1. `brotli-cli compress --glob "public/*.html"` | ||
> Files will be created in the same directory, but with `.br` extension appended. Overwriting will occur without asking. | ||
### Printing to stdout: | ||
@@ -20,4 +26,2 @@ > To do this, you can only specify one file to compress, and you have to add `-` at the end of the command. | ||
Files will be created in the same directory, but with `.br` extension appended. Overwriting will occur without asking. | ||
## Detailed usage | ||
@@ -34,2 +38,4 @@ | ||
-q, --quality Brotli compression quality [0-11] [default: 11] | ||
-t, --threads Use this many concurrent jobs [number of threads or `true` for threads=CPUs | ||
amount] [default: true] | ||
-l, --lgwin Brotli compression window size [0, 10-24] [default: 24] | ||
@@ -50,4 +56,13 @@ -b, --bail Stop execution on first error [boolean] [default: true] | ||
## Limitations and warnings | ||
Because node's `zlib` can't work with streams (nor the previously used `brotli` npm module) - | ||
all the file contents to be compressed must be loaded into memory. | ||
Usually this does not matter, but keep few things in mind: | ||
- if you're low on memory and you're compressing large files you might want to decrease threads amount | ||
- compressing files over 1GB on 32-bit systems and 2GB on 64-bit systems is even less memory efficient | ||
- absolute max file size limit is 4GB (actually it's determined by `buffer.constants.MAX_LENGTH` so this may vary) | ||
## License | ||
MIT |
@@ -23,3 +23,3 @@ import { EVENTS, Queue } from "queue-system"; | ||
// eslint-disable-next-line max-lines-per-function | ||
const compressQueue = (options: Options) => { | ||
const compressQueue = (options: Options, verbose: boolean) => { | ||
// eslint-disable-next-line max-lines-per-function | ||
@@ -33,10 +33,21 @@ return new Promise<void>((resolve, reject) => { | ||
options.files.forEach(file => { | ||
const task = q.add(() => compress({ | ||
filePath: file, | ||
mode: options.mode, | ||
quality: options.quality, | ||
windowSize: options.windowSize, | ||
writeTo: options.printToStdOut ? "stdout" : "file", | ||
br: options.br, | ||
})); | ||
let taskStart = 0; | ||
const task = q.add(() => { | ||
taskStart = Date.now(); | ||
if (verbose) { | ||
console.warn(`Processing file ${file}`); | ||
} | ||
return compress({ | ||
filePath: file, | ||
mode: options.mode, | ||
quality: options.quality, | ||
windowSize: options.windowSize, | ||
writeTo: options.printToStdOut ? "stdout" : "file", | ||
br: options.br, | ||
}).finally(() => { | ||
if (verbose) { | ||
console.warn(`File ${file} processed in ${Date.now() - taskStart}ms`); | ||
} | ||
}); | ||
}); | ||
task.promise.catch(noop); | ||
@@ -43,0 +54,0 @@ task.data = { file }; |
@@ -200,3 +200,3 @@ #!/usr/bin/env node | ||
await compressQueue(options); | ||
await compressQueue(options, Boolean(argv.v)); | ||
console.warn("OK"); | ||
@@ -203,0 +203,0 @@ } |
import fs from "fs"; | ||
import util from "util"; | ||
import buffer from "buffer"; | ||
import os from "os"; | ||
const readFile = util.promisify(fs.readFile); | ||
const stat = util.promisify(fs.stat); | ||
const is64Bit = ["x64", "arm64", "ppc64", "s390x"].includes(os.arch()); | ||
// eslint-disable-next-line @typescript-eslint/no-magic-numbers | ||
const readFileSizeLimit = is64Bit ? Math.pow(2, 31) - 1 : Math.pow(2, 30) - 1; | ||
const nativeReadFile = util.promisify(fs.readFile); | ||
/** | ||
* Reads file using streams into buffer | ||
* @param path | ||
*/ | ||
const readFile = async (path: string) => { | ||
const fileSize = await stat(path).then(stats => stats.size); | ||
if (fileSize > buffer.constants.MAX_LENGTH) { | ||
throw new Error( | ||
`File ${path} is too big to process, ` | ||
+ `${fileSize} bytes read but max ${buffer.constants.MAX_LENGTH} bytes allowed`, | ||
); | ||
} | ||
if (fileSize < readFileSizeLimit) { | ||
return nativeReadFile(path); | ||
} | ||
// TODO benchmark if it's worth to keep doing Buffer.concat instead of first pushing into the array and then | ||
// concatenating on resolve. Currently it's probably less memory efficient but faster [? - wild guess] | ||
return new Promise<Buffer>((resolve, reject) => { | ||
const stream = fs.createReadStream(path); | ||
const chunks: Buffer[] = []; | ||
stream.on("data", chunk => { | ||
chunks.push(Buffer.from(chunk)); | ||
}); | ||
stream.on("end", () => { | ||
resolve(Buffer.concat(chunks)); | ||
}); | ||
stream.on("error", reject); | ||
}); | ||
}; | ||
const writeFile = util.promisify(fs.writeFile); | ||
@@ -6,0 +47,0 @@ |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
342113
2370
65