base64-async
Advanced tools
Comparing version 2.1.0 to 2.1.1
@@ -9,10 +9,11 @@ #!/usr/bin/env node | ||
const timeSpan = require('time-span'); | ||
const minimist = require('minimist'); | ||
const b64 = require('../'); | ||
const argv = require('minimist')(process.argv.slice(2)); | ||
const argv = minimist(process.argv.slice(2)); | ||
const chunkSize = argv.chunkSize || 250000; | ||
const bytesToBenchmark = ( | ||
argv.bytesToBenchmark | ||
&& argv.bytesToBenchmark.split(',').map(Number) | ||
argv.bytesToBenchmark && | ||
argv.bytesToBenchmark.split(',').map(Number) | ||
) || [10000, 100000, 1000000, 10000000, 100000000]; | ||
@@ -19,0 +20,0 @@ |
@@ -5,3 +5,2 @@ #!/usr/bin/env node | ||
const chunkSize = 250000; | ||
const bytes = 100000000; | ||
@@ -11,6 +10,10 @@ const buf = Buffer.alloc(bytes); | ||
const syncStart = Date.now(); | ||
const asyncJobs = 4; | ||
console.log(`Registering ${asyncJobs} asynchronous jobs...`); | ||
let i = 0; | ||
const syncId = setInterval(() => { | ||
if (++i >= 4) clearInterval(syncId); | ||
if (++i >= asyncJobs) { | ||
clearInterval(syncId); | ||
} | ||
@@ -21,5 +24,4 @@ const late = Date.now() - (syncStart + (interval * i)); | ||
console.log('Encoding with default Node.js function'); | ||
console.log(`Encoding ${prettyBytes(bytes)}...`); | ||
console.log(`Encoding ${prettyBytes(bytes)} with default Node.js Buffer API...`); | ||
buf.toString('base64'); | ||
console.log('Base64 encode complete'); |
@@ -11,6 +11,10 @@ #!/usr/bin/env node | ||
const asyncStart = Date.now(); | ||
const asyncJobs = 4; | ||
console.log(`Registering ${asyncJobs} asynchronous jobs...`); | ||
let i = 0; | ||
const asyncId = setInterval(() => { | ||
if (++i >= 4) clearInterval(asyncId); | ||
if (++i >= asyncJobs) { | ||
clearInterval(asyncId); | ||
} | ||
@@ -21,6 +25,5 @@ const late = Date.now() - (asyncStart + (interval * i)); | ||
console.log('Encoding with base64-async'); | ||
console.log(`Encoding ${prettyBytes(bytes)} in chunks of ${prettyBytes(chunkSize)}...`); | ||
console.log(`Encoding ${prettyBytes(bytes)} with base64-async in chunks of ${prettyBytes(chunkSize)}...`); | ||
b64(buf, { chunkSize }).then(() => { | ||
console.log('Base64 encode complete'); | ||
}) | ||
}); |
{ | ||
"name": "base64-async", | ||
"version": "2.1.0", | ||
"description": "Non-blocking chunked base64 encoding", | ||
"version": "2.1.1", | ||
"description": "Non-blocking chunked Base64 encoding", | ||
"main": "src/index.js", | ||
@@ -6,0 +6,0 @@ "scripts": { |
# base64-async | ||
> Non-blocking chunked base64 encoding | ||
> Non-blocking chunked Base64 encoding | ||
@@ -9,2 +9,10 @@ [![Build Status](https://travis-ci.org/lukechilds/base64-async.svg?branch=master)](https://travis-ci.org/lukechilds/base64-async) | ||
Process large Base64 documents without blocking the event loop. | ||
Configurable chunk size option to optimise for your use case. | ||
> **Note:** | ||
> | ||
> Base64 in Node.js is already crazy fast. Breaking the work up into chunks and adding async logic adds [overhead](#performance). If you aren't dealing with large files it will probably be more efficient to just block the event loop for the small amount of time it takes Node.js to process Base64 synchronously. | ||
## Install | ||
@@ -21,21 +29,11 @@ | ||
const fs = require('fs'); | ||
const fileBuffer = fs.readFileSync('somehugefile.jpg'); | ||
const buffer = fs.readFileSync('somehugefile.jpg'); | ||
console.log(fileBuffer); | ||
b64.encode(fileBuffer).then(b64String => console.log(b64String)); | ||
// aGkgbXVt... | ||
b64.decode(b64String).then(buffer => console.log(buffer)); | ||
// <Buffer 68 69 20 6d 75 6d ... > | ||
b64.encode(fileBuffer) | ||
.then(b64String => { | ||
console.log(b64String); | ||
// aGkgbXVt... | ||
return b64.decode(b64String); | ||
}) | ||
.then(originalFileBuffer => { | ||
console.log(originalFileBuffer); | ||
// <Buffer 68 69 20 6d 75 6d ... > | ||
}); | ||
// or, for the cool kids | ||
const b64String = await b64.encode(fileBuffer); | ||
@@ -45,3 +43,2 @@ const originalFileBuffer = await b64.decode(b64String); | ||
// which is equivalent to this | ||
const b64String = await b64(fileBuffer); | ||
@@ -52,4 +49,68 @@ const originalFileBuffer = await b64(b64String); | ||
## Example | ||
``` | ||
$ npm run example | ||
Registering 4 asynchronous jobs... | ||
Encoding 100 MB with default Node.js Buffer API... | ||
Base64 encode complete | ||
Hi, I'm an asynchronous job, and I'm late by 231ms | ||
Hi, I'm an asynchronous job, and I'm late by 238ms | ||
Hi, I'm an asynchronous job, and I'm late by 239ms | ||
Hi, I'm an asynchronous job, and I'm late by 245ms | ||
Registering 4 asynchronous jobs... | ||
Encoding 100 MB with base64-async in chunks of 250 kB... | ||
Hi, I'm an asynchronous job, and I'm on time | ||
Hi, I'm an asynchronous job, and I'm on time | ||
Hi, I'm an asynchronous job, and I'm on time | ||
Hi, I'm an asynchronous job, and I'm on time | ||
Base64 encode complete | ||
``` | ||
([example source code](/examples)) | ||
Notice how none of the async jobs can start until the Buffer API has finished encoding and stops blocking the event loop? With `base64-async` the async jobs can execute in-between each chunk of data. | ||
## Performance | ||
``` | ||
$ npm run bench | ||
Benchmark completed with a chunk size of 250 kB | ||
┌────────┬──────────────┬──────────────┬──────────────┬──────────────┐ | ||
│ Bytes │ Encode Sync │ Decode Sync │ Encode Async │ Decode Async │ | ||
├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ | ||
│ 10 kB │ 0.097225ms │ 0.383031ms │ 1.276201ms │ 0.537687ms │ | ||
├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ | ||
│ 100 kB │ 0.198161ms │ 0.271577ms │ 0.99799ms │ 0.356765ms │ | ||
├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ | ||
│ 1 MB │ 1.924415ms │ 2.038406ms │ 2.679117ms │ 2.544993ms │ | ||
├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ | ||
│ 10 MB │ 15.749204ms │ 16.280246ms │ 33.666111ms │ 29.918725ms │ | ||
├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ | ||
│ 100 MB │ 165.189455ms │ 195.298199ms │ 246.359068ms │ 280.792751ms │ | ||
└────────┴──────────────┴──────────────┴──────────────┴──────────────┘ | ||
``` | ||
As you can see, the total processing time is longer with `base64-async` (as we spend some time paused waiting for the event loop). However, if you have an idea of the size of the data you'll be working with, you can play around with the chunk size to get better performance. | ||
The included benchmarking tool accepts arguments to help you test this: | ||
``` | ||
$ npm run bench -- --chunkSize=1000000 --bytesToBenchmark=50000000,100000000 | ||
Benchmark completed with a chunk size of 1 MB | ||
┌────────┬──────────────┬──────────────┬──────────────┬──────────────┐ | ||
│ Bytes │ Encode Sync │ Decode Sync │ Encode Async │ Decode Async │ | ||
├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ | ||
│ 50 MB │ 79.675533ms │ 87.251079ms │ 92.400367ms │ 137.468082ms │ | ||
├────────┼──────────────┼──────────────┼──────────────┼──────────────┤ | ||
│ 100 MB │ 203.423705ms │ 173.567974ms │ 186.181857ms │ 264.123311ms │ | ||
└────────┴──────────────┴──────────────┴──────────────┴──────────────┘ | ||
``` | ||
## License | ||
MIT © Luke Childs |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
18169
250
113