fastify-compress
Advanced tools
Comparing version 4.0.1 to 4.1.0
556
index.js
'use strict' | ||
const fp = require('fastify-plugin') | ||
const zlib = require('zlib') | ||
const pump = require('pump') | ||
const mimedb = require('mime-db') | ||
const isStream = require('is-stream') | ||
const intoStream = require('into-stream') | ||
const peek = require('peek-stream') | ||
const Minipass = require('minipass') | ||
const pumpify = require('pumpify') | ||
const isGzip = require('is-gzip') | ||
const isDeflate = require('is-deflate') | ||
const encodingNegotiator = require('encoding-negotiator') | ||
const { inherits, format } = require('util') | ||
const warning = require('process-warning')() | ||
warning.create('FastifyWarning.fastify-compress', 'FST_MODULE_DEP_fastify-compress'.toUpperCase(), 'fastify-compress has been deprecated. Use @fastify/compress@5.0.0 instead.') | ||
warning.emit('FST_MODULE_DEP_fastify-compress'.toUpperCase()) | ||
const InvalidRequestEncodingError = createError('FST_CP_ERR_INVALID_CONTENT_ENCODING', 'Unsupported Content-Encoding: %s', 415) | ||
const InvalidRequestCompressedPayloadError = createError('FST_CP_ERR_INVALID_CONTENT', 'Could not decompress the request payload using the provided encoding', 400) | ||
function compressPlugin (fastify, opts, next) { | ||
const globalCompressParams = processCompressParams(opts) | ||
const globalDecompressParams = processDecompressParams(opts) | ||
if (opts.encodings && opts.encodings.length < 1) { | ||
next(new Error('The `encodings` option array must have at least 1 item.')) | ||
return | ||
} | ||
if (opts.requestEncodings && opts.requestEncodings.length < 1) { | ||
next(new Error('The `requestEncodings` option array must have at least 1 item.')) | ||
return | ||
} | ||
if (globalCompressParams.encodings.length < 1) { | ||
next(new Error('None of the passed `encodings` were supported — compression not possible.')) | ||
return | ||
} | ||
if (globalDecompressParams.encodings.length < 1) { | ||
next(new Error('None of the passed `requestEncodings` were supported — request decompression not possible.')) | ||
return | ||
} | ||
if (globalDecompressParams.forceEncoding && !globalDecompressParams.encodings.includes(globalDecompressParams.forceEncoding)) { | ||
next(new Error(`Unsupported decompression encoding ${opts.forceRequestEncoding}.`)) | ||
return | ||
} | ||
fastify.decorateReply('compress', null) | ||
// add onSend hook onto each route as needed | ||
fastify.addHook('onRoute', (routeOptions) => { | ||
// If route config.compress has been set it takes precedence over compress | ||
if (routeOptions.config && typeof routeOptions.config.compress !== 'undefined') { | ||
routeOptions.compress = routeOptions.config.compress | ||
} | ||
// Manage compression options | ||
if (typeof routeOptions.compress !== 'undefined') { | ||
if (typeof routeOptions.compress === 'object') { | ||
const mergedCompressParams = Object.assign( | ||
{}, globalCompressParams, processCompressParams(routeOptions.compress) | ||
) | ||
// if the current endpoint has a custom compress configuration ... | ||
buildRouteCompress(fastify, mergedCompressParams, routeOptions) | ||
} else if (routeOptions.compress === false) { | ||
// don't apply any compress settings | ||
} else { | ||
throw new Error('Unknown value for route compress configuration') | ||
} | ||
} else if (globalCompressParams.global) { | ||
// if the plugin is set globally (meaning that all the routes will be compressed) | ||
// As the endpoint, does not have a custom rateLimit configuration, use the global one. | ||
buildRouteCompress(fastify, globalCompressParams, routeOptions) | ||
} else { | ||
// if no options are specified and the plugin is not global, then we still want to decorate | ||
// the reply in this case | ||
buildRouteCompress(fastify, globalCompressParams, routeOptions, true) | ||
} | ||
// If route config.decompress has been set it takes precedence over compress | ||
if (routeOptions.config && typeof routeOptions.config.decompress !== 'undefined') { | ||
routeOptions.decompress = routeOptions.config.decompress | ||
} | ||
// Manage decompression options | ||
if (typeof routeOptions.decompress !== 'undefined') { | ||
if (typeof routeOptions.decompress === 'object') { | ||
// if the current endpoint has a custom compress configuration ... | ||
const mergedDecompressParams = Object.assign( | ||
{}, globalDecompressParams, processDecompressParams(routeOptions.decompress) | ||
) | ||
buildRouteDecompress(fastify, mergedDecompressParams, routeOptions) | ||
} else if (routeOptions.decompress === false) { | ||
// don't apply any decompress settings | ||
} else { | ||
throw new Error('Unknown value for route decompress configuration') | ||
} | ||
} else if (globalDecompressParams.global) { | ||
// if the plugin is set globally (meaning that all the routes will be decompressed) | ||
// As the endpoint, does not have a custom rateLimit configuration, use the global one. | ||
buildRouteDecompress(fastify, globalDecompressParams, routeOptions) | ||
} | ||
}) | ||
next() | ||
} | ||
function processCompressParams (opts) { | ||
/* istanbul ignore next */ | ||
if (!opts) { | ||
return | ||
} | ||
const params = { | ||
global: (typeof opts.global === 'boolean') ? opts.global : true | ||
} | ||
params.removeContentLengthHeader = typeof opts.removeContentLengthHeader === 'boolean' ? opts.removeContentLengthHeader : true | ||
params.brotliOptions = opts.brotliOptions | ||
params.zlibOptions = opts.zlibOptions | ||
params.onUnsupportedEncoding = opts.onUnsupportedEncoding | ||
params.inflateIfDeflated = opts.inflateIfDeflated === true | ||
params.threshold = typeof opts.threshold === 'number' ? opts.threshold : 1024 | ||
params.compressibleTypes = opts.customTypes instanceof RegExp ? opts.customTypes : /^text\/(?!event-stream)|\+json$|\+text$|\+xml$|octet-stream$/ | ||
params.compressStream = { | ||
br: () => ((opts.zlib || zlib).createBrotliCompress || zlib.createBrotliCompress)(params.brotliOptions), | ||
gzip: () => ((opts.zlib || zlib).createGzip || zlib.createGzip)(params.zlibOptions), | ||
deflate: () => ((opts.zlib || zlib).createDeflate || zlib.createDeflate)(params.zlibOptions) | ||
} | ||
params.uncompressStream = { | ||
// Currently params.uncompressStream.br() is never called as we do not have any way to autodetect brotli compression in `fastify-compress` | ||
// Brotli documentation reference: [RFC 7932](https://www.rfc-editor.org/rfc/rfc7932) | ||
br: /* istanbul ignore next */ () => ((opts.zlib || zlib).createBrotliDecompress || zlib.createBrotliDecompress)(params.brotliOptions), | ||
gzip: () => ((opts.zlib || zlib).createGunzip || zlib.createGunzip)(params.zlibOptions), | ||
deflate: () => ((opts.zlib || zlib).createInflate || zlib.createInflate)(params.zlibOptions) | ||
} | ||
const supportedEncodings = ['br', 'gzip', 'deflate', 'identity'] | ||
params.encodings = Array.isArray(opts.encodings) | ||
? supportedEncodings | ||
.filter(encoding => opts.encodings.includes(encoding)) | ||
.sort((a, b) => opts.encodings.indexOf(a) - supportedEncodings.indexOf(b)) | ||
: supportedEncodings | ||
return params | ||
} | ||
function processDecompressParams (opts) { | ||
/* istanbul ignore next */ | ||
if (!opts) { | ||
return | ||
} | ||
const customZlib = opts.zlib || zlib | ||
const params = { | ||
global: (typeof opts.global === 'boolean') ? opts.global : true, | ||
onUnsupportedRequestEncoding: opts.onUnsupportedRequestEncoding, | ||
onInvalidRequestPayload: opts.onInvalidRequestPayload, | ||
decompressStream: { | ||
br: customZlib.createBrotliDecompress || zlib.createBrotliDecompress, | ||
gzip: customZlib.createGunzip || zlib.createGunzip, | ||
deflate: customZlib.createInflate || zlib.createInflate | ||
}, | ||
encodings: [], | ||
forceEncoding: null | ||
} | ||
const supportedEncodings = ['br', 'gzip', 'deflate', 'identity'] | ||
params.encodings = Array.isArray(opts.requestEncodings) | ||
? supportedEncodings | ||
.filter(encoding => opts.requestEncodings.includes(encoding)) | ||
.sort((a, b) => opts.requestEncodings.indexOf(a) - supportedEncodings.indexOf(b)) | ||
: supportedEncodings | ||
if (opts.forceRequestEncoding) { | ||
params.forceEncoding = opts.forceRequestEncoding | ||
if (params.encodings.includes(opts.forceRequestEncoding)) { | ||
params.encodings = [opts.forceRequestEncoding] | ||
} | ||
} | ||
return params | ||
} | ||
function buildRouteCompress (fastify, params, routeOptions, decorateOnly) { | ||
// In order to provide a compress method with the same parameter set as the route itself, | ||
// we decorate the reply at the start of the request | ||
if (Array.isArray(routeOptions.onRequest)) { | ||
routeOptions.onRequest.push(onRequest) | ||
} else if (typeof routeOptions.onRequest === 'function') { | ||
routeOptions.onRequest = [routeOptions.onRequest, onRequest] | ||
} else { | ||
routeOptions.onRequest = [onRequest] | ||
} | ||
const compressFn = compress(params) | ||
function onRequest (req, reply, next) { | ||
reply.compress = compressFn | ||
next() | ||
} | ||
if (decorateOnly) { | ||
return | ||
} | ||
if (Array.isArray(routeOptions.onSend)) { | ||
routeOptions.onSend.push(onSend) | ||
} else if (typeof routeOptions.onSend === 'function') { | ||
routeOptions.onSend = [routeOptions.onSend, onSend] | ||
} else { | ||
routeOptions.onSend = [onSend] | ||
} | ||
function onSend (req, reply, payload, next) { | ||
if (payload == null) { | ||
return next() | ||
} | ||
const responseEncoding = reply.getHeader('Content-Encoding') | ||
if (responseEncoding && responseEncoding !== 'identity') { | ||
// response is already compressed | ||
return next() | ||
} | ||
setVaryHeader(reply) | ||
let stream, encoding | ||
const noCompress = | ||
// don't compress on x-no-compression header | ||
(req.headers['x-no-compression'] !== undefined) || | ||
// don't compress if not one of the indicated compressible types | ||
(shouldCompress(reply.getHeader('Content-Type') || 'application/json', params.compressibleTypes) === false) || | ||
// don't compress on missing or identity `accept-encoding` header | ||
((encoding = getEncodingHeader(params.encodings, req)) == null || encoding === 'identity') | ||
if (encoding == null && params.onUnsupportedEncoding != null) { | ||
const encodingHeader = req.headers['accept-encoding'] | ||
try { | ||
const errorPayload = params.onUnsupportedEncoding(encodingHeader, reply.request, reply) | ||
return next(null, errorPayload) | ||
} catch (err) { | ||
return next(err) | ||
} | ||
} | ||
if (noCompress) { | ||
if (params.inflateIfDeflated && isStream(stream = maybeUnzip(payload))) { | ||
encoding === undefined | ||
? reply.removeHeader('Content-Encoding') | ||
: reply.header('Content-Encoding', 'identity') | ||
pump(stream, payload = unzipStream(params.uncompressStream), onEnd.bind(reply)) | ||
} | ||
return next(null, payload) | ||
} | ||
if (typeof payload.pipe !== 'function') { | ||
if (Buffer.byteLength(payload) < params.threshold) { | ||
return next() | ||
} | ||
payload = intoStream(payload) | ||
} | ||
params.removeContentLengthHeader | ||
? reply | ||
.header('Content-Encoding', encoding) | ||
.removeHeader('content-length') | ||
: reply.header('Content-Encoding', encoding) | ||
stream = zipStream(params.compressStream, encoding) | ||
pump(payload, stream, onEnd.bind(reply)) | ||
next(null, stream) | ||
} | ||
} | ||
function buildRouteDecompress (fastify, params, routeOptions) { | ||
// Add our decompress handler in the preParsing hook | ||
if (Array.isArray(routeOptions.preParsing)) { | ||
routeOptions.preParsing.unshift(preParsing) | ||
} else if (typeof routeOptions.preParsing === 'function') { | ||
routeOptions.preParsing = [preParsing, routeOptions.preParsing] | ||
} else { | ||
routeOptions.preParsing = [preParsing] | ||
} | ||
function preParsing (request, reply, raw, next) { | ||
// Get the encoding from the options or from the headers | ||
let encoding = params.forceEncoding | ||
if (!encoding) { | ||
encoding = request.headers['content-encoding'] | ||
} | ||
// The request is not compressed, nothing to do here | ||
if (!encoding) { | ||
return next(null, raw) | ||
} | ||
// Check that encoding is supported | ||
if (!params.encodings.includes(encoding)) { | ||
let errorPayload | ||
if (params.onUnsupportedRequestEncoding) { | ||
try { | ||
errorPayload = params.onUnsupportedRequestEncoding(encoding, request) | ||
} catch (ex) { | ||
errorPayload = undefined | ||
} | ||
} | ||
if (!errorPayload) { | ||
errorPayload = new InvalidRequestEncodingError(encoding) | ||
} | ||
return next(errorPayload) | ||
} | ||
// No action on identity | ||
if (encoding === 'identity') { | ||
return next(null, raw) | ||
} | ||
// Prepare decompression - If there is a decompress error, prepare the error for fastify handing | ||
const decompresser = params.decompressStream[encoding]() | ||
decompresser.receivedEncodedLength = 0 | ||
decompresser.on('error', onDecompressError.bind(this, request, params, encoding)) | ||
decompresser.pause() | ||
// Track length of encoded length to handle receivedEncodedLength | ||
raw.on('data', trackEncodedLength.bind(decompresser)) | ||
raw.on('end', removeEncodedLengthTracking) | ||
next(null, pump(raw, decompresser)) | ||
} | ||
} | ||
function compress (params) { | ||
return function (payload) { | ||
if (payload == null) { | ||
this.send(new Error('Internal server error')) | ||
return | ||
} | ||
setVaryHeader(this) | ||
let stream, encoding | ||
const noCompress = | ||
// don't compress on x-no-compression header | ||
(this.request.headers['x-no-compression'] !== undefined) || | ||
// don't compress if not one of the indicated compressible types | ||
(shouldCompress(this.getHeader('Content-Type') || 'application/json', params.compressibleTypes) === false) || | ||
// don't compress on missing or identity `accept-encoding` header | ||
((encoding = getEncodingHeader(params.encodings, this.request)) == null || encoding === 'identity') | ||
if (encoding == null && params.onUnsupportedEncoding != null) { | ||
const encodingHeader = this.request.headers['accept-encoding'] | ||
let errorPayload | ||
try { | ||
errorPayload = params.onUnsupportedEncoding(encodingHeader, this.request, this) | ||
} catch (ex) { | ||
errorPayload = ex | ||
} | ||
return this.send(errorPayload) | ||
} | ||
if (noCompress) { | ||
if (params.inflateIfDeflated && isStream(stream = maybeUnzip(payload, this.serialize.bind(this)))) { | ||
encoding === undefined | ||
? this.removeHeader('Content-Encoding') | ||
: this.header('Content-Encoding', 'identity') | ||
pump(stream, payload = unzipStream(params.uncompressStream), onEnd.bind(this)) | ||
} | ||
return this.send(payload) | ||
} | ||
if (typeof payload.pipe !== 'function') { | ||
if (!Buffer.isBuffer(payload) && typeof payload !== 'string') { | ||
payload = this.serialize(payload) | ||
} | ||
} | ||
if (typeof payload.pipe !== 'function') { | ||
if (Buffer.byteLength(payload) < params.threshold) { | ||
return this.send(payload) | ||
} | ||
payload = intoStream(payload) | ||
} | ||
params.removeContentLengthHeader | ||
? this | ||
.header('Content-Encoding', encoding) | ||
.removeHeader('content-length') | ||
: this.header('Content-Encoding', encoding) | ||
stream = zipStream(params.compressStream, encoding) | ||
pump(payload, stream, onEnd.bind(this)) | ||
this.send(stream) | ||
} | ||
} | ||
function setVaryHeader (reply) { | ||
if (reply.hasHeader('Vary')) { | ||
const varyHeader = Array.isArray(reply.getHeader('Vary')) ? reply.getHeader('Vary') : [reply.getHeader('Vary')] | ||
if (!varyHeader.some((h) => h.includes('accept-encoding'))) { | ||
reply.header('Vary', `${varyHeader.join(', ')}, accept-encoding`) | ||
} | ||
} else { | ||
reply.header('Vary', 'accept-encoding') | ||
} | ||
} | ||
function onEnd (err) { | ||
if (err) this.log.error(err) | ||
} | ||
function trackEncodedLength (chunk) { | ||
this.receivedEncodedLength += chunk.length | ||
} | ||
function removeEncodedLengthTracking () { | ||
this.removeListener('data', trackEncodedLength) | ||
this.removeListener('end', removeEncodedLengthTracking) | ||
} | ||
function onDecompressError (request, params, encoding, error) { | ||
this.log.debug(`compress: invalid request payload - ${error}`) | ||
let errorPayload | ||
if (params.onInvalidRequestPayload) { | ||
try { | ||
errorPayload = params.onInvalidRequestPayload(encoding, request, error) | ||
} catch (ex) { | ||
errorPayload = undefined | ||
} | ||
} | ||
if (!errorPayload) { | ||
errorPayload = new InvalidRequestCompressedPayloadError() | ||
} | ||
error.decompressError = error | ||
Object.assign(error, errorPayload) | ||
} | ||
function getEncodingHeader (encodings, request) { | ||
let header = request.headers['accept-encoding'] | ||
if (header != null) { | ||
header = header.toLowerCase() | ||
// consider the no-preference token as gzip for downstream compat | ||
// and x-gzip as an alias of gzip | ||
// ref.: [HTTP/1.1 RFC 7230 section 4.2.3](https://datatracker.ietf.org/doc/html/rfc7230#section-4.2.3) | ||
.replace(/\*|x-gzip/g, 'gzip') | ||
return encodingNegotiator.negotiate(header, encodings) | ||
} else { | ||
return undefined | ||
} | ||
} | ||
function shouldCompress (type, compressibleTypes) { | ||
if (compressibleTypes.test(type)) return true | ||
const data = mimedb[type.split(';', 1)[0].trim().toLowerCase()] | ||
if (data === undefined) return false | ||
return data.compressible === true | ||
} | ||
function isCompressed (data) { | ||
if (isGzip(data)) return 1 | ||
if (isDeflate(data)) return 2 | ||
return 0 | ||
} | ||
function maybeUnzip (payload, serialize) { | ||
if (isStream(payload)) return payload | ||
let buf = payload; let result = payload | ||
if (ArrayBuffer.isView(payload)) { | ||
// Cast non-Buffer DataViews into a Buffer | ||
buf = result = Buffer.from( | ||
payload.buffer, | ||
payload.byteOffset, | ||
payload.byteLength | ||
) | ||
} else if (serialize && typeof payload !== 'string') { | ||
buf = result = serialize(payload) | ||
} | ||
// handle case where serialize doesn't return a string or Buffer | ||
if (!Buffer.isBuffer(buf)) return result | ||
if (isCompressed(buf) === 0) return result | ||
return intoStream(result) | ||
} | ||
function zipStream (deflate, encoding) { | ||
return peek({ newline: false, maxBuffer: 10 }, function (data, swap) { | ||
switch (isCompressed(data)) { | ||
case 1: return swap(null, new Minipass()) | ||
case 2: return swap(null, new Minipass()) | ||
} | ||
return swap(null, deflate[encoding]()) | ||
}) | ||
} | ||
function unzipStream (inflate, maxRecursion) { | ||
if (!(maxRecursion >= 0)) maxRecursion = 3 | ||
return peek({ newline: false, maxBuffer: 10 }, function (data, swap) { | ||
/* istanbul ignore if */ | ||
// This path is never taken, when `maxRecursion` < 0 it is automatically set back to 3 | ||
if (maxRecursion < 0) return swap(new Error('Maximum recursion reached')) | ||
switch (isCompressed(data)) { | ||
case 1: return swap(null, pumpify(inflate.gzip(), unzipStream(inflate, maxRecursion - 1))) | ||
case 2: return swap(null, pumpify(inflate.deflate(), unzipStream(inflate, maxRecursion - 1))) | ||
} | ||
return swap(null, new Minipass()) | ||
}) | ||
} | ||
function createError (code, message, statusCode) { | ||
code = code.toUpperCase() | ||
function FastifyCompressError (a) { | ||
Error.captureStackTrace(this, FastifyCompressError) | ||
this.name = 'FastifyCompressError' | ||
this.code = code | ||
if (a) { | ||
this.message = format(message, a) | ||
} else { | ||
this.message = message | ||
} | ||
this.statusCode = statusCode | ||
} | ||
FastifyCompressError.prototype[Symbol.toStringTag] = 'Error' | ||
/* istanbul ignore next */ | ||
FastifyCompressError.prototype.toString = function () { | ||
return `${this.name} [${this.code}]: ${this.message}` | ||
} | ||
inherits(FastifyCompressError, Error) | ||
return FastifyCompressError | ||
} | ||
module.exports = fp(compressPlugin, { | ||
fastify: '3.x', | ||
name: 'fastify-compress' | ||
}) | ||
module.exports = require('fastify-compress-deprecated') |
{ | ||
"name": "fastify-compress", | ||
"version": "4.0.1", | ||
"description": "Fastify compression utils", | ||
"version": "4.1.0", | ||
"main": "index.js", | ||
"types": "index.d.ts", | ||
"dependencies": { | ||
"encoding-negotiator": "^2.0.1", | ||
"fastify-plugin": "^3.0.0", | ||
"into-stream": "^6.0.0", | ||
"is-deflate": "^1.0.0", | ||
"is-gzip": "^2.0.0", | ||
"is-stream": "^2.0.1", | ||
"is-zip": "^1.0.0", | ||
"mime-db": "^1.51.0", | ||
"minipass": "^3.1.5", | ||
"peek-stream": "^1.1.3", | ||
"pump": "^3.0.0", | ||
"pumpify": "^2.0.1", | ||
"string-to-stream": "^3.0.1" | ||
}, | ||
"devDependencies": { | ||
"@types/node": "^16.11.10", | ||
"@typescript-eslint/parser": "^5.4.0", | ||
"adm-zip": "^0.5.9", | ||
"eslint-plugin-typescript": "^0.14.0", | ||
"fastify": "^3.24.0", | ||
"jsonstream": "^1.0.3", | ||
"pre-commit": "^1.2.2", | ||
"standard": "^16.0.4", | ||
"tap": "^15.1.2", | ||
"tsd": "^0.19.0", | ||
"typescript": "^4.5.2" | ||
}, | ||
"scripts": { | ||
"coverage": "npm run unit -- --cov", | ||
"coverage-report": "npm run coverage -- --coverage-report=lcov", | ||
"lint": "standard", | ||
"lint:fix": "npm run lint -- --fix", | ||
"lint:typescript": "npm run lint:fix -- --parser @typescript-eslint/parser --plugin typescript test/types/*.ts", | ||
"lint:types": "npm run lint:fix -- --parser @typescript-eslint/parser --plugin typescript *.d.ts", | ||
"test": "npm run lint && npm run unit && npm run test:typescript", | ||
"test:typescript": "tsd", | ||
"unit": "tap -J test/*.test.js", | ||
"unit:report": "npm run unit -- --cov --coverage-report=html", | ||
"unit:verbose": "npm run unit -- -Rspec" | ||
}, | ||
"keywords": [ | ||
"fastify", | ||
"compression", | ||
"deflate", | ||
"gzip", | ||
"brotli" | ||
], | ||
"author": "Tomas Della Vedova - @delvedor (http://delved.org)", | ||
"license": "MIT", | ||
"bugs": { | ||
"url": "https://github.com/fastify/fastify-compress/issues" | ||
}, | ||
"homepage": "https://github.com/fastify/fastify-compress#readme", | ||
"repository": { | ||
"type": "git", | ||
"url": "git+https://github.com/fastify/fastify-compress.git" | ||
"url": "git://github.com/fastify/fastify-compress.git" | ||
}, | ||
"engines": { | ||
"node": ">=10.16" | ||
}, | ||
"tsd": { | ||
"directory": "test/types" | ||
"homepage": "https://github.com/fastify/fastify-compress", | ||
"dependencies": { | ||
"process-warning": "^1.0.0", | ||
"fastify-compress-deprecated": "npm:fastify-compress@4.0.1" | ||
} | ||
} |
310
README.md
# fastify-compress | ||
![CI](https://github.com/fastify/fastify-compress/workflows/CI/badge.svg) | ||
[![NPM version](https://img.shields.io/npm/v/fastify-compress.svg?style=flat)](https://www.npmjs.com/package/fastify-compress) | ||
[![Known Vulnerabilities](https://snyk.io/test/github/fastify/fastify-compress/badge.svg)](https://snyk.io/test/github/fastify/fastify-compress) | ||
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) | ||
Adds compression utils to [the Fastify `reply` object](https://www.fastify.io/docs/master/Reply/) and a hook to decompress requests payloads. | ||
Supports `gzip`, `deflate`, and `brotli`. | ||
> **Important note:** since `fastify-compress` version 4.x payloads that are compressed using the `zip` algorithm are not automatically uncompressed anymore. `fastify-compress` main feature is to provide response compression mechanism to your server, however the `zip` format does not appear in the [IANA maintained Table of Content Encodings](https://www.iana.org/assignments/http-parameters/http-parameters.xml#content-coding) and thus such behavior was out of the scope of this plugin. | ||
## Install | ||
``` | ||
npm i fastify-compress | ||
``` | ||
## Usage - Compress replies | ||
This plugin adds two functionalities to Fastify: a compress utility and a global compression hook. | ||
Currently, the following encoding tokens are supported, using the first acceptable token in this order: | ||
1. `br` | ||
2. `gzip` | ||
3. `deflate` | ||
4. `*` (no preference — `fastify-compress` will use `gzip`) | ||
5. `identity` (no compression) | ||
If an unsupported encoding is received or if the `'accept-encoding'` header is missing, it will not compress the payload. If an unsupported encoding is received and you would like to return an error, provide an `onUnsupportedEncoding` option. | ||
The plugin automatically decides if a payload should be compressed based on its `content-type`; if no content type is present, it will assume `application/json`. | ||
### Global hook | ||
The global compression hook is enabled by default. To disable it, pass the option `{ global: false }`: | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ global: false } | ||
) | ||
``` | ||
Remember that thanks to the Fastify encapsulation model, you can set a global compression, but run it only in a subset of routes if you wrap them inside a plugin. | ||
Important note! If you are using `fastify-compress` plugin together with `fastify-static` plugin, you must register the `fastify-compress` (with *global hook*) **before** registering `fastify-static`. | ||
### Per Route options | ||
You can specify different options for compression per route by passing in the `compress` options on the route's configuration. | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ global: false } | ||
) | ||
// only compress if the payload is above a certain size and use brotli | ||
fastify.get('/custom-route', { | ||
compress: { | ||
inflateIfDeflated: true, | ||
threshold: 128, | ||
zlib: { | ||
createBrotliCompress: () => createYourCustomBrotliCompress(), | ||
createGzip: () => createYourCustomGzip(), | ||
createDeflate: () => createYourCustomDeflate() | ||
} | ||
}, (req, reply) => { | ||
// ... | ||
}) | ||
``` | ||
Note: Setting `compress = false` on any route will disable compression on the route even if global compression is enabled. | ||
### `reply.compress` | ||
This plugin adds a `compress` method to `reply` that accepts a stream or a string, and compresses it based on the `accept-encoding` header. If a JS object is passed in, it will be stringified to JSON. | ||
Note that the compress method is configured with either the per route parameters if the route has a custom configuration or with the global parameters if the the route has no custom parameters but | ||
the plugin was defined as global. | ||
```javascript | ||
const fs = require('fs') | ||
const fastify = require('fastify')() | ||
fastify.register(require('fastify-compress'), { global: false }) | ||
fastify.get('/', (req, reply) => { | ||
reply | ||
.type('text/plain') | ||
.compress(fs.createReadStream('./package.json')) | ||
}) | ||
fastify.listen(3000, function (err) { | ||
if (err) throw err | ||
console.log(`server listening on ${fastify.server.address().port}`) | ||
}) | ||
``` | ||
## Compress Options | ||
### threshold | ||
The minimum byte size for a response to be compressed. Defaults to `1024`. | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ threshold: 2048 } | ||
) | ||
``` | ||
### customTypes | ||
[mime-db](https://github.com/jshttp/mime-db) is used to determine if a `content-type` should be compressed. You can compress additional content types via regular expression. | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ customTypes: /x-protobuf$/ } | ||
) | ||
``` | ||
### onUnsupportedEncoding | ||
When the encoding is not supported, a custom error response can be sent in place of the uncompressed payload by setting the `onUnsupportedEncoding(encoding, request, reply)` option to be a function that can modify the reply and return a `string | Buffer | Stream | Error` payload. | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ | ||
onUnsupportedEncoding: (encoding, request, reply) => { | ||
reply.code(406) | ||
return 'We do not support the ' + encoding + ' encoding.' | ||
} | ||
} | ||
) | ||
``` | ||
### Disable compression by header | ||
You can selectively disable response compression by using the `x-no-compression` header in the request. | ||
### Inflate pre-compressed bodies for clients that do not support compression | ||
Optional feature to inflate pre-compressed data if the client does not include one of the supported compression types in its `accept-encoding` header. | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ inflateIfDeflated: true } | ||
) | ||
fastify.get('/file', (req, reply) => | ||
// will inflate the file on the way out for clients | ||
// that indicate they do not support compression | ||
reply.send(fs.createReadStream('./file.gz'))) | ||
``` | ||
### Customize encoding priority | ||
By default, `fastify-compress` prioritizes compression as described [at the beginning of §Usage - Compress replies](#usage). You can change that by passing an array of compression tokens to the `encodings` option: | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
// Only support gzip and deflate, and prefer deflate to gzip | ||
{ encodings: ['deflate', 'gzip'] } | ||
) | ||
``` | ||
### brotliOptions and zlibOptions | ||
You can tune compression by setting the `brotliOptions` and `zlibOptions` properties. These properties are passed directly to native node `zlib` methods, so they should match the corresponding [class](https://nodejs.org/api/zlib.html#zlib_class_brotlioptions) [definitions](https://nodejs.org/api/zlib.html#zlib_class_options). | ||
```javascript | ||
server.register(fastifyCompress, { | ||
brotliOptions: { | ||
params: { | ||
[zlib.constants.BROTLI_PARAM_MODE]: zlib.constants.BROTLI_MODE_TEXT, // useful for APIs that primarily return text | ||
[zlib.constants.BROTLI_PARAM_QUALITY]: 4, // default is 11, max is 11, min is 0 | ||
}, | ||
}, | ||
zlibOptions: { | ||
level: 9, // default is 9, max is 9, min is 0 | ||
} | ||
}); | ||
``` | ||
### Manage `Content-Length` header removal with removeContentLengthHeader | ||
By default, `fastify-compress` removes the reply `Content-Length` header. You can change that by setting the `removeContentLengthHeader` to `false` either on a global scope or on a route specific scope. | ||
```javascript | ||
// Global plugin scope | ||
server.register(fastifyCompress, { global: true, removeContentLengthHeader: false }); | ||
// Route specific scope | ||
fastify.get('/file', { | ||
compress: { removeContentLengthHeader: false } | ||
}, (req, reply) => | ||
reply.compress(fs.createReadStream('./file.gz')) | ||
) | ||
``` | ||
## Usage - Decompress request payloads | ||
This plugin adds a `preParsing` hook that decompress the request payload according to the `content-encoding` request header. | ||
Currently, the following encoding tokens are supported: | ||
1. `br` | ||
2. `gzip` | ||
3. `deflate` | ||
If an unsupported encoding or and invalid payload is received, the plugin will throw an error. | ||
If the request header is missing, the plugin will not do anything and yield to the next hook. | ||
### Global hook | ||
The global request decompression hook is enabled by default. To disable it, pass the option `{ global: false }`: | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ global: false } | ||
) | ||
``` | ||
Remember that thanks to the Fastify encapsulation model, you can set a global decompression, but run it only in a subset of routes if you wrap them inside a plugin. | ||
### Per Route options | ||
You can specify different options for decompression per route by passing in the `decompress` options on the route's configuration. | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ global: false } | ||
) | ||
// Always decompress using gzip | ||
fastify.get('/custom-route', { | ||
decompress: { | ||
forceRequestEncoding: 'gzip', | ||
zlib: { | ||
createBrotliDecompress: () => createYourCustomBrotliDecompress(), | ||
createGunzip: () => createYourCustomGunzip(), | ||
createInflate: () => createYourCustomInflate() | ||
} | ||
} | ||
}, (req, reply) => { | ||
// ... | ||
}) | ||
``` | ||
### requestEncodings | ||
By default, `fastify-compress` accepts all encodings specified [at the beginning of §Usage - Decompress request payloads](#usage). You can change that by passing an array of compression tokens to the `requestEncodings` option: | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
// Only support gzip | ||
{ requestEncodings: ['gzip'] } | ||
) | ||
``` | ||
### forceRequestEncoding | ||
By default, `fastify-compress` chooses the decompressing algorithm by looking at the `content-encoding` header, if present. | ||
You can force one algorithm and ignore the header at all by providing the `forceRequestEncoding` option. | ||
Note that if the request payload is not compressed, `fastify-compress` will try to decompress, resulting in an error. | ||
### onUnsupportedRequestEncoding | ||
When the request payload encoding is not supported, you can customize the response error by setting the `onUnsupportedEncoding(request, encoding)` option to be a function that returns an error. | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ | ||
onUnsupportedRequestEncoding: (request, encoding) => { | ||
return { | ||
statusCode: 415, | ||
code: 'UNSUPPORTED', | ||
error: 'Unsupported Media Type', | ||
message: 'We do not support the ' + encoding + ' encoding.' | ||
} | ||
} | ||
} | ||
) | ||
``` | ||
### onInvalidRequestPayload | ||
When the request payload cannot be decompressed using the detected algorithm, you can customize the response error setting the `onInvalidRequestPayload(request, encoding)` option to be a function that returns an error. | ||
```javascript | ||
fastify.register( | ||
require('fastify-compress'), | ||
{ | ||
onInvalidRequestPayload: (request, encoding, error) => { | ||
return { | ||
statusCode: 400, | ||
code: 'BAD_REQUEST', | ||
error: 'Bad Request', | ||
message: 'This is not a valid ' + encoding + ' encoded payload: ' + error.message | ||
} | ||
} | ||
} | ||
) | ||
``` | ||
## Note | ||
Please note that in large-scale scenarios, you should use a proxy like Nginx to handle response compression. | ||
## Acknowledgements | ||
Past sponsors: | ||
- [LetzDoIt](http://www.letzdoitapp.com/) | ||
## License | ||
Licensed under [MIT](./LICENSE). | ||
`fastify-compress@4.1.0` has been deprecated. Please use | ||
`@fastify/compress@5.0.0` instead. |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Deprecated
MaintenanceThe maintainer of the package marked it as deprecated. This could indicate that a single version should not be used, or that the package is no longer maintained and any new vulnerabilities will not be fixed.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Trivial Package
Supply chain riskPackages less than 10 lines of code are easily copied into your own project and may not warrant the additional supply chain risk of an external dependency.
Found 1 instance in 1 package
No contributors or author data
MaintenancePackage does not specify a list of contributors or an author in package.json.
Found 1 instance in 1 package
No bug tracker
MaintenancePackage does not have a linked bug tracker in package.json.
Found 1 instance in 1 package
No repository
Supply chain riskPackage does not have a linked source code repository. Without this field, a package will have no reference to the location of the source code use to generate the package.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
2
0
1
829
3
5
2
1
5
2
+ Addedfastify-compress-deprecated@npm:fastify-compress@4.0.1
+ Addedprocess-warning@^1.0.0
+ Addedfastify-compress@4.0.1(transitive)
+ Addedprocess-warning@1.0.0(transitive)
- Removedencoding-negotiator@^2.0.1
- Removedfastify-plugin@^3.0.0
- Removedinto-stream@^6.0.0
- Removedis-deflate@^1.0.0
- Removedis-gzip@^2.0.0
- Removedis-stream@^2.0.1
- Removedis-zip@^1.0.0
- Removedmime-db@^1.51.0
- Removedminipass@^3.1.5
- Removedpeek-stream@^1.1.3
- Removedpump@^3.0.0
- Removedpumpify@^2.0.1
- Removedstring-to-stream@^3.0.1