fastify-multipart
Advanced tools
Comparing version 5.3.1 to 5.4.0
561
index.js
'use strict' | ||
const Busboy = require('@fastify/busboy') | ||
const os = require('os') | ||
const fp = require('fastify-plugin') | ||
const eos = require('end-of-stream') | ||
const { createWriteStream } = require('fs') | ||
const { unlink } = require('fs').promises | ||
const path = require('path') | ||
const hexoid = require('hexoid') | ||
const util = require('util') | ||
const createError = require('fastify-error') | ||
const sendToWormhole = require('stream-wormhole') | ||
const deepmerge = require('deepmerge') | ||
const { PassThrough, pipeline } = require('stream') | ||
const pump = util.promisify(pipeline) | ||
const secureJSON = require('secure-json-parse') | ||
const warning = require('process-warning')() | ||
warning.create('FastifyWarning.fastify-multipart', 'FST_MODULE_DEP_fastify-multipart'.toUpperCase(), 'fastify-multipart has been deprecated. Use @fastify/multipart@6.0.0 instead.') | ||
warning.emit('FST_MODULE_DEP_fastify-multipart'.toUpperCase()) | ||
const kMultipart = Symbol('multipart') | ||
const kMultipartHandler = Symbol('multipartHandler') | ||
const getDescriptor = Object.getOwnPropertyDescriptor | ||
function setMultipart (req, payload, done) { | ||
// nothing to do, it will be done by the Request.multipart object | ||
req.raw[kMultipart] = true | ||
done() | ||
} | ||
function attachToBody (options, req, reply, next) { | ||
if (req.raw[kMultipart] !== true) { | ||
next() | ||
return | ||
} | ||
const consumerStream = options.onFile || defaultConsumer | ||
const body = {} | ||
const mp = req.multipart((field, file, filename, encoding, mimetype) => { | ||
body[field] = body[field] || [] | ||
body[field].push({ | ||
data: [], | ||
filename, | ||
encoding, | ||
mimetype, | ||
limit: false | ||
}) | ||
const result = consumerStream(field, file, filename, encoding, mimetype, body) | ||
if (result && typeof result.then === 'function') { | ||
result.catch((err) => { | ||
// continue with the workflow | ||
err.statusCode = 500 | ||
file.destroy(err) | ||
}) | ||
} | ||
}, function (err) { | ||
if (!err) { | ||
req.body = body | ||
} | ||
next(err) | ||
}, options) | ||
mp.on('field', (key, value) => { | ||
if (key === '__proto__' || key === 'constructor') { | ||
mp.destroy(new Error(`${key} is not allowed as field name`)) | ||
return | ||
} | ||
if (body[key] === undefined) { | ||
body[key] = value | ||
} else if (Array.isArray(body[key])) { | ||
body[key].push(value) | ||
} else { | ||
body[key] = [body[key], value] | ||
} | ||
}) | ||
} | ||
function defaultConsumer (field, file, filename, encoding, mimetype, body) { | ||
const fileData = [] | ||
const lastFile = body[field][body[field].length - 1] | ||
file.on('data', data => { if (!lastFile.limit) { fileData.push(data) } }) | ||
file.on('limit', () => { lastFile.limit = true }) | ||
file.on('end', () => { | ||
if (!lastFile.limit) { | ||
lastFile.data = Buffer.concat(fileData) | ||
} else { | ||
lastFile.data = undefined | ||
} | ||
}) | ||
} | ||
function busboy (options) { | ||
try { | ||
return new Busboy(options) | ||
} catch (error) { | ||
const errorEmitter = new PassThrough() | ||
process.nextTick(function () { | ||
errorEmitter.emit('error', error) | ||
}) | ||
return errorEmitter | ||
} | ||
} | ||
function fastifyMultipart (fastify, options, done) { | ||
if (options.addToBody === true) { | ||
if (typeof options.sharedSchemaId === 'string') { | ||
fastify.addSchema({ | ||
$id: options.sharedSchemaId, | ||
type: 'object', | ||
properties: { | ||
encoding: { type: 'string' }, | ||
filename: { type: 'string' }, | ||
limit: { type: 'boolean' }, | ||
mimetype: { type: 'string' } | ||
} | ||
}) | ||
} | ||
fastify.addHook('preValidation', function (req, reply, next) { | ||
attachToBody(options, req, reply, next) | ||
}) | ||
} | ||
if (options.attachFieldsToBody === true) { | ||
if (typeof options.sharedSchemaId === 'string') { | ||
fastify.addSchema({ | ||
$id: options.sharedSchemaId, | ||
type: 'object', | ||
properties: { | ||
fieldname: { type: 'string' }, | ||
encoding: { type: 'string' }, | ||
filename: { type: 'string' }, | ||
mimetype: { type: 'string' } | ||
} | ||
}) | ||
} | ||
fastify.addHook('preValidation', async function (req, reply) { | ||
if (!req.isMultipart()) { | ||
return | ||
} | ||
for await (const part of req.parts()) { | ||
req.body = part.fields | ||
if (part.file) { | ||
if (options.onFile) { | ||
await options.onFile(part) | ||
} else { | ||
await part.toBuffer() | ||
} | ||
} | ||
} | ||
}) | ||
} | ||
let throwFileSizeLimit = true | ||
if (typeof options.throwFileSizeLimit === 'boolean') { | ||
throwFileSizeLimit = options.throwFileSizeLimit | ||
} | ||
const PartsLimitError = createError('FST_PARTS_LIMIT', 'reach parts limit', 413) | ||
const FilesLimitError = createError('FST_FILES_LIMIT', 'reach files limit', 413) | ||
const FieldsLimitError = createError('FST_FIELDS_LIMIT', 'reach fields limit', 413) | ||
const RequestFileTooLargeError = createError('FST_REQ_FILE_TOO_LARGE', 'request file too large, please check multipart config', 413) | ||
const PrototypeViolationError = createError('FST_PROTO_VIOLATION', 'prototype property is not allowed as field name', 400) | ||
const InvalidMultipartContentTypeError = createError('FST_INVALID_MULTIPART_CONTENT_TYPE', 'the request is not multipart', 406) | ||
const InvalidJSONFieldError = createError('FST_INVALID_JSON_FIELD_ERROR', 'a request field is not a valid JSON as declared by its Content-Type', 406) | ||
fastify.decorate('multipartErrors', { | ||
PartsLimitError, | ||
FilesLimitError, | ||
FieldsLimitError, | ||
PrototypeViolationError, | ||
InvalidMultipartContentTypeError, | ||
RequestFileTooLargeError | ||
}) | ||
fastify.addContentTypeParser('multipart', setMultipart) | ||
fastify.decorateRequest(kMultipartHandler, handleMultipart) | ||
fastify.decorateRequest('parts', getMultipartIterator) | ||
fastify.decorateRequest('isMultipart', isMultipart) | ||
fastify.decorateRequest('tmpUploads', null) | ||
// legacy | ||
fastify.decorateRequest('multipart', handleLegacyMultipartApi) | ||
// Stream mode | ||
fastify.decorateRequest('file', getMultipartFile) | ||
fastify.decorateRequest('files', getMultipartFiles) | ||
// Disk mode | ||
fastify.decorateRequest('saveRequestFiles', saveRequestFiles) | ||
fastify.decorateRequest('cleanRequestFiles', cleanRequestFiles) | ||
fastify.addHook('onResponse', async (request, reply) => { | ||
await request.cleanRequestFiles() | ||
}) | ||
const toID = hexoid() | ||
function isMultipart () { | ||
return this.raw[kMultipart] || false | ||
} | ||
// handler definition is in multipart-readstream | ||
// handler(field, file, filename, encoding, mimetype) | ||
// opts is a per-request override for the options object | ||
function handleLegacyMultipartApi (handler, done, opts) { | ||
if (typeof handler !== 'function') { | ||
throw new Error('handler must be a function') | ||
} | ||
if (typeof done !== 'function') { | ||
throw new Error('the callback must be a function') | ||
} | ||
if (!this.isMultipart()) { | ||
done(new Error('the request is not multipart')) | ||
return | ||
} | ||
const log = this.log | ||
log.warn('the multipart callback-based api is deprecated in favour of the new promise api') | ||
log.debug('starting multipart parsing') | ||
const req = this.raw | ||
const busboyOptions = deepmerge.all([{ headers: Object.assign({}, req.headers) }, options || {}, opts || {}]) | ||
const stream = busboy(busboyOptions) | ||
let completed = false | ||
let files = 0 | ||
req.on('error', function (err) { | ||
stream.destroy() | ||
if (!completed) { | ||
completed = true | ||
done(err) | ||
} | ||
}) | ||
stream.on('finish', function () { | ||
log.debug('finished receiving stream, total %d files', files) | ||
if (!completed) { | ||
completed = true | ||
setImmediate(done) | ||
} | ||
}) | ||
stream.on('file', wrap) | ||
req.pipe(stream) | ||
.on('error', function (error) { | ||
req.emit('error', error) | ||
}) | ||
function wrap (field, file, filename, encoding, mimetype) { | ||
log.debug({ field, filename, encoding, mimetype }, 'parsing part') | ||
files++ | ||
eos(file, waitForFiles) | ||
if (field === '__proto__' || field === 'constructor') { | ||
file.destroy(new Error(`${field} is not allowed as field name`)) | ||
return | ||
} | ||
handler(field, file, filename, encoding, mimetype) | ||
} | ||
function waitForFiles (err) { | ||
if (err) { | ||
completed = true | ||
done(err) | ||
} | ||
} | ||
return stream | ||
} | ||
function handleMultipart (opts = {}) { | ||
if (!this.isMultipart()) { | ||
throw new InvalidMultipartContentTypeError() | ||
} | ||
this.log.debug('starting multipart parsing') | ||
let values = [] | ||
let pendingHandler = null | ||
// only one file / field can be processed at a time | ||
// "null" will close the consumer side | ||
const ch = (val) => { | ||
if (pendingHandler) { | ||
pendingHandler(val) | ||
pendingHandler = null | ||
} else { | ||
values.push(val) | ||
} | ||
} | ||
const handle = (handler) => { | ||
if (values.length > 0) { | ||
const value = values[0] | ||
values = values.slice(1) | ||
handler(value) | ||
} else { | ||
pendingHandler = handler | ||
} | ||
} | ||
const parts = () => { | ||
return new Promise((resolve, reject) => { | ||
handle((val) => { | ||
if (val instanceof Error) return reject(val) | ||
resolve(val) | ||
}) | ||
}) | ||
} | ||
const body = {} | ||
let lastError = null | ||
let currentFile = null | ||
const request = this.raw | ||
const busboyOptions = deepmerge.all([ | ||
{ headers: Object.assign({}, request.headers) }, | ||
options, | ||
opts | ||
]) | ||
this.log.trace({ busboyOptions }, 'Providing options to busboy') | ||
const bb = busboy(busboyOptions) | ||
request.on('close', cleanup) | ||
request.on('error', cleanup) | ||
bb | ||
.on('field', onField) | ||
.on('file', onFile) | ||
.on('close', cleanup) | ||
.on('error', onEnd) | ||
.on('end', onEnd) | ||
.on('finish', onEnd) | ||
bb.on('partsLimit', function () { | ||
onError(new PartsLimitError()) | ||
}) | ||
bb.on('filesLimit', function () { | ||
onError(new FilesLimitError()) | ||
}) | ||
bb.on('fieldsLimit', function () { | ||
onError(new FieldsLimitError()) | ||
}) | ||
request.pipe(bb) | ||
function onField (name, fieldValue, fieldnameTruncated, valueTruncated, encoding, contentType) { | ||
let mimetype | ||
// don't overwrite prototypes | ||
if (getDescriptor(Object.prototype, name)) { | ||
onError(new PrototypeViolationError()) | ||
return | ||
} | ||
// If it is a JSON field, parse it | ||
if (contentType.startsWith('application/json')) { | ||
// If the value was truncated, it can never be a valid JSON. Don't even try to parse | ||
if (valueTruncated) { | ||
onError(new InvalidJSONFieldError()) | ||
return | ||
} | ||
try { | ||
fieldValue = secureJSON.parse(fieldValue) | ||
mimetype = 'application/json' | ||
} catch (e) { | ||
onError(new InvalidJSONFieldError()) | ||
return | ||
} | ||
} | ||
const value = { | ||
fieldname: name, | ||
mimetype, | ||
encoding, | ||
value: fieldValue, | ||
fieldnameTruncated, | ||
valueTruncated, | ||
fields: body | ||
} | ||
if (body[name] === undefined) { | ||
body[name] = value | ||
} else if (Array.isArray(body[name])) { | ||
body[name].push(value) | ||
} else { | ||
body[name] = [body[name], value] | ||
} | ||
ch(value) | ||
} | ||
function onFile (name, file, filename, encoding, mimetype) { | ||
// don't overwrite prototypes | ||
if (getDescriptor(Object.prototype, name)) { | ||
// ensure that stream is consumed, any error is suppressed | ||
sendToWormhole(file) | ||
onError(new PrototypeViolationError()) | ||
return | ||
} | ||
if (typeof opts.throwFileSizeLimit === 'boolean') { | ||
throwFileSizeLimit = opts.throwFileSizeLimit | ||
} | ||
const value = { | ||
fieldname: name, | ||
filename, | ||
encoding, | ||
mimetype, | ||
file, | ||
fields: body, | ||
_buf: null, | ||
async toBuffer () { | ||
if (this._buf) { | ||
return this._buf | ||
} | ||
const fileChunks = [] | ||
for await (const chunk of this.file) { | ||
fileChunks.push(chunk) | ||
if (throwFileSizeLimit && this.file.truncated) { | ||
const err = new RequestFileTooLargeError() | ||
err.part = this | ||
onError(err) | ||
throw err | ||
} | ||
} | ||
this._buf = Buffer.concat(fileChunks) | ||
return this._buf | ||
} | ||
} | ||
if (throwFileSizeLimit) { | ||
file.on('limit', function () { | ||
const err = new RequestFileTooLargeError() | ||
err.part = value | ||
onError(err) | ||
}) | ||
} | ||
if (body[name] === undefined) { | ||
body[name] = value | ||
} else if (Array.isArray(body[name])) { | ||
body[name].push(value) | ||
} else { | ||
body[name] = [body[name], value] | ||
} | ||
currentFile = file | ||
ch(value) | ||
} | ||
function onError (err) { | ||
lastError = err | ||
currentFile = null | ||
} | ||
function onEnd (err) { | ||
cleanup() | ||
ch(err || lastError) | ||
} | ||
function cleanup (err) { | ||
request.unpipe(bb) | ||
// in node 10 it seems that error handler is not called but request.aborted is set | ||
if ((err || request.aborted) && currentFile) { | ||
currentFile.destroy() | ||
} | ||
} | ||
return parts | ||
} | ||
async function saveRequestFiles (options) { | ||
const requestFiles = [] | ||
const tmpdir = (options && options.tmpdir) || os.tmpdir() | ||
const files = await this.files(options) | ||
this.tmpUploads = [] | ||
for await (const file of files) { | ||
const filepath = path.join(tmpdir, toID() + path.extname(file.filename)) | ||
const target = createWriteStream(filepath) | ||
try { | ||
await pump(file.file, target) | ||
requestFiles.push({ ...file, filepath }) | ||
this.tmpUploads.push(filepath) | ||
} catch (err) { | ||
this.log.error({ err }, 'save request file') | ||
throw err | ||
} | ||
} | ||
return requestFiles | ||
} | ||
async function cleanRequestFiles () { | ||
if (!this.tmpUploads) { | ||
return | ||
} | ||
for (const filepath of this.tmpUploads) { | ||
try { | ||
await unlink(filepath) | ||
} catch (error) { | ||
this.log.error(error, 'could not delete file') | ||
} | ||
} | ||
} | ||
async function getMultipartFile (options) { | ||
const parts = this[kMultipartHandler](options) | ||
let part | ||
while ((part = await parts()) != null) { | ||
if (part.file) { | ||
// part.file.truncated is true when a configured file size limit is reached | ||
if (part.file.truncated && throwFileSizeLimit) { | ||
throw new RequestFileTooLargeError() | ||
} | ||
return part | ||
} | ||
} | ||
} | ||
async function * getMultipartFiles (options) { | ||
const parts = this[kMultipartHandler](options) | ||
let part | ||
while ((part = await parts()) != null) { | ||
if (part.file) { | ||
yield part | ||
} | ||
} | ||
} | ||
async function * getMultipartIterator (options) { | ||
const parts = this[kMultipartHandler](options) | ||
let part | ||
while ((part = await parts()) != null) { | ||
yield part | ||
} | ||
} | ||
done() | ||
} | ||
module.exports = fp(fastifyMultipart, { | ||
fastify: '>= 0.39.0', | ||
name: 'fastify-multipart' | ||
}) | ||
module.exports = require('fastify-multipart-deprecated') |
{ | ||
"name": "fastify-multipart", | ||
"version": "5.3.1", | ||
"description": "Multipart plugin for Fastify", | ||
"version": "5.4.0", | ||
"main": "index.js", | ||
"types": "index.d.ts", | ||
"dependencies": { | ||
"@fastify/busboy": "^1.0.0", | ||
"deepmerge": "^4.2.2", | ||
"end-of-stream": "^1.4.4", | ||
"fastify-error": "^0.3.0", | ||
"fastify-plugin": "^3.0.0", | ||
"hexoid": "^1.0.0", | ||
"secure-json-parse": "^2.4.0", | ||
"stream-wormhole": "^1.1.0" | ||
}, | ||
"devDependencies": { | ||
"@types/node": "^17.0.1", | ||
"@typescript-eslint/parser": "^4.33.0", | ||
"climem": "^1.0.3", | ||
"eslint": "^7.7.0", | ||
"eslint-config-standard": "^16.0.0", | ||
"eslint-config-standard-with-typescript": "^21.0.1", | ||
"eslint-plugin-import": "^2.22.0", | ||
"eslint-plugin-node": "^11.1.0", | ||
"eslint-plugin-promise": "^5.1.0", | ||
"eslint-plugin-typescript": "^0.14.0", | ||
"fastify": "^3.24.1", | ||
"form-data": "^4.0.0", | ||
"h2url": "^0.2.0", | ||
"noop-stream": "^0.1.0", | ||
"pre-commit": "^1.2.2", | ||
"pump": "^3.0.0", | ||
"readable-stream": "^3.6.0", | ||
"snazzy": "^9.0.0", | ||
"standard": "^16.0.1", | ||
"tap": "^15.0.1", | ||
"tsd": "^0.19.0", | ||
"typescript": "^4.5.2" | ||
}, | ||
"scripts": { | ||
"coverage": "tap \"test/**/*.test.js\" --coverage-report=html", | ||
"climem": "climem 8999 localhost", | ||
"lint": "standard | snazzy", | ||
"start": "CLIMEM=8999 node -r climem ./examples/example", | ||
"test": "npm run lint && npm run unit && npm run typescript", | ||
"typescript": "tsd", | ||
"unit": "tap \"test/**/*.test.js\" -t 90" | ||
}, | ||
"license": "MIT", | ||
"repository": { | ||
"type": "git", | ||
"url": "git+https://github.com/fastify/fastify-multipart.git" | ||
"url": "git://github.com/fastify/fastify-multipart.git" | ||
}, | ||
"keywords": [ | ||
"fastify", | ||
"multipart", | ||
"form" | ||
], | ||
"author": "Matteo Collina <hello@matteocollina.com>", | ||
"license": "MIT", | ||
"tsd": { | ||
"directory": "test" | ||
"homepage": "https://github.com/fastify/fastify-multipart", | ||
"dependencies": { | ||
"process-warning": "^1.0.0", | ||
"fastify-multipart-deprecated": "npm:fastify-multipart@5.3.1" | ||
} | ||
} |
421
README.md
# fastify-multipart | ||
![CI](https://github.com/fastify/fastify-multipart/workflows/CI/badge.svg) | ||
[![NPM version](https://img.shields.io/npm/v/fastify-multipart.svg?style=flat)](https://www.npmjs.com/package/fastify-multipart) | ||
[![Known Vulnerabilities](https://snyk.io/test/github/fastify/fastify-multipart/badge.svg)](https://snyk.io/test/github/fastify/fastify-multipart) | ||
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/) | ||
Fastify plugin to parse the multipart content-type. Supports: | ||
- Async / Await | ||
- Async iterator support to handle multiple parts | ||
- Stream & Disk mode | ||
- Accumulate whole file in memory | ||
- Mode to attach all fields to the request body | ||
- Tested across Linux/Mac/Windows | ||
Under the hood it uses [`@fastify/busboy`](https://github.com/fastify/busboy). | ||
## Install | ||
```sh | ||
npm i --save fastify-multipart | ||
``` | ||
## Usage | ||
If you are looking for the documentation for the legacy callback-api please see [here](./callback.md). | ||
```js | ||
const fastify = require('fastify')() | ||
const fs = require('fs') | ||
const util = require('util') | ||
const path = require('path') | ||
const { pipeline } = require('stream') | ||
const pump = util.promisify(pipeline) | ||
fastify.register(require('fastify-multipart')) | ||
fastify.post('/', async function (req, reply) { | ||
// process a single file | ||
// also, consider that if you allow to upload multiple files | ||
// you must consume all files otherwise the promise will never fulfill | ||
const data = await req.file() | ||
data.file // stream | ||
data.fields // other parsed parts | ||
data.fieldname | ||
data.filename | ||
data.encoding | ||
data.mimetype | ||
// to accumulate the file in memory! Be careful! | ||
// | ||
// await data.toBuffer() // Buffer | ||
// | ||
// or | ||
await pump(data.file, fs.createWriteStream(data.filename)) | ||
// be careful of permission issues on disk and not overwrite | ||
// sensitive files that could cause security risks | ||
// also, consider that if the file stream is not consumed, the promise will never fulfill | ||
reply.send() | ||
}) | ||
fastify.listen(3000, err => { | ||
if (err) throw err | ||
console.log(`server listening on ${fastify.server.address().port}`) | ||
}) | ||
``` | ||
**Note** about `data.fields`: `busboy` consumes the multipart in serial order (stream). Therefore, the order of form fields is *VERY IMPORTANT* to how `fastify-multipart` can display the fields to you. | ||
We would recommend you place the value fields first before any of the file fields. | ||
It will ensure your fields are accessible before it starts consuming any files. | ||
If you cannot control the order of the placed fields, be sure to read `data.fields` *AFTER* consuming the stream, or it will only contain the fields parsed at that moment. | ||
You can also pass optional arguments to `@fastify/busboy` when registering with Fastify. This is useful for setting limits on the content that can be uploaded. A full list of available options can be found in the [`@fastify/busboy` documentation](https://github.com/fastify/busboy#busboy-methods). | ||
```js | ||
fastify.register(require('fastify-multipart'), { | ||
limits: { | ||
fieldNameSize: 100, // Max field name size in bytes | ||
fieldSize: 100, // Max field value size in bytes | ||
fields: 10, // Max number of non-file fields | ||
fileSize: 1000000, // For multipart forms, the max file size in bytes | ||
files: 1, // Max number of file fields | ||
headerPairs: 2000 // Max number of header key=>value pairs | ||
} | ||
}); | ||
``` | ||
**Note**: if the file stream that is provided by `data.file` is not consumed, like in the example below with the usage of pump, the promise will not be fulfilled at the end of the multipart processing. | ||
This behavior is inherited from [`@fastify/busboy`](https://github.com/fastify/busboy). | ||
**Note**: if you set a `fileSize` limit and you want to know if the file limit was reached you can: | ||
- listen to `data.file.on('limit')` | ||
- or check at the end of the stream the property `data.file.truncated` | ||
- or call `data.file.toBuffer()` and wait for the error to be thrown | ||
```js | ||
const data = await req.file() | ||
await pump(data.file, fs.createWriteStream(data.filename)) | ||
if (data.file.truncated) { | ||
// you may need to delete the part of the file that has been saved on disk | ||
// before the `limits.fileSize` has been reached | ||
reply.send(new fastify.multipartErrors.FilesLimitError()); | ||
} | ||
// OR | ||
const data = await req.file() | ||
try { | ||
const buffer = await data.toBuffer() | ||
} catch (err) { | ||
// fileSize limit reached! | ||
} | ||
``` | ||
Additionally, you can pass per-request options to the `req.file`, `req.files`, `req.saveRequestFiles` or `req.parts` function. | ||
```js | ||
fastify.post('/', async function (req, reply) { | ||
const options = { limits: { fileSize: 1000 } }; | ||
const data = await req.file(options) | ||
await pump(data.file, fs.createWriteStream(data.filename)) | ||
reply.send() | ||
}) | ||
``` | ||
## Handle multiple file streams | ||
```js | ||
fastify.post('/', async function (req, reply) { | ||
const parts = req.files() | ||
for await (const part of parts) { | ||
await pump(part.file, fs.createWriteStream(part.filename)) | ||
} | ||
reply.send() | ||
}) | ||
``` | ||
## Handle multiple file streams and fields | ||
```js | ||
fastify.post('/upload/raw/any', async function (req, reply) { | ||
const parts = req.parts() | ||
for await (const part of parts) { | ||
if (part.file) { | ||
await pump(part.file, fs.createWriteStream(part.filename)) | ||
} else { | ||
console.log(part) | ||
} | ||
} | ||
reply.send() | ||
}) | ||
``` | ||
## Accumulate whole file in memory | ||
```js | ||
fastify.post('/upload/raw/any', async function (req, reply) { | ||
const data = await req.file() | ||
const buffer = await data.toBuffer() | ||
// upload to S3 | ||
reply.send() | ||
}) | ||
``` | ||
## Upload files to disk and work with temporary file paths | ||
This will store all files in the operating system default directory for temporary files. As soon as the response ends all files are removed. | ||
```js | ||
fastify.post('/upload/files', async function (req, reply) { | ||
// stores files to tmp dir and return files | ||
const files = await req.saveRequestFiles() | ||
files[0].filepath | ||
files[0].fieldname | ||
files[0].filename | ||
files[0].encoding | ||
files[0].mimetype | ||
files[0].fields // other parsed parts | ||
reply.send() | ||
}) | ||
``` | ||
## Handle file size limitation | ||
If you set a `fileSize` limit, it is able to throw a `RequestFileTooLargeError` error when limit reached. | ||
```js | ||
fastify.post('/upload/files', async function (req, reply) { | ||
try { | ||
const file = await req.file({ limits: { fileSize: 17000 } }) | ||
//const files = req.files({ limits: { fileSize: 17000 } }) | ||
//const parts = req.parts({ limits: { fileSize: 17000 } }) | ||
//const files = await req.saveRequestFiles({ limits: { fileSize: 17000 } }) | ||
reply.send() | ||
} catch (error) { | ||
// error instanceof fastify.multipartErrors.RequestFileTooLargeError | ||
} | ||
}) | ||
``` | ||
If you want to fallback to the handling before `4.0.0`, you can disable the throwing behavior by passing `throwFileSizeLimit`. | ||
Note: It will not affect the behavior of `saveRequestFiles()` | ||
```js | ||
// globally disable | ||
fastify.register(fastifyMultipart, { throwFileSizeLimit: false }) | ||
fastify.post('/upload/file', async function (req, reply) { | ||
const file = await req.file({ throwFileSizeLimit: false, limits: { fileSize: 17000 } }) | ||
//const files = req.files({ throwFileSizeLimit: false, limits: { fileSize: 17000 } }) | ||
//const parts = req.parts({ throwFileSizeLimit: false, limits: { fileSize: 17000 } }) | ||
//const files = await req.saveRequestFiles({ throwFileSizeLimit: false, limits: { fileSize: 17000 } }) | ||
reply.send() | ||
}) | ||
``` | ||
## Parse all fields and assign them to the body | ||
This allows you to parse all fields automatically and assign them to the `request.body`. By default files are accumulated in memory (Be careful!) to buffer objects. Uncaught errors are [handled](https://github.com/fastify/fastify/blob/master/docs/Hooks.md#manage-errors-from-a-hook) by Fastify. | ||
```js | ||
fastify.register(require('fastify-multipart'), { attachFieldsToBody: true }) | ||
fastify.post('/upload/files', async function (req, reply) { | ||
const uploadValue = await req.body.upload.toBuffer() // access files | ||
const fooValue = req.body.foo.value // other fields | ||
const body = Object.fromEntries( | ||
Object.keys(req.body).map((key) => [key, req.body[key].value]) | ||
) // Request body in key-value pairs, like req.body in Express (Node 12+) | ||
}) | ||
``` | ||
You can also define an `onFile` handler to avoid accumulating all files in memory. | ||
```js | ||
async function onFile(part) { | ||
await pump(part.file, fs.createWriteStream(part.filename)) | ||
} | ||
fastify.register(require('fastify-multipart'), { attachFieldsToBody: true, onFile }) | ||
fastify.post('/upload/files', async function (req, reply) { | ||
const fooValue = req.body.foo.value // other fields | ||
}) | ||
``` | ||
**Note**: if you assign all fields to the body and don't define an `onFile` handler, you won't be able to read the files through streams, as they are already read and their contents are accumulated in memory. | ||
You can only use the `toBuffer` method to read the content. | ||
If you try to read from a stream and pipe to a new file, you will obtain an empty new file. | ||
## JSON Schema body validation | ||
If you enable `attachFieldsToBody` and set `sharedSchemaId` a shared JSON Schema is added, which can be used to validate parsed multipart fields. | ||
```js | ||
const opts = { | ||
attachFieldsToBody: true, | ||
sharedSchemaId: '#mySharedSchema' | ||
} | ||
fastify.register(require('fastify-multipart'), opts) | ||
fastify.post('/upload/files', { | ||
schema: { | ||
body: { | ||
type: 'object', | ||
required: ['myField'], | ||
properties: { | ||
// field that uses the shared schema | ||
myField: { $ref: '#mySharedSchema'}, | ||
// or another field that uses the shared schema | ||
myFiles: { type: 'array', items: fastify.getSchema('mySharedSchema') }, | ||
// or a field that doesn't use the shared schema | ||
hello: { | ||
properties: { | ||
value: { | ||
type: 'string', | ||
enum: ['male'] | ||
} | ||
} | ||
} | ||
} | ||
} | ||
} | ||
}, function (req, reply) { | ||
console.log({ body: req.body }) | ||
reply.send('done') | ||
}) | ||
``` | ||
If provided, the `sharedSchemaId` parameter must be a string ID and a shared schema will be added to your fastify instance so you will be able to apply the validation to your service (like in the example mentioned above). | ||
The shared schema, that is added, will look like this: | ||
```js | ||
{ | ||
type: 'object', | ||
properties: { | ||
encoding: { type: 'string' }, | ||
filename: { type: 'string' }, | ||
limit: { type: 'boolean' }, | ||
mimetype: { type: 'string' } | ||
} | ||
} | ||
``` | ||
### JSON Schema non-file field | ||
When sending fields with the body (`attachFieldsToBody` set to true), the field might look like this in the `request.body`: | ||
```json | ||
{ | ||
"hello": "world" | ||
} | ||
``` | ||
The mentioned field will be converted, by this plugin, to a more complex field. The converted field will look something like this: | ||
```js | ||
{ | ||
hello: { | ||
fieldname: "hello", | ||
value: "world", | ||
fieldnameTruncated: false, | ||
valueTruncated: false, | ||
fields: body | ||
} | ||
} | ||
``` | ||
It is important to know that this conversion happens BEFORE the field is validated, so keep that in mind when writing the JSON schema for validation for fields that don't use the shared schema. The schema for validation for the field mentioned above should look like this: | ||
```js | ||
hello: { | ||
properties: { | ||
value: { | ||
type: 'string' | ||
} | ||
} | ||
} | ||
``` | ||
#### JSON non-file fields | ||
If a non file field sent has `Content-Type` header starting with `application/json`, it will be parsed using `JSON.parse`. | ||
The schema to validate JSON fields should look like this: | ||
```js | ||
hello: { | ||
properties: { | ||
value: { | ||
type: 'object', | ||
properties: { | ||
/* ... */ | ||
} | ||
} | ||
} | ||
} | ||
``` | ||
If you also use the shared JSON schema as shown above, this is a full example which validates the entire field: | ||
```js | ||
const opts = { | ||
attachFieldsToBody: true, | ||
sharedSchemaId: '#mySharedSchema' | ||
} | ||
fastify.register(require('fastify-multipart'), opts) | ||
fastify.post('/upload/files', { | ||
schema: { | ||
body: { | ||
type: 'object', | ||
required: ['field'], | ||
properties: { | ||
field: { | ||
allOf: [ | ||
{ $ref: '#mySharedSchema' }, | ||
{ | ||
properties: { | ||
value: { | ||
type: 'object' | ||
properties: { | ||
child: { | ||
type: 'string' | ||
} | ||
} | ||
} | ||
} | ||
} | ||
] | ||
} | ||
} | ||
} | ||
} | ||
}, function (req, reply) { | ||
console.log({ body: req.body }) | ||
reply.send('done') | ||
}) | ||
``` | ||
## Access all errors | ||
We export all custom errors via a server decorator `fastify.multipartErrors`. This is useful if you want to react to specific errors. They are derived from [fastify-error](https://github.com/fastify/fastify-error) and include the correct `statusCode` property. | ||
```js | ||
fastify.post('/upload/files', async function (req, reply) { | ||
const { FilesLimitError } = fastify.multipartErrors | ||
}) | ||
``` | ||
## Acknowledgements | ||
This project is kindly sponsored by: | ||
- [nearForm](https://nearform.com) | ||
- [LetzDoIt](https://www.letzdoitapp.com/) | ||
## License | ||
Licensed under [MIT](./LICENSE). | ||
`fastify-multipart@5.4.0` has been deprecated. Please use | ||
`@fastify/multipart@6.0.0` instead. |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Deprecated
MaintenanceThe maintainer of the package marked it as deprecated. This could indicate that a single version should not be used, or that the package is no longer maintained and any new vulnerabilities will not be fixed.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
Trivial Package
Supply chain riskPackages less than 10 lines of code are easily copied into your own project and may not warrant the additional supply chain risk of an external dependency.
Found 1 instance in 1 package
No contributors or author data
MaintenancePackage does not specify a list of contributors or an author in package.json.
Found 1 instance in 1 package
No repository
Supply chain riskPackage does not have a linked source code repository. Without this field, a package will have no reference to the location of the source code use to generate the package.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Network access
Supply chain riskThis module accesses the network.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
No website
QualityPackage does not have a website.
Found 1 instance in 1 package
2
0
0
1
3
843
3
5
2
1
5
+ Addedfastify-multipart-deprecated@npm:fastify-multipart@5.3.1
+ Addedprocess-warning@^1.0.0
+ Addedfastify-multipart@5.3.1(transitive)
+ Addedprocess-warning@1.0.0(transitive)
- Removed@fastify/busboy@^1.0.0
- Removeddeepmerge@^4.2.2
- Removedend-of-stream@^1.4.4
- Removedfastify-error@^0.3.0
- Removedfastify-plugin@^3.0.0
- Removedhexoid@^1.0.0
- Removedsecure-json-parse@^2.4.0
- Removedstream-wormhole@^1.1.0