Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@fastify/busboy

Package Overview
Dependencies
Maintainers
14
Versions
12
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@fastify/busboy - npm Package Compare versions

Comparing version 1.0.0-next1 to 1.0.0-next2

310

deps/dicer/lib/Dicer.js

@@ -1,100 +0,86 @@

var WritableStream = require('stream').Writable,
inherits = require('util').inherits;
const WritableStream = require('stream').Writable
const inherits = require('util').inherits
var StreamSearch = require('../../streamsearch/sbmh');
const StreamSearch = require('../../streamsearch/sbmh')
var PartStream = require('./PartStream'),
HeaderParser = require('./HeaderParser');
const PartStream = require('./PartStream')
const HeaderParser = require('./HeaderParser')
var DASH = 45,
B_ONEDASH = Buffer.from('-'),
B_CRLF = Buffer.from('\r\n'),
EMPTY_FN = function() {};
const DASH = 45
const B_ONEDASH = Buffer.from('-')
const B_CRLF = Buffer.from('\r\n')
const EMPTY_FN = function () {}
function Dicer(cfg) {
if (!(this instanceof Dicer))
return new Dicer(cfg);
WritableStream.call(this, cfg);
function Dicer (cfg) {
if (!(this instanceof Dicer)) { return new Dicer(cfg) }
WritableStream.call(this, cfg)
if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string'))
throw new TypeError('Boundary required');
if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') }
if (typeof cfg.boundary === 'string')
this.setBoundary(cfg.boundary);
else
this._bparser = undefined;
if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined }
this._headerFirst = cfg.headerFirst;
this._headerFirst = cfg.headerFirst
var self = this;
const self = this
this._dashes = 0;
this._parts = 0;
this._finished = false;
this._realFinish = false;
this._isPreamble = true;
this._justMatched = false;
this._firstWrite = true;
this._inHeader = true;
this._part = undefined;
this._cb = undefined;
this._ignoreData = false;
this._dashes = 0
this._parts = 0
this._finished = false
this._realFinish = false
this._isPreamble = true
this._justMatched = false
this._firstWrite = true
this._inHeader = true
this._part = undefined
this._cb = undefined
this._ignoreData = false
this._partOpts = (typeof cfg.partHwm === 'number'
? { highWaterMark: cfg.partHwm }
: {});
this._pause = false;
? { highWaterMark: cfg.partHwm }
: {})
this._pause = false
this._hparser = new HeaderParser(cfg);
this._hparser.on('header', function(header) {
self._inHeader = false;
self._part.emit('header', header);
});
this._hparser = new HeaderParser(cfg)
this._hparser.on('header', function (header) {
self._inHeader = false
self._part.emit('header', header)
})
}
inherits(Dicer, WritableStream);
inherits(Dicer, WritableStream)
Dicer.prototype.emit = function(ev) {
Dicer.prototype.emit = function (ev) {
if (ev === 'finish' && !this._realFinish) {
if (!this._finished) {
var self = this;
process.nextTick(function() {
self.emit('error', new Error('Unexpected end of multipart data'));
const self = this
process.nextTick(function () {
self.emit('error', new Error('Unexpected end of multipart data'))
if (self._part && !self._ignoreData) {
var type = (self._isPreamble ? 'Preamble' : 'Part');
self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'));
self._part.push(null);
process.nextTick(function() {
self._realFinish = true;
self.emit('finish');
self._realFinish = false;
});
return;
const type = (self._isPreamble ? 'Preamble' : 'Part')
self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data'))
self._part.push(null)
process.nextTick(function () {
self._realFinish = true
self.emit('finish')
self._realFinish = false
})
return
}
self._realFinish = true;
self.emit('finish');
self._realFinish = false;
});
self._realFinish = true
self.emit('finish')
self._realFinish = false
})
}
} else
WritableStream.prototype.emit.apply(this, arguments);
};
} else { WritableStream.prototype.emit.apply(this, arguments) }
}
Dicer.prototype._write = function(data, encoding, cb) {
Dicer.prototype._write = function (data, encoding, cb) {
// ignore unexpected data (e.g. extra trailer data after finished)
if (!this._hparser && !this._bparser)
return cb();
if (!this._hparser && !this._bparser) { return cb() }
if (this._headerFirst && this._isPreamble) {
if (!this._part) {
this._part = new PartStream(this._partOpts);
if (this._events.preamble)
this.emit('preamble', this._part);
else
this._ignore();
this._part = new PartStream(this._partOpts)
if (this._events.preamble) { this.emit('preamble', this._part) } else { this._ignore() }
}
var r = this._hparser.push(data);
if (!this._inHeader && r !== undefined && r < data.length)
data = data.slice(r);
else
return cb();
const r = this._hparser.push(data)
if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() }
}

@@ -104,41 +90,38 @@

if (this._firstWrite) {
this._bparser.push(B_CRLF);
this._firstWrite = false;
this._bparser.push(B_CRLF)
this._firstWrite = false
}
this._bparser.push(data);
this._bparser.push(data)
if (this._pause)
this._cb = cb;
else
cb();
};
if (this._pause) { this._cb = cb } else { cb() }
}
Dicer.prototype.reset = function() {
this._part = undefined;
this._bparser = undefined;
this._hparser = undefined;
};
Dicer.prototype.reset = function () {
this._part = undefined
this._bparser = undefined
this._hparser = undefined
}
Dicer.prototype.setBoundary = function(boundary) {
var self = this;
this._bparser = new StreamSearch('\r\n--' + boundary);
this._bparser.on('info', function(isMatch, data, start, end) {
self._oninfo(isMatch, data, start, end);
});
};
Dicer.prototype.setBoundary = function (boundary) {
const self = this
this._bparser = new StreamSearch('\r\n--' + boundary)
this._bparser.on('info', function (isMatch, data, start, end) {
self._oninfo(isMatch, data, start, end)
})
}
Dicer.prototype._ignore = function() {
Dicer.prototype._ignore = function () {
if (this._part && !this._ignoreData) {
this._ignoreData = true;
this._part.on('error', EMPTY_FN);
this._ignoreData = true
this._part.on('error', EMPTY_FN)
// we must perform some kind of read on the stream even though we are
// ignoring the data, otherwise node's Readable stream will not emit 'end'
// after pushing null to the stream
this._part.resume();
this._part.resume()
}
};
}
Dicer.prototype._oninfo = function(isMatch, data, start, end) {
var buf, self = this, i = 0, r, shouldWriteMore = true;
Dicer.prototype._oninfo = function (isMatch, data, start, end) {
let buf; const self = this; let i = 0; let r; let shouldWriteMore = true

@@ -148,95 +131,80 @@ if (!this._part && this._justMatched && data) {

if (data[start + i] === DASH) {
++i;
++this._dashes;
++i
++this._dashes
} else {
if (this._dashes)
buf = B_ONEDASH;
this._dashes = 0;
break;
if (this._dashes) { buf = B_ONEDASH }
this._dashes = 0
break
}
}
if (this._dashes === 2) {
if ((start + i) < end && this._events.trailer)
this.emit('trailer', data.slice(start + i, end));
this.reset();
this._finished = true;
if ((start + i) < end && this._events.trailer) { this.emit('trailer', data.slice(start + i, end)) }
this.reset()
this._finished = true
// no more parts will be added
if (self._parts === 0) {
self._realFinish = true;
self.emit('finish');
self._realFinish = false;
self._realFinish = true
self.emit('finish')
self._realFinish = false
}
}
if (this._dashes)
return;
if (this._dashes) { return }
}
if (this._justMatched)
this._justMatched = false;
if (this._justMatched) { this._justMatched = false }
if (!this._part) {
this._part = new PartStream(this._partOpts);
this._part._read = function(n) {
self._unpause();
};
if (this._isPreamble && this._events['preamble'])
this.emit('preamble', this._part);
else if (this._isPreamble !== true && this._events['part'])
this.emit('part', this._part);
else
this._ignore();
if (!this._isPreamble)
this._inHeader = true;
this._part = new PartStream(this._partOpts)
this._part._read = function (n) {
self._unpause()
}
if (this._isPreamble && this._events.preamble) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this._events.part) { this.emit('part', this._part) } else { this._ignore() }
if (!this._isPreamble) { this._inHeader = true }
}
if (data && start < end && !this._ignoreData) {
if (this._isPreamble || !this._inHeader) {
if (buf)
shouldWriteMore = this._part.push(buf);
shouldWriteMore = this._part.push(data.slice(start, end));
if (!shouldWriteMore)
this._pause = true;
if (buf) { shouldWriteMore = this._part.push(buf) }
shouldWriteMore = this._part.push(data.slice(start, end))
if (!shouldWriteMore) { this._pause = true }
} else if (!this._isPreamble && this._inHeader) {
if (buf)
this._hparser.push(buf);
r = this._hparser.push(data.slice(start, end));
if (!this._inHeader && r !== undefined && r < end)
this._oninfo(false, data, start + r, end);
if (buf) { this._hparser.push(buf) }
r = this._hparser.push(data.slice(start, end))
if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) }
}
}
if (isMatch) {
this._hparser.reset();
if (this._isPreamble)
this._isPreamble = false;
else {
++this._parts;
this._part.on('end', function() {
if (--self._parts === 0) {
if (self._finished) {
self._realFinish = true;
self.emit('finish');
self._realFinish = false;
} else {
self._unpause();
this._hparser.reset()
if (this._isPreamble) { this._isPreamble = false } else {
if (start !== end) {
++this._parts
this._part.on('end', function () {
if (--self._parts === 0) {
if (self._finished) {
self._realFinish = true
self.emit('finish')
self._realFinish = false
} else {
self._unpause()
}
}
}
});
})
}
}
this._part.push(null);
this._part = undefined;
this._ignoreData = false;
this._justMatched = true;
this._dashes = 0;
this._part.push(null)
this._part = undefined
this._ignoreData = false
this._justMatched = true
this._dashes = 0
}
};
}
Dicer.prototype._unpause = function() {
if (!this._pause)
return;
Dicer.prototype._unpause = function () {
if (!this._pause) { return }
this._pause = false;
this._pause = false
if (this._cb) {
var cb = this._cb;
this._cb = undefined;
cb();
const cb = this._cb
this._cb = undefined
cb()
}
};
}
module.exports = Dicer;
module.exports = Dicer

@@ -1,82 +0,73 @@

var EventEmitter = require('events').EventEmitter,
inherits = require('util').inherits;
const EventEmitter = require('events').EventEmitter
const inherits = require('util').inherits
const getLimit = require('../../../lib/utils').getLimit
var StreamSearch = require('../../streamsearch/sbmh');
const StreamSearch = require('../../streamsearch/sbmh')
var B_DCRLF = Buffer.from('\r\n\r\n'),
RE_CRLF = /\r\n/g,
RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/,
MAX_HEADER_PAIRS = 2000, // from node's http.js
MAX_HEADER_SIZE = 80 * 1024; // from node's http_parser
const B_DCRLF = Buffer.from('\r\n\r\n')
const RE_CRLF = /\r\n/g
const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex
function HeaderParser(cfg) {
EventEmitter.call(this);
function HeaderParser (cfg) {
EventEmitter.call(this)
var self = this;
this.nread = 0;
this.maxed = false;
this.npairs = 0;
this.maxHeaderPairs = (cfg && typeof cfg.maxHeaderPairs === 'number'
? cfg.maxHeaderPairs
: MAX_HEADER_PAIRS);
this.buffer = '';
this.header = {};
this.finished = false;
this.ss = new StreamSearch(B_DCRLF);
this.ss.on('info', function(isMatch, data, start, end) {
cfg = cfg || {}
const self = this
this.nread = 0
this.maxed = false
this.npairs = 0
this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000)
this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024)
this.buffer = ''
this.header = {}
this.finished = false
this.ss = new StreamSearch(B_DCRLF)
this.ss.on('info', function (isMatch, data, start, end) {
if (data && !self.maxed) {
if (self.nread + (end - start) > MAX_HEADER_SIZE) {
end = MAX_HEADER_SIZE - self.nread + start;
self.nread = MAX_HEADER_SIZE;
} else
self.nread += (end - start);
if (self.nread + end - start >= self.maxHeaderSize) {
end = self.maxHeaderSize - self.nread + start
self.nread = self.maxHeaderSize
self.maxed = true
} else { self.nread += (end - start) }
if (self.nread === MAX_HEADER_SIZE)
self.maxed = true;
self.buffer += data.toString('binary', start, end);
self.buffer += data.toString('binary', start, end)
}
if (isMatch)
self._finish();
});
if (isMatch) { self._finish() }
})
}
inherits(HeaderParser, EventEmitter);
inherits(HeaderParser, EventEmitter)
HeaderParser.prototype.push = function(data) {
var r = this.ss.push(data);
if (this.finished)
return r;
};
HeaderParser.prototype.push = function (data) {
const r = this.ss.push(data)
if (this.finished) { return r }
}
HeaderParser.prototype.reset = function() {
this.finished = false;
this.buffer = '';
this.header = {};
this.ss.reset();
};
HeaderParser.prototype.reset = function () {
this.finished = false
this.buffer = ''
this.header = {}
this.ss.reset()
}
HeaderParser.prototype._finish = function() {
if (this.buffer)
this._parseHeader();
this.ss.matches = this.ss.maxMatches;
var header = this.header;
this.header = {};
this.buffer = '';
this.finished = true;
this.nread = this.npairs = 0;
this.maxed = false;
this.emit('header', header);
};
HeaderParser.prototype._finish = function () {
if (this.buffer) { this._parseHeader() }
this.ss.matches = this.ss.maxMatches
const header = this.header
this.header = {}
this.buffer = ''
this.finished = true
this.nread = this.npairs = 0
this.maxed = false
this.emit('header', header)
}
HeaderParser.prototype._parseHeader = function() {
if (this.npairs === this.maxHeaderPairs)
return;
HeaderParser.prototype._parseHeader = function () {
if (this.npairs === this.maxHeaderPairs) { return }
const lines = this.buffer.split(RE_CRLF),
len = lines.length;
let m, h;
const lines = this.buffer.split(RE_CRLF)
const len = lines.length
let m, h
for (var i = 0; i < len; ++i) {
if (lines[i].length === 0)
continue;
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
if (lines[i].length === 0) { continue }
if (lines[i][0] === '\t' || lines[i][0] === ' ') {

@@ -87,23 +78,17 @@ // folded header content

if (h) {
this.header[h][this.header[h].length - 1] += lines[i];
continue;
this.header[h][this.header[h].length - 1] += lines[i]
continue
}
}
m = RE_HDR.exec(lines[i]);
m = RE_HDR.exec(lines[i])
if (m) {
h = m[1].toLowerCase();
h = m[1].toLowerCase()
if (m[2]) {
if (this.header[h] === undefined)
this.header[h] = [m[2]];
else
this.header[h].push(m[2]);
} else
this.header[h] = [''];
if (++this.npairs === this.maxHeaderPairs)
break;
} else
return;
if (this.header[h] === undefined) { this.header[h] = [m[2]] } else { this.header[h].push(m[2]) }
} else { this.header[h] = [''] }
if (++this.npairs === this.maxHeaderPairs) { break }
} else { return }
}
};
}
module.exports = HeaderParser;
module.exports = HeaderParser

@@ -1,11 +0,11 @@

var inherits = require('util').inherits,
ReadableStream = require('stream').Readable;
const inherits = require('util').inherits
const ReadableStream = require('stream').Readable
function PartStream(opts) {
ReadableStream.call(this, opts);
function PartStream (opts) {
ReadableStream.call(this, opts)
}
inherits(PartStream, ReadableStream);
inherits(PartStream, ReadableStream)
PartStream.prototype._read = function(n) {};
PartStream.prototype._read = function (n) {}
module.exports = PartStream;
module.exports = PartStream
/**
* Copyright Brian White. All rights reserved.
*
*
* @see https://github.com/mscdex/streamsearch
*
*
* Permission is hereby granted, free of charge, to any person obtaining a copy

@@ -12,6 +12,6 @@ * of this software and associated documentation files (the "Software"), to

* furnished to do so, subject to the following conditions:
*
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR

@@ -24,56 +24,69 @@ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,

* IN THE SOFTWARE.
*
*
* Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation
* by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool
*/
var EventEmitter = require('events').EventEmitter,
inherits = require('util').inherits;
const EventEmitter = require('events').EventEmitter
const inherits = require('util').inherits
function SBMH(needle) {
if (typeof needle === 'string')
needle = Buffer.from(needle);
var i, j, needle_len = needle.length;
function SBMH (needle) {
if (typeof needle === 'string') {
needle = Buffer.from(needle)
}
this.maxMatches = Infinity;
this.matches = 0;
if (!Buffer.isBuffer(needle)) {
throw new TypeError('The needle has to be a String or a Buffer.')
}
this._occ = new Array(256);
this._lookbehind_size = 0;
this._needle = needle;
this._bufpos = 0;
const needleLength = needle.length
this._lookbehind = Buffer.alloc(needle_len);
if (needleLength === 0) {
throw new Error('The needle cannot be an empty String/Buffer.')
}
// Initialize occurrence table.
for (j = 0; j < 256; ++j)
this._occ[j] = needle_len;
if (needleLength > 256) {
throw new Error('The needle cannot have a length bigger than 256.')
}
this.maxMatches = Infinity
this.matches = 0
this._occ = new Array(256)
.fill(needleLength) // Initialize occurrence table.
this._lookbehind_size = 0
this._needle = needle
this._bufpos = 0
this._lookbehind = Buffer.alloc(needleLength)
// Populate occurrence table with analysis of the needle,
// ignoring last letter.
if (needle_len >= 1) {
for (i = 0; i < needle_len - 1; ++i)
this._occ[needle[i]] = needle_len - 1 - i;
for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var
this._occ[needle[i]] = needleLength - 1 - i
}
}
inherits(SBMH, EventEmitter);
inherits(SBMH, EventEmitter)
SBMH.prototype.reset = function() {
this._lookbehind_size = 0;
this.matches = 0;
this._bufpos = 0;
};
SBMH.prototype.reset = function () {
this._lookbehind_size = 0
this.matches = 0
this._bufpos = 0
}
SBMH.prototype.push = function(chunk, pos) {
var r, chlen;
if (!Buffer.isBuffer(chunk))
chunk = Buffer.from(chunk, 'binary');
chlen = chunk.length;
this._bufpos = pos || 0;
while (r !== chlen && this.matches < this.maxMatches)
r = this._sbmh_feed(chunk);
return r;
};
SBMH.prototype.push = function (chunk, pos) {
if (!Buffer.isBuffer(chunk)) {
chunk = Buffer.from(chunk, 'binary')
}
const chlen = chunk.length
this._bufpos = pos || 0
let r
while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) }
return r
}
SBMH.prototype._sbmh_feed = function(data) {
var len = data.length, needle = this._needle, needle_len = needle.length;
SBMH.prototype._sbmh_feed = function (data) {
const len = data.length
const needle = this._needle
const needleLength = needle.length
const lastNeedleChar = needle[needleLength - 1]

@@ -84,7 +97,4 @@ // Positive: points to a position in `data`

// pos == -2 points to lookbehind[lookbehind_size - 2]
var pos = -this._lookbehind_size,
last_needle_char = needle[needle_len - 1],
occ = this._occ,
lookbehind = this._lookbehind,
ch;
let pos = -this._lookbehind_size
let ch

@@ -104,19 +114,16 @@ if (pos < 0) {

// the character to look at lies outside the haystack.
while (pos < 0 && pos <= len - needle_len) {
ch = this._sbmh_lookup_char(data, pos + needle_len - 1);
while (pos < 0 && pos <= len - needleLength) {
ch = this._sbmh_lookup_char(data, pos + needleLength - 1)
if (
ch === last_needle_char &&
this._sbmh_memcmp(data, pos, needle_len - 1)
ch === lastNeedleChar &&
this._sbmh_memcmp(data, pos, needleLength - 1)
) {
this._lookbehind_size = 0;
++this.matches;
if (pos > 0)
this.emit('info', true, lookbehind, 0, pos);
else
this.emit('info', true);
this._lookbehind_size = 0
++this.matches
this.emit('info', true)
return (this._bufpos = pos + needle_len);
return (this._bufpos = pos + needleLength)
}
pos += occ[ch];
pos += this._occ[ch]
}

@@ -135,4 +142,3 @@

// pos == 0
while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos))
++pos;
while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos }
}

@@ -142,4 +148,4 @@

// Discard lookbehind buffer.
this.emit('info', false, lookbehind, 0, this._lookbehind_size);
this._lookbehind_size = 0;
this.emit('info', false, this._lookbehind, 0, this._lookbehind_size)
this._lookbehind_size = 0
} else {

@@ -149,37 +155,32 @@ // Cut off part of the lookbehind buffer that has

// into it.
var bytesToCutOff = this._lookbehind_size + pos;
const bytesToCutOff = this._lookbehind_size + pos
if (bytesToCutOff > 0) {
// The cut off data is guaranteed not to contain the needle.
this.emit('info', false, lookbehind, 0, bytesToCutOff);
this.emit('info', false, this._lookbehind, 0, bytesToCutOff)
}
lookbehind.copy(lookbehind, 0, bytesToCutOff,
this._lookbehind_size - bytesToCutOff);
this._lookbehind_size -= bytesToCutOff;
this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff,
this._lookbehind_size - bytesToCutOff)
this._lookbehind_size -= bytesToCutOff
data.copy(lookbehind, this._lookbehind_size);
this._lookbehind_size += len;
data.copy(this._lookbehind, this._lookbehind_size)
this._lookbehind_size += len
this._bufpos = len;
return len;
this._bufpos = len
return len
}
}
if (pos >= 0)
pos += this._bufpos;
pos += (pos >= 0) * this._bufpos
// Lookbehind buffer is now empty. We only need to check if the
// needle is in the haystack.
// Lookbehind buffer is now empty. We only need to check if the
// needle is in the haystack.
if (data.indexOf(needle, pos) !== -1) {
pos = data.indexOf(needle, pos);
++this.matches;
if (pos > 0)
this.emit('info', true, data, this._bufpos, pos);
else
this.emit('info', true);
pos = data.indexOf(needle, pos)
++this.matches
if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) }
return (this._bufpos = pos + needle_len);
return (this._bufpos = pos + needleLength)
} else {
pos = len - needle_len;
pos = len - needleLength
}

@@ -194,5 +195,5 @@

while (
pos < len &&
pos < len &&
(
data[pos] !== needle[0] ||
data[pos] !== needle[0] ||
(

@@ -206,31 +207,29 @@ (Buffer.compare(

) {
++pos;
++pos
}
if (pos < len) {
data.copy(lookbehind, 0, pos, pos + (len - pos));
this._lookbehind_size = len - pos;
data.copy(this._lookbehind, 0, pos, pos + (len - pos))
this._lookbehind_size = len - pos
}
// Everything until pos is guaranteed not to contain needle data.
if (pos > 0)
this.emit('info', false, data, this._bufpos, pos < len ? pos : len);
if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) }
this._bufpos = len;
return len;
};
this._bufpos = len
return len
}
SBMH.prototype._sbmh_lookup_char = function(data, pos) {
return (pos < 0)
SBMH.prototype._sbmh_lookup_char = function (data, pos) {
return (pos < 0)
? this._lookbehind[this._lookbehind_size + pos]
: data[pos];
};
: data[pos]
}
SBMH.prototype._sbmh_memcmp = function(data, pos, len) {
for (var i = 0; i < len; ++i) {
if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i])
return false;
SBMH.prototype._sbmh_memcmp = function (data, pos, len) {
for (var i = 0; i < len; ++i) { // eslint-disable-line no-var
if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false }
}
return true;
};
return true
}
module.exports = SBMH;
module.exports = SBMH

@@ -8,6 +8,6 @@ // Definitions by: Jacob Baskin <https://github.com/jacobbaskin>

import * as http from 'http';
import {Readable, Writable} from 'stream';
import { Readable, Writable } from 'stream';
declare const busboy: BusboyConstructor;
export default busboy
export const Busboy: BusboyConstructor;
export default Busboy

@@ -35,2 +35,12 @@ export interface BusboyConfig {

/**
* Detect if a Part is a file.
*
* By default a file is detected if contentType
* is application/octet-stream or fileName is not
* undefined.
*
* Modify this to handle e.g. Blobs.
*/
isPartAFile?: (fieldName: string | undefined, contentType: string | undefined, fileName: string | undefined) => boolean;
/**
* If paths in the multipart 'filename' field shall be preserved.

@@ -44,3 +54,3 @@ * @default false

limits?:
| {
| {
/**

@@ -78,7 +88,13 @@ * Max field name size (in bytes)

* For multipart forms, the max number of header key=>value pairs to parse
* @default 2000 (same as node's http)
* @default 2000
*/
headerPairs?: number | undefined;
/**
* For multipart forms, the max size of a header part
* @default 81920
*/
headerSize?: number | undefined;
}
| undefined;
| undefined;
}

@@ -88,2 +104,13 @@

export interface BusboyFileStream extends
Readable {
truncated: boolean;
/**
* The number of bytes that have been read so far.
*/
bytesRead: number;
}
export interface Busboy extends Writable {

@@ -136,3 +163,3 @@ addListener<Event extends keyof BusboyEvents>(event: Event, listener: BusboyEvents[Event]): this;

fieldname: string,
stream: Readable,
stream: BusboyFileStream,
filename: string,

@@ -139,0 +166,0 @@ transferEncoding: string,

@@ -1,87 +0,80 @@

var WritableStream = require('stream').Writable,
inherits = require('util').inherits;
const WritableStream = require('stream').Writable
const { inherits } = require('util')
var parseParams = require('./utils').parseParams;
const MultipartParser = require('./types/multipart')
const UrlencodedParser = require('./types/urlencoded')
const parseParams = require('./utils').parseParams
function Busboy(opts) {
if (!(this instanceof Busboy))
return new Busboy(opts);
if (opts.highWaterMark !== undefined)
WritableStream.call(this, { autoDestroy: false, highWaterMark: opts.highWaterMark });
else
WritableStream.call(this, { autoDestroy: false });
function Busboy (opts) {
if (!(this instanceof Busboy)) { return new Busboy(opts) }
this._done = false;
this._parser = undefined;
this._finished = false;
if (typeof opts !== 'object') {
throw new TypeError('Busboy expected an options-Object.')
}
if (typeof opts.headers !== 'object') {
throw new TypeError('Busboy expected an options-Object with headers-attribute.')
}
if (typeof opts.headers['content-type'] !== 'string') {
throw new TypeError('Missing Content-Type-header.')
}
this.opts = opts;
if (opts.headers && typeof opts.headers['content-type'] === 'string')
this.parseHeaders(opts.headers);
else
throw new Error('Missing Content-Type');
const {
headers,
...streamOptions
} = opts
this.opts = {
autoDestroy: false,
...streamOptions
}
WritableStream.call(this, this.opts)
this._done = false
this._parser = this.getParserByHeaders(headers)
this._finished = false
}
inherits(Busboy, WritableStream);
inherits(Busboy, WritableStream)
Busboy.prototype.emit = function(ev) {
Busboy.prototype.emit = function (ev) {
if (ev === 'finish') {
if (!this._done) {
this._parser && this._parser.end();
return;
this._parser && this._parser.end()
return
} else if (this._finished) {
return;
return
}
this._finished = true;
this._finished = true
}
WritableStream.prototype.emit.apply(this, arguments);
};
WritableStream.prototype.emit.apply(this, arguments)
}
Busboy.prototype.parseHeaders = function(headers) {
this._parser = undefined;
if (headers['content-type']) {
var parsed = parseParams(headers['content-type']),
matched, type;
for (var i = 0; i < TYPES.length; ++i) {
type = TYPES[i];
if (typeof type.detect === 'function')
matched = type.detect(parsed);
else
matched = type.detect.test(parsed[0]);
if (matched)
break;
}
if (matched) {
var cfg = {
limits: this.opts.limits,
headers: headers,
parsedConType: parsed,
highWaterMark: undefined,
fileHwm: undefined,
defCharset: undefined,
preservePath: false
};
if (this.opts.highWaterMark)
cfg.highWaterMark = this.opts.highWaterMark;
if (this.opts.fileHwm)
cfg.fileHwm = this.opts.fileHwm;
cfg.defCharset = this.opts.defCharset;
cfg.preservePath = this.opts.preservePath;
this._parser = type(this, cfg);
return;
}
Busboy.prototype.getParserByHeaders = function (headers) {
const parsed = parseParams(headers['content-type'])
const cfg = {
defCharset: this.opts.defCharset,
fileHwm: this.opts.fileHwm,
headers: headers,
highWaterMark: this.opts.highWaterMark,
isPartAFile: this.opts.isPartAFile,
limits: this.opts.limits,
parsedConType: parsed,
preservePath: this.opts.preservePath
}
throw new Error('Unsupported content type: ' + headers['content-type']);
};
Busboy.prototype._write = function(chunk, encoding, cb) {
if (!this._parser)
return cb(new Error('Not ready to parse. Missing Content-Type?'));
this._parser.write(chunk, cb);
};
if (MultipartParser.detect.test(parsed[0])) {
return new MultipartParser(this, cfg)
}
if (UrlencodedParser.detect.test(parsed[0])) {
return new UrlencodedParser(this, cfg)
}
throw new Error('Unsupported Content-Type.')
}
var TYPES = [
require('./types/multipart'),
require('./types/urlencoded'),
];
Busboy.prototype._write = function (chunk, encoding, cb) {
this._parser.write(chunk, cb)
}
module.exports = Busboy;
module.exports = Busboy
module.exports.default = Busboy
module.exports.Busboy = Busboy

@@ -8,98 +8,96 @@ // TODO:

var ReadableStream = require('stream').Readable,
inherits = require('util').inherits;
const ReadableStream = require('stream').Readable
const inherits = require('util').inherits
var Dicer = require('../../deps/dicer/lib/Dicer');
const Dicer = require('../../deps/dicer/lib/Dicer')
var parseParams = require('../utils').parseParams,
decodeText = require('../utils').decodeText,
basename = require('../utils').basename,
getLimit = require('../utils').getLimit;
const parseParams = require('../utils').parseParams
const decodeText = require('../utils').decodeText
const basename = require('../utils').basename
const getLimit = require('../utils').getLimit
var RE_BOUNDARY = /^boundary$/i,
RE_FIELD = /^form-data$/i,
RE_CHARSET = /^charset$/i,
RE_FILENAME = /^filename$/i,
RE_NAME = /^name$/i;
const RE_BOUNDARY = /^boundary$/i
const RE_FIELD = /^form-data$/i
const RE_CHARSET = /^charset$/i
const RE_FILENAME = /^filename$/i
const RE_NAME = /^name$/i
Multipart.detect = /^multipart\/form-data/i;
function Multipart(boy, cfg) {
if (!(this instanceof Multipart))
return new Multipart(boy, cfg);
var i,
len,
self = this,
boundary,
limits = cfg.limits,
parsedConType = cfg.parsedConType || [],
defCharset = cfg.defCharset || 'utf8',
preservePath = cfg.preservePath,
fileopts = (typeof cfg.fileHwm === 'number'
? { highWaterMark: cfg.fileHwm }
: {});
Multipart.detect = /^multipart\/form-data/i
function Multipart (boy, cfg) {
if (!(this instanceof Multipart)) { return new Multipart(boy, cfg) }
let i
let len
const self = this
let boundary
const limits = cfg.limits
const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined))
const parsedConType = cfg.parsedConType || []
const defCharset = cfg.defCharset || 'utf8'
const preservePath = cfg.preservePath
const fileOpts = { highWaterMark: cfg.fileHwm }
for (i = 0, len = parsedConType.length; i < len; ++i) {
if (Array.isArray(parsedConType[i])
&& RE_BOUNDARY.test(parsedConType[i][0])) {
boundary = parsedConType[i][1];
break;
if (Array.isArray(parsedConType[i]) &&
RE_BOUNDARY.test(parsedConType[i][0])) {
boundary = parsedConType[i][1]
break
}
}
function checkFinished() {
function checkFinished () {
if (nends === 0 && finished && !boy._done) {
finished = false;
process.nextTick(function() {
boy._done = true;
boy.emit('finish');
});
finished = false
process.nextTick(function () {
boy._done = true
boy.emit('finish')
})
}
}
if (typeof boundary !== 'string')
throw new Error('Multipart: Boundary not found');
if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') }
var fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024),
fileSizeLimit = getLimit(limits, 'fileSize', Infinity),
filesLimit = getLimit(limits, 'files', Infinity),
fieldsLimit = getLimit(limits, 'fields', Infinity),
partsLimit = getLimit(limits, 'parts', Infinity);
const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
const fileSizeLimit = getLimit(limits, 'fileSize', Infinity)
const filesLimit = getLimit(limits, 'files', Infinity)
const fieldsLimit = getLimit(limits, 'fields', Infinity)
const partsLimit = getLimit(limits, 'parts', Infinity)
const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)
const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024)
var nfiles = 0,
nfields = 0,
nends = 0,
curFile,
curField,
finished = false;
let nfiles = 0
let nfields = 0
let nends = 0
let curFile
let curField
let finished = false
this._needDrain = false;
this._pause = false;
this._cb = undefined;
this._nparts = 0;
this._boy = boy;
this._needDrain = false
this._pause = false
this._cb = undefined
this._nparts = 0
this._boy = boy
var parserCfg = {
const parserCfg = {
boundary: boundary,
maxHeaderPairs: (limits && limits.headerPairs)
};
if (fileopts.highWaterMark)
parserCfg.partHwm = fileopts.highWaterMark;
if (cfg.highWaterMark)
parserCfg.highWaterMark = cfg.highWaterMark;
maxHeaderPairs: headerPairsLimit,
maxHeaderSize: headerSizeLimit,
partHwm: fileOpts.highWaterMark,
highWaterMark: cfg.highWaterMark
}
this.parser = new Dicer(parserCfg);
this.parser.on('drain', function() {
self._needDrain = false;
this.parser = new Dicer(parserCfg)
this.parser.on('drain', function () {
self._needDrain = false
if (self._cb && !self._pause) {
var cb = self._cb;
self._cb = undefined;
cb();
const cb = self._cb
self._cb = undefined
cb()
}
}).on('part', function onPart(part) {
}).on('part', function onPart (part) {
if (++self._nparts > partsLimit) {
self.parser.removeListener('part', onPart);
self.parser.on('part', skipPart);
boy.hitPartsLimit = true;
boy.emit('partsLimit');
return skipPart(part);
self.parser.removeListener('part', onPart)
self.parser.on('part', skipPart)
boy.hitPartsLimit = true
boy.emit('partsLimit')
return skipPart(part)
}

@@ -111,24 +109,24 @@

if (curField) {
var field = curField;
field.emit('end');
field.removeAllListeners('end');
const field = curField
field.emit('end')
field.removeAllListeners('end')
}
part.on('header', function(header) {
var contype,
fieldname,
parsed,
charset,
encoding,
filename,
nsize = 0;
part.on('header', function (header) {
let contype
let fieldname
let parsed
let charset
let encoding
let filename
let nsize = 0
if (header['content-type']) {
parsed = parseParams(header['content-type'][0]);
parsed = parseParams(header['content-type'][0])
if (parsed[0]) {
contype = parsed[0].toLowerCase();
contype = parsed[0].toLowerCase()
for (i = 0, len = parsed.length; i < len; ++i) {
if (RE_CHARSET.test(parsed[i][0])) {
charset = parsed[i][1].toLowerCase();
break;
charset = parsed[i][1].toLowerCase()
break
}

@@ -139,88 +137,82 @@ }

if (contype === undefined)
contype = 'text/plain';
if (charset === undefined)
charset = defCharset;
if (contype === undefined) { contype = 'text/plain' }
if (charset === undefined) { charset = defCharset }
if (header['content-disposition']) {
parsed = parseParams(header['content-disposition'][0]);
if (!RE_FIELD.test(parsed[0]))
return skipPart(part);
parsed = parseParams(header['content-disposition'][0])
if (!RE_FIELD.test(parsed[0])) { return skipPart(part) }
for (i = 0, len = parsed.length; i < len; ++i) {
if (RE_NAME.test(parsed[i][0])) {
fieldname = parsed[i][1];
fieldname = parsed[i][1]
} else if (RE_FILENAME.test(parsed[i][0])) {
filename = parsed[i][1];
if (!preservePath)
filename = basename(filename);
filename = parsed[i][1]
if (!preservePath) { filename = basename(filename) }
}
}
} else
return skipPart(part);
} else { return skipPart(part) }
if (header['content-transfer-encoding'])
encoding = header['content-transfer-encoding'][0].toLowerCase();
else
encoding = '7bit';
if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' }
var onData,
onEnd;
if (contype === 'application/octet-stream' || filename !== undefined) {
let onData,
onEnd
if (isPartAFile(fieldname, contype, filename)) {
// file/binary field
if (nfiles === filesLimit) {
if (!boy.hitFilesLimit) {
boy.hitFilesLimit = true;
boy.emit('filesLimit');
boy.hitFilesLimit = true
boy.emit('filesLimit')
}
return skipPart(part);
return skipPart(part)
}
++nfiles;
++nfiles
if (!boy._events.file) {
self.parser._ignore();
return;
self.parser._ignore()
return
}
++nends;
var file = new FileStream(fileopts);
curFile = file;
file.on('end', function() {
--nends;
self._pause = false;
checkFinished();
++nends
const file = new FileStream(fileOpts)
curFile = file
file.on('end', function () {
--nends
self._pause = false
checkFinished()
if (self._cb && !self._needDrain) {
var cb = self._cb;
self._cb = undefined;
cb();
const cb = self._cb
self._cb = undefined
cb()
}
});
file._read = function(n) {
if (!self._pause)
return;
self._pause = false;
})
file._read = function (n) {
if (!self._pause) { return }
self._pause = false
if (self._cb && !self._needDrain) {
var cb = self._cb;
self._cb = undefined;
cb();
const cb = self._cb
self._cb = undefined
cb()
}
};
boy.emit('file', fieldname, file, filename, encoding, contype);
}
boy.emit('file', fieldname, file, filename, encoding, contype)
onData = function(data) {
onData = function (data) {
if ((nsize += data.length) > fileSizeLimit) {
var extralen = (fileSizeLimit - (nsize - data.length));
if (extralen > 0)
file.push(data.slice(0, extralen));
file.emit('limit');
file.truncated = true;
part.removeAllListeners('data');
} else if (!file.push(data))
self._pause = true;
};
const extralen = fileSizeLimit - nsize + data.length
if (extralen > 0) { file.push(data.slice(0, extralen)) }
file.truncated = true
file.bytesRead = fileSizeLimit
part.removeAllListeners('data')
file.emit('limit')
return
} else if (!file.push(data)) { self._pause = true }
onEnd = function() {
curFile = undefined;
file.push(null);
};
file.bytesRead = nsize
}
onEnd = function () {
curFile = undefined
file.push(null)
}
} else {

@@ -230,32 +222,30 @@ // non-file field

if (!boy.hitFieldsLimit) {
boy.hitFieldsLimit = true;
boy.emit('fieldsLimit');
boy.hitFieldsLimit = true
boy.emit('fieldsLimit')
}
return skipPart(part);
return skipPart(part)
}
++nfields;
++nends;
var buffer = '',
truncated = false;
curField = part;
++nfields
++nends
let buffer = ''
let truncated = false
curField = part
onData = function(data) {
onData = function (data) {
if ((nsize += data.length) > fieldSizeLimit) {
var extralen = (fieldSizeLimit - (nsize - data.length));
buffer += data.toString('binary', 0, extralen);
truncated = true;
part.removeAllListeners('data');
} else
buffer += data.toString('binary');
};
const extralen = (fieldSizeLimit - (nsize - data.length))
buffer += data.toString('binary', 0, extralen)
truncated = true
part.removeAllListeners('data')
} else { buffer += data.toString('binary') }
}
onEnd = function() {
curField = undefined;
if (buffer.length)
buffer = decodeText(buffer, 'binary', charset);
boy.emit('field', fieldname, buffer, false, truncated, encoding, contype);
--nends;
checkFinished();
};
onEnd = function () {
curField = undefined
if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) }
boy.emit('field', fieldname, buffer, false, truncated, encoding, contype)
--nends
checkFinished()
}
}

@@ -268,54 +258,50 @@

*/
part._readableState.sync = false;
part._readableState.sync = false
part.on('data', onData);
part.on('end', onEnd);
}).on('error', function(err) {
if (curFile)
curFile.emit('error', err);
});
}).on('error', function(err) {
boy.emit('error', err);
}).on('finish', function() {
finished = true;
checkFinished();
});
part.on('data', onData)
part.on('end', onEnd)
}).on('error', function (err) {
if (curFile) { curFile.emit('error', err) }
})
}).on('error', function (err) {
boy.emit('error', err)
}).on('finish', function () {
finished = true
checkFinished()
})
}
Multipart.prototype.write = function(chunk, cb) {
var r;
if ((r = this.parser.write(chunk)) && !this._pause)
cb();
else {
this._needDrain = !r;
this._cb = cb;
Multipart.prototype.write = function (chunk, cb) {
let r
if ((r = this.parser.write(chunk)) && !this._pause) { cb() } else {
this._needDrain = !r
this._cb = cb
}
};
}
Multipart.prototype.end = function() {
var self = this;
Multipart.prototype.end = function () {
const self = this
if (this._nparts === 0 && !self._boy._done) {
process.nextTick(function() {
self._boy._done = true;
self._boy.emit('finish');
});
} else if (this.parser.writable)
this.parser.end();
};
process.nextTick(function () {
self._boy._done = true
self._boy.emit('finish')
})
} else if (this.parser.writable) { this.parser.end() }
}
function skipPart(part) {
part.resume();
function skipPart (part) {
part.resume()
}
function FileStream(opts) {
if (!(this instanceof FileStream))
return new FileStream(opts);
ReadableStream.call(this, opts);
function FileStream (opts) {
ReadableStream.call(this, opts)
this.truncated = false;
this.bytesRead = 0
this.truncated = false
}
inherits(FileStream, ReadableStream);
inherits(FileStream, ReadableStream)
FileStream.prototype._read = function(n) {};
FileStream.prototype._read = function (n) { }
module.exports = Multipart;
module.exports = Multipart

@@ -1,75 +0,70 @@

var Decoder = require('../utils').Decoder,
decodeText = require('../utils').decodeText,
getLimit = require('../utils').getLimit;
const Decoder = require('../utils').Decoder
const decodeText = require('../utils').decodeText
const getLimit = require('../utils').getLimit
var RE_CHARSET = /^charset$/i;
const RE_CHARSET = /^charset$/i
UrlEncoded.detect = /^application\/x-www-form-urlencoded/i;
function UrlEncoded(boy, cfg) {
if (!(this instanceof UrlEncoded))
return new UrlEncoded(boy, cfg);
var limits = cfg.limits,
headers = cfg.headers,
parsedConType = cfg.parsedConType;
this.boy = boy;
UrlEncoded.detect = /^application\/x-www-form-urlencoded/i
function UrlEncoded (boy, cfg) {
if (!(this instanceof UrlEncoded)) { return new UrlEncoded(boy, cfg) }
const limits = cfg.limits
const parsedConType = cfg.parsedConType
this.boy = boy
this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024);
this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100);
this.fieldsLimit = getLimit(limits, 'fields', Infinity);
this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024)
this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100)
this.fieldsLimit = getLimit(limits, 'fields', Infinity)
var charset;
for (var i = 0, len = parsedConType.length; i < len; ++i) {
if (Array.isArray(parsedConType[i])
&& RE_CHARSET.test(parsedConType[i][0])) {
charset = parsedConType[i][1].toLowerCase();
break;
let charset
for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var
if (Array.isArray(parsedConType[i]) &&
RE_CHARSET.test(parsedConType[i][0])) {
charset = parsedConType[i][1].toLowerCase()
break
}
}
if (charset === undefined)
charset = cfg.defCharset || 'utf8';
if (charset === undefined) { charset = cfg.defCharset || 'utf8' }
this.decoder = new Decoder();
this.charset = charset;
this._fields = 0;
this._state = 'key';
this._checkingBytes = true;
this._bytesKey = 0;
this._bytesVal = 0;
this._key = '';
this._val = '';
this._keyTrunc = false;
this._valTrunc = false;
this._hitLimit = false;
this.decoder = new Decoder()
this.charset = charset
this._fields = 0
this._state = 'key'
this._checkingBytes = true
this._bytesKey = 0
this._bytesVal = 0
this._key = ''
this._val = ''
this._keyTrunc = false
this._valTrunc = false
this._hitLimit = false
}
UrlEncoded.prototype.write = function(data, cb) {
UrlEncoded.prototype.write = function (data, cb) {
if (this._fields === this.fieldsLimit) {
if (!this.boy.hitFieldsLimit) {
this.boy.hitFieldsLimit = true;
this.boy.emit('fieldsLimit');
this.boy.hitFieldsLimit = true
this.boy.emit('fieldsLimit')
}
return cb();
return cb()
}
var idxeq, idxamp, i, p = 0, len = data.length;
let idxeq; let idxamp; let i; let p = 0; const len = data.length
while (p < len) {
if (this._state === 'key') {
idxeq = idxamp = undefined;
idxeq = idxamp = undefined
for (i = p; i < len; ++i) {
if (!this._checkingBytes)
++p;
if (data[i] === 0x3D/*=*/) {
idxeq = i;
break;
} else if (data[i] === 0x26/*&*/) {
idxamp = i;
break;
if (!this._checkingBytes) { ++p }
if (data[i] === 0x3D/* = */) {
idxeq = i
break
} else if (data[i] === 0x26/* & */) {
idxamp = i
break
}
if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) {
this._hitLimit = true;
break;
} else if (this._checkingBytes)
++this._bytesKey;
this._hitLimit = true
break
} else if (this._checkingBytes) { ++this._bytesKey }
}

@@ -79,132 +74,117 @@

// key with assignment
if (idxeq > p)
this._key += this.decoder.write(data.toString('binary', p, idxeq));
this._state = 'val';
if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) }
this._state = 'val'
this._hitLimit = false;
this._checkingBytes = true;
this._val = '';
this._bytesVal = 0;
this._valTrunc = false;
this.decoder.reset();
this._hitLimit = false
this._checkingBytes = true
this._val = ''
this._bytesVal = 0
this._valTrunc = false
this.decoder.reset()
p = idxeq + 1;
p = idxeq + 1
} else if (idxamp !== undefined) {
// key with no assignment
++this._fields;
var key, keyTrunc = this._keyTrunc;
if (idxamp > p)
key = (this._key += this.decoder.write(data.toString('binary', p, idxamp)));
else
key = this._key;
++this._fields
let key; const keyTrunc = this._keyTrunc
if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key }
this._hitLimit = false;
this._checkingBytes = true;
this._key = '';
this._bytesKey = 0;
this._keyTrunc = false;
this.decoder.reset();
this._hitLimit = false
this._checkingBytes = true
this._key = ''
this._bytesKey = 0
this._keyTrunc = false
this.decoder.reset()
if (key.length) {
this.boy.emit('field', decodeText(key, 'binary', this.charset),
'',
keyTrunc,
false);
'',
keyTrunc,
false)
}
p = idxamp + 1;
if (this._fields === this.fieldsLimit)
return cb();
p = idxamp + 1
if (this._fields === this.fieldsLimit) { return cb() }
} else if (this._hitLimit) {
// we may not have hit the actual limit if there are encoded bytes...
if (i > p)
this._key += this.decoder.write(data.toString('binary', p, i));
p = i;
if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) }
p = i
if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) {
// yep, we actually did hit the limit
this._checkingBytes = false;
this._keyTrunc = true;
this._checkingBytes = false
this._keyTrunc = true
}
} else {
if (p < len)
this._key += this.decoder.write(data.toString('binary', p));
p = len;
if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) }
p = len
}
} else {
idxamp = undefined;
idxamp = undefined
for (i = p; i < len; ++i) {
if (!this._checkingBytes)
++p;
if (data[i] === 0x26/*&*/) {
idxamp = i;
break;
if (!this._checkingBytes) { ++p }
if (data[i] === 0x26/* & */) {
idxamp = i
break
}
if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) {
this._hitLimit = true;
break;
}
else if (this._checkingBytes)
++this._bytesVal;
this._hitLimit = true
break
} else if (this._checkingBytes) { ++this._bytesVal }
}
if (idxamp !== undefined) {
++this._fields;
if (idxamp > p)
this._val += this.decoder.write(data.toString('binary', p, idxamp));
++this._fields
if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) }
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
decodeText(this._val, 'binary', this.charset),
this._keyTrunc,
this._valTrunc);
this._state = 'key';
decodeText(this._val, 'binary', this.charset),
this._keyTrunc,
this._valTrunc)
this._state = 'key'
this._hitLimit = false;
this._checkingBytes = true;
this._key = '';
this._bytesKey = 0;
this._keyTrunc = false;
this.decoder.reset();
this._hitLimit = false
this._checkingBytes = true
this._key = ''
this._bytesKey = 0
this._keyTrunc = false
this.decoder.reset()
p = idxamp + 1;
if (this._fields === this.fieldsLimit)
return cb();
p = idxamp + 1
if (this._fields === this.fieldsLimit) { return cb() }
} else if (this._hitLimit) {
// we may not have hit the actual limit if there are encoded bytes...
if (i > p)
this._val += this.decoder.write(data.toString('binary', p, i));
p = i;
if ((this._val === '' && this.fieldSizeLimit === 0)
|| (this._bytesVal = this._val.length) === this.fieldSizeLimit) {
if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) }
p = i
if ((this._val === '' && this.fieldSizeLimit === 0) ||
(this._bytesVal = this._val.length) === this.fieldSizeLimit) {
// yep, we actually did hit the limit
this._checkingBytes = false;
this._valTrunc = true;
this._checkingBytes = false
this._valTrunc = true
}
} else {
if (p < len)
this._val += this.decoder.write(data.toString('binary', p));
p = len;
if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) }
p = len
}
}
}
cb();
};
cb()
}
UrlEncoded.prototype.end = function() {
if (this.boy._done)
return;
UrlEncoded.prototype.end = function () {
if (this.boy._done) { return }
if (this._state === 'key' && this._key.length > 0) {
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
'',
this._keyTrunc,
false);
'',
this._keyTrunc,
false)
} else if (this._state === 'val') {
this.boy.emit('field', decodeText(this._key, 'binary', this.charset),
decodeText(this._val, 'binary', this.charset),
this._keyTrunc,
this._valTrunc);
decodeText(this._val, 'binary', this.charset),
this._keyTrunc,
this._valTrunc)
}
this.boy._done = true;
this.boy.emit('finish');
};
this.boy._done = true
this.boy.emit('finish')
}
module.exports = UrlEncoded;
module.exports = UrlEncoded

@@ -1,23 +0,41 @@

var jsencoding = require('../deps/encoding/encoding');
const { TextDecoder } = require('util')
const { TextDecoder: PolyfillTextDecoder, getEncoding } = require('text-decoding')
var RE_ENCODED = /%([a-fA-F0-9]{2})/g;
function encodedReplacer(match, byte) {
return String.fromCharCode(parseInt(byte, 16));
const RE_ENCODED = /%([a-fA-F0-9]{2})/g
const RE_PLUS = /\+/g
const HEX = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
// Node has always utf-8
const textDecoders = new Map()
textDecoders.set('utf-8', new TextDecoder('utf-8'))
textDecoders.set('utf8', textDecoders.get('utf-8'))
function encodedReplacer (match, byte) {
return String.fromCharCode(parseInt(byte, 16))
}
function parseParams(str) {
var res = [],
state = 'key',
charset = '',
inquote = false,
escaping = false,
p = 0,
tmp = '';
for (var i = 0, len = str.length; i < len; ++i) {
function parseParams (str) {
const res = []
let state = 'key'
let charset = ''
let inquote = false
let escaping = false
let p = 0
let tmp = ''
for (var i = 0, len = str.length; i < len; ++i) { // eslint-disable-line no-var
if (str[i] === '\\' && inquote) {
if (escaping)
escaping = false;
else {
escaping = true;
continue;
if (escaping) { escaping = false } else {
escaping = true
continue
}

@@ -27,113 +45,102 @@ } else if (str[i] === '"') {

if (inquote) {
inquote = false;
state = 'key';
} else
inquote = true;
continue;
} else
escaping = false;
inquote = false
state = 'key'
} else { inquote = true }
continue
} else { escaping = false }
} else {
if (escaping && inquote)
tmp += '\\';
escaping = false;
if (escaping && inquote) { tmp += '\\' }
escaping = false
if ((state === 'charset' || state === 'lang') && str[i] === "'") {
if (state === 'charset') {
state = 'lang';
charset = tmp.substring(1);
} else
state = 'value';
tmp = '';
continue;
} else if (state === 'key'
&& (str[i] === '*' || str[i] === '=')
&& res.length) {
if (str[i] === '*')
state = 'charset';
else
state = 'value';
res[p] = [tmp, undefined];
tmp = '';
continue;
state = 'lang'
charset = tmp.substring(1)
} else { state = 'value' }
tmp = ''
continue
} else if (state === 'key' &&
(str[i] === '*' || str[i] === '=') &&
res.length) {
if (str[i] === '*') { state = 'charset' } else { state = 'value' }
res[p] = [tmp, undefined]
tmp = ''
continue
} else if (!inquote && str[i] === ';') {
state = 'key';
state = 'key'
if (charset) {
if (tmp.length) {
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
'binary',
charset);
'binary',
charset)
}
charset = '';
charset = ''
} else if (tmp.length) {
tmp = decodeText(tmp, 'binary', 'utf8');
tmp = decodeText(tmp, 'binary', 'utf8')
}
if (res[p] === undefined)
res[p] = tmp;
else
res[p][1] = tmp;
tmp = '';
++p;
continue;
} else if (!inquote && (str[i] === ' ' || str[i] === '\t'))
continue;
if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp }
tmp = ''
++p
continue
} else if (!inquote && (str[i] === ' ' || str[i] === '\t')) { continue }
}
tmp += str[i];
tmp += str[i]
}
if (charset && tmp.length) {
tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer),
'binary',
charset);
'binary',
charset)
} else if (tmp) {
tmp = decodeText(tmp, 'binary', 'utf8');
tmp = decodeText(tmp, 'binary', 'utf8')
}
if (res[p] === undefined) {
if (tmp)
res[p] = tmp;
} else
res[p][1] = tmp;
if (tmp) { res[p] = tmp }
} else { res[p][1] = tmp }
return res;
return res
}
function decodeText(text, textEncoding, destEncoding) {
var ret;
if (text && jsencoding.encodingExists(destEncoding)) {
try {
ret = jsencoding.TextDecoder(destEncoding)
.decode(Buffer.from(text, textEncoding));
} catch(e) {}
function decodeText (text, textEncoding, destEncoding) {
if (text) {
if (textDecoders.has(destEncoding)) {
try {
return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
} catch (e) { }
} else {
try {
textDecoders.set(destEncoding, new TextDecoder(destEncoding))
return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
} catch (e) {
if (getEncoding(destEncoding)) {
try {
textDecoders.set(destEncoding, new PolyfillTextDecoder(destEncoding))
return textDecoders.get(destEncoding).decode(Buffer.from(text, textEncoding))
} catch (e) { }
}
}
}
}
return (typeof ret === 'string' ? ret : text);
return text
}
var HEX = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
], RE_PLUS = /\+/g;
function Decoder() {
this.buffer = undefined;
function Decoder () {
this.buffer = undefined
}
Decoder.prototype.write = function(str) {
Decoder.prototype.write = function (str) {
// Replace '+' with ' ' before decoding
str = str.replace(RE_PLUS, ' ');
var res = '';
var i = 0, p = 0, len = str.length;
str = str.replace(RE_PLUS, ' ')
let res = ''
let i = 0; let p = 0; const len = str.length
for (; i < len; ++i) {
if (this.buffer !== undefined) {
if (!HEX[str.charCodeAt(i)]) {
res += '%' + this.buffer;
this.buffer = undefined;
--i; // retry character
res += '%' + this.buffer
this.buffer = undefined
--i // retry character
} else {
this.buffer += str[i];
++p;
this.buffer += str[i]
++p
if (this.buffer.length === 2) {
res += String.fromCharCode(parseInt(this.buffer, 16));
this.buffer = undefined;
res += String.fromCharCode(parseInt(this.buffer, 16))
this.buffer = undefined
}

@@ -143,46 +150,42 @@ }

if (i > p) {
res += str.substring(p, i);
p = i;
res += str.substring(p, i)
p = i
}
this.buffer = '';
++p;
this.buffer = ''
++p
}
}
if (p < len && this.buffer === undefined)
res += str.substring(p);
return res;
};
Decoder.prototype.reset = function() {
this.buffer = undefined;
};
if (p < len && this.buffer === undefined) { res += str.substring(p) }
return res
}
Decoder.prototype.reset = function () {
this.buffer = undefined
}
function basename(path) {
if (typeof path !== 'string')
return '';
for (var i = path.length - 1; i >= 0; --i) {
function basename (path) {
if (typeof path !== 'string') { return '' }
for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var
switch (path.charCodeAt(i)) {
case 0x2F: // '/'
case 0x5C: // '\'
path = path.slice(i + 1);
return (path === '..' || path === '.' ? '' : path);
path = path.slice(i + 1)
return (path === '..' || path === '.' ? '' : path)
}
}
return (path === '..' || path === '.' ? '' : path);
return (path === '..' || path === '.' ? '' : path)
}
function getLimit(limits, name, defaultLimit) {
if (!limits)
return defaultLimit;
function getLimit (limits, name, defaultLimit) {
if (
!limits ||
limits[name] === undefined ||
limits[name] === null
) { return defaultLimit }
var limit = limits[name];
// Intentional double equals
if (limit == undefined)
return defaultLimit;
if (
typeof limits[name] !== 'number' ||
isNaN(limits[name])
) { throw new TypeError('Limit ' + name + ' is not a valid number') }
// Ensure limit is a number and is not NaN
if (typeof limit !== 'number' || limit !== limit) {
throw new Error('Limit ' + name + ' is not a valid number');
}
return limit;
return limits[name]
}

@@ -195,3 +198,3 @@

parseParams,
decodeText,
decodeText
}
{
"name": "@fastify/busboy",
"version": "1.0.0-next1",
"version": "1.0.0-next2",
"private": false,

@@ -22,6 +22,9 @@ "author": "Brian White <mscdex@mscdex.net>",

"scripts": {
"bench:dicer": "node deps/dicer/bench/dicer-bench-multipart-parser.js",
"bench:busboy": "node bench/fastify-busboy-bench.js",
"bench:dicer": "node bench/dicer/dicer-bench-multipart-parser.js",
"coveralls": "nyc report --reporter=lcov",
"lint": "eslint .",
"lint": "npm run lint:standard",
"lint:everything": "npm run lint && npm run test:types",
"lint:fix": "standard --fix",
"lint:standard": "standard --verbose | snazzy",
"test:mocha": "mocha test",

@@ -35,9 +38,17 @@ "test:types": "tsd",

},
"dependencies": {
"text-decoding": "^1.0.0"
},
"devDependencies": {
"@types/node": "^16.11.10",
"busboy": "^0.3.1",
"chai": "^4.3.4",
"eslint": "^8.3.0",
"eslint": "^7.32.0",
"eslint-config-standard": "^16.0.3",
"eslint-plugin-node": "^11.1.0",
"mocha": "^9.1.3",
"nyc": "^15.1.0",
"photofinish": "^1.8.0",
"snazzy": "^9.0.0",
"standard": "^16.0.4",
"tsd": "^0.19.0",

@@ -65,2 +76,11 @@ "typescript": "^4.5.2"

},
"standard": {
"globals": [
"describe",
"it"
],
"ignore": [
"bench"
]
},
"files": [

@@ -67,0 +87,0 @@ "README.md",

# busboy
<div align="center">
[![Build Status](https://github.com/fastify/busboy/workflows/ci/badge.svg)](https://github.com/fastify/busboy/actions)
[![Coverage Status](https://coveralls.io/repos/fastify/busboy/badge.svg?branch=master)](https://coveralls.io/r/fastify/busboy?branch=master)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://standardjs.com/)
[![Known Vulnerabilities](https://snyk.io/test/github/fastify/busboy/badge.svg)](https://snyk.io/test/github/fastify/busboy)
[![Security Responsible Disclosure](https://img.shields.io/badge/Security-Responsible%20Disclosure-yellow.svg)](https://github.com/nodejs/security-wg/blob/HEAD/processes/responsible_disclosure_template.md)
</div>
<div align="center">
[![NPM version](https://img.shields.io/npm/v/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy)
[![NPM downloads](https://img.shields.io/npm/dm/@fastify/busboy.svg?style=flat)](https://www.npmjs.com/package/@fastify/busboy)
</div>
Description

@@ -14,3 +28,11 @@ ===========

Benchmark (Mean time for 500 Kb payload, 2000 cycles, 1000 cycle warmup):
| Library | Mean time in nanoseconds |
|-----------------------|--------------------------|
| busboy 0.31 | |
| @fastify/busboy 1.0.0 | |
[Changelog](https://github.com/fastify/busboy/blob/master/CHANGELOG.md) since busboy 0.31.
Requirements

@@ -183,2 +205,3 @@ ============

* If a configured file size limit was reached, `stream` will both have a boolean property `truncated` (best checked at the end of the stream) and emit a 'limit' event to notify you when this happens.
* The property `bytesRead` informs about the number of bytes that have been read so far.

@@ -203,2 +226,4 @@ * **field**(< _string_ >fieldname, < _string_ >value, < _boolean_ >fieldnameTruncated, < _boolean_ >valueTruncated, < _string_ >transferEncoding, < _string_ >mimeType) - Emitted for each new non-file field found.

* **autoDestroy** - _boolean_ - Whether this stream should automatically call .destroy() on itself after ending. (Default: false).
* **highWaterMark** - _integer_ - highWaterMark to use for this Busboy instance (Default: WritableStream default).

@@ -212,2 +237,12 @@

* **isPartAFile** - __function__ - Use this function to override the default file detection functionality. It has following parameters:
* fieldName - __string__ The name of the field.
* contentType - __string__ The content-type of the part, e.g. `text/plain`, `image/jpeg`, `application/octet-stream`
* fileName - __string__ The name of a file supplied by the part.
(Default: `(fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)`)
* **limits** - _object_ - Various limits on incoming data. Valid properties are:

@@ -227,8 +262,16 @@

* **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000 (same as node's http).
* **headerPairs** - _integer_ - For multipart forms, the max number of header key=>value pairs to parse **Default:** 2000
* **headerSize** - _integer_ - For multipart forms, the max size of a multipart header **Default:** 81920.
* The constructor can throw errors:
* **Unsupported content type: $type** - The `Content-Type` isn't one Busboy can parse.
* **Busboy expected an options-Object.** - Busboy expected an Object as first parameters.
* **Missing Content-Type** - The provided headers don't include `Content-Type` at all.
* **Busboy expected an options-Object with headers-attribute.** - The first parameter is lacking of a headers-attribute.
* **Limit $limit is not a valid number** - Busboy expected the desired limit to be of type number. Busboy throws this Error to prevent a potential security issue by falling silently back to the Busboy-defaults. Potential source for this Error can be the direct use of environment variables without transforming them to the type number.
* **Unsupported Content-Type.** - The `Content-Type` isn't one Busboy can parse.
* **Missing Content-Type-header.** - The provided headers don't include `Content-Type` at all.

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc