Comparing version 2.0.2 to 3.0.0
161
lib/index.js
@@ -0,13 +1,15 @@ | ||
'use strict'; | ||
// Load modules | ||
var Fs = require('fs'); | ||
var Os = require('os'); | ||
var Stream = require('stream'); | ||
var Zlib = require('zlib'); | ||
var Boom = require('boom'); | ||
var Content = require('content'); | ||
var Hoek = require('hoek'); | ||
var Pez = require('pez'); | ||
var Qs = require('qs'); | ||
var Wreck = require('wreck'); | ||
const Fs = require('fs'); | ||
const Os = require('os'); | ||
const Stream = require('stream'); | ||
const Zlib = require('zlib'); | ||
const Boom = require('boom'); | ||
const Content = require('content'); | ||
const Hoek = require('hoek'); | ||
const Pez = require('pez'); | ||
const Qs = require('qs'); | ||
const Wreck = require('wreck'); | ||
@@ -17,3 +19,3 @@ | ||
var internals = {}; | ||
const internals = {}; | ||
@@ -27,3 +29,3 @@ | ||
var parser = new internals.Parser(req, tap, options, next); | ||
const parser = new internals.Parser(req, tap, options, next); | ||
return parser.read(); | ||
@@ -35,4 +37,2 @@ }; | ||
var self = this; | ||
this.req = req; | ||
@@ -44,5 +44,5 @@ this.settings = options; | ||
this.next = function (err) { | ||
this.next = (err) => { | ||
return next(err, self.result); | ||
return next(err, this.result); | ||
}; | ||
@@ -54,8 +54,8 @@ }; | ||
var next = this.next; | ||
const next = this.next; | ||
// Content size | ||
var req = this.req; | ||
var contentLength = req.headers['content-length']; | ||
const req = this.req; | ||
const contentLength = req.headers['content-length']; | ||
if (this.settings.maxBytes !== undefined && | ||
@@ -70,3 +70,3 @@ contentLength && | ||
var contentType = Content.type(this.settings.override || req.headers['content-type'] || this.settings.defaultContentType || 'application/octet-stream'); | ||
const contentType = Content.type(this.settings.override || req.headers['content-type'] || this.settings.defaultContentType || 'application/octet-stream'); | ||
if (contentType.isBoom) { | ||
@@ -99,17 +99,15 @@ return next(contentType); | ||
var self = this; | ||
let next = this.next; | ||
var next = this.next; | ||
const output = this.settings.output; // Output: 'data', 'stream', 'file' | ||
let source = this.req; | ||
var output = this.settings.output; // Output: 'data', 'stream', 'file' | ||
var source = this.req; | ||
// Content-encoding | ||
var contentEncoding = source.headers['content-encoding']; | ||
const contentEncoding = source.headers['content-encoding']; | ||
if (contentEncoding === 'gzip' || contentEncoding === 'deflate') { | ||
var decoder = (contentEncoding === 'gzip' ? Zlib.createGunzip() : Zlib.createInflate()); | ||
const decoder = (contentEncoding === 'gzip' ? Zlib.createGunzip() : Zlib.createInflate()); | ||
next = Hoek.once(next); // Modify next() for async events | ||
this.next = next; | ||
decoder.once('error', function (err) { | ||
decoder.once('error', (err) => { | ||
@@ -144,3 +142,3 @@ return next(Boom.badRequest('Invalid compressed payload', err)); | ||
if (output === 'file') { | ||
this.writeFile(source, function (err, path, bytes) { | ||
this.writeFile(source, (err, path, bytes) => { | ||
@@ -151,3 +149,3 @@ if (err) { | ||
self.result.payload = { path: path, bytes: bytes }; | ||
this.result.payload = { path: path, bytes: bytes }; | ||
return next(); | ||
@@ -161,3 +159,3 @@ }); | ||
return Wreck.read(source, { timeout: this.settings.timeout, maxBytes: this.settings.maxBytes }, function (err, payload) { | ||
return Wreck.read(source, { timeout: this.settings.timeout, maxBytes: this.settings.maxBytes }, (err, payload) => { | ||
@@ -168,10 +166,10 @@ if (err) { | ||
internals.object(payload, self.result.contentType.mime, self.settings, function (err, result) { | ||
internals.object(payload, this.result.contentType.mime, this.settings, (err, result) => { | ||
if (err) { | ||
self.result.payload = null; | ||
this.result.payload = null; | ||
return next(err); | ||
} | ||
self.result.payload = result; | ||
this.result.payload = result; | ||
return next(); | ||
@@ -185,18 +183,16 @@ }); | ||
var self = this; | ||
let next = this.next; | ||
var next = this.next; | ||
const output = this.settings.output; // Output: 'data', 'stream', 'file' | ||
let source = this.req; | ||
var output = this.settings.output; // Output: 'data', 'stream', 'file' | ||
var source = this.req; | ||
// Content-encoding | ||
if (this.settings.parse === 'gunzip') { | ||
var contentEncoding = source.headers['content-encoding']; | ||
const contentEncoding = source.headers['content-encoding']; | ||
if (contentEncoding === 'gzip' || contentEncoding === 'deflate') { | ||
var decoder = (contentEncoding === 'gzip' ? Zlib.createGunzip() : Zlib.createInflate()); | ||
const decoder = (contentEncoding === 'gzip' ? Zlib.createGunzip() : Zlib.createInflate()); | ||
next = Hoek.once(next); // Modify next() for async events | ||
decoder.once('error', function (err) { | ||
decoder.once('error', (err) => { | ||
@@ -226,3 +222,3 @@ return next(Boom.badRequest('Invalid compressed payload', err)); | ||
if (output === 'file') { | ||
this.writeFile(source, function (err, path, bytes) { | ||
this.writeFile(source, (err, path, bytes) => { | ||
@@ -233,3 +229,3 @@ if (err) { | ||
self.result.payload = { path: path, bytes: bytes }; | ||
this.result.payload = { path: path, bytes: bytes }; | ||
return next(); | ||
@@ -243,3 +239,3 @@ }); | ||
return Wreck.read(source, { timeout: this.settings.timeout, maxBytes: this.settings.maxBytes }, function (err, payload) { | ||
return Wreck.read(source, { timeout: this.settings.timeout, maxBytes: this.settings.maxBytes }, (err, payload) => { | ||
@@ -250,3 +246,3 @@ if (err) { | ||
self.result.payload = payload; | ||
this.result.payload = payload; | ||
return next(); | ||
@@ -293,4 +289,5 @@ }); | ||
let parsed; | ||
try { | ||
var parsed = JSON.parse(payload.toString('utf8')); | ||
parsed = JSON.parse(payload.toString('utf8')); | ||
} | ||
@@ -307,11 +304,9 @@ catch (err) { | ||
var self = this; | ||
var next = this.next; | ||
let next = this.next; | ||
next = Hoek.once(next); // Modify next() for async events | ||
this.next = next; | ||
var dispenser = new Pez.Dispenser(contentType); | ||
const dispenser = new Pez.Dispenser(contentType); | ||
var onError = function (err) { | ||
const onError = (err) => { | ||
@@ -323,5 +318,5 @@ return next(Boom.badRequest('Invalid multipart payload format', err)); | ||
var arrayFields = false; | ||
var data = {}; | ||
var finalize = function () { | ||
let arrayFields = false; | ||
let data = {}; | ||
const finalize = () => { | ||
@@ -334,10 +329,10 @@ dispenser.removeListener('error', onError); | ||
if (arrayFields) { | ||
data = Qs.parse(data, self.settings.qs); | ||
data = Qs.parse(data, this.settings.qs); | ||
} | ||
self.result.payload = data; | ||
this.result.payload = data; | ||
return next(); | ||
}; | ||
var set = function (name, value) { | ||
const set = (name, value) => { | ||
@@ -357,12 +352,12 @@ arrayFields = arrayFields || (name.indexOf('[') !== -1); | ||
var pendingFiles = {}; | ||
var nextId = 0; | ||
var closed = false; | ||
const pendingFiles = {}; | ||
let nextId = 0; | ||
let closed = false; | ||
var onPart = function (part) { | ||
const onPart = (part) => { | ||
if (self.settings.output === 'file') { // Output: 'file' | ||
var id = nextId++; | ||
if (this.settings.output === 'file') { // Output: 'file' | ||
const id = nextId++; | ||
pendingFiles[id] = true; | ||
self.writeFile(part, function (err, path, bytes) { | ||
this.writeFile(part, (err, path, bytes) => { | ||
@@ -375,3 +370,3 @@ delete pendingFiles[id]; | ||
var item = { | ||
const item = { | ||
filename: part.filename, | ||
@@ -393,8 +388,8 @@ path: path, | ||
else { // Output: 'data' | ||
Wreck.read(part, {}, function (err, payload) { | ||
Wreck.read(part, {}, (err, payload) => { | ||
// err handled by dispenser.once('error') | ||
if (self.settings.output === 'stream') { // Output: 'stream' | ||
var item = Wreck.toReadableStream(payload); | ||
if (this.settings.output === 'stream') { // Output: 'stream' | ||
const item = Wreck.toReadableStream(payload); | ||
@@ -409,4 +404,4 @@ item.hapi = { | ||
var ct = part.headers['content-type'] || ''; | ||
var mime = ct.split(';')[0].trim().toLowerCase(); | ||
const ct = part.headers['content-type'] || ''; | ||
const mime = ct.split(';')[0].trim().toLowerCase(); | ||
@@ -421,3 +416,3 @@ if (!mime) { | ||
internals.object(payload, mime, self.settings, function (err, result) { | ||
internals.object(payload, mime, this.settings, (err, result) => { | ||
@@ -432,3 +427,3 @@ return set(part.name, err ? payload : result); | ||
var onField = function (name, value) { | ||
const onField = (name, value) => { | ||
@@ -440,3 +435,3 @@ set(name, value); | ||
var onClose = function () { | ||
const onClose = () => { | ||
@@ -459,11 +454,9 @@ if (Object.keys(pendingFiles).length) { | ||
var self = this; | ||
const path = Hoek.uniqueFilename(this.settings.uploads || Os.tmpDir()); | ||
const file = Fs.createWriteStream(path, { flags: 'wx' }); | ||
const counter = new internals.Counter(); | ||
var path = Hoek.uniqueFilename(this.settings.uploads || Os.tmpDir()); | ||
var file = Fs.createWriteStream(path, { flags: 'wx' }); | ||
var counter = new internals.Counter(); | ||
const finalize = Hoek.once((err) => { | ||
var finalize = Hoek.once(function (err) { | ||
self.req.removeListener('aborted', onAbort); | ||
this.req.removeListener('aborted', onAbort); | ||
file.removeListener('close', finalize); | ||
@@ -477,3 +470,3 @@ file.removeListener('error', finalize); | ||
file.destroy(); | ||
Fs.unlink(path, function (/* fsErr */) { // Ignore unlink errors | ||
Fs.unlink(path, (/* fsErr */) => { // Ignore unlink errors | ||
@@ -487,3 +480,3 @@ return callback(err); | ||
var onAbort = function () { | ||
const onAbort = () => { | ||
@@ -510,4 +503,4 @@ return finalize(Boom.badRequest('Client connection aborted')); | ||
this.bytes += chunk.length; | ||
this.bytes = this.bytes + chunk.length; | ||
return next(null, chunk); | ||
}; |
{ | ||
"name": "subtext", | ||
"description": "HTTP payload parsing", | ||
"version": "2.0.2", | ||
"version": "3.0.0", | ||
"repository": "git://github.com/hapijs/subtext", | ||
@@ -15,16 +15,16 @@ "main": "lib/index.js", | ||
"engines": { | ||
"node": ">=0.10.40" | ||
"node": ">=4.0.0" | ||
}, | ||
"dependencies": { | ||
"boom": "2.x.x", | ||
"content": "1.x.x", | ||
"hoek": "2.x.x", | ||
"pez": "1.x.x", | ||
"qs": "5.x.x", | ||
"wreck": "6.x.x" | ||
"boom": "3.x.x", | ||
"content": "3.x.x", | ||
"hoek": "3.x.x", | ||
"pez": "2.x.x", | ||
"qs": "6.x.x", | ||
"wreck": "7.x.x" | ||
}, | ||
"devDependencies": { | ||
"code": "1.x.x", | ||
"code": "2.x.x", | ||
"form-data": "0.1.x", | ||
"lab": "6.x.x" | ||
"lab": "7.x.x" | ||
}, | ||
@@ -31,0 +31,0 @@ "scripts": { |
@@ -0,14 +1,16 @@ | ||
'use strict'; | ||
// Load modules | ||
var Domain = require('domain'); | ||
var Fs = require('fs'); | ||
var Http = require('http'); | ||
var Path = require('path'); | ||
var Stream = require('stream'); | ||
var Zlib = require('zlib'); | ||
var Code = require('code'); | ||
var FormData = require('form-data'); | ||
var Lab = require('lab'); | ||
var Subtext = require('..'); | ||
var Wreck = require('wreck'); | ||
const Domain = require('domain'); | ||
const Fs = require('fs'); | ||
const Http = require('http'); | ||
const Path = require('path'); | ||
const Stream = require('stream'); | ||
const Zlib = require('zlib'); | ||
const Code = require('code'); | ||
const FormData = require('form-data'); | ||
const Lab = require('lab'); | ||
const Subtext = require('..'); | ||
const Wreck = require('wreck'); | ||
@@ -18,3 +20,3 @@ | ||
var internals = {}; | ||
const internals = {}; | ||
@@ -24,14 +26,14 @@ | ||
var lab = exports.lab = Lab.script(); | ||
var describe = lab.describe; | ||
var it = lab.it; | ||
var expect = Code.expect; | ||
const lab = exports.lab = Lab.script(); | ||
const describe = lab.describe; | ||
const it = lab.it; | ||
const expect = Code.expect; | ||
describe('parse()', function () { | ||
describe('parse()', () => { | ||
it('returns a raw body', function (done) { | ||
it('returns a raw body', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -41,3 +43,3 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: false, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: false, output: 'data' }, (err, parsed) => { | ||
@@ -52,6 +54,6 @@ expect(err).to.not.exist(); | ||
it('returns a parsed body', function (done) { | ||
it('returns a parsed body', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -61,3 +63,3 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -71,6 +73,6 @@ expect(err).to.not.exist(); | ||
it('returns a parsed body as stream', function (done) { | ||
it('returns a parsed body as stream', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -80,7 +82,7 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: true, output: 'stream' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'stream' }, (err, parsed) => { | ||
expect(err).to.not.exist(); | ||
expect(parsed.mime).to.equal('application/json'); | ||
Wreck.read(parsed.payload, null, function (err, result) { | ||
Wreck.read(parsed.payload, null, (err, result) => { | ||
@@ -93,6 +95,6 @@ expect(result.toString()).to.equal(payload); | ||
it('returns a raw body as stream', function (done) { | ||
it('returns a raw body as stream', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -102,7 +104,7 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: false, output: 'stream' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: false, output: 'stream' }, (err, parsed) => { | ||
expect(err).to.not.exist(); | ||
expect(parsed.mime).to.equal('application/json'); | ||
Wreck.read(parsed.payload, null, function (err, result) { | ||
Wreck.read(parsed.payload, null, (err, result) => { | ||
@@ -115,6 +117,6 @@ expect(result.toString()).to.equal(payload); | ||
it('returns a parsed body (json-derived media type)', function (done) { | ||
it('returns a parsed body (json-derived media type)', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -124,3 +126,3 @@ 'content-type': 'application/json-patch+json' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -134,6 +136,6 @@ expect(err).to.not.exist(); | ||
it('returns an empty parsed body', function (done) { | ||
it('returns an empty parsed body', (done) => { | ||
var payload = ''; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = ''; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -143,3 +145,3 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -153,6 +155,6 @@ expect(err).to.not.exist(); | ||
it('returns an empty string', function (done) { | ||
it('returns an empty string', (done) => { | ||
var payload = ''; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = ''; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -162,3 +164,3 @@ 'content-type': 'text/plain' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -171,6 +173,6 @@ expect(err).to.not.exist(); | ||
it('errors on invalid content type header', function (done) { | ||
it('errors on invalid content type header', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -180,3 +182,3 @@ 'content-type': 'steve' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -189,6 +191,6 @@ expect(err).to.exist(); | ||
it('errors on unsupported content type', function (done) { | ||
it('errors on unsupported content type', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -198,3 +200,3 @@ 'content-type': 'james/bond' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -208,6 +210,6 @@ expect(err).to.exist(); | ||
it('errors when content-length header greater than maxBytes', function (done) { | ||
it('errors when content-length header greater than maxBytes', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -218,3 +220,3 @@ 'content-length': '50', | ||
Subtext.parse(request, null, { parse: false, output: 'data', maxBytes: 10 }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: false, output: 'data', maxBytes: 10 }, (err, parsed) => { | ||
@@ -227,6 +229,6 @@ expect(err).to.exist(); | ||
it('limits maxBytes when content-length header missing', function (done) { | ||
it('limits maxBytes when content-length header missing', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -237,3 +239,3 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: false, output: 'data', maxBytes: 10 }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: false, output: 'data', maxBytes: 10 }, (err, parsed) => { | ||
@@ -246,6 +248,6 @@ expect(err).to.exist(); | ||
it('errors on invalid JSON payload', function (done) { | ||
it('errors on invalid JSON payload', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -255,3 +257,3 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -264,6 +266,6 @@ expect(err).to.exist(); | ||
it('peeks at the unparsed stream of a parsed body', function (done) { | ||
it('peeks at the unparsed stream of a parsed body', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -273,7 +275,7 @@ 'content-type': 'application/json' | ||
var raw = ''; | ||
var tap = new Stream.Transform(); | ||
let raw = ''; | ||
const tap = new Stream.Transform(); | ||
tap._transform = function (chunk, encoding, callback) { | ||
raw += chunk.toString(); | ||
raw = raw + chunk.toString(); | ||
this.push(chunk, encoding); | ||
@@ -283,3 +285,3 @@ callback(); | ||
Subtext.parse(request, tap, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, tap, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -293,6 +295,6 @@ expect(err).to.not.exist(); | ||
it('peeks at the unparsed stream of an unparsed body', function (done) { | ||
it('peeks at the unparsed stream of an unparsed body', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -302,7 +304,7 @@ 'content-type': 'application/json' | ||
var raw = ''; | ||
var tap = new Stream.Transform(); | ||
let raw = ''; | ||
const tap = new Stream.Transform(); | ||
tap._transform = function (chunk, encoding, callback) { | ||
raw += chunk.toString(); | ||
raw = raw + chunk.toString(); | ||
this.push(chunk, encoding); | ||
@@ -312,3 +314,3 @@ callback(); | ||
Subtext.parse(request, tap, { parse: false, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, tap, { parse: false, output: 'data' }, (err, parsed) => { | ||
@@ -322,5 +324,5 @@ expect(err).to.not.exist(); | ||
it('saves file', function (done) { | ||
it('saves file', (done) => { | ||
var request = Wreck.toReadableStream('payload'); | ||
const request = Wreck.toReadableStream('payload'); | ||
request.headers = { | ||
@@ -330,7 +332,7 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: false, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: false, output: 'file' }, (err, parsed) => { | ||
expect(err).to.not.exist(); | ||
var receivedContents = Fs.readFileSync(parsed.payload.path); | ||
const receivedContents = Fs.readFileSync(parsed.payload.path); | ||
Fs.unlinkSync(parsed.payload.path); | ||
@@ -342,11 +344,11 @@ expect(receivedContents.toString()).to.equal('payload'); | ||
it('saves a file after content decoding', function (done) { | ||
it('saves a file after content decoding', (done) => { | ||
var path = Path.join(__dirname, './file/image.jpg'); | ||
var sourceContents = Fs.readFileSync(path); | ||
var stats = Fs.statSync(path); | ||
const path = Path.join(__dirname, './file/image.jpg'); | ||
const sourceContents = Fs.readFileSync(path); | ||
const stats = Fs.statSync(path); | ||
Zlib.gzip(sourceContents, function (err, compressed) { | ||
Zlib.gzip(sourceContents, (err, compressed) => { | ||
var request = Wreck.toReadableStream(compressed); | ||
const request = Wreck.toReadableStream(compressed); | ||
request.headers = { | ||
@@ -356,7 +358,7 @@ 'content-encoding': 'gzip' | ||
Subtext.parse(request, null, { parse: true, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'file' }, (err, parsed) => { | ||
expect(err).to.not.exist(); | ||
var receivedContents = Fs.readFileSync(parsed.payload.path); | ||
const receivedContents = Fs.readFileSync(parsed.payload.path); | ||
Fs.unlinkSync(parsed.payload.path); | ||
@@ -370,10 +372,10 @@ expect(receivedContents).to.deep.equal(sourceContents); | ||
it('saves a file ignoring content decoding when parse is false', function (done) { | ||
it('saves a file ignoring content decoding when parse is false', (done) => { | ||
var path = Path.join(__dirname, './file/image.jpg'); | ||
var sourceContents = Fs.readFileSync(path); | ||
const path = Path.join(__dirname, './file/image.jpg'); | ||
const sourceContents = Fs.readFileSync(path); | ||
Zlib.gzip(sourceContents, function (err, compressed) { | ||
Zlib.gzip(sourceContents, (err, compressed) => { | ||
var request = Wreck.toReadableStream(compressed); | ||
const request = Wreck.toReadableStream(compressed); | ||
request.headers = { | ||
@@ -384,7 +386,7 @@ 'content-encoding': 'gzip', | ||
Subtext.parse(request, null, { parse: false, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: false, output: 'file' }, (err, parsed) => { | ||
expect(err).to.not.exist(); | ||
var receivedContents = Fs.readFileSync(parsed.payload.path); | ||
const receivedContents = Fs.readFileSync(parsed.payload.path); | ||
Fs.unlinkSync(parsed.payload.path); | ||
@@ -397,5 +399,5 @@ expect(receivedContents).to.deep.equal(compressed); | ||
it('errors on invalid upload directory (parse false)', function (done) { | ||
it('errors on invalid upload directory (parse false)', (done) => { | ||
var request = Wreck.toReadableStream('payload'); | ||
const request = Wreck.toReadableStream('payload'); | ||
request.headers = { | ||
@@ -405,3 +407,3 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: false, output: 'file', uploads: '/a/b/c/no/such/folder' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: false, output: 'file', uploads: '/a/b/c/no/such/folder' }, (err, parsed) => { | ||
@@ -414,5 +416,5 @@ expect(err).to.exist(); | ||
it('errors on invalid upload directory (parse true)', function (done) { | ||
it('errors on invalid upload directory (parse true)', (done) => { | ||
var request = Wreck.toReadableStream('payload'); | ||
const request = Wreck.toReadableStream('payload'); | ||
request.headers = { | ||
@@ -422,3 +424,3 @@ 'content-type': 'application/json' | ||
Subtext.parse(request, null, { parse: true, output: 'file', uploads: '/a/b/c/no/such/folder' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'file', uploads: '/a/b/c/no/such/folder' }, (err, parsed) => { | ||
@@ -431,6 +433,6 @@ expect(err).to.exist(); | ||
it('processes application/octet-stream', function (done) { | ||
it('processes application/octet-stream', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -440,3 +442,3 @@ 'content-type': 'application/octet-stream' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -451,9 +453,9 @@ expect(err).to.not.exist(); | ||
it('defaults to application/octet-stream', function (done) { | ||
it('defaults to application/octet-stream', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = {}; | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -468,9 +470,9 @@ expect(err).to.not.exist(); | ||
it('returns null on empty payload and application/octet-stream', function (done) { | ||
it('returns null on empty payload and application/octet-stream', (done) => { | ||
var payload = ''; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = ''; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = {}; | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -484,6 +486,6 @@ expect(err).to.not.exist(); | ||
it('overrides content-type', function (done) { | ||
it('overrides content-type', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -493,3 +495,3 @@ 'content-type': 'text/plain' | ||
Subtext.parse(request, null, { parse: true, output: 'data', override: 'application/json' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', override: 'application/json' }, (err, parsed) => { | ||
@@ -503,9 +505,9 @@ expect(err).to.not.exist(); | ||
it('custom default content-type', function (done) { | ||
it('custom default content-type', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = {}; | ||
Subtext.parse(request, null, { parse: true, output: 'data', defaultContentType: 'application/json' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', defaultContentType: 'application/json' }, (err, parsed) => { | ||
@@ -519,6 +521,6 @@ expect(err).to.not.exist(); | ||
it('returns a parsed text payload', function (done) { | ||
it('returns a parsed text payload', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -528,3 +530,3 @@ 'content-type': 'text/plain' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -538,6 +540,6 @@ expect(err).to.not.exist(); | ||
it('parses an allowed content-type', function (done) { | ||
it('parses an allowed content-type', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -547,3 +549,3 @@ 'content-type': 'text/plain' | ||
Subtext.parse(request, null, { parse: true, output: 'data', allow: 'text/plain' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', allow: 'text/plain' }, (err, parsed) => { | ||
@@ -557,6 +559,6 @@ expect(err).to.not.exist(); | ||
it('parses an allowed content-type (array)', function (done) { | ||
it('parses an allowed content-type (array)', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -566,3 +568,3 @@ 'content-type': 'text/plain' | ||
Subtext.parse(request, null, { parse: true, output: 'data', allow: ['text/plain'] }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', allow: ['text/plain'] }, (err, parsed) => { | ||
@@ -576,6 +578,6 @@ expect(err).to.not.exist(); | ||
it('errors on an unallowed content-type', function (done) { | ||
it('errors on an unallowed content-type', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -585,3 +587,3 @@ 'content-type': 'text/plain' | ||
Subtext.parse(request, null, { parse: true, output: 'data', allow: 'application/json' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', allow: 'application/json' }, (err, parsed) => { | ||
@@ -594,6 +596,6 @@ expect(err).to.exist(); | ||
it('errors on an unallowed content-type (array)', function (done) { | ||
it('errors on an unallowed content-type (array)', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -603,3 +605,3 @@ 'content-type': 'text/plain' | ||
Subtext.parse(request, null, { parse: true, output: 'data', allow: ['application/json'] }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', allow: ['application/json'] }, (err, parsed) => { | ||
@@ -612,6 +614,6 @@ expect(err).to.exist(); | ||
it('parses form encoded payload', function (done) { | ||
it('parses form encoded payload', (done) => { | ||
var payload = 'x=abc'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = 'x=abc'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -621,3 +623,3 @@ 'content-type': 'application/x-www-form-urlencoded' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -631,6 +633,6 @@ expect(err).to.not.exist(); | ||
it('parses empty form encoded payload', function (done) { | ||
it('parses empty form encoded payload', (done) => { | ||
var payload = ''; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = ''; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -640,3 +642,3 @@ 'content-type': 'application/x-www-form-urlencoded' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -650,6 +652,6 @@ expect(err).to.not.exist(); | ||
it('parses form encoded payload (array keys)', function (done) { | ||
it('parses form encoded payload (array keys)', (done) => { | ||
var payload = 'x[y]=1&x[z]=2'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = 'x[y]=1&x[z]=2'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -659,3 +661,3 @@ 'content-type': 'application/x-www-form-urlencoded' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -669,6 +671,6 @@ expect(err).to.not.exist(); | ||
it('parses form encoded payload (with qs arraylimit set to 0)', function (done) { | ||
it('parses form encoded payload (with qs arraylimit set to 0)', (done) => { | ||
var payload = 'x[0]=1&x[100]=2'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = 'x[0]=1&x[100]=2'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -678,3 +680,3 @@ 'content-type': 'application/x-www-form-urlencoded' | ||
Subtext.parse(request, null, { parse: true, output: 'data', qs: { arrayLimit: 0 } }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', qs: { arrayLimit: 0 } }, (err, parsed) => { | ||
@@ -688,6 +690,6 @@ expect(err).to.not.exist(); | ||
it('parses form encoded payload (with qs arraylimit set to 30) as flat zero indexed array', function (done) { | ||
it('parses form encoded payload (with qs arraylimit set to 30) as flat zero indexed array', (done) => { | ||
var payload = 'x[0]=0&x[1]=1&x[2]=2&x[3]=3&x[4]=4&x[5]=5&x[6]=6&x[7]=7&x[8]=8&x[9]=9&x[10]=10&x[11]=11&x[12]=12&x[13]=13&x[14]=14&x[15]=15&x[16]=16&x[17]=17&x[18]=18&x[19]=19&x[20]=20&x[21]=21&x[22]=22&x[23]=23&x[24]=24&x[25]=25&x[26]=26&x[27]=27&x[28]=28&x[29]=29&'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = 'x[0]=0&x[1]=1&x[2]=2&x[3]=3&x[4]=4&x[5]=5&x[6]=6&x[7]=7&x[8]=8&x[9]=9&x[10]=10&x[11]=11&x[12]=12&x[13]=13&x[14]=14&x[15]=15&x[16]=16&x[17]=17&x[18]=18&x[19]=19&x[20]=20&x[21]=21&x[22]=22&x[23]=23&x[24]=24&x[25]=25&x[26]=26&x[27]=27&x[28]=28&x[29]=29&'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -697,3 +699,3 @@ 'content-type': 'application/x-www-form-urlencoded' | ||
Subtext.parse(request, null, { parse: true, output: 'data', qs: { arrayLimit: 30 } }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', qs: { arrayLimit: 30 } }, (err, parsed) => { | ||
@@ -707,6 +709,6 @@ expect(err).to.not.exist(); | ||
it('errors on malformed zipped payload', function (done) { | ||
it('errors on malformed zipped payload', (done) => { | ||
var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -717,3 +719,3 @@ 'content-encoding': 'gzip', | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -726,6 +728,6 @@ expect(err).to.exist(); | ||
it('errors on malformed zipped payload (parse gunzip only)', function (done) { | ||
it('errors on malformed zipped payload (parse gunzip only)', (done) => { | ||
var payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '7d8d78347h8347d58w347hd58w374d58w37h5d8w37hd4'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -736,3 +738,3 @@ 'content-encoding': 'gzip', | ||
Subtext.parse(request, null, { parse: 'gunzip', output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: 'gunzip', output: 'data' }, (err, parsed) => { | ||
@@ -745,8 +747,8 @@ expect(err).to.exist(); | ||
it('parses a gzipped payload', function (done) { | ||
it('parses a gzipped payload', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
Zlib.gzip(payload, function (err, compressed) { | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
Zlib.gzip(payload, (err, compressed) => { | ||
var request = Wreck.toReadableStream(compressed); | ||
const request = Wreck.toReadableStream(compressed); | ||
request.headers = { | ||
@@ -757,3 +759,3 @@ 'content-encoding': 'gzip', | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -767,8 +769,8 @@ expect(err).to.not.exist(); | ||
it('unzips payload without parsing', function (done) { | ||
it('unzips payload without parsing', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
Zlib.gzip(payload, function (err, compressed) { | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
Zlib.gzip(payload, (err, compressed) => { | ||
var request = Wreck.toReadableStream(compressed); | ||
const request = Wreck.toReadableStream(compressed); | ||
request.headers = { | ||
@@ -779,3 +781,3 @@ 'content-encoding': 'gzip', | ||
Subtext.parse(request, null, { parse: 'gunzip', output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: 'gunzip', output: 'data' }, (err, parsed) => { | ||
@@ -789,8 +791,8 @@ expect(err).to.not.exist(); | ||
it('parses a deflated payload', function (done) { | ||
it('parses a deflated payload', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
Zlib.deflate(payload, function (err, compressed) { | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
Zlib.deflate(payload, (err, compressed) => { | ||
var request = Wreck.toReadableStream(compressed); | ||
const request = Wreck.toReadableStream(compressed); | ||
request.headers = { | ||
@@ -801,3 +803,3 @@ 'content-encoding': 'deflate', | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -811,8 +813,8 @@ expect(err).to.not.exist(); | ||
it('deflates payload without parsing', function (done) { | ||
it('deflates payload without parsing', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
Zlib.deflate(payload, function (err, compressed) { | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
Zlib.deflate(payload, (err, compressed) => { | ||
var request = Wreck.toReadableStream(compressed); | ||
const request = Wreck.toReadableStream(compressed); | ||
request.headers = { | ||
@@ -823,3 +825,3 @@ 'content-encoding': 'deflate', | ||
Subtext.parse(request, null, { parse: 'gunzip', output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: 'gunzip', output: 'data' }, (err, parsed) => { | ||
@@ -833,5 +835,5 @@ expect(err).to.not.exist(); | ||
it('parses a multipart payload', function (done) { | ||
it('parses a multipart payload', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -864,3 +866,3 @@ 'content-disposition: form-data; name="x"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -870,3 +872,3 @@ 'content-type': 'multipart/form-data; boundary=AaB03x' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -884,5 +886,5 @@ expect(err).to.not.exist(); | ||
it('parses a multipart payload with qs arraylimit set to zero', function (done) { | ||
it('parses a multipart payload with qs arraylimit set to zero', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -915,3 +917,3 @@ 'content-disposition: form-data; name="x[0]"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -921,3 +923,3 @@ 'content-type': 'multipart/form-data; boundary=AaB03x' | ||
Subtext.parse(request, null, { parse: true, output: 'data', qs: { arrayLimit: 0 } }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data', qs: { arrayLimit: 0 } }, (err, parsed) => { | ||
@@ -935,5 +937,5 @@ expect(err).to.not.exist(); | ||
it('parses a multipart payload', function (done) { | ||
it('parses a multipart payload', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -966,3 +968,3 @@ 'content-disposition: form-data; name="x"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -972,3 +974,3 @@ 'content-type': 'multipart/form-data; boundary=AaB03x' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -986,5 +988,5 @@ expect(err).to.not.exist(); | ||
it('parses a multipart payload (empty file)', function (done) { | ||
it('parses a multipart payload (empty file)', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -997,3 +999,3 @@ 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1003,3 +1005,3 @@ 'content-type': 'multipart/form-data; boundary=AaB03x' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -1012,5 +1014,5 @@ expect(err).to.not.exist(); | ||
it('errors on an invalid multipart header (missing boundary)', function (done) { | ||
it('errors on an invalid multipart header (missing boundary)', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -1043,3 +1045,3 @@ 'content-disposition: form-data; name="x"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1049,3 +1051,3 @@ 'content-type': 'multipart/form-data' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -1058,5 +1060,5 @@ expect(err).to.exist(); | ||
it('errors on an invalid multipart payload', function (done) { | ||
it('errors on an invalid multipart payload', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -1067,3 +1069,3 @@ 'content-disposition: form-data; name="x"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1073,3 +1075,3 @@ 'content-type': 'multipart/form-data; boundary=AaB03x' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -1082,5 +1084,5 @@ expect(err).to.exist(); | ||
it('parses file without content-type', function (done) { | ||
it('parses file without content-type', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -1092,3 +1094,3 @@ 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1098,3 +1100,3 @@ 'content-type': 'multipart/form-data; boundary="AaB03x"' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -1107,5 +1109,5 @@ expect(err).to.not.exist(); | ||
it('errors on invalid uploads folder while processing multipart payload', function (done) { | ||
it('errors on invalid uploads folder while processing multipart payload', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -1117,3 +1119,3 @@ 'content-disposition: form-data; name="pics"; filename="file1.txt"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1123,3 +1125,3 @@ 'content-type': 'multipart/form-data; boundary="AaB03x"' | ||
Subtext.parse(request, null, { parse: true, output: 'file', uploads: '/no/such/folder/a/b/c' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'file', uploads: '/no/such/folder/a/b/c' }, (err, parsed) => { | ||
@@ -1132,5 +1134,5 @@ expect(err).to.exist(); | ||
it('parses multiple files as streams', function (done) { | ||
it('parses multiple files as streams', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -1153,3 +1155,3 @@ 'content-disposition: form-data; name="files"; filename="file1.txt"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1159,3 +1161,3 @@ 'content-type': 'multipart/form-data; boundary="AaB03x"' | ||
Subtext.parse(request, null, { parse: true, output: 'stream' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'stream' }, (err, parsed) => { | ||
@@ -1167,7 +1169,7 @@ expect(err).to.not.exist(); | ||
Wreck.read(parsed.payload.files[1], null, function (err, payload2) { | ||
Wreck.read(parsed.payload.files[1], null, (err, payload2) => { | ||
Wreck.read(parsed.payload.files[0], null, function (err, payload1) { | ||
Wreck.read(parsed.payload.files[0], null, (err, payload1) => { | ||
Wreck.read(parsed.payload.files[2], null, function (err, payload3) { | ||
Wreck.read(parsed.payload.files[2], null, (err, payload3) => { | ||
@@ -1184,12 +1186,12 @@ expect(payload1.toString()).to.equal('one'); | ||
it('parses a multipart file as file', function (done) { | ||
it('parses a multipart file as file', (done) => { | ||
var path = Path.join(__dirname, './file/image.jpg'); | ||
var stats = Fs.statSync(path); | ||
const path = Path.join(__dirname, './file/image.jpg'); | ||
const stats = Fs.statSync(path); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('my_file', Fs.createReadStream(path)); | ||
form.headers = form.getHeaders(); | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, (err, parsed) => { | ||
@@ -1200,4 +1202,4 @@ expect(err).to.not.exist(); | ||
var sourceContents = Fs.readFileSync(path); | ||
var receivedContents = Fs.readFileSync(parsed.payload.my_file.path); | ||
const sourceContents = Fs.readFileSync(path); | ||
const receivedContents = Fs.readFileSync(parsed.payload.my_file.path); | ||
Fs.unlinkSync(parsed.payload.my_file.path); | ||
@@ -1209,8 +1211,8 @@ expect(sourceContents).to.deep.equal(receivedContents); | ||
it('parses multiple files as files', function (done) { | ||
it('parses multiple files as files', (done) => { | ||
var path = Path.join(__dirname, './file/image.jpg'); | ||
var stats = Fs.statSync(path); | ||
const path = Path.join(__dirname, './file/image.jpg'); | ||
const stats = Fs.statSync(path); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('file1', Fs.createReadStream(path)); | ||
@@ -1220,3 +1222,3 @@ form.append('file2', Fs.createReadStream(path)); | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, (err, parsed) => { | ||
@@ -1232,10 +1234,10 @@ expect(err).to.not.exist(); | ||
it('parses multiple files of different sizes', function (done) { | ||
it('parses multiple files of different sizes', (done) => { | ||
var path = Path.join(__dirname, './file/smallimage.png'); | ||
var path2 = Path.join(__dirname, './file/image.jpg'); | ||
var stats = Fs.statSync(path); | ||
var stats2 = Fs.statSync(path2); | ||
const path = Path.join(__dirname, './file/smallimage.png'); | ||
const path2 = Path.join(__dirname, './file/image.jpg'); | ||
const stats = Fs.statSync(path); | ||
const stats2 = Fs.statSync(path2); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('file1', Fs.createReadStream(path)); | ||
@@ -1245,3 +1247,3 @@ form.append('file2', Fs.createReadStream(path2)); | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, (err, parsed) => { | ||
@@ -1257,10 +1259,10 @@ expect(err).to.not.exist(); | ||
it('parses multiple files of different sizes', function (done) { | ||
it('parses multiple files of different sizes', (done) => { | ||
var path = Path.join(__dirname, './file/image.jpg'); | ||
var path2 = Path.join(__dirname, './file/smallimage.png'); | ||
var stats = Fs.statSync(path); | ||
var stats2 = Fs.statSync(path2); | ||
const path = Path.join(__dirname, './file/image.jpg'); | ||
const path2 = Path.join(__dirname, './file/smallimage.png'); | ||
const stats = Fs.statSync(path); | ||
const stats2 = Fs.statSync(path2); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('file1', Fs.createReadStream(path)); | ||
@@ -1270,3 +1272,3 @@ form.append('file2', Fs.createReadStream(path2)); | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, (err, parsed) => { | ||
@@ -1283,8 +1285,8 @@ expect(err).to.not.exist(); | ||
it('parses multiple small files', function (done) { | ||
it('parses multiple small files', (done) => { | ||
var path = Path.join(__dirname, './file/smallimage.png'); | ||
var stats = Fs.statSync(path); | ||
const path = Path.join(__dirname, './file/smallimage.png'); | ||
const stats = Fs.statSync(path); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('file1', Fs.createReadStream(path)); | ||
@@ -1294,3 +1296,3 @@ form.append('file2', Fs.createReadStream(path)); | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, (err, parsed) => { | ||
@@ -1306,8 +1308,8 @@ expect(err).to.not.exist(); | ||
it('parses multiple larger files', function (done) { | ||
it('parses multiple larger files', (done) => { | ||
var path = Path.join(__dirname, './file/image.jpg'); | ||
var stats = Fs.statSync(path); | ||
const path = Path.join(__dirname, './file/image.jpg'); | ||
const stats = Fs.statSync(path); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('file1', Fs.createReadStream(path)); | ||
@@ -1317,3 +1319,3 @@ form.append('file2', Fs.createReadStream(path)); | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, (err, parsed) => { | ||
@@ -1329,17 +1331,17 @@ expect(err).to.not.exist(); | ||
it('parses multiple files while waiting for last file to be written', { parallel: false }, function (done) { | ||
it('parses multiple files while waiting for last file to be written', { parallel: false }, (done) => { | ||
var path = Path.join(__dirname, './file/image.jpg'); | ||
var stats = Fs.statSync(path); | ||
const path = Path.join(__dirname, './file/image.jpg'); | ||
const stats = Fs.statSync(path); | ||
var orig = Fs.createWriteStream; | ||
const orig = Fs.createWriteStream; | ||
Fs.createWriteStream = function () { // Make the first file write happen faster by bypassing the disk | ||
Fs.createWriteStream = orig; | ||
var stream = new Stream.Writable(); | ||
stream._write = function (chunk, encoding, callback) { | ||
const stream = new Stream.Writable(); | ||
stream._write = (chunk, encoding, callback) => { | ||
callback(); | ||
}; | ||
stream.once('finish', function () { | ||
stream.once('finish', () => { | ||
@@ -1351,3 +1353,3 @@ stream.emit('close'); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('a', Fs.createReadStream(path)); | ||
@@ -1357,3 +1359,3 @@ form.append('b', Fs.createReadStream(path)); | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'file' }, (err, parsed) => { | ||
@@ -1370,11 +1372,11 @@ expect(err).to.not.exist(); | ||
it('parses a multipart file as data', function (done) { | ||
it('parses a multipart file as data', (done) => { | ||
var path = Path.join(__dirname, '../package.json'); | ||
const path = Path.join(__dirname, '../package.json'); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('my_file', Fs.createReadStream(path)); | ||
form.headers = form.getHeaders(); | ||
Subtext.parse(form, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -1387,5 +1389,5 @@ expect(err).to.not.exist(); | ||
it('peeks at multipart in stream mode', function (done) { | ||
it('peeks at multipart in stream mode', (done) => { | ||
var payload = | ||
const payload = | ||
'--AaB03x\r\n' + | ||
@@ -1418,3 +1420,3 @@ 'content-disposition: form-data; name="x"\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1424,7 +1426,7 @@ 'content-type': 'multipart/form-data; boundary=AaB03x' | ||
var raw = ''; | ||
var tap = new Stream.Transform(); | ||
let raw = ''; | ||
const tap = new Stream.Transform(); | ||
tap._transform = function (chunk, encoding, callback) { | ||
raw += chunk.toString(); | ||
raw = raw + chunk.toString(); | ||
this.push(chunk, encoding); | ||
@@ -1434,3 +1436,3 @@ callback(); | ||
Subtext.parse(request, tap, { parse: true, output: 'stream' }, function (err, parsed) { | ||
Subtext.parse(request, tap, { parse: true, output: 'stream' }, (err, parsed) => { | ||
@@ -1446,13 +1448,13 @@ expect(err).to.not.exist(); | ||
it('parses a file correctly on stream mode', function (done) { | ||
it('parses a file correctly on stream mode', (done) => { | ||
var path = Path.join(__dirname, './file/image.jpg'); | ||
var fileStream = Fs.createReadStream(path); | ||
var fileContents = Fs.readFileSync(path); | ||
const path = Path.join(__dirname, './file/image.jpg'); | ||
const fileStream = Fs.createReadStream(path); | ||
const fileContents = Fs.readFileSync(path); | ||
var form = new FormData(); | ||
const form = new FormData(); | ||
form.append('my_file', fileStream); | ||
form.headers = form.getHeaders(); | ||
Subtext.parse(form, null, { parse: true, output: 'stream' }, function (err, parsed) { | ||
Subtext.parse(form, null, { parse: true, output: 'stream' }, (err, parsed) => { | ||
@@ -1469,3 +1471,3 @@ expect(err).to.not.exist(); | ||
Wreck.read(parsed.payload.my_file, null, function (err, buffer) { | ||
Wreck.read(parsed.payload.my_file, null, (err, buffer) => { | ||
@@ -1480,5 +1482,5 @@ expect(err).to.not.exist(); | ||
it('parses field names with arrays', function (done) { | ||
it('parses field names with arrays', (done) => { | ||
var payload = '--AaB03x\r\n' + | ||
const payload = '--AaB03x\r\n' + | ||
'Content-Disposition: form-data; name="a[b]"\r\n' + | ||
@@ -1493,3 +1495,3 @@ '\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1499,3 +1501,3 @@ 'content-type': 'multipart/form-data; boundary=AaB03x' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -1508,5 +1510,5 @@ expect(err).to.not.exist(); | ||
it('parses field names with arrays and file', function (done) { | ||
it('parses field names with arrays and file', (done) => { | ||
var payload = '----WebKitFormBoundaryE19zNvXGzXaLvS5C\r\n' + | ||
const payload = '----WebKitFormBoundaryE19zNvXGzXaLvS5C\r\n' + | ||
'Content-Disposition: form-data; name="a[b]"\r\n' + | ||
@@ -1526,3 +1528,3 @@ '\r\n' + | ||
var request = Wreck.toReadableStream(payload); | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1532,3 +1534,3 @@ 'content-type': 'multipart/form-data; boundary="--WebKitFormBoundaryE19zNvXGzXaLvS5C"' | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -1541,11 +1543,11 @@ expect(err).to.not.exist(); | ||
it('cleans file when stream is aborted', function (done) { | ||
it('cleans file when stream is aborted', (done) => { | ||
var path = Path.join(__dirname, 'file'); | ||
var count = Fs.readdirSync(path).length; | ||
const path = Path.join(__dirname, 'file'); | ||
const count = Fs.readdirSync(path).length; | ||
var server = Http.createServer(); | ||
server.on('request', function (req, res) { | ||
const server = Http.createServer(); | ||
server.on('request', (req, res) => { | ||
Subtext.parse(req, null, { parse: false, output: 'file', uploads: path }, function (err, parsed) { | ||
Subtext.parse(req, null, { parse: false, output: 'file', uploads: path }, (err, parsed) => { | ||
@@ -1557,5 +1559,5 @@ expect(Fs.readdirSync(path).length).to.equal(count); | ||
server.listen(0, function () { | ||
server.listen(0, () => { | ||
var options = { | ||
const options = { | ||
hostname: 'localhost', | ||
@@ -1568,10 +1570,10 @@ port: server.address().port, | ||
var req = Http.request(options, function (res) { }); | ||
const req = Http.request(options, (res) => { }); | ||
req.on('error', function (err) { }); | ||
req.on('error', (err) => { }); | ||
var random = new Buffer(100000); | ||
const random = new Buffer(100000); | ||
req.write(random); | ||
req.write(random); | ||
setTimeout(function () { | ||
setTimeout(() => { | ||
@@ -1583,6 +1585,6 @@ req.abort(); | ||
it('avoids catching an error thrown in sync callback', function (done) { | ||
it('avoids catching an error thrown in sync callback', (done) => { | ||
var payload = '{"x":"1","y":"2","z":"3"}'; | ||
var request = Wreck.toReadableStream(payload); | ||
const payload = '{"x":"1","y":"2","z":"3"}'; | ||
const request = Wreck.toReadableStream(payload); | ||
request.headers = { | ||
@@ -1592,4 +1594,4 @@ 'content-type': 'application/json' | ||
var domain = Domain.create(); | ||
domain.once('error', function (err) { | ||
const domain = Domain.create(); | ||
domain.once('error', (err) => { | ||
@@ -1600,5 +1602,5 @@ expect(err.message).to.equal('callback error'); | ||
domain.run(function () { | ||
domain.run(() => { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, function (err, parsed) { | ||
Subtext.parse(request, null, { parse: true, output: 'data' }, (err, parsed) => { | ||
@@ -1605,0 +1607,0 @@ expect(err).to.not.exist(); |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
248765
1461
+ Addedb64@3.1.1(transitive)
+ Addedboom@3.2.25.3.3(transitive)
+ Addedcall-bind@1.0.7(transitive)
+ Addedcontent@3.1.2(transitive)
+ Addeddefine-data-property@1.1.4(transitive)
+ Addedes-define-property@1.0.0(transitive)
+ Addedes-errors@1.3.0(transitive)
+ Addedfunction-bind@1.1.2(transitive)
+ Addedget-intrinsic@1.2.4(transitive)
+ Addedgopd@1.0.1(transitive)
+ Addedhas-property-descriptors@1.0.2(transitive)
+ Addedhas-proto@1.0.3(transitive)
+ Addedhas-symbols@1.0.3(transitive)
+ Addedhasown@2.0.2(transitive)
+ Addedhoek@3.0.44.3.1(transitive)
+ Addednigel@2.1.1(transitive)
+ Addedobject-inspect@1.13.2(transitive)
+ Addedpez@2.2.2(transitive)
+ Addedqs@6.13.0(transitive)
+ Addedset-function-length@1.2.2(transitive)
+ Addedside-channel@1.0.6(transitive)
+ Addedvise@2.1.1(transitive)
+ Addedwreck@7.2.1(transitive)
- Removedb64@2.0.1(transitive)
- Removedboom@2.10.1(transitive)
- Removedcontent@1.0.2(transitive)
- Removedhoek@2.16.3(transitive)
- Removednigel@1.0.1(transitive)
- Removedpez@1.0.0(transitive)
- Removedqs@5.2.1(transitive)
- Removedvise@1.0.0(transitive)
- Removedwreck@6.3.0(transitive)
Updatedboom@3.x.x
Updatedcontent@3.x.x
Updatedhoek@3.x.x
Updatedpez@2.x.x
Updatedqs@6.x.x
Updatedwreck@7.x.x