multiparty
Advanced tools
Comparing version 2.0.0 to 2.1.0
562
index.js
@@ -1,3 +0,559 @@ | ||
var IncomingForm = require('./lib/incoming_form').IncomingForm; | ||
IncomingForm.IncomingForm = IncomingForm; | ||
module.exports = IncomingForm; | ||
exports.Form = Form; | ||
var stream = require('readable-stream') | ||
, util = require('util') | ||
, fs = require('fs') | ||
, crypto = require('crypto') | ||
, path = require('path') | ||
, os = require('os') | ||
, assert = require('assert') | ||
, StringDecoder = require('string_decoder').StringDecoder | ||
var START = 0 | ||
, START_BOUNDARY = 1 | ||
, HEADER_FIELD_START = 2 | ||
, HEADER_FIELD = 3 | ||
, HEADER_VALUE_START = 4 | ||
, HEADER_VALUE = 5 | ||
, HEADER_VALUE_ALMOST_DONE = 6 | ||
, HEADERS_ALMOST_DONE = 7 | ||
, PART_DATA_START = 8 | ||
, PART_DATA = 9 | ||
, PART_END = 10 | ||
, END = 11 | ||
, LF = 10 | ||
, CR = 13 | ||
, SPACE = 32 | ||
, HYPHEN = 45 | ||
, COLON = 58 | ||
, A = 97 | ||
, Z = 122 | ||
var CONTENT_TYPE_RE = /^multipart\/(form-data|related);\s+boundary=(?:"([^"]+)"|([^;]+))$/i; | ||
var FILE_EXT_RE = /(\.[_\-a-zA-Z0-9]{0,16}).*/; | ||
util.inherits(Form, stream.Writable); | ||
function Form(options) { | ||
var self = this; | ||
stream.Writable.call(self); | ||
options = options || {}; | ||
self.error = null; | ||
self.finished = false; | ||
self.autoFields = !!options.autoFields; | ||
self.autoFiles = !!options.autoFields; | ||
// TODO: maxFields does nothing | ||
self.maxFields = options.maxFields || 1000; | ||
self.maxFieldsSize = options.maxFieldsSize || 2 * 1024 * 1024; | ||
self.uploadDir = options.uploadDir || os.tmpDir(); | ||
self.encoding = options.encoding || 'utf8'; | ||
self.hash = false; | ||
self.bytesReceived = 0; | ||
self.bytesExpected = null; | ||
self.openedFiles = []; | ||
self.totalFieldSize = 0; | ||
self.totalFieldCount = 0; | ||
self.flushing = 0; | ||
if (options.boundary) setUpParser(self, options.boundary); | ||
self.on('newListener', function(eventName) { | ||
if (eventName === 'file') { | ||
self.autoFiles = true; | ||
} else if (eventName === 'field') { | ||
self.autoFields = true; | ||
} | ||
}); | ||
} | ||
Form.prototype.parse = function(req, cb) { | ||
var self = this; | ||
// if the user supplies a callback, this implies autoFields and autoFiles | ||
if (cb) { | ||
self.autoFields = true; | ||
self.autoFiles = true; | ||
} | ||
req.on('error', function(err) { | ||
error(self, err); | ||
}); | ||
req.on('aborted', function() { | ||
self.emit('aborted'); | ||
error(self, new Error("Request aborted")); | ||
}); | ||
self.bytesExpected = getBytesExpected(req.headers); | ||
var contentType = req.headers['content-type']; | ||
if (!contentType) { | ||
error(self, new Error('missing content-type header')); | ||
return; | ||
} | ||
var m = contentType.match(CONTENT_TYPE_RE); | ||
if (!m) { | ||
error(self, new Error('unrecognized content-type: ' + contentType)); | ||
return; | ||
} | ||
var boundary = m[2] || m[3]; | ||
setUpParser(self, boundary); | ||
req.pipe(self); | ||
if (cb) { | ||
var fields = {} | ||
, files = {}; | ||
self.on('error', function(err) { | ||
cb(err); | ||
}); | ||
self.on('field', function(name, value) { | ||
fields[name] = value; | ||
}); | ||
self.on('file', function(name, file) { | ||
files[name] = file; | ||
}); | ||
self.on('close', function() { | ||
cb(null, fields, files); | ||
}); | ||
} | ||
}; | ||
Form.prototype._write = function(buffer, encoding, cb) { | ||
var self = this | ||
, i = 0 | ||
, len = buffer.length | ||
, prevIndex = self.index | ||
, index = self.index | ||
, state = self.state | ||
, lookbehind = self.lookbehind | ||
, boundary = self.boundary | ||
, boundaryChars = self.boundaryChars | ||
, boundaryLength = self.boundary.length | ||
, boundaryEnd = boundaryLength - 1 | ||
, bufferLength = buffer.length | ||
, c | ||
, cl | ||
for (i = 0; i < len; i++) { | ||
c = buffer[i]; | ||
switch (state) { | ||
case START: | ||
index = 0; | ||
state = START_BOUNDARY; | ||
/* falls through */ | ||
case START_BOUNDARY: | ||
if (index === boundaryLength - 2) { | ||
if (c !== CR) return cb(new Error("Expected CR Received " + c)); | ||
index++; | ||
break; | ||
} else if (index === boundaryLength - 1) { | ||
if (c !== LF) return cb(new Error("Expected LF Received " + c)); | ||
index = 0; | ||
self.onParsePartBegin(); | ||
state = HEADER_FIELD_START; | ||
break; | ||
} | ||
if (c !== boundary[index+2]) index = -2; | ||
if (c === boundary[index+2]) index++; | ||
break; | ||
case HEADER_FIELD_START: | ||
state = HEADER_FIELD; | ||
self.headerFieldMark = i; | ||
index = 0; | ||
/* falls through */ | ||
case HEADER_FIELD: | ||
if (c === CR) { | ||
self.headerFieldMark = null; | ||
state = HEADERS_ALMOST_DONE; | ||
break; | ||
} | ||
index++; | ||
if (c === HYPHEN) break; | ||
if (c === COLON) { | ||
if (index === 1) { | ||
// empty header field | ||
cb(new Error("Empty header field")); | ||
return; | ||
} | ||
self.onParseHeaderField(buffer.slice(self.headerFieldMark, i)); | ||
self.headerFieldMark = null; | ||
state = HEADER_VALUE_START; | ||
break; | ||
} | ||
cl = lower(c); | ||
if (cl < A || cl > Z) { | ||
cb(new Error("Expected alphabetic character, received " + c)); | ||
} | ||
break; | ||
case HEADER_VALUE_START: | ||
if (c === SPACE) break; | ||
self.headerValueMark = i; | ||
state = HEADER_VALUE; | ||
/* falls through */ | ||
case HEADER_VALUE: | ||
if (c === CR) { | ||
self.onParseHeaderValue(buffer.slice(self.headerValueMark, i)); | ||
self.headerValueMark = null; | ||
self.onParseHeaderEnd(); | ||
state = HEADER_VALUE_ALMOST_DONE; | ||
} | ||
break; | ||
case HEADER_VALUE_ALMOST_DONE: | ||
if (c !== LF) return cb(new Error("Expected LF Received " + c)); | ||
state = HEADER_FIELD_START; | ||
break; | ||
case HEADERS_ALMOST_DONE: | ||
if (c !== LF) return cb(new Error("Expected LF Received " + c)); | ||
var err = self.onParseHeadersEnd(); | ||
if (err) return cb(err); | ||
state = PART_DATA_START; | ||
break; | ||
case PART_DATA_START: | ||
state = PART_DATA; | ||
self.partDataMark = i; | ||
/* falls through */ | ||
case PART_DATA: | ||
prevIndex = index; | ||
if (index === 0) { | ||
// boyer-moore derrived algorithm to safely skip non-boundary data | ||
i += boundaryEnd; | ||
while (i < bufferLength && !(buffer[i] in boundaryChars)) { | ||
i += boundaryLength; | ||
} | ||
i -= boundaryEnd; | ||
c = buffer[i]; | ||
} | ||
if (index < boundaryLength) { | ||
if (boundary[index] === c) { | ||
if (index === 0) { | ||
self.onParsePartData(buffer.slice(self.partDataMark, i)); | ||
self.partDataMark = null; | ||
} | ||
index++; | ||
} else { | ||
index = 0; | ||
} | ||
} else if (index === boundaryLength) { | ||
index++; | ||
if (c === CR) { | ||
// CR = part boundary | ||
self.partBoundaryFlag = true; | ||
} else if (c === HYPHEN) { | ||
// HYPHEN = end boundary | ||
self.lastBoundaryFlag = true; | ||
} else { | ||
index = 0; | ||
} | ||
} else if (index - 1 === boundaryLength) { | ||
if (self.partBoundaryFlag) { | ||
index = 0; | ||
if (c === LF) { | ||
self.partBoundaryFlag = false; | ||
self.onParsePartEnd(); | ||
self.onParsePartBegin(); | ||
state = HEADER_FIELD_START; | ||
break; | ||
} | ||
} else if (self.lastBoundaryFlag) { | ||
if (c === HYPHEN) { | ||
self.onParsePartEnd(); | ||
self.end(); | ||
state = END; | ||
} else { | ||
index = 0; | ||
} | ||
} else { | ||
index = 0; | ||
} | ||
} | ||
if (index > 0) { | ||
// when matching a possible boundary, keep a lookbehind reference | ||
// in case it turns out to be a false lead | ||
lookbehind[index-1] = c; | ||
} else if (prevIndex > 0) { | ||
// if our boundary turned out to be rubbish, the captured lookbehind | ||
// belongs to partData | ||
self.onParsePartData(lookbehind.slice(0, prevIndex)); | ||
prevIndex = 0; | ||
self.partDataMark = i; | ||
// reconsider the current character even so it interrupted the sequence | ||
// it could be the beginning of a new sequence | ||
i--; | ||
} | ||
break; | ||
case END: | ||
break; | ||
default: | ||
cb(new Error("Parser has invalid state.")); | ||
} | ||
} | ||
if (self.headerFieldMark) { | ||
self.onParseHeaderField(buffer.slice(self.headerFieldMark)); | ||
} | ||
if (self.headerValueMark) { | ||
self.onParseHeaderValue(buffer.slice(self.headerValueMark)); | ||
} | ||
if (self.partDataMark) { | ||
self.onParsePartData(buffer.slice(self.partDataMark)); | ||
} | ||
self.index = index; | ||
self.state = state; | ||
self.bytesReceived += buffer.length; | ||
self.emit('progress', self.bytesReceived, self.bytesExpected); | ||
cb(); | ||
}; | ||
Form.prototype.onParsePartBegin = function() { | ||
clearPartVars(this); | ||
} | ||
Form.prototype.onParseHeaderField = function(b) { | ||
this.headerField += this.headerFieldDecoder.write(b); | ||
} | ||
Form.prototype.onParseHeaderValue = function(b) { | ||
this.headerValue += this.headerValueDecoder.write(b); | ||
} | ||
Form.prototype.onParseHeaderEnd = function() { | ||
this.headerField = this.headerField.toLowerCase(); | ||
this.partHeaders[this.headerField] = this.headerValue; | ||
var m; | ||
if (this.headerField === 'content-disposition') { | ||
if (m = this.headerValue.match(/\bname="([^"]+)"/i)) { | ||
this.partName = m[1]; | ||
} | ||
this.partFilename = parseFilename(this.headerValue); | ||
} else if (this.headerField === 'content-transfer-encoding') { | ||
this.partTransferEncoding = this.headerValue.toLowerCase(); | ||
} | ||
this.headerFieldDecoder = new StringDecoder(this.encoding); | ||
this.headerField = ''; | ||
this.headerValueDecoder = new StringDecoder(this.encoding); | ||
this.headerValue = ''; | ||
} | ||
Form.prototype.onParsePartData = function(b) { | ||
if (this.partTransferEncoding === 'base64') { | ||
this.destStream.write(b.toString('ascii'), 'base64'); | ||
} else { | ||
this.destStream.write(b); | ||
} | ||
} | ||
Form.prototype.onParsePartEnd = function() { | ||
if (this.destStream) this.destStream.end(); | ||
clearPartVars(this); | ||
} | ||
Form.prototype.onParseHeadersEnd = function() { | ||
switch(this.partTransferEncoding){ | ||
case 'binary': | ||
case '7bit': | ||
case '8bit': | ||
this.partTransferEncoding = 'binary'; | ||
break; | ||
case 'base64': break; | ||
default: | ||
return new Error("unknown transfer-encoding: " + this.partTransferEncoding); | ||
} | ||
this.destStream = new stream.PassThrough(); | ||
this.destStream.headers = this.partHeaders; | ||
this.destStream.name = this.partName; | ||
this.destStream.filename = this.partFilename; | ||
this.totalFieldCount += 1; | ||
if (this.totalFieldCount >= this.maxFields) { | ||
error(this, new Error("maxFields " + this.maxFields + " exceeded.")); | ||
return; | ||
} | ||
this.emit('part', this.destStream); | ||
if (this.destStream.filename == null && this.autoFields) { | ||
handleField(this, this.destStream); | ||
} else if (this.destStream.filename != null && this.autoFiles) { | ||
handleFile(this, this.destStream); | ||
} | ||
} | ||
function getBytesExpected(headers) { | ||
var contentLength = headers['content-length']; | ||
if (contentLength) { | ||
return parseInt(contentLength, 10); | ||
} else if (headers['transfer-encoding'] == null) { | ||
return 0; | ||
} else { | ||
return null; | ||
} | ||
} | ||
function error(self, err) { | ||
assert.ok(!self.error); | ||
self.error = err; | ||
self.emit('error', err); | ||
self.openedFiles.forEach(function(file) { | ||
file.ws.destroy(); | ||
fs.unlink(file.path, function(err) { | ||
// this is already an error condition, ignore 2nd error | ||
}); | ||
}); | ||
} | ||
function beginFlush(self) { | ||
self.flushing += 1; | ||
} | ||
function endFlush(self) { | ||
self.flushing -= 1; | ||
maybeClose(self); | ||
} | ||
function maybeClose(self) { | ||
if (!self.flushing && self.finished && !self.error) { | ||
self.emit('close'); | ||
} | ||
} | ||
function handleFile(self, fileStream) { | ||
beginFlush(self); | ||
var file = { | ||
originalFilename: fileStream.filename, | ||
path: uploadPath(self.uploadDir, fileStream.filename), | ||
headers: fileStream.headers, | ||
}; | ||
file.ws = fs.createWriteStream(file.path); | ||
self.openedFiles.push(file); | ||
fileStream.pipe(file.ws); | ||
var hashWorkaroundStream | ||
, hash = null; | ||
if (self.hash) { | ||
// workaround stream because https://github.com/joyent/node/issues/5216 | ||
hashWorkaroundStream = stream.Writable(); | ||
hash = crypto.createHash(self.hash); | ||
hashWorkaroundStream._write = function(buffer, encoding, callback) { | ||
//console.log("write", buffer.toString()); | ||
hash.update(buffer); | ||
callback(); | ||
}; | ||
fileStream.pipe(hashWorkaroundStream); | ||
} | ||
if (file.hash) fileStream.pipe(file.hash); | ||
file.ws.on('error', function(err) { | ||
error(self, err); | ||
}); | ||
fileStream.on('end', function() { | ||
if (hash) file.hash = hash.digest('hex'); | ||
self.emit('file', fileStream.name, file); | ||
endFlush(self); | ||
}); | ||
} | ||
function handleField(self, fieldStream) { | ||
var value = '' | ||
, decoder = new StringDecoder(self.encoding); | ||
beginFlush(self); | ||
fieldStream.on('readable', function() { | ||
var buffer = fieldStream.read(); | ||
self.totalFieldSize += buffer.length; | ||
if (self.totalFieldSize > self.maxFieldsSize) { | ||
error(self, new Error("maxFieldsSize " + self.maxFieldsSize + " exceeded")); | ||
return; | ||
} | ||
value += decoder.write(buffer); | ||
}); | ||
fieldStream.on('end', function() { | ||
self.emit('field', fieldStream.name, value); | ||
endFlush(self); | ||
}); | ||
} | ||
function clearPartVars(self) { | ||
self.partHeaders = {}; | ||
self.partName = null; | ||
self.partFilename = null; | ||
self.partTransferEncoding = 'binary'; | ||
self.destStream = null; | ||
self.headerFieldDecoder = new StringDecoder(self.encoding); | ||
self.headerField = ""; | ||
self.headerValueDecoder = new StringDecoder(self.encoding); | ||
self.headerValue = ""; | ||
} | ||
function setUpParser(self, boundary) { | ||
self.boundary = new Buffer(boundary.length + 4); | ||
self.boundary.write('\r\n--', 'ascii', 0); | ||
self.boundary.write(boundary, 'ascii', 4); | ||
self.lookbehind = new Buffer(self.boundary.length + 8); | ||
self.state = START; | ||
self.boundaryChars = {}; | ||
for (var i = 0; i < self.boundary.length; i++) { | ||
self.boundaryChars[self.boundary[i]] = true; | ||
} | ||
self.index = null; | ||
self.partBoundaryFlag = false; | ||
self.lastBoundaryFlag = false; | ||
self.on('finish', function() { | ||
if ((self.state === HEADER_FIELD_START && self.index === 0) || | ||
(self.state === PART_DATA && self.index === self.boundary.length)) | ||
{ | ||
self.onParsePartEnd(); | ||
} else if (self.state !== END) { | ||
error(self, new Error('stream ended unexpectedly')); | ||
} | ||
self.finished = true; | ||
maybeClose(self); | ||
}); | ||
} | ||
function uploadPath(baseDir, filename) { | ||
var ext = path.extname(filename).replace(FILE_EXT_RE, '$1'); | ||
var name = process.pid + '-' + | ||
(Math.random() * 0x100000000 + 1).toString(36) + ext; | ||
return path.join(baseDir, name); | ||
} | ||
function parseFilename(headerValue) { | ||
var m = headerValue.match(/\bfilename="(.*?)"($|; )/i); | ||
if (!m) return; | ||
var filename = m[1].substr(m[1].lastIndexOf('\\') + 1); | ||
filename = filename.replace(/%22/g, '"'); | ||
filename = filename.replace(/&#([\d]{4});/g, function(m, code) { | ||
return String.fromCharCode(code); | ||
}); | ||
return filename; | ||
} | ||
function lower(c) { | ||
return c | 0x20; | ||
} | ||
{ | ||
"name": "multiparty", | ||
"version": "2.0.0", | ||
"version": "2.1.0", | ||
"description": "multipart/form-data parser which supports streaming", | ||
@@ -19,3 +19,3 @@ "keywords": [ | ||
}, | ||
"main": "index.js", | ||
"main": "./lib/incoming_form.js", | ||
"scripts": { | ||
@@ -27,3 +27,6 @@ "test": "mocha --reporter spec --recursive test/test.js" | ||
}, | ||
"license": "MIT" | ||
"license": "MIT", | ||
"dependencies": { | ||
"readable-stream": "~1.0.2" | ||
} | ||
} |
240
README.md
@@ -0,7 +1,4 @@ | ||
[![Build Status](https://travis-ci.org/superjoe30/node-multiparty.png?branch=master)](https://travis-ci.org/superjoe30/node-multiparty) | ||
# multiparty | ||
[![Build Status](https://secure.travis-ci.org/superjoe30/node-multiparty.png?branch=master)](http://travis-ci.org/superjoe30/node-multiparty) | ||
## Purpose | ||
Parse http requests with content-type `multipart/form-data`, also known as file uploads. | ||
@@ -11,8 +8,9 @@ | ||
* This module uses the Node.js v0.10 streams properly. It will not create a | ||
temp file for you. You could easily stream to S3 using | ||
[knox](https://github.com/LearnBoost/knox), for [example](). | ||
* Less bugs. This code is simpler and has cleaner tests, and does not try to | ||
do anything beyond multipart stream parsing. | ||
* Fast. | ||
* This module uses the Node.js v0.10 streams properly, *even in Node.js v0.8* | ||
* It will not create a temp file for you unless you want it to. | ||
* You can easily stream uploads to s3 with | ||
[knox](https://github.com/LearnBoost/knox), for [example](examples/s3.js). | ||
* Less bugs. This code is simpler, has all deprecated functionality removed, | ||
has cleaner tests, and does not try to do anything beyond multipart stream | ||
parsing. | ||
@@ -25,5 +23,5 @@ ## Installation | ||
## Example | ||
## Usage | ||
Parse an incoming file upload. | ||
Parse an incoming `multipart/form-data` request. | ||
@@ -38,3 +36,3 @@ ```js | ||
// parse a file upload | ||
var form = new multiparty.IncomingForm(); | ||
var form = new multiparty.Form(); | ||
@@ -64,186 +62,98 @@ form.parse(req, function(err, fields, files) { | ||
### multiparty.IncomingForm | ||
### multiparty.Form | ||
```js | ||
var form = new multiparty.IncomingForm() | ||
var form = new multiparty.Form(options) | ||
``` | ||
Creates a new incoming form. | ||
Creates a new form. Options: | ||
```js | ||
form.encoding = 'utf-8'; | ||
``` | ||
Sets encoding for incoming form fields. | ||
* `encoding` - sets encoding for the incoming form fields. Defaults to `utf8`. | ||
* `maxFieldSize` - Limits the amount of memory a field (not a file) can | ||
allocate in bytes. If this value is exceeded, an `error` event is emitted. | ||
The default size is 2MB. | ||
* `maxFields` - Limits the number of fields that will be parsed before | ||
emitting an `error` event. A file counts as a field in this case. | ||
Defaults to 1000. | ||
* `autoFields` - Enables `field` events. This is automatically set to `true` | ||
if you add a `field` listener. | ||
* `autoFiles` - Enables `file` events. This is automatically set to `true` | ||
if you add a `file` listener. | ||
* `uploadDir` - Only relevant when `autoFiles` is `true`. The directory for | ||
placing file uploads in. You can move them later using `fs.rename()`. | ||
Defaults to `os.tmpDir()`. | ||
* `hash` - Only relevant when `autoFiles` is `true`. If you want checksums | ||
calculated for incoming files, set this to either `sha1` or `md5`. | ||
Defaults to off. | ||
```js | ||
form.uploadDir = process.env.TMP || process.env.TMPDIR || process.env.TEMP || '/tmp' || process.cwd(); | ||
``` | ||
The directory for placing file uploads in. You can move them later on using | ||
`fs.rename()`. The default directory is picked at module load time depending on | ||
the first existing directory from those listed above. | ||
#### form.parse(request, [cb]) | ||
```js | ||
form.keepExtensions = false; | ||
``` | ||
If you want the files written to `form.uploadDir` to include the extensions of the original files, set this property to `true`. | ||
Parses an incoming node.js `request` containing form data. If `cb` is | ||
provided, `autoFields` and `autoFiles` are set to `true` and all fields and | ||
files are collected and passed to the callback: | ||
```js | ||
form.type | ||
``` | ||
Either 'multipart' or 'urlencoded' depending on the incoming request. | ||
```js | ||
form.maxFieldsSize = 2 * 1024 * 1024; | ||
``` | ||
Limits the amount of memory a field (not file) can allocate in bytes. | ||
If this value is exceeded, an `'error'` event is emitted. The default | ||
size is 2MB. | ||
```js | ||
form.maxFields = 0; | ||
``` | ||
Limits the number of fields that the querystring parser will decode. Defaults | ||
to 0 (unlimited). | ||
```js | ||
form.hash = false; | ||
``` | ||
If you want checksums calculated for incoming files, set this to either `'sha1'` or `'md5'`. | ||
```js | ||
form.bytesReceived | ||
``` | ||
The amount of bytes received for this form so far. | ||
```js | ||
form.bytesExpected | ||
``` | ||
The expected number of bytes in this form. | ||
```js | ||
form.parse(request, [cb]); | ||
``` | ||
Parses an incoming node.js `request` containing form data. If `cb` is provided, all fields an files are collected and passed to the callback: | ||
```js | ||
form.parse(req, function(err, fields, files) { | ||
// ... | ||
}); | ||
form.onPart(part); | ||
``` | ||
You may overwrite this method if you are interested in directly accessing the multipart stream. Doing so will disable any `'field'` / `'file'` events processing which would occur otherwise, making you fully responsible for handling the processing. | ||
```js | ||
form.onPart = function(part) { | ||
part.addListener('data', function() { | ||
// ... | ||
}); | ||
} | ||
``` | ||
If you want to use multiparty to only handle certain parts for you, you can do so: | ||
```js | ||
form.onPart = function(part) { | ||
if (!part.filename) { | ||
// let multiparty handle all non-file parts | ||
form.handlePart(part); | ||
} | ||
} | ||
``` | ||
Check the code in this method for further inspiration. | ||
#### form.bytesReceived | ||
The amount of bytes received for this form so far. | ||
### multiparty.File | ||
```js | ||
file.size = 0 | ||
``` | ||
The size of the uploaded file in bytes. If the file is still being uploaded (see `'fileBegin'` event), this property says how many bytes of the file have been written to disk yet. | ||
```js | ||
file.path = null | ||
``` | ||
The path this file is being written to. You can modify this in the `'fileBegin'` event in | ||
case you are unhappy with the way multiparty generates a temporary path for your files. | ||
```js | ||
file.name = null | ||
``` | ||
The name this file had according to the uploading client. | ||
```js | ||
file.type = null | ||
``` | ||
The mime type of this file, according to the uploading client. | ||
```js | ||
file.lastModifiedDate = null | ||
``` | ||
A date object (or `null`) containing the time this file was last written to. Mostly | ||
here for compatibility with the [W3C File API Draft](http://dev.w3.org/2006/webapi/FileAPI/). | ||
```js | ||
file.hash = null | ||
``` | ||
If hash calculation was set, you can read the hex digest out of this var. | ||
#### form.bytesExpected | ||
#### multiparty.File#toJSON() | ||
The expected number of bytes in this form. | ||
This method returns a JSON-representation of the file, allowing you to | ||
`JSON.stringify()` the file which is useful for logging and responding | ||
to requests. | ||
### Events | ||
#### 'error' (err) | ||
#### 'progress' | ||
```js | ||
form.on('progress', function(bytesReceived, bytesExpected) { | ||
}); | ||
``` | ||
Emitted after each incoming chunk of data that has been parsed. Can be used to roll your own progress bar. | ||
You definitely want to handle this event. If not your server *will* crash when | ||
users submit bogus multipart requests! | ||
#### 'part' (part) | ||
Emitted when a part is encountered in the request. `part` is a | ||
`ReadableStream`. It also has the following properties: | ||
#### 'field' | ||
```js | ||
form.on('field', function(name, value) { | ||
}); | ||
``` | ||
* `headers` - the headers for this part. For example, you may be interested | ||
in `content-type`. | ||
* `name` - the field name for this part | ||
* `filename` - only if the part is an incoming file | ||
#### 'fileBegin' | ||
By default this is the only data event emitted. If you want multiparty to do | ||
more work for you, pass a callback to `form.parse()` or set `form.autoFields` and/or | ||
`form.autoFiles` to `true`. | ||
Emitted whenever a field / value pair has been received. | ||
```js | ||
form.on('fileBegin', function(name, file) { | ||
}); | ||
``` | ||
#### 'aborted' | ||
#### 'file' | ||
Emitted when the request is aborted. This event will be followed shortly | ||
by an `error` event. In practice you do not need to handle this event. | ||
Emitted whenever a new file is detected in the upload stream. Use this even if | ||
you want to stream the file to somewhere else while buffering the upload on | ||
the file system. | ||
#### 'progress' (bytesReceived, bytesExpected) | ||
Emitted whenever a field / file pair has been received. `file` is an instance of `File`. | ||
```js | ||
form.on('file', function(name, file) { | ||
}); | ||
``` | ||
#### 'close' | ||
#### 'error' | ||
Emitted after all parts have been parsed and emitted. Not emitted if an `error` | ||
event is emitted. This is typically when you would send your response. | ||
Emitted when there is an error processing the incoming form. A request that experiences an error is automatically paused, you will have to manually call `request.resume()` if you want the request to continue firing `'data'` events. | ||
```js | ||
form.on('error', function(err) { | ||
}); | ||
``` | ||
#### 'file' (name, file) | ||
#### 'aborted' | ||
**By default multiparty will not touch your hard drive.** But if you add this | ||
listener, multiparty automatically sets `form.autoFiles` to `true` and will | ||
stream uploads to disk for you. | ||
* `name` - the field name for this file | ||
* `file` - an object with these properties: | ||
- `originalFilename` - the filename that the user reports for the file | ||
- `path` - the absolute path of the uploaded file on disk | ||
- `headers - the HTTP headers that were sent along with this file | ||
Emitted when the request was aborted by the user. Right now this can be due to a 'timeout' or 'close' event on the socket. In the future there will be a separate 'timeout' event (needs a change in the node core). | ||
```js | ||
form.on('aborted', function() { | ||
}); | ||
``` | ||
If you set the `form.hash` option, then `file` will also contain a `hash` | ||
property which is the checksum of the file. | ||
##### 'end' | ||
```js | ||
form.on('end', function() { | ||
}); | ||
``` | ||
Emitted when the entire request has been received, and all contained files have finished flushing to disk. This is a great place for you to send your response. | ||
#### 'field' (name, value) | ||
* `name` - field name | ||
* `value` - string field value | ||
var assert = require('assert') | ||
, MultipartParser = require('../lib/multipart_parser') | ||
, parser = new MultipartParser() | ||
, Form = require('../').Form | ||
, boundary = '-----------------------------168072824752491622650073' | ||
@@ -17,30 +16,30 @@ , mb = 100 | ||
var form = new Form({ boundary: boundary }); | ||
parser.initWithBoundary(boundary); | ||
parser.onHeaderField = function() { | ||
hijack('onParseHeaderField', function() { | ||
callbacks.headerField++; | ||
}; | ||
}); | ||
parser.onHeaderValue = function() { | ||
hijack('onParseHeaderValue', function() { | ||
callbacks.headerValue++; | ||
}; | ||
}); | ||
parser.onPartBegin = function() { | ||
hijack('onParsePartBegin', function() { | ||
callbacks.partBegin++; | ||
}; | ||
}); | ||
parser.onPartData = function() { | ||
hijack('onParsePartData', function() { | ||
callbacks.partData++; | ||
}; | ||
}); | ||
parser.onPartEnd = function() { | ||
hijack('onParsePartEnd', function() { | ||
callbacks.partEnd++; | ||
}; | ||
}); | ||
parser.onEnd = function() { | ||
form.on('finish', function() { | ||
callbacks.end++; | ||
}; | ||
}); | ||
var start = new Date(); | ||
parser.write(buffer, function(err) { | ||
form.write(buffer, function(err) { | ||
var duration = new Date() - start; | ||
@@ -71,1 +70,9 @@ assert.ifError(err); | ||
function hijack(name, fn) { | ||
var oldFn = form[name]; | ||
form[name] = function() { | ||
fn(); | ||
return oldFn.apply(this, arguments); | ||
}; | ||
} | ||
@@ -7,3 +7,3 @@ var assert = require('assert'); | ||
var server = http.createServer(function (req, res) { | ||
var form = new multiparty.IncomingForm(); | ||
var form = new multiparty.Form(); | ||
var aborted_received = false; | ||
@@ -10,0 +10,0 @@ form.on('aborted', function () { |
@@ -8,4 +8,7 @@ var assert = require('assert') | ||
var server = http.createServer(function(req, res) { | ||
var form = new multiparty.IncomingForm(); | ||
var form = new multiparty.Form(); | ||
form.uploadDir = TMP_PATH; | ||
form.on('close', function () { | ||
throw new Error('Unexpected "close" event'); | ||
}); | ||
form.on('end', function () { | ||
@@ -12,0 +15,0 @@ throw new Error('Unexpected "end" event'); |
@@ -25,3 +25,3 @@ var http = require('http'), | ||
// Parse form and write results to response. | ||
var form = new multiparty.IncomingForm(); | ||
var form = new multiparty.Form(); | ||
form.parse(req, function(err, fields, files) { | ||
@@ -28,0 +28,0 @@ res.writeHead(200, {'content-type': 'text/plain'}); |
@@ -69,3 +69,3 @@ var spawn = require('child_process').spawn | ||
var file = parsedPart.value; | ||
assert.equal(file.name, expectedPart.filename); | ||
assert.equal(file.originalFilename, expectedPart.filename); | ||
if(expectedPart.sha1) assert.equal(file.hash, expectedPart.sha1); | ||
@@ -83,18 +83,20 @@ } | ||
var parts = []; | ||
var form = new multiparty.IncomingForm(); | ||
var form = new multiparty.Form({ | ||
autoFields: true, | ||
autoFiles: true, | ||
}); | ||
form.uploadDir = TMP_PATH; | ||
form.hash = "sha1"; | ||
form | ||
.on('error', callback) | ||
.on('fileBegin', function(name, value) { | ||
parts.push({type: 'file', name: name, value: value}); | ||
}) | ||
.on('field', function(name, value) { | ||
parts.push({type: 'field', name: name, value: value}); | ||
}) | ||
.on('end', function() { | ||
res.end('OK'); | ||
callback(null, parts); | ||
}); | ||
form.on('error', callback); | ||
form.on('file', function(name, value) { | ||
parts.push({type: 'file', name: name, value: value}); | ||
}); | ||
form.on('field', function(name, value) { | ||
parts.push({type: 'field', name: name, value: value}); | ||
}); | ||
form.on('close', function() { | ||
res.end('OK'); | ||
callback(null, parts); | ||
}); | ||
form.parse(req); | ||
@@ -101,0 +103,0 @@ |
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 3 instances in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
2019868
48
1
1041
156
12
+ Addedreadable-stream@~1.0.2
+ Addedcore-util-is@1.0.3(transitive)
+ Addedinherits@2.0.4(transitive)
+ Addedisarray@0.0.1(transitive)
+ Addedreadable-stream@1.0.34(transitive)
+ Addedstring_decoder@0.10.31(transitive)