graphql-upload
Advanced tools
Comparing version 14.0.0 to 15.0.0
{ | ||
"name": "graphql-upload", | ||
"version": "14.0.0", | ||
"version": "15.0.0", | ||
"description": "Middleware and an Upload scalar to add support for GraphQL multipart requests (file uploads via queries and mutations) to various Node.js GraphQL servers.", | ||
@@ -64,6 +64,6 @@ "license": "MIT", | ||
"dependencies": { | ||
"@types/busboy": "^0.3.2", | ||
"@types/busboy": "^1.5.0", | ||
"@types/node": "*", | ||
"@types/object-path": "^0.11.1", | ||
"busboy": "^0.3.1", | ||
"busboy": "^1.6.0", | ||
"fs-capacitor": "^6.2.0", | ||
@@ -87,3 +87,3 @@ "http-errors": "^2.0.0", | ||
"test-director": "^8.0.2", | ||
"typescript": "^4.7.1-rc" | ||
"typescript": "^4.7.2" | ||
}, | ||
@@ -90,0 +90,0 @@ "scripts": { |
@@ -5,3 +5,3 @@ // @ts-check | ||
const Busboy = require("busboy"); | ||
const busboy = require("busboy"); | ||
const { WriteStream } = require("fs-capacitor"); | ||
@@ -46,5 +46,2 @@ const createError = require("http-errors"); | ||
/** @type {import("stream").Readable} */ | ||
let lastFileStream; | ||
/** | ||
@@ -67,3 +64,3 @@ * @type {{ [key: string]: unknown } | Array< | ||
const parser = new Busboy({ | ||
const parser = busboy({ | ||
// @ts-ignore This is about to change with `busboy` v1 types. | ||
@@ -82,8 +79,5 @@ headers: request.headers, | ||
* @param {Error} error Error instance. | ||
* @param {boolean} [isParserError] Is the error from the parser. | ||
*/ | ||
const exit = (error) => { | ||
// None of the tested scenarios cause multiple calls of this function, but | ||
// it’s still good to guard against it happening in case it’s possible now | ||
// or in the future. | ||
// coverage ignore next line | ||
function exit(error, isParserError = false) { | ||
if (exitError) return; | ||
@@ -93,13 +87,2 @@ | ||
reject(exitError); | ||
parser.destroy(); | ||
if ( | ||
lastFileStream && | ||
!lastFileStream.readableEnded && | ||
!lastFileStream.destroyed | ||
) | ||
lastFileStream.destroy(exitError); | ||
if (map) | ||
@@ -109,2 +92,5 @@ for (const upload of map.values()) | ||
// If the error came from the parser, don’t cause it to be emitted again. | ||
isParserError ? parser.destroy() : parser.destroy(exitError); | ||
request.unpipe(parser); | ||
@@ -119,194 +105,194 @@ | ||
}); | ||
}; | ||
parser.on( | ||
"field", | ||
(fieldName, value, fieldNameTruncated, valueTruncated) => { | ||
if (valueTruncated) | ||
return exit( | ||
createError( | ||
413, | ||
`The ‘${fieldName}’ multipart field value exceeds the ${maxFieldSize} byte size limit.` | ||
) | ||
); | ||
reject(exitError); | ||
} | ||
switch (fieldName) { | ||
case "operations": | ||
try { | ||
operations = JSON.parse(value); | ||
} catch (error) { | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid JSON in the ‘operations’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
} | ||
parser.on("field", (fieldName, value, { valueTruncated }) => { | ||
if (valueTruncated) | ||
return exit( | ||
createError( | ||
413, | ||
`The ‘${fieldName}’ multipart field value exceeds the ${maxFieldSize} byte size limit.` | ||
) | ||
); | ||
// `operations` should be an object or an array. Note that arrays | ||
// and `null` have an `object` type. | ||
if (typeof operations !== "object" || !operations) | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid type for the ‘operations’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
switch (fieldName) { | ||
case "operations": | ||
try { | ||
operations = JSON.parse(value); | ||
} catch (error) { | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid JSON in the ‘operations’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
} | ||
operationsPath = objectPath(operations); | ||
// `operations` should be an object or an array. Note that arrays | ||
// and `null` have an `object` type. | ||
if (typeof operations !== "object" || !operations) | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid type for the ‘operations’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
break; | ||
case "map": { | ||
if (!operations) | ||
return exit( | ||
createError( | ||
400, | ||
`Misordered multipart fields; ‘map’ should follow ‘operations’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
operationsPath = objectPath(operations); | ||
let parsedMap; | ||
try { | ||
parsedMap = JSON.parse(value); | ||
} catch (error) { | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid JSON in the ‘map’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
} | ||
break; | ||
case "map": { | ||
if (!operations) | ||
return exit( | ||
createError( | ||
400, | ||
`Misordered multipart fields; ‘map’ should follow ‘operations’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
// `map` should be an object. | ||
if ( | ||
typeof parsedMap !== "object" || | ||
!parsedMap || | ||
Array.isArray(parsedMap) | ||
) | ||
let parsedMap; | ||
try { | ||
parsedMap = JSON.parse(value); | ||
} catch (error) { | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid JSON in the ‘map’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
} | ||
// `map` should be an object. | ||
if ( | ||
typeof parsedMap !== "object" || | ||
!parsedMap || | ||
Array.isArray(parsedMap) | ||
) | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid type for the ‘map’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
const mapEntries = Object.entries(parsedMap); | ||
// Check max files is not exceeded, even though the number of files | ||
// to parse might not match the map provided by the client. | ||
if (mapEntries.length > maxFiles) | ||
return exit( | ||
createError(413, `${maxFiles} max file uploads exceeded.`) | ||
); | ||
map = new Map(); | ||
for (const [fieldName, paths] of mapEntries) { | ||
if (!Array.isArray(paths)) | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid type for the ‘map’ multipart field (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
`Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
const mapEntries = Object.entries(parsedMap); | ||
map.set(fieldName, new Upload()); | ||
// Check max files is not exceeded, even though the number of files | ||
// to parse might not match the map provided by the client. | ||
if (mapEntries.length > maxFiles) | ||
return exit( | ||
createError(413, `${maxFiles} max file uploads exceeded.`) | ||
); | ||
for (const [index, path] of paths.entries()) { | ||
if (typeof path !== "string") | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
map = new Map(); | ||
for (const [fieldName, paths] of mapEntries) { | ||
if (!Array.isArray(paths)) | ||
try { | ||
operationsPath.set(path, map.get(fieldName)); | ||
} catch (error) { | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
`Invalid object path for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value ‘${path}’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
map.set(fieldName, new Upload()); | ||
for (const [index, path] of paths.entries()) { | ||
if (typeof path !== "string") | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid type for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
try { | ||
operationsPath.set(path, map.get(fieldName)); | ||
} catch (error) { | ||
return exit( | ||
createError( | ||
400, | ||
`Invalid object path for the ‘map’ multipart field entry key ‘${fieldName}’ array index ‘${index}’ value ‘${path}’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
} | ||
} | ||
} | ||
} | ||
resolve(operations); | ||
} | ||
resolve(operations); | ||
} | ||
} | ||
); | ||
}); | ||
parser.on("file", (fieldName, stream, filename, encoding, mimetype) => { | ||
lastFileStream = stream; | ||
parser.on( | ||
"file", | ||
(fieldName, stream, { filename, encoding, mimeType: mimetype }) => { | ||
if (!map) { | ||
ignoreStream(stream); | ||
return exit( | ||
createError( | ||
400, | ||
`Misordered multipart fields; files should follow ‘map’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
} | ||
if (!map) { | ||
ignoreStream(stream); | ||
return exit( | ||
createError( | ||
400, | ||
`Misordered multipart fields; files should follow ‘map’ (${GRAPHQL_MULTIPART_REQUEST_SPEC_URL}).` | ||
) | ||
); | ||
} | ||
const upload = map.get(fieldName); | ||
const upload = map.get(fieldName); | ||
if (!upload) { | ||
// The file is extraneous. As the rest can still be processed, just | ||
// ignore it and don’t exit with an error. | ||
ignoreStream(stream); | ||
return; | ||
} | ||
if (!upload) { | ||
// The file is extraneous. As the rest can still be processed, just | ||
// ignore it and don’t exit with an error. | ||
ignoreStream(stream); | ||
return; | ||
} | ||
/** @type {Error} */ | ||
let fileError; | ||
/** @type {Error} */ | ||
let fileError; | ||
const capacitor = new WriteStream(); | ||
const capacitor = new WriteStream(); | ||
capacitor.on("error", () => { | ||
stream.unpipe(); | ||
stream.resume(); | ||
}); | ||
capacitor.on("error", () => { | ||
stream.unpipe(); | ||
stream.resume(); | ||
}); | ||
stream.on("limit", () => { | ||
fileError = createError( | ||
413, | ||
`File truncated as it exceeds the ${maxFileSize} byte size limit.` | ||
); | ||
stream.unpipe(); | ||
capacitor.destroy(fileError); | ||
}); | ||
stream.on("limit", () => { | ||
fileError = createError( | ||
413, | ||
`File truncated as it exceeds the ${maxFileSize} byte size limit.` | ||
); | ||
stream.unpipe(); | ||
capacitor.destroy(fileError); | ||
}); | ||
stream.on("error", (error) => { | ||
fileError = error; | ||
stream.unpipe(); | ||
capacitor.destroy(fileError); | ||
}); | ||
stream.on("error", (error) => { | ||
fileError = error; | ||
stream.unpipe(); | ||
capacitor.destroy(fileError); | ||
}); | ||
/** @type {FileUpload} */ | ||
const file = { | ||
filename, | ||
mimetype, | ||
encoding, | ||
createReadStream(options) { | ||
const error = fileError || (released ? exitError : null); | ||
if (error) throw error; | ||
return capacitor.createReadStream(options); | ||
}, | ||
capacitor, | ||
}; | ||
/** @type {FileUpload} */ | ||
const file = { | ||
filename, | ||
mimetype, | ||
encoding, | ||
createReadStream(options) { | ||
const error = fileError || (released ? exitError : null); | ||
if (error) throw error; | ||
return capacitor.createReadStream(options); | ||
}, | ||
capacitor, | ||
}; | ||
Object.defineProperty(file, "capacitor", { | ||
enumerable: false, | ||
configurable: false, | ||
writable: false, | ||
}); | ||
Object.defineProperty(file, "capacitor", { | ||
enumerable: false, | ||
configurable: false, | ||
writable: false, | ||
}); | ||
stream.pipe(capacitor); | ||
upload.resolve(file); | ||
} | ||
); | ||
stream.pipe(capacitor); | ||
upload.resolve(file); | ||
}); | ||
parser.once("filesLimit", () => | ||
@@ -341,3 +327,9 @@ exit(createError(413, `${maxFiles} max file uploads exceeded.`)) | ||
parser.once("error", exit); | ||
// Use the `on` method instead of `once` as in edge cases the same parser | ||
// could have multiple `error` events and all must be handled to prevent the | ||
// Node.js process exiting with an error. One edge case is if there is a | ||
// malformed part header as well as an unexpected end of the form. | ||
parser.on("error", (/** @type {Error} */ error) => { | ||
exit(error, true); | ||
}); | ||
@@ -344,0 +336,0 @@ response.once("close", () => { |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
67296
11
1
649
+ Added@types/busboy@1.5.4(transitive)
+ Addedbusboy@1.6.0(transitive)
+ Addedstreamsearch@1.1.0(transitive)
- Removed@types/busboy@0.3.5(transitive)
- Removedbusboy@0.3.1(transitive)
- Removeddicer@0.3.0(transitive)
- Removedstreamsearch@0.1.2(transitive)
Updated@types/busboy@^1.5.0
Updatedbusboy@^1.6.0