Socket
Socket
Sign inDemoInstall

s3-asset-uploader

Package Overview
Dependencies
5
Maintainers
6
Versions
25
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 2.2.0 to 2.3.0

197

index.js

@@ -18,2 +18,3 @@ /**

const hashLib = require('./lib/hash')
const streamLib = require('./lib/stream')
const transformLib = require('./lib/transform')

@@ -48,2 +49,3 @@

* @property {S3UploadHeaders} [gzipHeaders] - extra params used by `AWS.S3` upload method for GZIP files
* @property {RegExp} [gzipHashedFileKeyRegexp] - gzip files when hashing them
* @property {RegExp|boolean} [hashedOriginalFileRegexp] - respect hashes in original filenames

@@ -108,2 +110,4 @@ * @property {boolean} [includePseudoUnhashedOriginalFilesInDigest] - add pseudo-entries to the digest

this.noUploadHashedFiles = Boolean(options.noUploadHashedFiles)
// gzip options
this.gzipHashedFileKeyRegexp = options.gzipHashedFileKeyRegexp
// Hashed original file options

@@ -122,12 +126,15 @@ if (options.hashedOriginalFileRegexp instanceof RegExp) {

* The main work-horse method that performs all of the sub-tasks to synchronize
* @returns {Bluebird<S3SyncDigest>}
* @returns {Promise.<S3SyncDigest>}
* @public
*/
run() {
return this.gatherFiles()
.then(() => this.addFilesToDigest())
.then(() => this.syncFiles())
.then(() => this.uploadDigestFile())
.then(() => this.digest)
.finally(() => this.reset())
async run() {
try {
await this.gatherFiles()
await this.addFilesToDigest()
await this.syncFiles()
await this.uploadDigestFile()
return this.digest
} finally {
this.reset()
}
}

@@ -151,10 +158,8 @@

* Walks the `this.path` directory and collects all of the file paths
* @returns {Bluebird<void>}
* @returns {Promise.<void>}
* @private
*/
gatherFiles() {
return directoryLib.getFileNames(this.path, this.ignorePaths)
.then(filePaths => {
this.gatheredFilePaths.push(...filePaths)
})
async gatherFiles() {
const filePaths = await directoryLib.getFileNames(this.path, this.ignorePaths)
this.gatheredFilePaths.push(...filePaths)
}

@@ -164,10 +169,10 @@

* Iterates through the gathered files and generates the hashed digest mapping
* @returns {Bluebird<S3SyncDigest>}
* @returns {Promise.<S3SyncDigest>}
* @private
*/
addFilesToDigest() {
return Bluebird.mapSeries(this.gatheredFilePaths, filePath => {
return this.addFileToDigest(filePath)
})
.then(() => this.digest)
async addFilesToDigest() {
for (let filePath of this.gatheredFilePaths) {
await this.addFileToDigest(filePath)
}
return this.digest
}

@@ -177,6 +182,6 @@

* Uploads the gathered files
* @returns {Bluebird<Array.<S3SyncFileResult>>}
* @returns {Promise.<Array.<S3SyncFileResult>>}
* @private
*/
syncFiles() {
async syncFiles() {
return Bluebird.mapSeries(this.gatheredFilePaths, filePath => {

@@ -194,33 +199,34 @@ return Bluebird.props({

* @param {AbsoluteFilePath} filePath
* @returns {Bluebird<void>}
* @returns {Promise.<void>}
* @private
*/
addFileToDigest(filePath) {
return hashLib.hashFromFile(filePath)
.then(hash => {
this.filePathToEtagMap[filePath] = hash
const originalFileName = this.relativeFileName(filePath)
const originalFileKey = this.s3KeyForRelativeFileName(originalFileName)
if (this.isHashedFileName(originalFileName)) {
if (this.includePseudoUnhashedOriginalFilesInDigest) {
const unhashedFileName = this.unhashedFileName(originalFileName)
this.digest[unhashedFileName] = originalFileKey
}
this.digest[originalFileName] = originalFileKey
} else {
const hashedFileKey = this.hashedFileKey(originalFileKey, hash)
this.digest[originalFileName] = hashedFileKey
async addFileToDigest(filePath) {
const hash = await hashLib.hashFromFile(filePath)
this.filePathToEtagMap[filePath] = hash
const originalFileName = this.relativeFileName(filePath)
const originalFileKey = this.s3KeyForRelativeFileName(originalFileName)
if (this.isHashedFileName(originalFileName)) {
if (this.includePseudoUnhashedOriginalFilesInDigest) {
const unhashedFileName = this.unhashedFileName(originalFileName)
this.digest[unhashedFileName] = originalFileKey
}
})
this.digest[originalFileName] = originalFileKey
} else {
let hashedFileKey = this.hashedFileKey(originalFileKey, hash)
if (this.shouldGzipHashedFileKey(hashedFileKey)) {
hashedFileKey += '.gz'
}
this.digest[originalFileName] = hashedFileKey
}
}
/**
* @returns {Bluebird<S3UploadResult>}
* @returns {Promise.<S3UploadResult>}
* @private
*/
uploadDigestFile() {
async uploadDigestFile() {
const key = this.digestFileKey
if (this.noUploadDigestFile) {
debug(`SKIPPING key[${key}] reason[noUploadDigestFile]`)
return Bluebird.resolve()
return
}

@@ -238,6 +244,6 @@ return this.upload({

* @param {AbsoluteFilePath} filePath
* @returns {Bluebird<S3UploadResult>}
* @returns {Promise.<S3UploadResult>}
* @private
*/
uploadOriginalFile(filePath) {
async uploadOriginalFile(filePath) {
const originalFileName = this.relativeFileName(filePath)

@@ -247,16 +253,12 @@ const originalFileKey = this.s3KeyForRelativeFileName(originalFileName)

debug(`SKIPPING key[${originalFileKey}] reason[noUploadOriginalFiles]`)
return Bluebird.resolve()
return
}
const etag = this.filePathToEtagMap[filePath]
return this.shouldUpload(originalFileKey, etag)
.then(shouldUpload => {
if (shouldUpload) {
const headers = this.fileHeaders(filePath)
const params = Object.assign({}, headers, {
'Key': originalFileKey,
'Body': fs.createReadStream(filePath)
})
return this.upload(params)
}
})
if (await this.shouldUpload(originalFileKey, etag)) {
return this.upload({
...this.fileHeaders(filePath),
'Key': originalFileKey,
'Body': fs.createReadStream(filePath)
})
}
}

@@ -266,6 +268,6 @@

* @param {AbsoluteFilePath} filePath
* @returns {Bluebird<S3UploadResult>}
* @returns {Promise.<S3UploadResult>}
* @private
*/
uploadHashedFile(filePath) {
async uploadHashedFile(filePath) {
const originalFileName = this.relativeFileName(filePath)

@@ -277,13 +279,13 @@ const originalFileKey = this.s3KeyForRelativeFileName(originalFileName)

debug(`SKIPPING filePath[${filePath}] reason[NotInDigest]`)
return Bluebird.resolve()
return
}
if (hashedFileKey === originalFileKey) {
debug(`SKIPPING key[${hashedFileKey}] reason[originalFileIsHashed]`)
return Bluebird.resolve()
return
}
if (this.noUploadHashedFiles) {
debug(`SKIPPING key[${hashedFileKey}] reason[noUploadHashedFiles]`)
return Bluebird.resolve()
return
}
return transformLib.replaceHashedFilenames({
const transformResult = await transformLib.replaceHashedFilenames({
filePath,

@@ -293,16 +295,16 @@ relativeFileName: originalFileName,

})
.then(({ transformed, stream, hash }) => {
const etag = transformed ? hash : this.filePathToEtagMap[filePath]
return this.shouldUpload(hashedFileKey, etag)
.then(shouldUpload => {
if (shouldUpload) {
const headers = this.fileHeaders(filePath)
const params = Object.assign({}, headers, {
'Key': hashedFileKey,
'Body': stream
})
return this.upload(params)
}
let fileStream = transformResult.stream
let fileHeaders = this.fileHeaders(filePath)
if (this.shouldGzipHashedFileKey(hashedFileKey)) {
fileStream = streamLib.gzipStream(fileStream)
fileHeaders = { ...fileHeaders, ...this.gzipHeaders }
}
const etag = transformResult.hash || this.filePathToEtagMap[filePath]
if (await this.shouldUpload(hashedFileKey, etag)) {
return this.upload({
...fileHeaders,
'Key': hashedFileKey,
'Body': fileStream
})
})
}
}

@@ -312,11 +314,11 @@

* @param {S3UploadParams} params
* @returns {Bluebird<S3UploadResult>}
* @returns {Promise.<S3UploadResult>}
* @see https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property
* @private
*/
upload(params) {
async upload(params) {
const key = params['Key']
if (this.noUpload) {
debug(`SKIPPING key[${key}] reason[noUpload]`)
return Bluebird.resolve()
return
}

@@ -332,22 +334,21 @@ debug(`UPLOADING key[${key}]`)

* @param {AWS.S3.ETag} etag
* @returns {Bluebird<boolean>}
* @returns {Promise.<boolean>}
* @private
*/
shouldUpload(key, etag) {
async shouldUpload(key, etag) {
if (this.noUpload) {
debug(`SKIPPING key[${key}] reason[noUpload]`)
return Bluebird.resolve(false)
return false
}
return Bluebird.fromCallback(callback => {
this.client.headObject({
'Bucket': this.bucket,
'Key': key,
'IfNoneMatch': etag
}, callback)
})
.then(() => {
try {
await Bluebird.fromCallback(callback => {
this.client.headObject({
'Bucket': this.bucket,
'Key': key,
'IfNoneMatch': etag
}, callback)
})
// File found, ETag does not match
return true
})
.catch(err => {
} catch (err) {
switch (err.name) {

@@ -362,3 +363,3 @@ case 'NotModified':

}
})
}
}

@@ -415,2 +416,14 @@

/**
* @param {string} fileKey
* @returns {boolean}
* @private
*/
shouldGzipHashedFileKey(fileKey) {
if (this.gzipHashedFileKeyRegexp) {
return this.gzipHashedFileKeyRegexp.test(fileKey)
}
return false
}
/**
* @param {AbsoluteFilePath} filePath

@@ -417,0 +430,0 @@ * @returns {S3UploadHeaders}

@@ -9,6 +9,6 @@ // Node imports

* @param {string} basePath
* @param {Array.<(RegExp|string)>} filters
* @returns {Bluebird<Array.<string>>} The full paths of all files in the directory
* @param {(RegExp|string)[]} filters
* @returns {Promise.<string[]>} The full paths of all files in the directory
*/
function getFileNames(basePath, filters = []) {
async function getFileNames(basePath, filters = []) {
return recurseDirectory(basePath)

@@ -18,21 +18,19 @@

* @param {string} dirPath
* @returns {Bluebird<Array.<string>>} The full paths of all files in the directory
* @returns {Promise.<string[]>} The full paths of all files in the directory
*/
function recurseDirectory(dirPath) {
return Bluebird.fromCallback(callback => {
async function recurseDirectory(dirPath) {
const dirents = await Bluebird.fromCallback(callback => {
fs.readdir(dirPath, { withFileTypes: true }, callback)
})
.then(dirents => {
return Bluebird.map(dirents, dirent => {
const fullPath = path.resolve(dirPath, dirent.name)
if (isFiltered(fullPath)) {
return []
}
if (dirent.isDirectory()) {
return recurseDirectory(fullPath)
}
return [fullPath]
})
const filePaths = await Bluebird.map(dirents, dirent => {
const fullPath = path.resolve(dirPath, dirent.name)
if (isFiltered(fullPath)) {
return []
}
if (dirent.isDirectory()) {
return recurseDirectory(fullPath)
}
return [fullPath]
})
.then(filePaths => Array.prototype.concat(...filePaths))
return Array.prototype.concat(...filePaths)
}

@@ -39,0 +37,0 @@

// Node imports
const crypto = require('crypto')
const fs = require('fs')
// NPM imports
const Bluebird = require('bluebird')

@@ -15,5 +13,5 @@ const HASH_ALGORITHM = 'md5'

* @param {string} filePath
* @returns {Bluebird<Hash>}
* @returns {Promise.<Hash>}
*/
function hashFromFile(filePath) {
async function hashFromFile(filePath) {
return hashFromStream(fs.createReadStream(filePath))

@@ -25,6 +23,6 @@ }

* @param {NodeJS.ReadableStream} readableStream
* @returns {Bluebird<Hash>}
* @returns {Promise.<Hash>}
*/
function hashFromStream(readableStream) {
return new Bluebird((resolve, reject) => {
async function hashFromStream(readableStream) {
return new Promise((resolve, reject) => {
const hashStream = crypto.createHash(HASH_ALGORITHM)

@@ -31,0 +29,0 @@ readableStream.pipe(hashStream)

@@ -5,13 +5,13 @@ // Node imports

const zlib = require('zlib')
// NPM imports
const Bluebird = require('bluebird')
// Lib imports
const fileLib = require('./file')
const DEFAULT_GZIP_OPTIONS = { level: 9 }
/**
* @param {string} filePath
* @returns {Bluebird<string>}
* @returns {Promise.<string>}
* @private
*/
function fileToString(filePath) {
async function fileToString(filePath) {
const fileStream = fs.createReadStream(filePath)

@@ -27,6 +27,6 @@ const readerStream = fileLib.isGzipped(filePath)

* @param {BufferEncoding} [encoding]
* @returns {Bluebird<string>}
* @returns {Promise.<string>}
*/
function streamToString(readerStream, encoding = 'utf8') {
return new Bluebird((resolve, reject) => {
async function streamToString(readerStream, encoding = 'utf8') {
return new Promise((resolve, reject) => {
/** @type {Array.<Uint8Array>} */

@@ -65,6 +65,16 @@ const chunks = []

/**
* @param {NodeJS.ReadableStream} stream
* @returns {NodeJS.ReadableStream}
*/
function gzipStream(stream) {
const gzip = zlib.createGzip(DEFAULT_GZIP_OPTIONS)
return stream.pipe(gzip)
}
module.exports = {
fileToString,
gzipStream,
streamToString,
stringToStream
}
// Node imports
const fs = require('fs')
const path = require('path')
const zlib = require('zlib')
// NPM imports
const Bluebird = require('bluebird')
// Lib imports

@@ -14,4 +11,2 @@ const fileLib = require('./file')

const DEFAULT_GZIP_OPTIONS = { level: 9 }
const CSS_URL_REGEXP = /url\(\/([^)]+)\)/g

@@ -28,11 +23,3 @@ const CSS_SOURCEMAP_REGEXP = /\/\*# sourceMappingURL=([^*]+)\*\/$/

/**
* @typedef {Object} ReplaceHashedFilenamesOptions
* @property {string} filePath
* @property {string} relativeFileName
* @property {S3SyncDigest} digest
*/
/**
* @typedef {Object} ReplaceHashedFilenamesResult
* @property {boolean} transformed
* @typedef {Object} TransformedFileResult
* @property {NodeJS.ReadableStream} stream

@@ -49,7 +36,10 @@ * @property {string} [hash]

/**
* @param {ReplaceHashedFilenamesOptions} options
* @returns {Bluebird<ReplaceHashedFilenamesResult>}
* @param {Object} options
* @param {string} options.filePath
* @param {string} options.relativeFileName
* @param {S3SyncDigest} options.digest
* @returns {Promise.<TransformedFileResult>}
* @public
*/
function replaceHashedFilenames({ filePath, relativeFileName, digest }) {
async function replaceHashedFilenames({ filePath, relativeFileName, digest }) {
const relativeDirPath = path.dirname(relativeFileName)

@@ -63,10 +53,9 @@ const contentType = fileLib.getContentType(filePath)

}
return Bluebird.resolve(originalFileResult())
return originalFileResult()
/**
* @returns {ReplaceHashedFilenamesResult}
* @returns {TransformedFileResult}
*/
function originalFileResult() {
return {
transformed: false,
stream: fs.createReadStream(filePath)

@@ -79,7 +68,6 @@ }

* @param {string} recalculatedHash
* @returns {ReplaceHashedFilenamesResult}
* @returns {TransformedFileResult}
*/
function transformedFileResult(transformedData, recalculatedHash) {
return {
transformed: true,
stream: transformedDataToStream(transformedData),

@@ -92,16 +80,12 @@ hash: recalculatedHash

* @param {TransformFileCallback} transformCallback
* @returns {Bluebird<ReplaceHashedFilenamesResult>}
* @returns {Promise.<TransformedFileResult>}
*/
function transformFile(transformCallback) {
return streamLib.fileToString(filePath)
.then(originalData => {
const transformedData = transformCallback(originalData)
if (originalData === transformedData) {
return originalFileResult()
}
return recalculateHash(transformedData)
.then(recalculatedHash => {
return transformedFileResult(transformedData, recalculatedHash)
})
})
async function transformFile(transformCallback) {
const originalData = await streamLib.fileToString(filePath)
const transformedData = transformCallback(originalData)
if (originalData === transformedData) {
return originalFileResult()
}
const recalculatedHash = await recalculateHash(transformedData)
return transformedFileResult(transformedData, recalculatedHash)
}

@@ -117,3 +101,3 @@

return fileLib.isGzipped(filePath)
? transformedStream.pipe(zlib.createGzip(DEFAULT_GZIP_OPTIONS))
? streamLib.gzipStream(transformedStream)
: transformedStream

@@ -124,8 +108,8 @@ }

* @param {string} transformedData
* @returns {Bluebird<string>} recalculated hash of transformed file
* @returns {Promise.<string>} recalculated hash of transformed file
*/
function recalculateHash(transformedData) {
async function recalculateHash(transformedData) {
if (!fileLib.isGzipped(filePath)) {
// Fast-path to avoid unnecessary conversion to stream
return Bluebird.resolve(hashLib.hashFromString(transformedData))
return hashLib.hashFromString(transformedData)
}

@@ -132,0 +116,0 @@ const transformedStream = transformedDataToStream(transformedData)

{
"name": "s3-asset-uploader",
"version": "2.2.0",
"version": "2.3.0",
"description": "AWS S3 Asset Uploader",

@@ -5,0 +5,0 @@ "main": "index.js",

@@ -17,2 +17,3 @@ ## AWS S3 asset uploader

`gzipHeaders` | `S3UploadHeaders` | extra params used by `AWS.S3` upload method for GZIP files
`gzipHashedFileKeyRegexp` | `RegExp` | gzip the hashed files that match this pattern
`noUpload` | `boolean` | don't upload anything, just generate a digest mapping

@@ -19,0 +20,0 @@ `noUploadDigestFile` | `boolean` | don't upload the digest mapping file

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc