Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

asar

Package Overview
Dependencies
Maintainers
8
Versions
49
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

asar - npm Package Compare versions

Comparing version 0.14.6 to 1.0.0

109

bin/asar.js

@@ -6,69 +6,64 @@ #!/usr/bin/env node

program.version('v' + require('../package.json').version)
.description('Manipulate asar archive files')
.description('Manipulate asar archive files')
program.command('pack <dir> <output>')
.alias('p')
.description('create asar archive')
.option('--ordering <file path>', 'path to a text file for ordering contents')
.option('--unpack <expression>', 'do not pack files matching glob <expression>')
.option('--unpack-dir <expression>', 'do not pack dirs matching glob <expression> or starting with literal <expression>')
.option('--snapshot', 'create snapshot')
.option('--exclude-hidden', 'exclude hidden files')
.option('--sv <version>', '(snapshot) version of Electron')
.option('--sa <arch>', '(snapshot) arch of Electron')
.option('--sb <builddir>', '(snapshot) where to put downloaded files')
.action(function (dir, output, options) {
options = {
unpack: options.unpack,
unpackDir: options.unpackDir,
snapshot: options.snapshot,
ordering: options.ordering,
version: options.sv,
arch: options.sa,
builddir: options.sb,
dot: !options.excludeHidden
}
asar.createPackageWithOptions(dir, output, options, function (error) {
if (error) {
console.error(error.stack)
process.exit(1)
}
})
})
.alias('p')
.description('create asar archive')
.option('--ordering <file path>', 'path to a text file for ordering contents')
.option('--unpack <expression>', 'do not pack files matching glob <expression>')
.option('--unpack-dir <expression>', 'do not pack dirs matching glob <expression> or starting with literal <expression>')
.option('--exclude-hidden', 'exclude hidden files')
.action(function (dir, output, options) {
options = {
unpack: options.unpack,
unpackDir: options.unpackDir,
ordering: options.ordering,
version: options.sv,
arch: options.sa,
builddir: options.sb,
dot: !options.excludeHidden
}
asar.createPackageWithOptions(dir, output, options, function (error) {
if (error) {
console.error(error.stack)
process.exit(1)
}
})
})
program.command('list <archive>')
.alias('l')
.description('list files of asar archive')
.option('-i, --is-pack', 'each file in the asar is pack or unpack')
.action(function (archive, options) {
options = {
isPack: options.isPack
}
var files = asar.listPackage(archive, options)
for (var i in files) {
console.log(files[i])
}
// This is in order to disappear help
process.exit(0)
})
.alias('l')
.description('list files of asar archive')
.option('-i, --is-pack', 'each file in the asar is pack or unpack')
.action(function (archive, options) {
options = {
isPack: options.isPack
}
var files = asar.listPackage(archive, options)
for (var i in files) {
console.log(files[i])
}
// This is in order to disappear help
process.exit(0)
})
program.command('extract-file <archive> <filename>')
.alias('ef')
.description('extract one file from archive')
.action(function (archive, filename) {
require('fs').writeFileSync(require('path').basename(filename),
asar.extractFile(archive, filename))
})
.alias('ef')
.description('extract one file from archive')
.action(function (archive, filename) {
require('fs').writeFileSync(require('path').basename(filename),
asar.extractFile(archive, filename))
})
program.command('extract <archive> <dest>')
.alias('e')
.description('extract archive')
.action(function (archive, dest) {
asar.extractAll(archive, dest)
})
.alias('e')
.description('extract archive')
.action(function (archive, dest) {
asar.extractAll(archive, dest)
})
program.command('*')
.action(function (cmd) {
console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', cmd)
})
.action(function (cmd) {
console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', cmd)
})

@@ -75,0 +70,0 @@ program.parse(process.argv)

'use strict'
const pify = require('pify')
const fs = process.versions.electron ? require('original-fs') : require('fs')
const path = require('path')
const minimatch = require('minimatch')
const mkdirp = require('mkdirp')
const mkdirp = pify(require('mkdirp'))

@@ -10,32 +13,26 @@ const Filesystem = require('./filesystem')

const crawlFilesystem = require('./crawlfs')
const createSnapshot = require('./snapshot')
// Return whether or not a directory should be excluded from packing due to
// "--unpack-dir" option
//
// @param {string} path - diretory path to check
// @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
// @param {array} unpackDirs - Array of directory paths previously marked as unpacked
//
const isUnpackDir = function (path, pattern, unpackDirs) {
if (path.indexOf(pattern) === 0 || minimatch(path, pattern)) {
if (unpackDirs.indexOf(path) === -1) {
unpackDirs.push(path)
/**
* Whether a directory should be excluded from packing due to the `--unpack-dir" option.
*
* @param {string} dirPath - directory path to check
* @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
* @param {array} unpackDirs - Array of directory paths previously marked as unpacked
*/
function isUnpackedDir (dirPath, pattern, unpackDirs) {
if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) {
if (!unpackDirs.includes(dirPath)) {
unpackDirs.push(dirPath)
}
return true
} else {
for (let i = 0; i < unpackDirs.length; i++) {
if (path.indexOf(unpackDirs[i]) === 0) {
return true
}
}
return false
return unpackDirs.some(unpackDir => dirPath.startsWith(unpackDir))
}
}
module.exports.createPackage = function (src, dest, callback) {
return module.exports.createPackageWithOptions(src, dest, {}, callback)
module.exports.createPackage = function (src, dest) {
return module.exports.createPackageWithOptions(src, dest, {})
}
module.exports.createPackageWithOptions = function (src, dest, options, callback) {
module.exports.createPackageWithOptions = function (src, dest, options) {
const globOptions = options.globOptions ? options.globOptions : {}

@@ -49,6 +46,4 @@ globOptions.dot = options.dot === undefined ? true : options.dot

return crawlFilesystem(pattern, globOptions, function (error, filenames, metadata) {
if (error) { return callback(error) }
module.exports.createPackageFromFiles(src, dest, filenames, metadata, options, callback)
})
return crawlFilesystem(pattern, globOptions)
.then(([filenames, metadata]) => module.exports.createPackageFromFiles(src, dest, filenames, metadata, options))
}

@@ -63,5 +58,4 @@

options: The options.
callback: The callback function. Accepts (err).
*/
module.exports.createPackageFromFiles = function (src, dest, filenames, metadata, options, callback) {
module.exports.createPackageFromFiles = function (src, dest, filenames, metadata, options) {
if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }

@@ -118,3 +112,3 @@ if (typeof options === 'undefined' || options === null) { options = {} }

const handleFile = function (filename, done) {
const handleFile = function (filename) {
let file = metadata[filename]

@@ -127,3 +121,3 @@ let type

if (stat.isSymbolicLink()) { type = 'link' }
file = {stat, type}
file = { stat, type }
metadata[filename] = file

@@ -135,5 +129,7 @@ }

case 'directory':
shouldUnpack = options.unpackDir
? isUnpackDir(path.relative(src, filename), options.unpackDir, unpackDirs)
: false
if (options.unpackDir) {
shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs)
} else {
shouldUnpack = false
}
filesystem.insertDirectory(filename, shouldUnpack)

@@ -144,11 +140,10 @@ break

if (options.unpack) {
shouldUnpack = minimatch(filename, options.unpack, {matchBase: true})
shouldUnpack = minimatch(filename, options.unpack, { matchBase: true })
}
if (!shouldUnpack && options.unpackDir) {
const dirName = path.relative(src, path.dirname(filename))
shouldUnpack = isUnpackDir(dirName, options.unpackDir, unpackDirs)
shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs)
}
files.push({filename: filename, unpack: shouldUnpack})
filesystem.insertFile(filename, shouldUnpack, file, options, done)
return
files.push({ filename: filename, unpack: shouldUnpack })
return filesystem.insertFile(filename, shouldUnpack, file, options)
case 'link':

@@ -158,17 +153,8 @@ filesystem.insertLink(filename, file.stat)

}
return process.nextTick(done)
return Promise.resolve()
}
const insertsDone = function () {
return mkdirp(path.dirname(dest), function (error) {
if (error) { return callback(error) }
return disk.writeFilesystem(dest, filesystem, files, metadata, function (error) {
if (error) { return callback(error) }
if (options.snapshot) {
return createSnapshot(src, dest, filenames, metadata, options, callback)
} else {
return callback(null)
}
})
})
return mkdirp(path.dirname(dest))
.then(() => disk.writeFilesystem(dest, filesystem, files, metadata))
}

@@ -181,5 +167,4 @@

return handleFile(name, function () {
return next(names.shift())
})
return handleFile(name)
.then(() => next(names.shift()))
}

@@ -215,3 +200,3 @@

return filenames.map((filename) => {
filename = filename.substr(1) // get rid of leading slash
filename = filename.substr(1) // get rid of leading slash
const destFilename = path.join(dest, filename)

@@ -218,0 +203,0 @@ const file = filesystem.getFile(filename, followLinks)

'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const glob = require('glob')
module.exports = function (dir, options, callback) {
const metadata = {}
return glob(dir, options, function (error, filenames) {
if (error) { return callback(error) }
for (const filename of filenames) {
const stat = fs.lstatSync(filename)
const pify = require('pify')
const fs = pify(process.versions.electron ? require('original-fs') : require('fs'))
const glob = pify(require('glob'))
function determineFileType (filename) {
return fs.lstat(filename)
.then(stat => {
if (stat.isFile()) {
metadata[filename] = {type: 'file', stat: stat}
return [filename, { type: 'file', stat: stat }]
} else if (stat.isDirectory()) {
metadata[filename] = {type: 'directory', stat: stat}
return [filename, { type: 'directory', stat: stat }]
} else if (stat.isSymbolicLink()) {
metadata[filename] = {type: 'link', stat: stat}
return [filename, { type: 'link', stat: stat }]
}
}
return callback(null, filenames, metadata)
})
return [filename, undefined]
})
}
module.exports = function (dir, options) {
const metadata = {}
return glob(dir, options)
.then(filenames => Promise.all(filenames.map(filename => determineFileType(filename))))
.then(results => {
const filenames = []
for (const [filename, type] of results) {
filenames.push(filename)
if (type) {
metadata[filename] = type
}
}
return [filenames, metadata]
})
}
'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const pify = require('pify')
const fs = pify(process.versions.electron ? require('original-fs') : require('fs'))
const path = require('path')
const mkdirp = require('mkdirp')
const mkdirp = pify(require('mkdirp'))
const pickle = require('chromium-pickle-js')

@@ -10,58 +13,47 @@

const copyFileToSync = function (dest, src, filename) {
function copyFile (dest, src, filename) {
const srcFile = path.join(src, filename)
const targetFile = path.join(dest, filename)
const content = fs.readFileSync(srcFile)
const stats = fs.statSync(srcFile)
mkdirp.sync(path.dirname(targetFile))
return fs.writeFileSync(targetFile, content, {mode: stats.mode})
return Promise.all([fs.readFile(srcFile), fs.stat(srcFile), mkdirp(path.dirname(targetFile))])
.then(([content, stats, _]) => fs.writeFile(targetFile, content, { mode: stats.mode }))
}
const writeFileListToStream = function (dest, filesystem, out, list, metadata, callback) {
for (let i = 0; i < list.length; i++) {
const file = list[i]
if (file.unpack) {
// the file should not be packed into archive.
function streamTransformedFile (originalFilename, outStream, transformed) {
return new Promise((resolve, reject) => {
const stream = fs.createReadStream(transformed ? transformed.path : originalFilename)
stream.pipe(outStream, { end: false })
stream.on('error', reject)
stream.on('end', () => resolve())
})
}
const writeFileListToStream = function (dest, filesystem, out, list, metadata) {
let promise = Promise.resolve()
for (const file of list) {
if (file.unpack) { // the file should not be packed into archive
const filename = path.relative(filesystem.src, file.filename)
try {
copyFileToSync(`${dest}.unpacked`, filesystem.src, filename)
} catch (error) {
return callback(error)
}
promise = promise.then(() => copyFile(`${dest}.unpacked`, filesystem.src, filename))
} else {
const tr = metadata[file.filename].transformed
const stream = fs.createReadStream((tr ? tr.path : file.filename))
stream.pipe(out, {end: false})
stream.on('error', callback)
return stream.on('end', function () {
return writeFileListToStream(dest, filesystem, out, list.slice(i + 1), metadata, callback)
})
promise = promise.then(() => streamTransformedFile(file.filename, out, metadata[file.filename].transformed))
}
}
out.end()
return callback(null)
return promise.then(() => out.end())
}
module.exports.writeFilesystem = function (dest, filesystem, files, metadata, callback) {
let sizeBuf
let headerBuf
try {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(filesystem.header))
headerBuf = headerPickle.toBuffer()
module.exports.writeFilesystem = function (dest, filesystem, files, metadata) {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(filesystem.header))
const headerBuf = headerPickle.toBuffer()
const sizePickle = pickle.createEmpty()
sizePickle.writeUInt32(headerBuf.length)
sizeBuf = sizePickle.toBuffer()
} catch (error) {
return callback(error)
}
const sizePickle = pickle.createEmpty()
sizePickle.writeUInt32(headerBuf.length)
const sizeBuf = sizePickle.toBuffer()
const out = fs.createWriteStream(dest)
out.on('error', callback)
out.write(sizeBuf)
return out.write(headerBuf, function () {
return writeFileListToStream(dest, filesystem, out, files, metadata, callback)
})
return new Promise((resolve, reject) => {
out.on('error', reject)
out.write(sizeBuf)
return out.write(headerBuf, () => resolve())
}).then(() => writeFileListToStream(dest, filesystem, out, files, metadata))
}

@@ -74,3 +66,3 @@

try {
const sizeBuf = new Buffer(8)
const sizeBuf = Buffer.alloc(8)
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {

@@ -82,3 +74,3 @@ throw new Error('Unable to read header size')

size = sizePickle.createIterator().readUInt32()
headerBuf = new Buffer(size)
headerBuf = Buffer.alloc(size)
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {

@@ -93,3 +85,3 @@ throw new Error('Unable to read header')

const header = headerPickle.createIterator().readString()
return {header: JSON.parse(header), headerSize: size}
return { header: JSON.parse(header), headerSize: size }
}

@@ -121,3 +113,3 @@

module.exports.readFileSync = function (filesystem, filename, info) {
let buffer = new Buffer(info.size)
let buffer = Buffer.alloc(info.size)
if (info.size <= 0) { return buffer }

@@ -124,0 +116,0 @@ if (info.unpacked) {

'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const pify = require('pify')
const fs = pify(process.versions.electron ? require('original-fs') : require('fs'))
const path = require('path')
const tmp = require('tmp')
const tmp = require('tmp-promise')
const UINT64 = require('cuint').UINT64
const UINT32_MAX = 4294967295
class Filesystem {
constructor (src) {
this.src = path.resolve(src)
this.header = {files: {}}
this.header = { files: {} }
this.offset = UINT64(0)

@@ -48,3 +53,3 @@ }

insertFile (p, shouldUnpack, file, options, callback) {
insertFile (p, shouldUnpack, file, options) {
const dirNode = this.searchNodeFromPath(path.dirname(p))

@@ -55,12 +60,16 @@ const node = this.searchNodeFromPath(p)

node.unpacked = true
process.nextTick(callback)
return
return Promise.resolve()
}
const handler = () => {
const handler = (resolve, reject) => {
const size = file.transformed ? file.transformed.stat.size : file.stat.size
// JavaScript can not precisely present integers >= UINT32_MAX.
if (size > 4294967295) {
throw new Error(`${p}: file size can not be larger than 4.2GB`)
if (size > UINT32_MAX) {
const error = new Error(`${p}: file size can not be larger than 4.2GB`)
if (reject) {
return reject(error)
} else {
throw error
}
}

@@ -75,23 +84,28 @@

return callback()
return resolve ? resolve() : Promise.resolve()
}
const tr = options.transform && options.transform(p)
if (tr) {
return tmp.file(function (err, path) {
if (err) { return handler() }
const out = fs.createWriteStream(path)
const stream = fs.createReadStream(p)
const transformed = options.transform && options.transform(p)
if (transformed) {
return tmp.file()
.then(tmpfile => {
return new Promise((resolve, reject) => {
const out = fs.createWriteStream(tmpfile.path)
const stream = fs.createReadStream(p)
stream.pipe(tr).pipe(out)
return out.on('close', function () {
file.transformed = {
path,
stat: fs.lstatSync(path)
}
return handler()
stream.pipe(transformed).pipe(out)
return out.on('close', () => {
return fs.lstat(tmpfile.path)
.then(stat => {
file.transformed = {
path: tmpfile.path,
stat
}
return handler(resolve, reject)
})
})
})
})
})
} else {
return process.nextTick(handler)
return handler()
}

@@ -98,0 +112,0 @@ }

@@ -5,3 +5,3 @@ {

"description": "Creating Electron app packages",
"version": "0.14.6",
"version": "1.0.0",
"bin": {

@@ -11,3 +11,3 @@ "asar": "./bin/asar.js"

"engines": {
"node": ">=4.6"
"node": ">=6.0"
},

@@ -34,19 +34,20 @@ "license": "MIT",

"chromium-pickle-js": "^0.2.0",
"commander": "^2.9.0",
"cuint": "^0.2.1",
"glob": "^6.0.4",
"minimatch": "^3.0.3",
"mkdirp": "^0.5.0",
"mksnapshot": "^0.3.4",
"tmp": "0.0.28"
"commander": "^2.19.0",
"cuint": "^0.2.2",
"glob": "^7.1.3",
"minimatch": "^3.0.4",
"mkdirp": "^0.5.1",
"pify": "^4.0.1",
"tmp-promise": "^1.0.5"
},
"devDependencies": {
"electron": "^1.6.2",
"electron": "^4.0.5",
"electron-mocha": "^6.0.4",
"lodash": "^4.2.1",
"mocha": "^5.2.0",
"rimraf": "^2.5.1",
"standard": "^8.6.0",
"xvfb-maybe": "^0.1.3"
"lodash": "^4.17.11",
"mocha": "^6.0.0",
"mz": "^2.7.0",
"rimraf": "^2.6.3",
"standard": "^12.0.1",
"xvfb-maybe": "^0.2.1"
}
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc