Socket
Socket
Sign inDemoInstall

@electron/asar

Package Overview
Dependencies
Maintainers
0
Versions
14
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@electron/asar - npm Package Compare versions

Comparing version 3.2.10 to 3.2.11

lib/asar.d.ts

414

lib/asar.js

@@ -1,229 +0,237 @@

'use strict'
const fs = require('./wrapped-fs')
const path = require('path')
const minimatch = require('minimatch')
const Filesystem = require('./filesystem')
const disk = require('./disk')
const crawlFilesystem = require('./crawlfs')
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createPackage = createPackage;
exports.createPackageWithOptions = createPackageWithOptions;
exports.createPackageFromFiles = createPackageFromFiles;
exports.statFile = statFile;
exports.getRawHeader = getRawHeader;
exports.listPackage = listPackage;
exports.extractFile = extractFile;
exports.extractAll = extractAll;
exports.uncache = uncache;
exports.uncacheAll = uncacheAll;
const path = require("path");
const minimatch = require("minimatch");
const wrapped_fs_1 = require("./wrapped-fs");
const filesystem_1 = require("./filesystem");
const disk = require("./disk");
const crawlfs_1 = require("./crawlfs");
/**
* Whether a directory should be excluded from packing due to the `--unpack-dir" option.
*
* @param {string} dirPath - directory path to check
* @param {string} pattern - literal prefix [for backward compatibility] or glob pattern
* @param {array} unpackDirs - Array of directory paths previously marked as unpacked
* @param dirPath - directory path to check
* @param pattern - literal prefix [for backward compatibility] or glob pattern
* @param unpackDirs - Array of directory paths previously marked as unpacked
*/
function isUnpackedDir (dirPath, pattern, unpackDirs) {
if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) {
if (!unpackDirs.includes(dirPath)) {
unpackDirs.push(dirPath)
function isUnpackedDir(dirPath, pattern, unpackDirs) {
if (dirPath.startsWith(pattern) || minimatch(dirPath, pattern)) {
if (!unpackDirs.includes(dirPath)) {
unpackDirs.push(dirPath);
}
return true;
}
return true
} else {
return unpackDirs.some(unpackDir => dirPath.startsWith(unpackDir))
}
else {
return unpackDirs.some((unpackDir) => dirPath.startsWith(unpackDir));
}
}
module.exports.createPackage = async function (src, dest) {
return module.exports.createPackageWithOptions(src, dest, {})
async function createPackage(src, dest) {
return createPackageWithOptions(src, dest, {});
}
module.exports.createPackageWithOptions = async function (src, dest, options) {
const globOptions = options.globOptions ? options.globOptions : {}
globOptions.dot = options.dot === undefined ? true : options.dot
const pattern = src + (options.pattern ? options.pattern : '/**/*')
const [filenames, metadata] = await crawlFilesystem(pattern, globOptions)
return module.exports.createPackageFromFiles(src, dest, filenames, metadata, options)
async function createPackageWithOptions(src, dest, options) {
const globOptions = options.globOptions ? options.globOptions : {};
globOptions.dot = options.dot === undefined ? true : options.dot;
const pattern = src + (options.pattern ? options.pattern : '/**/*');
const [filenames, metadata] = await (0, crawlfs_1.crawl)(pattern, globOptions);
return createPackageFromFiles(src, dest, filenames, metadata, options);
}
/**
* Create an ASAR archive from a list of filenames.
*
* @param {string} src: Base path. All files are relative to this.
* @param {string} dest: Archive filename (& path).
* @param {array} filenames: List of filenames relative to src.
* @param {object} metadata: Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
* @param {object} options: Options passed to `createPackageWithOptions`.
*/
module.exports.createPackageFromFiles = async function (src, dest, filenames, metadata, options) {
if (typeof metadata === 'undefined' || metadata === null) { metadata = {} }
if (typeof options === 'undefined' || options === null) { options = {} }
src = path.normalize(src)
dest = path.normalize(dest)
filenames = filenames.map(function (filename) { return path.normalize(filename) })
const filesystem = new Filesystem(src)
const files = []
const unpackDirs = []
let filenamesSorted = []
if (options.ordering) {
const orderingFiles = (await fs.readFile(options.ordering)).toString().split('\n').map(line => {
if (line.includes(':')) { line = line.split(':').pop() }
line = line.trim()
if (line.startsWith('/')) { line = line.slice(1) }
return line
})
const ordering = []
for (const file of orderingFiles) {
const pathComponents = file.split(path.sep)
let str = src
for (const pathComponent of pathComponents) {
str = path.join(str, pathComponent)
ordering.push(str)
}
* @param src - Base path. All files are relative to this.
* @param dest - Archive filename (& path).
* @param filenames - List of filenames relative to src.
* @param [metadata] - Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
* @param [options] - Options passed to `createPackageWithOptions`.
*/
async function createPackageFromFiles(src, dest, filenames, metadata = {}, options = {}) {
src = path.normalize(src);
dest = path.normalize(dest);
filenames = filenames.map(function (filename) {
return path.normalize(filename);
});
const filesystem = new filesystem_1.Filesystem(src);
const files = [];
const unpackDirs = [];
let filenamesSorted = [];
if (options.ordering) {
const orderingFiles = (await wrapped_fs_1.default.readFile(options.ordering))
.toString()
.split('\n')
.map((line) => {
if (line.includes(':')) {
line = line.split(':').pop();
}
line = line.trim();
if (line.startsWith('/')) {
line = line.slice(1);
}
return line;
});
const ordering = [];
for (const file of orderingFiles) {
const pathComponents = file.split(path.sep);
let str = src;
for (const pathComponent of pathComponents) {
str = path.join(str, pathComponent);
ordering.push(str);
}
}
let missing = 0;
const total = filenames.length;
for (const file of ordering) {
if (!filenamesSorted.includes(file) && filenames.includes(file)) {
filenamesSorted.push(file);
}
}
for (const file of filenames) {
if (!filenamesSorted.includes(file)) {
filenamesSorted.push(file);
missing += 1;
}
}
console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`);
}
let missing = 0
const total = filenames.length
for (const file of ordering) {
if (!filenamesSorted.includes(file) && filenames.includes(file)) {
filenamesSorted.push(file)
}
else {
filenamesSorted = filenames;
}
for (const file of filenames) {
if (!filenamesSorted.includes(file)) {
filenamesSorted.push(file)
missing += 1
}
}
console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`)
} else {
filenamesSorted = filenames
}
const handleFile = async function (filename) {
if (!metadata[filename]) {
metadata[filename] = await crawlFilesystem.determineFileType(filename)
}
const file = metadata[filename]
let shouldUnpack
switch (file.type) {
case 'directory':
if (options.unpackDir) {
shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs)
} else {
shouldUnpack = false
const handleFile = async function (filename) {
if (!metadata[filename]) {
const fileType = await (0, crawlfs_1.determineFileType)(filename);
if (!fileType) {
throw new Error('Unknown file type for file: ' + filename);
}
metadata[filename] = fileType;
}
filesystem.insertDirectory(filename, shouldUnpack)
break
case 'file':
shouldUnpack = false
if (options.unpack) {
shouldUnpack = minimatch(filename, options.unpack, { matchBase: true })
const file = metadata[filename];
let shouldUnpack;
switch (file.type) {
case 'directory':
if (options.unpackDir) {
shouldUnpack = isUnpackedDir(path.relative(src, filename), options.unpackDir, unpackDirs);
}
else {
shouldUnpack = false;
}
filesystem.insertDirectory(filename, shouldUnpack);
break;
case 'file':
shouldUnpack = false;
if (options.unpack) {
shouldUnpack = minimatch(filename, options.unpack, { matchBase: true });
}
if (!shouldUnpack && options.unpackDir) {
const dirName = path.relative(src, path.dirname(filename));
shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs);
}
files.push({ filename: filename, unpack: shouldUnpack });
return filesystem.insertFile(filename, shouldUnpack, file, options);
case 'link':
filesystem.insertLink(filename);
break;
}
if (!shouldUnpack && options.unpackDir) {
const dirName = path.relative(src, path.dirname(filename))
shouldUnpack = isUnpackedDir(dirName, options.unpackDir, unpackDirs)
return Promise.resolve();
};
const insertsDone = async function () {
await wrapped_fs_1.default.mkdirp(path.dirname(dest));
return disk.writeFilesystem(dest, filesystem, files, metadata);
};
const names = filenamesSorted.slice();
const next = async function (name) {
if (!name) {
return insertsDone();
}
files.push({ filename: filename, unpack: shouldUnpack })
return filesystem.insertFile(filename, shouldUnpack, file, options)
case 'link':
filesystem.insertLink(filename)
break
}
return Promise.resolve()
}
const insertsDone = async function () {
await fs.mkdirp(path.dirname(dest))
return disk.writeFilesystem(dest, filesystem, files, metadata)
}
const names = filenamesSorted.slice()
const next = async function (name) {
if (!name) { return insertsDone() }
await handleFile(name)
return next(names.shift())
}
return next(names.shift())
await handleFile(name);
return next(names.shift());
};
return next(names.shift());
}
module.exports.statFile = function (archive, filename, followLinks) {
const filesystem = disk.readFilesystemSync(archive)
return filesystem.getFile(filename, followLinks)
function statFile(archivePath, filename, followLinks = true) {
const filesystem = disk.readFilesystemSync(archivePath);
return filesystem.getFile(filename, followLinks);
}
module.exports.getRawHeader = function (archive) {
return disk.readArchiveHeaderSync(archive)
function getRawHeader(archivePath) {
return disk.readArchiveHeaderSync(archivePath);
}
module.exports.listPackage = function (archive, options) {
return disk.readFilesystemSync(archive).listFiles(options)
function listPackage(archivePath, options) {
return disk.readFilesystemSync(archivePath).listFiles(options);
}
module.exports.extractFile = function (archive, filename) {
const filesystem = disk.readFilesystemSync(archive)
return disk.readFileSync(filesystem, filename, filesystem.getFile(filename))
function extractFile(archivePath, filename, followLinks = true) {
const filesystem = disk.readFilesystemSync(archivePath);
const fileInfo = filesystem.getFile(filename, followLinks);
if ('link' in fileInfo || 'files' in fileInfo) {
throw new Error('Expected to find file at: ' + filename + ' but found a directory or link');
}
return disk.readFileSync(filesystem, filename, fileInfo);
}
module.exports.extractAll = function (archive, dest) {
const filesystem = disk.readFilesystemSync(archive)
const filenames = filesystem.listFiles()
// under windows just extract links as regular files
const followLinks = process.platform === 'win32'
// create destination directory
fs.mkdirpSync(dest)
const extractionErrors = []
for (const fullPath of filenames) {
// Remove leading slash
const filename = fullPath.substr(1)
const destFilename = path.join(dest, filename)
const file = filesystem.getFile(filename, followLinks)
if (file.files) {
// it's a directory, create it and continue with the next entry
fs.mkdirpSync(destFilename)
} else if (file.link) {
// it's a symlink, create a symlink
const linkSrcPath = path.dirname(path.join(dest, file.link))
const linkDestPath = path.dirname(destFilename)
const relativePath = path.relative(linkDestPath, linkSrcPath)
// try to delete output file, because we can't overwrite a link
try {
fs.unlinkSync(destFilename)
} catch {}
const linkTo = path.join(relativePath, path.basename(file.link))
fs.symlinkSync(linkTo, destFilename)
} else {
// it's a file, try to extract it
try {
const content = disk.readFileSync(filesystem, filename, file)
fs.writeFileSync(destFilename, content)
if (file.executable) {
fs.chmodSync(destFilename, '755')
function extractAll(archivePath, dest) {
const filesystem = disk.readFilesystemSync(archivePath);
const filenames = filesystem.listFiles();
// under windows just extract links as regular files
const followLinks = process.platform === 'win32';
// create destination directory
wrapped_fs_1.default.mkdirpSync(dest);
const extractionErrors = [];
for (const fullPath of filenames) {
// Remove leading slash
const filename = fullPath.substr(1);
const destFilename = path.join(dest, filename);
const file = filesystem.getFile(filename, followLinks);
if (path.relative(dest, destFilename).startsWith('..')) {
throw new Error(`${fullPath}: file "${destFilename}" writes out of the package`);
}
} catch (e) {
extractionErrors.push(e)
}
if ('files' in file) {
// it's a directory, create it and continue with the next entry
wrapped_fs_1.default.mkdirpSync(destFilename);
}
else if ('link' in file) {
// it's a symlink, create a symlink
const linkSrcPath = path.dirname(path.join(dest, file.link));
const linkDestPath = path.dirname(destFilename);
const relativePath = path.relative(linkDestPath, linkSrcPath);
// try to delete output file, because we can't overwrite a link
try {
wrapped_fs_1.default.unlinkSync(destFilename);
}
catch (_a) { }
const linkTo = path.join(relativePath, path.basename(file.link));
if (path.relative(dest, linkSrcPath).startsWith('..')) {
throw new Error(`${fullPath}: file "${file.link}" links out of the package to "${linkSrcPath}"`);
}
wrapped_fs_1.default.symlinkSync(linkTo, destFilename);
}
else {
// it's a file, try to extract it
try {
const content = disk.readFileSync(filesystem, filename, file);
wrapped_fs_1.default.writeFileSync(destFilename, content);
if (file.executable) {
wrapped_fs_1.default.chmodSync(destFilename, '755');
}
}
catch (e) {
extractionErrors.push(e);
}
}
}
}
if (extractionErrors.length) {
throw new Error(
'Unable to extract some files:\n\n' +
extractionErrors.map(error => error.stack).join('\n\n'))
}
if (extractionErrors.length) {
throw new Error('Unable to extract some files:\n\n' +
extractionErrors.map((error) => error.stack).join('\n\n'));
}
}
module.exports.uncache = function (archive) {
return disk.uncacheFilesystem(archive)
function uncache(archivePath) {
return disk.uncacheFilesystem(archivePath);
}
module.exports.uncacheAll = function () {
disk.uncacheAll()
function uncacheAll() {
disk.uncacheAll();
}
//# sourceMappingURL=asar.js.map

@@ -1,41 +0,48 @@

'use strict'
const { promisify } = require('util')
const fs = require('./wrapped-fs')
const glob = promisify(require('glob'))
async function determineFileType (filename) {
const stat = await fs.lstat(filename)
if (stat.isFile()) {
return { type: 'file', stat }
} else if (stat.isDirectory()) {
return { type: 'directory', stat }
} else if (stat.isSymbolicLink()) {
return { type: 'link', stat }
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.determineFileType = determineFileType;
exports.crawl = crawl;
const util_1 = require("util");
const glob_1 = require("glob");
const wrapped_fs_1 = require("./wrapped-fs");
const glob = (0, util_1.promisify)(glob_1.glob);
async function determineFileType(filename) {
const stat = await wrapped_fs_1.default.lstat(filename);
if (stat.isFile()) {
return { type: 'file', stat };
}
else if (stat.isDirectory()) {
return { type: 'directory', stat };
}
else if (stat.isSymbolicLink()) {
return { type: 'link', stat };
}
return null;
}
module.exports = async function (dir, options) {
const metadata = {}
const crawled = await glob(dir, options)
const results = await Promise.all(crawled.map(async filename => [filename, await determineFileType(filename)]))
const links = []
const filenames = results.map(([filename, type]) => {
if (type) {
metadata[filename] = type
if (type.type === 'link') links.push(filename)
}
return filename
}).filter((filename) => {
// Newer glob can return files inside symlinked directories, to avoid
// those appearing in archives we need to manually exclude theme here
const exactLinkIndex = links.findIndex(link => filename === link)
return links.every((link, index) => {
if (index === exactLinkIndex) return true
return !filename.startsWith(link)
async function crawl(dir, options) {
const metadata = {};
const crawled = await glob(dir, options);
const results = await Promise.all(crawled.map(async (filename) => [filename, await determineFileType(filename)]));
const links = [];
const filenames = results
.map(([filename, type]) => {
if (type) {
metadata[filename] = type;
if (type.type === 'link')
links.push(filename);
}
return filename;
})
})
return [filenames, metadata]
.filter((filename) => {
// Newer glob can return files inside symlinked directories, to avoid
// those appearing in archives we need to manually exclude theme here
const exactLinkIndex = links.findIndex((link) => filename === link);
return links.every((link, index) => {
if (index === exactLinkIndex)
return true;
return !filename.startsWith(link);
});
});
return [filenames, metadata];
}
module.exports.determineFileType = determineFileType
//# sourceMappingURL=crawlfs.js.map

@@ -1,123 +0,125 @@

'use strict'
const fs = require('./wrapped-fs')
const path = require('path')
const pickle = require('./pickle')
const Filesystem = require('./filesystem')
let filesystemCache = {}
async function copyFile (dest, src, filename) {
const srcFile = path.join(src, filename)
const targetFile = path.join(dest, filename)
const [content, stats] = await Promise.all([fs.readFile(srcFile), fs.stat(srcFile), fs.mkdirp(path.dirname(targetFile))])
return fs.writeFile(targetFile, content, { mode: stats.mode })
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.writeFilesystem = writeFilesystem;
exports.readArchiveHeaderSync = readArchiveHeaderSync;
exports.readFilesystemSync = readFilesystemSync;
exports.uncacheFilesystem = uncacheFilesystem;
exports.uncacheAll = uncacheAll;
exports.readFileSync = readFileSync;
const path = require("path");
const wrapped_fs_1 = require("./wrapped-fs");
const pickle_1 = require("./pickle");
const filesystem_1 = require("./filesystem");
let filesystemCache = Object.create(null);
async function copyFile(dest, src, filename) {
const srcFile = path.join(src, filename);
const targetFile = path.join(dest, filename);
const [content, stats] = await Promise.all([
wrapped_fs_1.default.readFile(srcFile),
wrapped_fs_1.default.stat(srcFile),
wrapped_fs_1.default.mkdirp(path.dirname(targetFile)),
]);
return wrapped_fs_1.default.writeFile(targetFile, content, { mode: stats.mode });
}
async function streamTransformedFile (originalFilename, outStream, transformed) {
return new Promise((resolve, reject) => {
const stream = fs.createReadStream(transformed ? transformed.path : originalFilename)
stream.pipe(outStream, { end: false })
stream.on('error', reject)
stream.on('end', () => resolve())
})
async function streamTransformedFile(originalFilename, outStream, transformed) {
return new Promise((resolve, reject) => {
const stream = wrapped_fs_1.default.createReadStream(transformed ? transformed.path : originalFilename);
stream.pipe(outStream, { end: false });
stream.on('error', reject);
stream.on('end', () => resolve());
});
}
const writeFileListToStream = async function (dest, filesystem, out, list, metadata) {
for (const file of list) {
if (file.unpack) { // the file should not be packed into archive
const filename = path.relative(filesystem.src, file.filename)
await copyFile(`${dest}.unpacked`, filesystem.src, filename)
} else {
await streamTransformedFile(file.filename, out, metadata[file.filename].transformed)
const writeFileListToStream = async function (dest, filesystem, out, fileList, metadata) {
for (const file of fileList) {
if (file.unpack) {
// the file should not be packed into archive
const filename = path.relative(filesystem.getRootPath(), file.filename);
await copyFile(`${dest}.unpacked`, filesystem.getRootPath(), filename);
}
else {
await streamTransformedFile(file.filename, out, metadata[file.filename].transformed);
}
}
}
return out.end()
return out.end();
};
async function writeFilesystem(dest, filesystem, fileList, metadata) {
const headerPickle = pickle_1.Pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.getHeader()));
const headerBuf = headerPickle.toBuffer();
const sizePickle = pickle_1.Pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length);
const sizeBuf = sizePickle.toBuffer();
const out = wrapped_fs_1.default.createWriteStream(dest);
await new Promise((resolve, reject) => {
out.on('error', reject);
out.write(sizeBuf);
return out.write(headerBuf, () => resolve());
});
return writeFileListToStream(dest, filesystem, out, fileList, metadata);
}
module.exports.writeFilesystem = async function (dest, filesystem, files, metadata) {
const headerPickle = pickle.createEmpty()
headerPickle.writeString(JSON.stringify(filesystem.header))
const headerBuf = headerPickle.toBuffer()
const sizePickle = pickle.createEmpty()
sizePickle.writeUInt32(headerBuf.length)
const sizeBuf = sizePickle.toBuffer()
const out = fs.createWriteStream(dest)
await new Promise((resolve, reject) => {
out.on('error', reject)
out.write(sizeBuf)
return out.write(headerBuf, () => resolve())
})
return writeFileListToStream(dest, filesystem, out, files, metadata)
}
module.exports.readArchiveHeaderSync = function (archive) {
const fd = fs.openSync(archive, 'r')
let size
let headerBuf
try {
const sizeBuf = Buffer.alloc(8)
if (fs.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
throw new Error('Unable to read header size')
function readArchiveHeaderSync(archivePath) {
const fd = wrapped_fs_1.default.openSync(archivePath, 'r');
let size;
let headerBuf;
try {
const sizeBuf = Buffer.alloc(8);
if (wrapped_fs_1.default.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
throw new Error('Unable to read header size');
}
const sizePickle = pickle_1.Pickle.createFromBuffer(sizeBuf);
size = sizePickle.createIterator().readUInt32();
headerBuf = Buffer.alloc(size);
if (wrapped_fs_1.default.readSync(fd, headerBuf, 0, size, null) !== size) {
throw new Error('Unable to read header');
}
}
const sizePickle = pickle.createFromBuffer(sizeBuf)
size = sizePickle.createIterator().readUInt32()
headerBuf = Buffer.alloc(size)
if (fs.readSync(fd, headerBuf, 0, size, null) !== size) {
throw new Error('Unable to read header')
finally {
wrapped_fs_1.default.closeSync(fd);
}
} finally {
fs.closeSync(fd)
}
const headerPickle = pickle.createFromBuffer(headerBuf)
const header = headerPickle.createIterator().readString()
return { headerString: header, header: JSON.parse(header), headerSize: size }
const headerPickle = pickle_1.Pickle.createFromBuffer(headerBuf);
const header = headerPickle.createIterator().readString();
return { headerString: header, header: JSON.parse(header), headerSize: size };
}
module.exports.readFilesystemSync = function (archive) {
if (!filesystemCache[archive]) {
const header = this.readArchiveHeaderSync(archive)
const filesystem = new Filesystem(archive)
filesystem.header = header.header
filesystem.headerSize = header.headerSize
filesystemCache[archive] = filesystem
}
return filesystemCache[archive]
function readFilesystemSync(archivePath) {
if (!filesystemCache[archivePath]) {
const header = readArchiveHeaderSync(archivePath);
const filesystem = new filesystem_1.Filesystem(archivePath);
filesystem.setHeader(header.header, header.headerSize);
filesystemCache[archivePath] = filesystem;
}
return filesystemCache[archivePath];
}
module.exports.uncacheFilesystem = function (archive) {
if (filesystemCache[archive]) {
filesystemCache[archive] = undefined
return true
}
return false
function uncacheFilesystem(archivePath) {
if (filesystemCache[archivePath]) {
filesystemCache[archivePath] = undefined;
return true;
}
return false;
}
module.exports.uncacheAll = function () {
filesystemCache = {}
function uncacheAll() {
filesystemCache = {};
}
module.exports.readFileSync = function (filesystem, filename, info) {
let buffer = Buffer.alloc(info.size)
if (info.size <= 0) { return buffer }
if (info.unpacked) {
// it's an unpacked file, copy it.
buffer = fs.readFileSync(path.join(`${filesystem.src}.unpacked`, filename))
} else {
// Node throws an exception when reading 0 bytes into a 0-size buffer,
// so we short-circuit the read in this case.
const fd = fs.openSync(filesystem.src, 'r')
try {
const offset = 8 + filesystem.headerSize + parseInt(info.offset)
fs.readSync(fd, buffer, 0, info.size, offset)
} finally {
fs.closeSync(fd)
function readFileSync(filesystem, filename, info) {
let buffer = Buffer.alloc(info.size);
if (info.size <= 0) {
return buffer;
}
}
return buffer
if (info.unpacked) {
// it's an unpacked file, copy it.
buffer = wrapped_fs_1.default.readFileSync(path.join(`${filesystem.getRootPath()}.unpacked`, filename));
}
else {
// Node throws an exception when reading 0 bytes into a 0-size buffer,
// so we short-circuit the read in this case.
const fd = wrapped_fs_1.default.openSync(filesystem.getRootPath(), 'r');
try {
const offset = 8 + filesystem.getHeaderSize() + parseInt(info.offset);
wrapped_fs_1.default.readSync(fd, buffer, 0, info.size, offset);
}
finally {
wrapped_fs_1.default.closeSync(fd);
}
}
return buffer;
}
//# sourceMappingURL=disk.js.map

@@ -1,161 +0,164 @@

'use strict'
const fs = require('./wrapped-fs')
const os = require('os')
const path = require('path')
const { promisify } = require('util')
const stream = require('stream')
const getFileIntegrity = require('./integrity')
const UINT32_MAX = 2 ** 32 - 1
const pipeline = promisify(stream.pipeline)
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Filesystem = void 0;
const os = require("os");
const path = require("path");
const util_1 = require("util");
const stream = require("stream");
const integrity_1 = require("./integrity");
const wrapped_fs_1 = require("./wrapped-fs");
const UINT32_MAX = 2 ** 32 - 1;
const pipeline = (0, util_1.promisify)(stream.pipeline);
class Filesystem {
constructor (src) {
this.src = path.resolve(src)
this.header = { files: Object.create(null) }
this.offset = BigInt(0)
}
searchNodeFromDirectory (p) {
let json = this.header
const dirs = p.split(path.sep)
for (const dir of dirs) {
if (dir !== '.') {
if (!json.files[dir]) {
json.files[dir] = { files: Object.create(null) }
}
json = json.files[dir]
}
constructor(src) {
this.src = path.resolve(src);
this.header = { files: Object.create(null) };
this.headerSize = 0;
this.offset = BigInt(0);
}
return json
}
searchNodeFromPath (p) {
p = path.relative(this.src, p)
if (!p) { return this.header }
const name = path.basename(p)
const node = this.searchNodeFromDirectory(path.dirname(p))
if (node.files == null) {
node.files = Object.create(null)
getRootPath() {
return this.src;
}
if (node.files[name] == null) {
node.files[name] = Object.create(null)
getHeader() {
return this.header;
}
return node.files[name]
}
insertDirectory (p, shouldUnpack) {
const node = this.searchNodeFromPath(p)
if (shouldUnpack) {
node.unpacked = shouldUnpack
getHeaderSize() {
return this.headerSize;
}
node.files = node.files || Object.create(null)
return node.files
}
async insertFile (p, shouldUnpack, file, options) {
const dirNode = this.searchNodeFromPath(path.dirname(p))
const node = this.searchNodeFromPath(p)
if (shouldUnpack || dirNode.unpacked) {
node.size = file.stat.size
node.unpacked = true
node.integrity = await getFileIntegrity(p)
return Promise.resolve()
setHeader(header, headerSize) {
this.header = header;
this.headerSize = headerSize;
}
let size
const transformed = options.transform && options.transform(p)
if (transformed) {
const tmpdir = await fs.mkdtemp(path.join(os.tmpdir(), 'asar-'))
const tmpfile = path.join(tmpdir, path.basename(p))
const out = fs.createWriteStream(tmpfile)
const readStream = fs.createReadStream(p)
await pipeline(readStream, transformed, out)
file.transformed = {
path: tmpfile,
stat: await fs.lstat(tmpfile)
}
size = file.transformed.stat.size
} else {
size = file.stat.size
searchNodeFromDirectory(p) {
let json = this.header;
const dirs = p.split(path.sep);
for (const dir of dirs) {
if (dir !== '.') {
if ('files' in json) {
if (!json.files[dir]) {
json.files[dir] = { files: Object.create(null) };
}
json = json.files[dir];
}
else {
throw new Error('Unexpected directory state while traversing: ' + p);
}
}
}
return json;
}
// JavaScript cannot precisely present integers >= UINT32_MAX.
if (size > UINT32_MAX) {
throw new Error(`${p}: file size can not be larger than 4.2GB`)
searchNodeFromPath(p) {
p = path.relative(this.src, p);
if (!p) {
return this.header;
}
const name = path.basename(p);
const node = this.searchNodeFromDirectory(path.dirname(p));
if (!node.files) {
node.files = Object.create(null);
}
if (!node.files[name]) {
node.files[name] = Object.create(null);
}
return node.files[name];
}
node.size = size
node.offset = this.offset.toString()
node.integrity = await getFileIntegrity(p)
if (process.platform !== 'win32' && (file.stat.mode & 0o100)) {
node.executable = true
insertDirectory(p, shouldUnpack) {
const node = this.searchNodeFromPath(p);
if (shouldUnpack) {
node.unpacked = shouldUnpack;
}
node.files = node.files || Object.create(null);
return node.files;
}
this.offset += BigInt(size)
}
insertLink (p) {
const symlink = fs.readlinkSync(p)
// /var => /private/var
const parentPath = fs.realpathSync(path.dirname(p))
const link = path.relative(fs.realpathSync(this.src), path.join(parentPath, symlink))
if (link.startsWith('..')) {
throw new Error(`${p}: file "${link}" links out of the package`)
async insertFile(p, shouldUnpack, file, options = {}) {
const dirNode = this.searchNodeFromPath(path.dirname(p));
const node = this.searchNodeFromPath(p);
if (shouldUnpack || dirNode.unpacked) {
node.size = file.stat.size;
node.unpacked = true;
node.integrity = await (0, integrity_1.getFileIntegrity)(p);
return Promise.resolve();
}
let size;
const transformed = options.transform && options.transform(p);
if (transformed) {
const tmpdir = await wrapped_fs_1.default.mkdtemp(path.join(os.tmpdir(), 'asar-'));
const tmpfile = path.join(tmpdir, path.basename(p));
const out = wrapped_fs_1.default.createWriteStream(tmpfile);
const readStream = wrapped_fs_1.default.createReadStream(p);
await pipeline(readStream, transformed, out);
file.transformed = {
path: tmpfile,
stat: await wrapped_fs_1.default.lstat(tmpfile),
};
size = file.transformed.stat.size;
}
else {
size = file.stat.size;
}
// JavaScript cannot precisely present integers >= UINT32_MAX.
if (size > UINT32_MAX) {
throw new Error(`${p}: file size can not be larger than 4.2GB`);
}
node.size = size;
node.offset = this.offset.toString();
node.integrity = await (0, integrity_1.getFileIntegrity)(p);
if (process.platform !== 'win32' && file.stat.mode & 0o100) {
node.executable = true;
}
this.offset += BigInt(size);
}
const node = this.searchNodeFromPath(p)
node.link = link
return link
}
listFiles (options) {
const files = []
const fillFilesFromMetadata = function (basePath, metadata) {
if (!metadata.files) {
return
}
for (const [childPath, childMetadata] of Object.entries(metadata.files)) {
const fullPath = path.join(basePath, childPath)
const packState = childMetadata.unpacked ? 'unpack' : 'pack '
files.push((options && options.isPack) ? `${packState} : ${fullPath}` : fullPath)
fillFilesFromMetadata(fullPath, childMetadata)
}
insertLink(p) {
const symlink = wrapped_fs_1.default.readlinkSync(p);
// /var => /private/var
const parentPath = wrapped_fs_1.default.realpathSync(path.dirname(p));
const link = path.relative(wrapped_fs_1.default.realpathSync(this.src), path.join(parentPath, symlink));
if (link.startsWith('..')) {
throw new Error(`${p}: file "${link}" links out of the package`);
}
const node = this.searchNodeFromPath(p);
node.link = link;
return link;
}
fillFilesFromMetadata('/', this.header)
return files
}
getNode (p) {
const node = this.searchNodeFromDirectory(path.dirname(p))
const name = path.basename(p)
if (name) {
return node.files[name]
} else {
return node
listFiles(options) {
const files = [];
const fillFilesFromMetadata = function (basePath, metadata) {
if (!('files' in metadata)) {
return;
}
for (const [childPath, childMetadata] of Object.entries(metadata.files)) {
const fullPath = path.join(basePath, childPath);
const packState = 'unpacked' in childMetadata && childMetadata.unpacked ? 'unpack' : 'pack ';
files.push(options && options.isPack ? `${packState} : ${fullPath}` : fullPath);
fillFilesFromMetadata(fullPath, childMetadata);
}
};
fillFilesFromMetadata('/', this.header);
return files;
}
}
getFile (p, followLinks) {
followLinks = typeof followLinks === 'undefined' ? true : followLinks
const info = this.getNode(p)
if (!info) {
throw new Error(`"${p}" was not found in this archive`)
getNode(p) {
const node = this.searchNodeFromDirectory(path.dirname(p));
const name = path.basename(p);
if (name) {
return node.files[name];
}
else {
return node;
}
}
// if followLinks is false we don't resolve symlinks
if (info.link && followLinks) {
return this.getFile(info.link)
} else {
return info
getFile(p, followLinks = true) {
const info = this.getNode(p);
if (!info) {
throw new Error(`"${p}" was not found in this archive`);
}
// if followLinks is false we don't resolve symlinks
if ('link' in info && followLinks) {
return this.getFile(info.link, followLinks);
}
else {
return info;
}
}
}
}
module.exports = Filesystem
exports.Filesystem = Filesystem;
//# sourceMappingURL=filesystem.js.map

@@ -1,62 +0,53 @@

const crypto = require('crypto')
const fs = require('fs')
const stream = require('stream')
const { promisify } = require('util')
const ALGORITHM = 'SHA256'
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getFileIntegrity = getFileIntegrity;
const crypto = require("crypto");
const fs = require("fs");
const stream = require("stream");
const util_1 = require("util");
const ALGORITHM = 'SHA256';
// 4MB default block size
const BLOCK_SIZE = 4 * 1024 * 1024
const pipeline = promisify(stream.pipeline)
function hashBlock (block) {
return crypto.createHash(ALGORITHM).update(block).digest('hex')
const BLOCK_SIZE = 4 * 1024 * 1024;
const pipeline = (0, util_1.promisify)(stream.pipeline);
function hashBlock(block) {
return crypto.createHash(ALGORITHM).update(block).digest('hex');
}
async function getFileIntegrity (path) {
const fileHash = crypto.createHash(ALGORITHM)
const blocks = []
let currentBlockSize = 0
let currentBlock = []
await pipeline(
fs.createReadStream(path),
new stream.PassThrough({
decodeStrings: false,
transform (_chunk, encoding, callback) {
fileHash.update(_chunk)
function handleChunk (chunk) {
const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength)
currentBlockSize += diffToSlice
currentBlock.push(chunk.slice(0, diffToSlice))
if (currentBlockSize === BLOCK_SIZE) {
blocks.push(hashBlock(Buffer.concat(currentBlock)))
currentBlock = []
currentBlockSize = 0
}
if (diffToSlice < chunk.byteLength) {
handleChunk(chunk.slice(diffToSlice))
}
}
handleChunk(_chunk)
callback()
},
flush (callback) {
blocks.push(hashBlock(Buffer.concat(currentBlock)))
currentBlock = []
callback()
}
})
)
return {
algorithm: ALGORITHM,
hash: fileHash.digest('hex'),
blockSize: BLOCK_SIZE,
blocks: blocks
}
async function getFileIntegrity(path) {
const fileHash = crypto.createHash(ALGORITHM);
const blockHashes = [];
let currentBlockSize = 0;
let currentBlock = [];
await pipeline(fs.createReadStream(path), new stream.PassThrough({
decodeStrings: false,
transform(_chunk, encoding, callback) {
fileHash.update(_chunk);
function handleChunk(chunk) {
const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength);
currentBlockSize += diffToSlice;
currentBlock.push(chunk.slice(0, diffToSlice));
if (currentBlockSize === BLOCK_SIZE) {
blockHashes.push(hashBlock(Buffer.concat(currentBlock)));
currentBlock = [];
currentBlockSize = 0;
}
if (diffToSlice < chunk.byteLength) {
handleChunk(chunk.slice(diffToSlice));
}
}
handleChunk(_chunk);
callback();
},
flush(callback) {
blockHashes.push(hashBlock(Buffer.concat(currentBlock)));
currentBlock = [];
callback();
},
}));
return {
algorithm: ALGORITHM,
hash: fileHash.digest('hex'),
blockSize: BLOCK_SIZE,
blocks: blockHashes,
};
}
module.exports = getFileIntegrity
//# sourceMappingURL=integrity.js.map

@@ -0,93 +1,80 @@

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Pickle = void 0;
// sizeof(T).
const SIZE_INT32 = 4
const SIZE_UINT32 = 4
const SIZE_INT64 = 8
const SIZE_UINT64 = 8
const SIZE_FLOAT = 4
const SIZE_DOUBLE = 8
const SIZE_INT32 = 4;
const SIZE_UINT32 = 4;
const SIZE_INT64 = 8;
const SIZE_UINT64 = 8;
const SIZE_FLOAT = 4;
const SIZE_DOUBLE = 8;
// The allocation granularity of the payload.
const PAYLOAD_UNIT = 64
const PAYLOAD_UNIT = 64;
// Largest JS number.
const CAPACITY_READ_ONLY = 9007199254740992
const CAPACITY_READ_ONLY = 9007199254740992;
// Aligns 'i' by rounding it up to the next multiple of 'alignment'.
const alignInt = function (i, alignment) {
return i + (alignment - (i % alignment)) % alignment
}
return i + ((alignment - (i % alignment)) % alignment);
};
// PickleIterator reads data from a Pickle. The Pickle object must remain valid
// while the PickleIterator object is in use.
const PickleIterator = (function () {
function PickleIterator (pickle) {
this.payload = pickle.header
this.payloadOffset = pickle.headerSize
this.readIndex = 0
this.endIndex = pickle.getPayloadSize()
}
PickleIterator.prototype.readBool = function () {
return this.readInt() !== 0
}
PickleIterator.prototype.readInt = function () {
return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE)
}
PickleIterator.prototype.readUInt32 = function () {
return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE)
}
PickleIterator.prototype.readInt64 = function () {
return this.readBytes(SIZE_INT64, Buffer.prototype.readInt64LE)
}
PickleIterator.prototype.readUInt64 = function () {
return this.readBytes(SIZE_UINT64, Buffer.prototype.readUInt64LE)
}
PickleIterator.prototype.readFloat = function () {
return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE)
}
PickleIterator.prototype.readDouble = function () {
return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE)
}
PickleIterator.prototype.readString = function () {
return this.readBytes(this.readInt()).toString()
}
PickleIterator.prototype.readBytes = function (length, method) {
const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length)
if (method != null) {
return method.call(this.payload, readPayloadOffset, length)
} else {
return this.payload.slice(readPayloadOffset, readPayloadOffset + length)
class PickleIterator {
constructor(pickle) {
this.payload = pickle.getHeader();
this.payloadOffset = pickle.getHeaderSize();
this.readIndex = 0;
this.endIndex = pickle.getPayloadSize();
}
}
PickleIterator.prototype.getReadPayloadOffsetAndAdvance = function (length) {
if (length > this.endIndex - this.readIndex) {
this.readIndex = this.endIndex
throw new Error('Failed to read data with length of ' + length)
readBool() {
return this.readInt() !== 0;
}
const readPayloadOffset = this.payloadOffset + this.readIndex
this.advance(length)
return readPayloadOffset
}
PickleIterator.prototype.advance = function (size) {
const alignedSize = alignInt(size, SIZE_UINT32)
if (this.endIndex - this.readIndex < alignedSize) {
this.readIndex = this.endIndex
} else {
this.readIndex += alignedSize
readInt() {
return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE);
}
}
return PickleIterator
})()
readUInt32() {
return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE);
}
readInt64() {
return this.readBytes(SIZE_INT64, Buffer.prototype.readBigInt64LE);
}
readUInt64() {
return this.readBytes(SIZE_UINT64, Buffer.prototype.readBigUInt64LE);
}
readFloat() {
return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE);
}
readDouble() {
return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE);
}
readString() {
return this.readBytes(this.readInt()).toString();
}
readBytes(length, method) {
const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length);
if (method != null) {
return method.call(this.payload, readPayloadOffset, length);
}
else {
return this.payload.slice(readPayloadOffset, readPayloadOffset + length);
}
}
getReadPayloadOffsetAndAdvance(length) {
if (length > this.endIndex - this.readIndex) {
this.readIndex = this.endIndex;
throw new Error('Failed to read data with length of ' + length);
}
const readPayloadOffset = this.payloadOffset + this.readIndex;
this.advance(length);
return readPayloadOffset;
}
advance(size) {
const alignedSize = alignInt(size, SIZE_UINT32);
if (this.endIndex - this.readIndex < alignedSize) {
this.readIndex = this.endIndex;
}
else {
this.readIndex += alignedSize;
}
}
}
// This class provides facilities for basic binary value packing and unpacking.

@@ -109,123 +96,105 @@ //

// constructor.
const Pickle = (function () {
function Pickle (buffer) {
if (buffer) {
this.initFromBuffer(buffer)
} else {
this.initEmpty()
class Pickle {
constructor(buffer) {
if (buffer) {
this.header = buffer;
this.headerSize = buffer.length - this.getPayloadSize();
this.capacityAfterHeader = CAPACITY_READ_ONLY;
this.writeOffset = 0;
if (this.headerSize > buffer.length) {
this.headerSize = 0;
}
if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) {
this.headerSize = 0;
}
if (this.headerSize === 0) {
this.header = Buffer.alloc(0);
}
}
else {
this.header = Buffer.alloc(0);
this.headerSize = SIZE_UINT32;
this.capacityAfterHeader = 0;
this.writeOffset = 0;
this.resize(PAYLOAD_UNIT);
this.setPayloadSize(0);
}
}
}
Pickle.prototype.initEmpty = function () {
this.header = Buffer.alloc(0)
this.headerSize = SIZE_UINT32
this.capacityAfterHeader = 0
this.writeOffset = 0
this.resize(PAYLOAD_UNIT)
this.setPayloadSize(0)
}
Pickle.prototype.initFromBuffer = function (buffer) {
this.header = buffer
this.headerSize = buffer.length - this.getPayloadSize()
this.capacityAfterHeader = CAPACITY_READ_ONLY
this.writeOffset = 0
if (this.headerSize > buffer.length) {
this.headerSize = 0
static createEmpty() {
return new Pickle();
}
if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) {
this.headerSize = 0
static createFromBuffer(buffer) {
return new Pickle(buffer);
}
if (this.headerSize === 0) {
this.header = Buffer.alloc(0)
getHeader() {
return this.header;
}
}
Pickle.prototype.createIterator = function () {
return new PickleIterator(this)
}
Pickle.prototype.toBuffer = function () {
return this.header.slice(0, this.headerSize + this.getPayloadSize())
}
Pickle.prototype.writeBool = function (value) {
return this.writeInt(value ? 1 : 0)
}
Pickle.prototype.writeInt = function (value) {
return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE)
}
Pickle.prototype.writeUInt32 = function (value) {
return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE)
}
Pickle.prototype.writeInt64 = function (value) {
return this.writeBytes(value, SIZE_INT64, Buffer.prototype.writeInt64LE)
}
Pickle.prototype.writeUInt64 = function (value) {
return this.writeBytes(value, SIZE_UINT64, Buffer.prototype.writeUInt64LE)
}
Pickle.prototype.writeFloat = function (value) {
return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE)
}
Pickle.prototype.writeDouble = function (value) {
return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE)
}
Pickle.prototype.writeString = function (value) {
const length = Buffer.byteLength(value, 'utf8')
if (!this.writeInt(length)) {
return false
getHeaderSize() {
return this.headerSize;
}
return this.writeBytes(value, length)
}
Pickle.prototype.setPayloadSize = function (payloadSize) {
return this.header.writeUInt32LE(payloadSize, 0)
}
Pickle.prototype.getPayloadSize = function () {
return this.header.readUInt32LE(0)
}
Pickle.prototype.writeBytes = function (data, length, method) {
const dataLength = alignInt(length, SIZE_UINT32)
const newSize = this.writeOffset + dataLength
if (newSize > this.capacityAfterHeader) {
this.resize(Math.max(this.capacityAfterHeader * 2, newSize))
createIterator() {
return new PickleIterator(this);
}
if (method != null) {
method.call(this.header, data, this.headerSize + this.writeOffset)
} else {
this.header.write(data, this.headerSize + this.writeOffset, length)
toBuffer() {
return this.header.slice(0, this.headerSize + this.getPayloadSize());
}
const endOffset = this.headerSize + this.writeOffset + length
this.header.fill(0, endOffset, endOffset + dataLength - length)
this.setPayloadSize(newSize)
this.writeOffset = newSize
return true
}
Pickle.prototype.resize = function (newCapacity) {
newCapacity = alignInt(newCapacity, PAYLOAD_UNIT)
this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)])
this.capacityAfterHeader = newCapacity
}
return Pickle
})()
module.exports = {
createEmpty: function () {
return new Pickle()
},
createFromBuffer: function (buffer) {
return new Pickle(buffer)
}
writeBool(value) {
return this.writeInt(value ? 1 : 0);
}
writeInt(value) {
return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE);
}
writeUInt32(value) {
return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE);
}
writeInt64(value) {
return this.writeBytes(BigInt(value), SIZE_INT64, Buffer.prototype.writeBigInt64LE);
}
writeUInt64(value) {
return this.writeBytes(BigInt(value), SIZE_UINT64, Buffer.prototype.writeBigUInt64LE);
}
writeFloat(value) {
return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE);
}
writeDouble(value) {
return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE);
}
writeString(value) {
const length = Buffer.byteLength(value, 'utf8');
if (!this.writeInt(length)) {
return false;
}
return this.writeBytes(value, length);
}
setPayloadSize(payloadSize) {
return this.header.writeUInt32LE(payloadSize, 0);
}
getPayloadSize() {
return this.header.readUInt32LE(0);
}
writeBytes(data, length, method) {
const dataLength = alignInt(length, SIZE_UINT32);
const newSize = this.writeOffset + dataLength;
if (newSize > this.capacityAfterHeader) {
this.resize(Math.max(this.capacityAfterHeader * 2, newSize));
}
if (method) {
method.call(this.header, data, this.headerSize + this.writeOffset);
}
else {
this.header.write(data, this.headerSize + this.writeOffset, length);
}
const endOffset = this.headerSize + this.writeOffset + length;
this.header.fill(0, endOffset, endOffset + dataLength - length);
this.setPayloadSize(newSize);
this.writeOffset = newSize;
return true;
}
resize(newCapacity) {
newCapacity = alignInt(newCapacity, PAYLOAD_UNIT);
this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)]);
this.capacityAfterHeader = newCapacity;
}
}
exports.Pickle = Pickle;
//# sourceMappingURL=pickle.js.map

@@ -1,26 +0,18 @@

'use strict'
const fs = process.versions.electron ? require('original-fs') : require('fs')
const promisifiedMethods = [
'lstat',
'mkdtemp',
'readFile',
'stat',
'writeFile'
]
const promisified = {}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const fs = 'electron' in process.versions ? require('original-fs') : require('fs');
const promisifiedMethods = ['lstat', 'mkdtemp', 'readFile', 'stat', 'writeFile'];
const promisified = {};
for (const method of Object.keys(fs)) {
if (promisifiedMethods.includes(method)) {
promisified[method] = fs.promises[method]
} else {
promisified[method] = fs[method]
}
if (promisifiedMethods.includes(method)) {
promisified[method] = fs.promises[method];
}
else {
promisified[method] = fs[method];
}
}
// To make it more like fs-extra
promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true })
promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true })
module.exports = promisified
promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true });
promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true });
exports.default = promisified;
//# sourceMappingURL=wrapped-fs.js.map
{
"name": "@electron/asar",
"description": "Creating Electron app packages",
"version": "3.2.10",
"version": "3.2.11",
"main": "./lib/asar.js",
"types": "./lib/index.d.ts",
"types": "./lib/asar.d.ts",
"bin": {

@@ -12,4 +12,3 @@ "asar": "./bin/asar.js"

"bin",
"lib",
"lib/index.d.ts"
"lib"
],

@@ -29,20 +28,13 @@ "engines": {

"scripts": {
"build": "tsc",
"mocha": "xvfb-maybe electron-mocha --reporter spec && mocha --reporter spec",
"test": "npm run lint && npm run mocha",
"lint": "tsd && standard",
"standard": "standard",
"tsd": "tsd"
"test": "yarn lint && yarn mocha",
"lint": "yarn prettier:check",
"prettier": "prettier \"src/**/*.ts\" \"test/**/*.ts\" \"test/**/*.js\"",
"prettier:check": "yarn prettier --check",
"prettier:write": "yarn prettier --write",
"prepare": "tsc"
},
"standard": {
"env": {
"mocha": true
},
"globals": [
"BigInt"
]
},
"tsd": {
"directory": "test"
},
"dependencies": {
"@types/glob": "^7.1.0",
"commander": "^5.0.0",

@@ -53,2 +45,4 @@ "glob": "^7.1.6",

"devDependencies": {
"@types/minimatch": "^3.0.5",
"@types/node": "^12.0.0",
"electron": "^22.0.0",

@@ -58,7 +52,7 @@ "electron-mocha": "^11.0.2",

"mocha": "^10.1.0",
"prettier": "^3.3.3",
"rimraf": "^3.0.2",
"standard": "^14.3.3",
"tsd": "^0.25.0",
"typescript": "^5.5.4",
"xvfb-maybe": "^0.2.1"
}
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc