Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@parcel/core

Package Overview
Dependencies
Maintainers
1
Versions
885
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@parcel/core - npm Package Compare versions

Comparing version 2.0.0-nightly.105 to 2.0.0-nightly.110

8

lib/BundleGraph.js

@@ -482,4 +482,8 @@ "use strict";

this.traverseBundles(childBundle => {
hash.update(this.getContentHash(childBundle));
this.traverseBundles((childBundle, ctx, traversal) => {
if (childBundle.id === bundle.id || childBundle.isInline) {
hash.update(this.getContentHash(childBundle));
} else {
traversal.skipChildren();
}
}, bundle);

@@ -486,0 +490,0 @@ hash.update(JSON.stringify((0, _utils.objectSortedEntriesDeep)(bundle.env)));

@@ -198,2 +198,6 @@ "use strict";

internalBundle.name = name;
let {
hashReference
} = internalBundle;
internalBundle.displayName = name.includes(hashReference) ? name.replace(hashReference, '[hash]') : name;
return;

@@ -200,0 +204,0 @@ }

@@ -6,3 +6,3 @@ "use strict";

});
exports.PARCEL_VERSION = void 0;
exports.HASH_REF_PREFIX = exports.PARCEL_VERSION = void 0;

@@ -13,2 +13,4 @@ var _package = require("../package.json");

const PARCEL_VERSION = _package.version;
exports.PARCEL_VERSION = PARCEL_VERSION;
exports.PARCEL_VERSION = PARCEL_VERSION;
const HASH_REF_PREFIX = 'HASH_REF_';
exports.HASH_REF_PREFIX = HASH_REF_PREFIX;

@@ -22,2 +22,4 @@ "use strict";

var _crypto = _interopRequireDefault(require("crypto"));
var _Bundle = require("./public/Bundle");

@@ -39,2 +41,5 @@

const BOUNDARY_LENGTH = _constants.HASH_REF_PREFIX.length + 32 - 1;
const HASH_REF_REGEX = new RegExp(`${_constants.HASH_REF_PREFIX}\\w{32}`, 'g');
class PackagerRunner {

@@ -61,3 +66,3 @@ constructor({

_defineProperty(this, "writeBundleFromWorker", void 0);
_defineProperty(this, "getBundleInfoFromWorker", void 0);

@@ -69,3 +74,3 @@ this.config = config;

this.report = report;
this.writeBundleFromWorker = farm ? farm.createHandle('runPackage') : () => {
this.getBundleInfoFromWorker = farm ? farm.createHandle('runPackage') : () => {
throw new Error('Cannot call PackagerRunner.writeBundleFromWorker() in a worker');

@@ -81,46 +86,64 @@ };

} = await farm.createSharedReference(bundleGraph);
let promises = [];
let bundleInfoMap = {};
let writeEarlyPromises = {};
let hashRefToNameHash = new Map(); // skip inline bundles, they will be processed via the parent bundle
for (let bundle of bundleGraph.getBundles()) {
// skip inline bundles, they will be processed via the parent bundle
if (bundle.isInline) {
continue;
let bundles = bundleGraph.getBundles().filter(bundle => !bundle.isInline);
await Promise.all(bundles.map(async bundle => {
let info = await this.processBundle(bundle, bundleGraph, ref);
bundleInfoMap[bundle.id] = info;
if (!info.hashReferences.length) {
hashRefToNameHash.set(bundle.hashReference, info.hash.slice(-8));
writeEarlyPromises[bundle.id] = this.writeToDist({
bundle,
info,
hashRefToNameHash,
bundleGraph
});
}
}));
assignComplexNameHashes(hashRefToNameHash, bundles, bundleInfoMap);
await Promise.all(bundles.map(bundle => {
var _writeEarlyPromises$b;
promises.push(this.writeBundle(bundle, bundleGraph, ref).then(stats => {
bundle.stats = stats;
}));
}
await Promise.all(promises);
return (_writeEarlyPromises$b = writeEarlyPromises[bundle.id]) !== null && _writeEarlyPromises$b !== void 0 ? _writeEarlyPromises$b : this.writeToDist({
bundle,
info: bundleInfoMap[bundle.id],
hashRefToNameHash,
bundleGraph
});
}));
await dispose();
}
async writeBundle(bundle, bundleGraph, bundleGraphReference) {
async processBundle(bundle, bundleGraph, bundleGraphReference) {
var _ref;
let start = Date.now();
let cacheKey = await this.getCacheKey(bundle, bundleGraph);
let cacheKeys = {
content: getContentKey(cacheKey),
map: getMapKey(cacheKey),
info: getInfoKey(cacheKey)
};
let {
size
} = (await this.writeBundleFromCache({
hash,
hashReferences
} = (_ref = await this.getBundleInfoFromCache(cacheKeys.info)) !== null && _ref !== void 0 ? _ref : await this.getBundleInfoFromWorker({
bundle,
bundleGraph,
cacheKey
})) || (await this.writeBundleFromWorker({
bundle,
cacheKey,
bundleGraphReference,
cacheKeys,
options: this.options,
config: this.config
}));
});
return {
time: Date.now() - start,
size
hash,
hashReferences,
cacheKeys
};
}
async writeBundleFromCache({
bundle,
bundleGraph,
cacheKey
}) {
getBundleInfoFromCache(infoKey) {
if (this.options.disableCache) {

@@ -130,81 +153,21 @@ return;

let cacheResult = await this.readFromCache(cacheKey);
if (cacheResult == null) {
return;
}
let {
contents,
map
} = cacheResult;
let {
size
} = await this.writeToDist({
bundle,
bundleGraph,
contents,
map
});
return {
size
};
return this.options.cache.get(infoKey);
}
async packageAndWriteBundle(bundle, bundleGraph, cacheKey) {
let start = Date.now();
async getBundleInfo(bundle, bundleGraph, cacheKeys) {
let {
contents,
map
} = await this.getBundleResult(bundle, bundleGraph, cacheKey);
let {
size
} = await this.writeToDist({
bundle,
bundleGraph,
contents,
map
});
return {
time: Date.now() - start,
size
};
} = await this.getBundleResult(bundle, bundleGraph);
return this.writeToCache(cacheKeys, contents, map);
}
async getBundleResult(bundle, bundleGraph, cacheKey) {
let result;
if (!cacheKey && !this.options.disableCache) {
cacheKey = await this.getCacheKey(bundle, bundleGraph);
let cacheResult = await this.readFromCache(cacheKey);
if (cacheResult) {
// NOTE: Returning a new object for flow
return {
contents: cacheResult.contents,
map: cacheResult.map
};
}
}
async getBundleResult(bundle, bundleGraph) {
let packaged = await this.package(bundle, bundleGraph);
let res = await this.optimize(bundle, bundleGraph, packaged.contents, packaged.map);
let map = res.map ? await this.generateSourceMap(bundle, res.map) : null;
result = {
return {
contents: res.contents,
map
};
if (cacheKey != null) {
await this.writeToCache(cacheKey, result.contents, map);
if (result.contents instanceof _stream.Readable) {
return {
contents: this.options.cache.getStream(getContentKey(cacheKey)),
map: result.map
};
}
}
return result;
}

@@ -367,4 +330,4 @@

bundleGraph,
contents,
map
info,
hashRefToNameHash
}) {

@@ -376,3 +339,11 @@ let {

let filePath = (0, _nullthrows.default)(bundle.filePath);
let thisHashReference = bundle.hashReference;
if (filePath.includes(thisHashReference)) {
let thisNameHash = (0, _nullthrows.default)(hashRefToNameHash.get(thisHashReference));
filePath = filePath.replace(thisHashReference, thisNameHash);
bundle.filePath = filePath;
bundle.name = (0, _nullthrows.default)(bundle.name).replace(thisHashReference, thisNameHash);
}
let dir = _path.default.dirname(filePath);

@@ -388,32 +359,45 @@

};
let size;
let cacheKeys = info.cacheKeys;
let contentStream = this.options.cache.getStream(cacheKeys.content);
let size = await writeFileStream(outputFS, filePath, contentStream, info.hashReferences, hashRefToNameHash, writeOptions);
bundle.stats = {
size,
time: info.time
};
let mapKey = cacheKeys.map;
if (contents instanceof _stream.Readable) {
size = await writeFileStream(outputFS, filePath, contents, writeOptions);
} else {
await outputFS.writeFile(filePath, contents, writeOptions);
size = contents.length;
if (await this.options.cache.blobExists(mapKey)) {
let mapStream = this.options.cache.getStream(mapKey);
await writeFileStream(outputFS, filePath + '.map', mapStream, info.hashReferences, hashRefToNameHash);
}
}
if (map != null) {
if (map instanceof _stream.Readable) {
await writeFileStream(outputFS, filePath + '.map', map);
} else {
await outputFS.writeFile(filePath + '.map', map);
}
}
async writeToCache(cacheKeys, contents, map) {
let size = 0;
return {
size
};
}
let hash = _crypto.default.createHash('md5');
async writeToCache(cacheKey, contents, map) {
let contentKey = getContentKey(cacheKey);
await this.options.cache.setStream(contentKey, (0, _utils.blobToStream)(contents));
let boundaryStr = '';
let hashReferences = [];
await this.options.cache.setStream(cacheKeys.content, (0, _utils.blobToStream)(contents).pipe(new _utils.TapStream(buf => {
var _str$match;
let str = boundaryStr + buf.toString();
hashReferences = hashReferences.concat((_str$match = str.match(HASH_REF_REGEX)) !== null && _str$match !== void 0 ? _str$match : []);
size += buf.length;
hash.update(buf);
boundaryStr = str.slice(str.length - BOUNDARY_LENGTH);
})));
if (map != null) {
let mapKey = getMapKey(cacheKey);
await this.options.cache.setStream(mapKey, (0, _utils.blobToStream)(map));
await this.options.cache.setStream(cacheKeys.map, (0, _utils.blobToStream)(map));
}
let info = {
size,
hash: hash.digest('hex'),
hashReferences
};
await this.options.cache.set(cacheKeys.info, info);
return info;
}

@@ -425,6 +409,7 @@

function writeFileStream(fs, filePath, stream, options) {
function writeFileStream(fs, filePath, stream, hashReferences, hashRefToNameHash, options) {
return new Promise((resolve, reject) => {
let initialStream = hashReferences.length ? stream.pipe(replaceStream(hashRefToNameHash)) : stream;
let fsStream = fs.createWriteStream(filePath, options);
stream.pipe(fsStream) // $FlowFixMe
initialStream.pipe(fsStream) // $FlowFixMe
.on('finish', () => resolve(fsStream.bytesWritten)).on('error', reject);

@@ -434,2 +419,22 @@ });

function replaceStream(hashRefToNameHash) {
let boundaryStr = '';
return new _stream.Transform({
transform(chunk, encoding, cb) {
let str = boundaryStr + chunk.toString();
let replaced = str.replace(HASH_REF_REGEX, match => {
return hashRefToNameHash.get(match) || match;
});
boundaryStr = replaced.slice(replaced.length - BOUNDARY_LENGTH);
let strUpToBoundary = replaced.slice(0, replaced.length - BOUNDARY_LENGTH);
cb(null, strUpToBoundary);
},
flush(cb) {
cb(null, boundaryStr);
}
});
}
function getContentKey(cacheKey) {

@@ -441,2 +446,35 @@ return (0, _utils.md5FromString)(`${cacheKey}:content`);

return (0, _utils.md5FromString)(`${cacheKey}:map`);
}
function getInfoKey(cacheKey) {
return (0, _utils.md5FromString)(`${cacheKey}:info`);
}
function assignComplexNameHashes(hashRefToNameHash, bundles, bundleInfoMap) {
for (let bundle of bundles) {
if (hashRefToNameHash.get(bundle.hashReference) != null) {
continue;
}
let includedBundles = getBundlesIncludedInHash(bundle.id, bundleInfoMap);
hashRefToNameHash.set(bundle.hashReference, (0, _utils.md5FromString)(includedBundles.map(bundleId => bundleInfoMap[bundleId].hash).join(':')).slice(-8));
}
}
function getBundlesIncludedInHash(bundleId, bundleInfoMap, included = []) {
included.push(bundleId);
for (let hashRef of bundleInfoMap[bundleId].hashReferences) {
let referencedId = getIdFromHashRef(hashRef);
if (!included.includes(referencedId)) {
included.push(...getBundlesIncludedInHash(referencedId, bundleInfoMap, included));
}
}
return included;
}
function getIdFromHashRef(hashRef) {
return hashRef.slice(_constants.HASH_REF_PREFIX.length);
}

@@ -92,2 +92,6 @@ "use strict";

get hashReference() {
return _classPrivateFieldGet(this, _bundle).hashReference;
}
get type() {

@@ -167,6 +171,2 @@ return _classPrivateFieldGet(this, _bundle).type;

getHash() {
return _classPrivateFieldGet(this, _bundleGraph).getHash(_classPrivateFieldGet(this, _bundle));
}
}

@@ -222,2 +222,6 @@

get displayName() {
return (0, _nullthrows.default)(_classPrivateFieldGet(this, _bundle2).displayName);
}
}

@@ -224,0 +228,0 @@

@@ -30,2 +30,4 @@ "use strict";

var _constants = require("../constants");
function _getRequireWildcardCache() { if (typeof WeakMap !== "function") return null; var cache = new WeakMap(); _getRequireWildcardCache = function () { return cache; }; return cache; }

@@ -123,6 +125,6 @@

createBundle(opts) {
var _opts$id, _opts$type, _opts$isSplittable;
var _opts$uniqueKey, _opts$type, _opts$isSplittable;
let entryAsset = opts.entryAsset ? (0, _Asset.assetToInternalAsset)(opts.entryAsset) : null;
let bundleId = 'bundle:' + ((_opts$id = opts.id) !== null && _opts$id !== void 0 ? _opts$id : (0, _nullthrows.default)(entryAsset === null || entryAsset === void 0 ? void 0 : entryAsset.value.id));
let bundleId = (0, _utils.md5FromString)('bundle:' + ((_opts$uniqueKey = opts.uniqueKey) !== null && _opts$uniqueKey !== void 0 ? _opts$uniqueKey : (0, _nullthrows.default)(entryAsset === null || entryAsset === void 0 ? void 0 : entryAsset.value.id)));
let bundleNode = {

@@ -133,2 +135,3 @@ type: 'bundle',

id: bundleId,
hashReference: _constants.HASH_REF_PREFIX + bundleId,
type: (_opts$type = opts.type) !== null && _opts$type !== void 0 ? _opts$type : (0, _nullthrows.default)(entryAsset).value.type,

@@ -144,2 +147,3 @@ env: opts.env ? (0, _Environment.environmentToInternalEnvironment)(opts.env) : (0, _nullthrows.default)(entryAsset).value.env,

name: null,
displayName: null,
stats: {

@@ -146,0 +150,0 @@ size: 0,

@@ -299,3 +299,3 @@ "use strict";

// no re-exported symbols are used by ancestor dependencies and the re-exporting asset isn't
// using a wildcard.
// using a wildcard and isn't an entry (in library mode).
// This helps with performance building large libraries like `lodash-es`, which re-exports

@@ -308,18 +308,17 @@ // a huge number of functions since we can avoid even transforming the files that aren't used.

if (dependency.isWeak && sideEffects === false && !dependency.symbols.has('*') && !dependency.env.isLibrary // TODO (T-232): improve the logic below and remove this.
) {
let depNode = this.assetGraph.getNode(dependency.id);
(0, _assert.default)(depNode);
let assets = this.assetGraph.getNodesConnectedTo(depNode);
let symbols = invertMap(dependency.symbols);
(0, _assert.default)(assets.length === 1);
let firstAsset = assets[0];
(0, _assert.default)(firstAsset.type === 'asset');
let resolvedAsset = firstAsset.value;
let deps = this.assetGraph.getIncomingDependencies(resolvedAsset);
defer = deps.every(d => !d.symbols.has('*') && ![...d.symbols.keys()].some(symbol => {
let assetSymbol = resolvedAsset.symbols.get(symbol);
return assetSymbol != null && symbols.has(assetSymbol);
}));
}
if (dependency.isWeak && sideEffects === false && !dependency.symbols.has('*')) {
let depNode = this.assetGraph.getNode(dependency.id);
(0, _assert.default)(depNode);
let assets = this.assetGraph.getNodesConnectedTo(depNode);
let symbols = invertMap(dependency.symbols);
(0, _assert.default)(assets.length === 1);
let firstAsset = assets[0];
(0, _assert.default)(firstAsset.type === 'asset');
let resolvedAsset = firstAsset.value;
let deps = this.assetGraph.getIncomingDependencies(resolvedAsset);
defer = deps.every(d => !(d.env.isLibrary && d.isEntry) && !d.symbols.has('*') && ![...d.symbols.keys()].some(symbol => {
let assetSymbol = resolvedAsset.symbols.get(symbol);
return assetSymbol != null && symbols.has(assetSymbol);
}));
}

@@ -326,0 +325,0 @@ return defer;

@@ -61,3 +61,3 @@ "use strict";

config,
cacheKey,
cacheKeys,
options

@@ -71,3 +71,3 @@ }) {

report: _ReporterRunner.reportWorker.bind(null, workerApi)
}).packageAndWriteBundle(bundle, bundleGraph, cacheKey);
}).getBundleInfo(bundle, bundleGraph, cacheKeys);
}
{
"name": "@parcel/core",
"version": "2.0.0-nightly.105+b2851080",
"version": "2.0.0-nightly.110+9e4870b8",
"license": "MIT",

@@ -19,13 +19,13 @@ "publishConfig": {

"dependencies": {
"@parcel/cache": "2.0.0-nightly.107+b2851080",
"@parcel/diagnostic": "2.0.0-nightly.107+b2851080",
"@parcel/events": "2.0.0-nightly.107+b2851080",
"@parcel/fs": "2.0.0-nightly.107+b2851080",
"@parcel/logger": "2.0.0-nightly.107+b2851080",
"@parcel/package-manager": "2.0.0-nightly.107+b2851080",
"@parcel/plugin": "2.0.0-nightly.107+b2851080",
"@parcel/source-map": "2.0.0-nightly.107+b2851080",
"@parcel/types": "2.0.0-nightly.107+b2851080",
"@parcel/utils": "2.0.0-nightly.107+b2851080",
"@parcel/workers": "2.0.0-nightly.107+b2851080",
"@parcel/cache": "2.0.0-nightly.112+9e4870b8",
"@parcel/diagnostic": "2.0.0-nightly.112+9e4870b8",
"@parcel/events": "2.0.0-nightly.112+9e4870b8",
"@parcel/fs": "2.0.0-nightly.112+9e4870b8",
"@parcel/logger": "2.0.0-nightly.112+9e4870b8",
"@parcel/package-manager": "2.0.0-nightly.112+9e4870b8",
"@parcel/plugin": "2.0.0-nightly.112+9e4870b8",
"@parcel/source-map": "2.0.0-nightly.112+9e4870b8",
"@parcel/types": "2.0.0-nightly.112+9e4870b8",
"@parcel/utils": "2.0.0-nightly.112+9e4870b8",
"@parcel/workers": "2.0.0-nightly.112+9e4870b8",
"abortcontroller-polyfill": "^1.1.9",

@@ -46,3 +46,3 @@ "browserslist": "^4.6.6",

},
"gitHead": "b2851080dd7c4c1df22b2c512ce50d7424c36927"
"gitHead": "9e4870b8c861134e45750671a84bee966eacbd8c"
}

@@ -579,4 +579,8 @@ // @flow strict-local

let hash = crypto.createHash('md5');
this.traverseBundles(childBundle => {
hash.update(this.getContentHash(childBundle));
this.traverseBundles((childBundle, ctx, traversal) => {
if (childBundle.id === bundle.id || childBundle.isInline) {
hash.update(this.getContentHash(childBundle));
} else {
traversal.skipChildren();
}
}, bundle);

@@ -583,0 +587,0 @@

@@ -192,2 +192,7 @@ // @flow strict-local

internalBundle.name = name;
let {hashReference} = internalBundle;
internalBundle.displayName = name.includes(hashReference)
? name.replace(hashReference, '[hash]')
: name;
return;

@@ -194,0 +199,0 @@ }

@@ -7,1 +7,2 @@ // @flow strict-local

export const PARCEL_VERSION = version;
export const HASH_REF_PREFIX = 'HASH_REF_';

@@ -1,2 +0,2 @@

// @flow
// @flow strict-local

@@ -9,3 +9,2 @@ import type {

BundleGraph as BundleGraphType,
Stats,
} from '@parcel/types';

@@ -19,9 +18,15 @@ import type SourceMap from '@parcel/source-map';

import {md5FromObject, md5FromString, blobToStream} from '@parcel/utils';
import {
md5FromObject,
md5FromString,
blobToStream,
TapStream,
} from '@parcel/utils';
import {PluginLogger} from '@parcel/logger';
import ThrowableDiagnostic, {errorToDiagnostic} from '@parcel/diagnostic';
import {Readable} from 'stream';
import {Readable, Transform} from 'stream';
import nullthrows from 'nullthrows';
import path from 'path';
import url from 'url';
import crypto from 'crypto';

@@ -33,3 +38,3 @@ import {NamedBundle, bundleToInternalBundle} from './public/Bundle';

import PluginOptions from './public/PluginOptions';
import {PARCEL_VERSION} from './constants';
import {PARCEL_VERSION, HASH_REF_PREFIX} from './constants';

@@ -43,2 +48,17 @@ type Opts = {|

type BundleInfo = {|
+hash: string,
+hashReferences: Array<string>,
+time: number,
|};
type CacheKeyMap = {|
content: string,
map: string,
info: string,
|};
const BOUNDARY_LENGTH = HASH_REF_PREFIX.length + 32 - 1;
const HASH_REF_REGEX = new RegExp(`${HASH_REF_PREFIX}\\w{32}`, 'g');
export default class PackagerRunner {

@@ -52,9 +72,9 @@ config: ParcelConfig;

report: ReportFn;
writeBundleFromWorker: ({|
getBundleInfoFromWorker: ({|
bundle: InternalBundle,
bundleGraphReference: number,
config: ParcelConfig,
cacheKey: string,
cacheKeys: CacheKeyMap,
options: ParcelOptions,
|}) => Promise<Stats>;
|}) => Promise<BundleInfo>;

@@ -68,3 +88,3 @@ constructor({config, farm, options, report}: Opts) {

this.report = report;
this.writeBundleFromWorker = farm
this.getBundleInfoFromWorker = farm
? farm.createHandle('runPackage')

@@ -82,34 +102,57 @@ : () => {

let promises = [];
for (let bundle of bundleGraph.getBundles()) {
// skip inline bundles, they will be processed via the parent bundle
if (bundle.isInline) {
continue;
}
promises.push(
this.writeBundle(bundle, bundleGraph, ref).then(stats => {
bundle.stats = stats;
}),
);
}
await Promise.all(promises);
let bundleInfoMap = {};
let writeEarlyPromises = {};
let hashRefToNameHash = new Map();
// skip inline bundles, they will be processed via the parent bundle
let bundles = bundleGraph.getBundles().filter(bundle => !bundle.isInline);
await Promise.all(
bundles.map(async bundle => {
let info = await this.processBundle(bundle, bundleGraph, ref);
bundleInfoMap[bundle.id] = info;
if (!info.hashReferences.length) {
hashRefToNameHash.set(bundle.hashReference, info.hash.slice(-8));
writeEarlyPromises[bundle.id] = this.writeToDist({
bundle,
info,
hashRefToNameHash,
bundleGraph,
});
}
}),
);
assignComplexNameHashes(hashRefToNameHash, bundles, bundleInfoMap);
await Promise.all(
bundles.map(
bundle =>
writeEarlyPromises[bundle.id] ??
this.writeToDist({
bundle,
info: bundleInfoMap[bundle.id],
hashRefToNameHash,
bundleGraph,
}),
),
);
await dispose();
}
async writeBundle(
async processBundle(
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
bundleGraphReference: number,
) {
): Promise<{|...BundleInfo, cacheKeys: CacheKeyMap|}> {
let start = Date.now();
let cacheKey = await this.getCacheKey(bundle, bundleGraph);
let {size} =
(await this.writeBundleFromCache({bundle, bundleGraph, cacheKey})) ||
(await this.writeBundleFromWorker({
let cacheKeys = {
content: getContentKey(cacheKey),
map: getMapKey(cacheKey),
info: getInfoKey(cacheKey),
};
let {hash, hashReferences} =
(await this.getBundleInfoFromCache(cacheKeys.info)) ??
(await this.getBundleInfoFromWorker({
bundle,
cacheKey,
bundleGraphReference,
cacheKeys,
options: this.options,

@@ -119,17 +162,6 @@ config: this.config,

return {
time: Date.now() - start,
size,
};
return {time: Date.now() - start, hash, hashReferences, cacheKeys};
}
async writeBundleFromCache({
bundle,
bundleGraph,
cacheKey,
}: {|
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
cacheKey: string,
|}) {
getBundleInfoFromCache(infoKey: string) {
if (this.options.disableCache) {

@@ -139,41 +171,13 @@ return;

let cacheResult = await this.readFromCache(cacheKey);
if (cacheResult == null) {
return;
}
let {contents, map} = cacheResult;
let {size} = await this.writeToDist({
bundle,
bundleGraph,
contents,
map,
});
return {size};
return this.options.cache.get(infoKey);
}
async packageAndWriteBundle(
async getBundleInfo(
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
cacheKey: string,
cacheKeys: CacheKeyMap,
) {
let start = Date.now();
let {contents, map} = await this.getBundleResult(bundle, bundleGraph);
let {contents, map} = await this.getBundleResult(
bundle,
bundleGraph,
cacheKey,
);
let {size} = await this.writeToDist({
bundle,
bundleGraph,
contents,
map,
});
return {
time: Date.now() - start,
size,
};
return this.writeToCache(cacheKeys, contents, map);
}

@@ -184,18 +188,3 @@

bundleGraph: InternalBundleGraph,
cacheKey: ?string,
): Promise<{|contents: Blob, map: ?(Readable | string)|}> {
let result;
if (!cacheKey && !this.options.disableCache) {
cacheKey = await this.getCacheKey(bundle, bundleGraph);
let cacheResult = await this.readFromCache(cacheKey);
if (cacheResult) {
// NOTE: Returning a new object for flow
return {
contents: cacheResult.contents,
map: cacheResult.map,
};
}
}
let packaged = await this.package(bundle, bundleGraph);

@@ -210,19 +199,6 @@ let res = await this.optimize(

let map = res.map ? await this.generateSourceMap(bundle, res.map) : null;
result = {
return {
contents: res.contents,
map,
};
if (cacheKey != null) {
await this.writeToCache(cacheKey, result.contents, map);
if (result.contents instanceof Readable) {
return {
contents: this.options.cache.getStream(getContentKey(cacheKey)),
map: result.map,
};
}
}
return result;
}

@@ -421,12 +397,23 @@

bundleGraph,
contents,
map,
info,
hashRefToNameHash,
}: {|
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
contents: Blob,
map: ?(Readable | string),
info: {|...BundleInfo, cacheKeys: CacheKeyMap|},
hashRefToNameHash: Map<string, string>,
|}) {
let {inputFS, outputFS} = this.options;
let filePath = nullthrows(bundle.filePath);
let thisHashReference = bundle.hashReference;
if (filePath.includes(thisHashReference)) {
let thisNameHash = nullthrows(hashRefToNameHash.get(thisHashReference));
filePath = filePath.replace(thisHashReference, thisNameHash);
bundle.filePath = filePath;
bundle.name = nullthrows(bundle.name).replace(
thisHashReference,
thisNameHash,
);
}
let dir = path.dirname(filePath);

@@ -445,31 +432,56 @@ await outputFS.mkdirp(dir); // ? Got rid of dist exists, is this an expensive operation

};
let cacheKeys = info.cacheKeys;
let contentStream = this.options.cache.getStream(cacheKeys.content);
let size = await writeFileStream(
outputFS,
filePath,
contentStream,
info.hashReferences,
hashRefToNameHash,
writeOptions,
);
bundle.stats = {
size,
time: info.time,
};
let size;
if (contents instanceof Readable) {
size = await writeFileStream(outputFS, filePath, contents, writeOptions);
} else {
await outputFS.writeFile(filePath, contents, writeOptions);
size = contents.length;
let mapKey = cacheKeys.map;
if (await this.options.cache.blobExists(mapKey)) {
let mapStream = this.options.cache.getStream(mapKey);
await writeFileStream(
outputFS,
filePath + '.map',
mapStream,
info.hashReferences,
hashRefToNameHash,
);
}
if (map != null) {
if (map instanceof Readable) {
await writeFileStream(outputFS, filePath + '.map', map);
} else {
await outputFS.writeFile(filePath + '.map', map);
}
}
return {size};
}
async writeToCache(cacheKey: string, contents: Blob, map: ?Blob) {
let contentKey = getContentKey(cacheKey);
async writeToCache(cacheKeys: CacheKeyMap, contents: Blob, map: ?Blob) {
let size = 0;
let hash = crypto.createHash('md5');
let boundaryStr = '';
let hashReferences = [];
await this.options.cache.setStream(
cacheKeys.content,
blobToStream(contents).pipe(
new TapStream(buf => {
let str = boundaryStr + buf.toString();
hashReferences = hashReferences.concat(
str.match(HASH_REF_REGEX) ?? [],
);
size += buf.length;
hash.update(buf);
boundaryStr = str.slice(str.length - BOUNDARY_LENGTH);
}),
),
);
await this.options.cache.setStream(contentKey, blobToStream(contents));
if (map != null) {
let mapKey = getMapKey(cacheKey);
await this.options.cache.setStream(mapKey, blobToStream(map));
await this.options.cache.setStream(cacheKeys.map, blobToStream(map));
}
let info = {size, hash: hash.digest('hex'), hashReferences};
await this.options.cache.set(cacheKeys.info, info);
return info;
}

@@ -482,7 +494,12 @@ }

stream: Readable,
hashReferences: Array<string>,
hashRefToNameHash: Map<string, string>,
options: ?FileOptions,
): Promise<number> {
return new Promise((resolve, reject) => {
let initialStream = hashReferences.length
? stream.pipe(replaceStream(hashRefToNameHash))
: stream;
let fsStream = fs.createWriteStream(filePath, options);
stream
initialStream
.pipe(fsStream)

@@ -495,2 +512,24 @@ // $FlowFixMe

function replaceStream(hashRefToNameHash) {
let boundaryStr = '';
return new Transform({
transform(chunk, encoding, cb) {
let str = boundaryStr + chunk.toString();
let replaced = str.replace(HASH_REF_REGEX, match => {
return hashRefToNameHash.get(match) || match;
});
boundaryStr = replaced.slice(replaced.length - BOUNDARY_LENGTH);
let strUpToBoundary = replaced.slice(
0,
replaced.length - BOUNDARY_LENGTH,
);
cb(null, strUpToBoundary);
},
flush(cb) {
cb(null, boundaryStr);
},
});
}
function getContentKey(cacheKey: string) {

@@ -503,1 +542,40 @@ return md5FromString(`${cacheKey}:content`);

}
function getInfoKey(cacheKey: string) {
return md5FromString(`${cacheKey}:info`);
}
function assignComplexNameHashes(hashRefToNameHash, bundles, bundleInfoMap) {
for (let bundle of bundles) {
if (hashRefToNameHash.get(bundle.hashReference) != null) {
continue;
}
let includedBundles = getBundlesIncludedInHash(bundle.id, bundleInfoMap);
hashRefToNameHash.set(
bundle.hashReference,
md5FromString(
includedBundles.map(bundleId => bundleInfoMap[bundleId].hash).join(':'),
).slice(-8),
);
}
}
function getBundlesIncludedInHash(bundleId, bundleInfoMap, included = []) {
included.push(bundleId);
for (let hashRef of bundleInfoMap[bundleId].hashReferences) {
let referencedId = getIdFromHashRef(hashRef);
if (!included.includes(referencedId)) {
included.push(
...getBundlesIncludedInHash(referencedId, bundleInfoMap, included),
);
}
}
return included;
}
function getIdFromHashRef(hashRef: string) {
return hashRef.slice(HASH_REF_PREFIX.length);
}

@@ -80,2 +80,6 @@ // @flow strict-local

get hashReference(): string {
return this.#bundle.hashReference;
}
get type(): string {

@@ -161,6 +165,2 @@ return this.#bundle.type;

}
getHash() {
return this.#bundleGraph.getHash(this.#bundle);
}
}

@@ -196,2 +196,6 @@

}
get displayName(): string {
return nullthrows(this.#bundle.displayName);
}
}

@@ -18,4 +18,3 @@ // @flow strict-local

import nullthrows from 'nullthrows';
import {DefaultWeakMap} from '@parcel/utils';
import {DefaultWeakMap, md5FromString} from '@parcel/utils';
import InternalBundleGraph from '../BundleGraph';

@@ -29,2 +28,3 @@ import {Bundle, bundleToInternalBundle} from './Bundle';

import {targetToInternalTarget} from './Target';
import {HASH_REF_PREFIX} from '../constants';

@@ -123,3 +123,5 @@ const internalMutableBundleGraphToMutableBundleGraph: DefaultWeakMap<

let bundleId = 'bundle:' + (opts.id ?? nullthrows(entryAsset?.value.id));
let bundleId = md5FromString(
'bundle:' + (opts.uniqueKey ?? nullthrows(entryAsset?.value.id)),
);
let bundleNode = {

@@ -130,2 +132,3 @@ type: 'bundle',

id: bundleId,
hashReference: HASH_REF_PREFIX + bundleId,
type: opts.type ?? nullthrows(entryAsset).value.type,

@@ -143,2 +146,3 @@ env: opts.env

name: null,
displayName: null,
stats: {size: 0, time: 0},

@@ -145,0 +149,0 @@ },

@@ -356,3 +356,3 @@ // @flow strict-local

// no re-exported symbols are used by ancestor dependencies and the re-exporting asset isn't
// using a wildcard.
// using a wildcard and isn't an entry (in library mode).
// This helps with performance building large libraries like `lodash-es`, which re-exports

@@ -365,4 +365,3 @@ // a huge number of functions since we can avoid even transforming the files that aren't used.

sideEffects === false &&
!dependency.symbols.has('*') &&
!dependency.env.isLibrary // TODO (T-232): improve the logic below and remove this.
!dependency.symbols.has('*')
) {

@@ -381,2 +380,3 @@ let depNode = this.assetGraph.getNode(dependency.id);

d =>
!(d.env.isLibrary && d.isEntry) &&
!d.symbols.has('*') &&

@@ -383,0 +383,0 @@ ![...d.symbols.keys()].some(symbol => {

@@ -298,2 +298,3 @@ // @flow strict-local

id: string,
hashReference: string,
type: string,

@@ -308,2 +309,3 @@ env: Environment,

name: ?string,
displayName: ?string,
pipeline: ?string,

@@ -310,0 +312,0 @@ stats: Stats,

@@ -59,3 +59,3 @@ // @flow strict-local

config,
cacheKey,
cacheKeys,
options,

@@ -66,3 +66,7 @@ }: {|

config: ParcelConfig,
cacheKey: string,
cacheKeys: {|
content: string,
map: string,
info: string,
|},
options: ParcelOptions,

@@ -77,3 +81,3 @@ |},

report: reportWorker.bind(null, workerApi),
}).packageAndWriteBundle(bundle, bundleGraph, cacheKey);
}).getBundleInfo(bundle, bundleGraph, cacheKeys);
}

@@ -17,2 +17,3 @@ // @flow strict-local

id: '123',
hashReference: '@@HASH_REFERENCE_123',
entryAssetIds: [],

@@ -23,2 +24,3 @@ type: 'js',

name: null,
displayName: null,
pipeline: null,

@@ -25,0 +27,0 @@ isEntry: null,

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc