Socket
Socket
Sign inDemoInstall

@memlab/core

Package Overview
Dependencies
Maintainers
3
Versions
45
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@memlab/core - npm Package Compare versions

Comparing version 1.1.18 to 1.1.19

dist/lib/charts/MemoryBarChart.d.ts

6

dist/index.d.ts

@@ -10,7 +10,7 @@ /**

*/
/** @internal */
export declare function registerPackage(): Promise<void>;
export * from './lib/Types';
export * from './lib/NodeHeap';
/** @internal */
export declare function registerPackage(): Promise<void>;
/** @internal */
export { default as config } from './lib/Config';

@@ -40,2 +40,4 @@ /** @internal */

/** @internal */
export { default as memoryBarChart } from './lib/charts/MemoryBarChart';
/** @internal */
export { default as modes } from './modes/RunningModes';

@@ -42,0 +44,0 @@ /** @internal */

@@ -38,5 +38,7 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.TraceFinder = exports.MultiIterationSeqClustering = exports.SequentialClustering = exports.EvaluationMetric = exports.NormalizedTrace = exports.leakClusterLogger = exports.ProcessManager = exports.modes = exports.constant = exports.analysis = exports.browserInfo = exports.serializer = exports.fileManager = exports.utils = exports.BaseOption = exports.info = exports.config = exports.registerPackage = void 0;
exports.TraceFinder = exports.MultiIterationSeqClustering = exports.SequentialClustering = exports.EvaluationMetric = exports.NormalizedTrace = exports.leakClusterLogger = exports.ProcessManager = exports.modes = exports.memoryBarChart = exports.constant = exports.analysis = exports.browserInfo = exports.serializer = exports.fileManager = exports.utils = exports.BaseOption = exports.info = exports.config = exports.registerPackage = void 0;
const path_1 = __importDefault(require("path"));
const PackageInfoLoader_1 = require("./lib/PackageInfoLoader");
__exportStar(require("./lib/Types"), exports);
__exportStar(require("./lib/NodeHeap"), exports);
/** @internal */

@@ -49,4 +51,2 @@ function registerPackage() {

exports.registerPackage = registerPackage;
__exportStar(require("./lib/Types"), exports);
__exportStar(require("./lib/NodeHeap"), exports);
/** @internal */

@@ -86,2 +86,5 @@ var Config_1 = require("./lib/Config");

/** @internal */
var MemoryBarChart_1 = require("./lib/charts/MemoryBarChart");
Object.defineProperty(exports, "memoryBarChart", { enumerable: true, get: function () { return __importDefault(MemoryBarChart_1).default; } });
/** @internal */
var RunningModes_1 = require("./modes/RunningModes");

@@ -88,0 +91,0 @@ Object.defineProperty(exports, "modes", { enumerable: true, get: function () { return __importDefault(RunningModes_1).default; } });

@@ -14,2 +14,3 @@ /**

export declare class FileManager {
private memlabConfigCache;
getDefaultWorkDir(): string;

@@ -67,2 +68,3 @@ generateTmpHeapDir(): string;

clearDataDirs(options?: FileOption): void;
removeSnapshotFiles(options?: FileOption): void;
emptyDirIfExists(dir: string): void;

@@ -69,0 +71,0 @@ emptyTraceLogDataDir(options?: FileOption): void;

@@ -16,3 +16,2 @@ "use strict";

exports.FileManager = void 0;
const minimist_1 = __importDefault(require("minimist"));
const fs_extra_1 = __importDefault(require("fs-extra"));

@@ -41,2 +40,5 @@ const os_1 = __importDefault(require("os"));

class FileManager {
constructor() {
this.memlabConfigCache = null;
}
getDefaultWorkDir() {

@@ -53,2 +55,3 @@ return path_1.default.join(this.getTmpDir(), 'memlab');

getWorkDir(options = FileManager.defaultFileOption) {
var _a;
// workDir options supercedes all the other options

@@ -58,3 +61,3 @@ if (options.workDir) {

}
// transient options supercedes other the CLI options
// transient options supercedes the other CLI options
if (options.transient) {

@@ -67,4 +70,7 @@ const idx = ++FileManager.transientInstanceIdx;

// workDir from the CLI options
const argv = (0, minimist_1.default)(process.argv.slice(2));
const workDir = argv['work-dir'] || this.getDefaultWorkDir();
const workDir = FileManager.defaultFileOption.workDir ||
(
// in case there is a transcient working directory generated
(_a = this.memlabConfigCache) === null || _a === void 0 ? void 0 : _a.workDir) ||
this.getDefaultWorkDir();
return path_1.default.resolve(workDir);

@@ -257,2 +263,19 @@ }

}
removeSnapshotFiles(options = FileManager.defaultFileOption) {
const curDataDir = this.getCurDataDir(options);
if (!fs_extra_1.default.existsSync(curDataDir)) {
return;
}
const dataSuffix = ['.heapsnapshot'];
const files = fs_extra_1.default.readdirSync(curDataDir);
for (const file of files) {
inner: for (const suffix of dataSuffix) {
if (file.endsWith(suffix)) {
const filepath = path_1.default.join(curDataDir, file);
fs_extra_1.default.unlinkSync(filepath);
break inner;
}
}
}
}
emptyDirIfExists(dir) {

@@ -318,2 +341,5 @@ if (this.isDirectory(dir)) {

initDirs(config, options = FileManager.defaultFileOption) {
// cache the last processed memlab config instance
// the instance should be a singleton
this.memlabConfigCache = config;
config.monoRepoDir = Constant_1.default.monoRepoDir;

@@ -323,2 +349,4 @@ // make sure getWorkDir is called first before

const workDir = this.getWorkDir(options);
// remember the current working directory
// especially if this is a transcient working directory
config.workDir = joinAndProcessDir(options, workDir);

@@ -325,0 +353,0 @@ options = Object.assign(Object.assign({}, options), { workDir });

@@ -10,8 +10,16 @@ /**

*/
import type { E2EStepInfo, HeapNodeIdSet, IHeapSnapshot, IMemoryAnalystOptions, IMemoryAnalystSnapshotDiff, IOveralHeapInfo, LeakTracePathItem, Optional, IOveralLeakInfo, TraceCluster, ISerializedInfo } from './Types';
import type { E2EStepInfo, HeapNodeIdSet, IHeapSnapshot, IMemoryAnalystSnapshotDiff, IOveralHeapInfo, LeakTracePathItem, Optional, IOveralLeakInfo, TraceCluster, ISerializedInfo, DiffLeakOptions } from './Types';
import TraceFinder from '../paths/TraceFinder';
declare type DiffSnapshotsOptions = {
loadAllSnapshots?: boolean;
workDir?: string;
};
declare type WorkDirOptions = {
workDir?: string;
};
declare class MemoryAnalyst {
checkLeak(): Promise<ISerializedInfo[]>;
diffLeakByWorkDir(options: DiffLeakOptions): Promise<ISerializedInfo[]>;
diffMemoryLeakTraces(options: DiffLeakOptions): Promise<ISerializedInfo[]>;
detectMemoryLeaks(): Promise<ISerializedInfo[]>;
visualizeMemoryUsage(options?: IMemoryAnalystOptions): void;
focus(options?: {

@@ -21,3 +29,3 @@ file?: string;

shouldLoadCompleteSnapshot(tabsOrder: E2EStepInfo[], tab: E2EStepInfo): boolean;
diffSnapshots(loadAll?: boolean): Promise<IMemoryAnalystSnapshotDiff>;
diffSnapshots(options?: DiffSnapshotsOptions): Promise<IMemoryAnalystSnapshotDiff>;
preparePathFinder(snapshot: IHeapSnapshot): TraceFinder;

@@ -34,8 +42,7 @@ private dumpPageInteractionSummary;

private logLeakTraceSummary;
searchLeakedTraces(leakedNodeIds: HeapNodeIdSet, snapshot: IHeapSnapshot): Promise<{
paths: LeakTracePathItem[];
}>;
filterLeakPaths(leakedNodeIds: HeapNodeIdSet, snapshot: IHeapSnapshot, options?: WorkDirOptions): LeakTracePathItem[];
findLeakTraces(leakedNodeIds: HeapNodeIdSet, snapshot: IHeapSnapshot, options?: WorkDirOptions): Promise<LeakTracePathItem[]>;
/**
* Given a set of heap object ids, cluster them based on the similarity
* of their retainer traces and return a
* of their retainer traces
* @param leakedNodeIds

@@ -49,3 +56,3 @@ * @param snapshot

}): Promise<void>;
dumpPathByNodeId(leakedIdSet: HeapNodeIdSet, snapshot: IHeapSnapshot, nodeIdsInSnapshots: Array<HeapNodeIdSet>, id: number, pathLoaderFile: string, summaryFile: string): void;
dumpPathByNodeId(leakedIdSet: HeapNodeIdSet, snapshot: IHeapSnapshot, nodeIdsInSnapshots: Array<HeapNodeIdSet>, id: number, pathLoaderFile: string, summaryFile: string, options?: WorkDirOptions): void;
}

@@ -52,0 +59,0 @@ declare const _default: MemoryAnalyst;

@@ -25,3 +25,8 @@ /**

const fs_1 = __importDefault(require("fs"));
const babar_1 = __importDefault(require("babar"));
const Config_1 = __importDefault(require("./Config"));
const Console_1 = __importDefault(require("./Console"));
const Serializer_1 = __importDefault(require("./Serializer"));
const Utils_1 = __importDefault(require("./Utils"));
const FileManager_1 = __importDefault(require("./FileManager"));
const MemoryBarChart_1 = __importDefault(require("./charts/MemoryBarChart"));
const LeakClusterLogger_1 = __importDefault(require("../logger/LeakClusterLogger"));

@@ -31,6 +36,2 @@ const LeakTraceDetailsLogger_1 = __importDefault(require("../logger/LeakTraceDetailsLogger"));

const TraceBucket_1 = __importDefault(require("../trace-cluster/TraceBucket"));
const Config_1 = __importDefault(require("./Config"));
const Console_1 = __importDefault(require("./Console"));
const Serializer_1 = __importDefault(require("./Serializer"));
const Utils_1 = __importDefault(require("./Utils"));
const LeakObjectFilter_1 = require("./leak-filters/LeakObjectFilter");

@@ -41,3 +42,3 @@ const MLTraceSimilarityStrategy_1 = __importDefault(require("../trace-cluster/strategies/MLTraceSimilarityStrategy"));

return __awaiter(this, void 0, void 0, function* () {
this.visualizeMemoryUsage();
MemoryBarChart_1.default.plotMemoryBarChart();
Utils_1.default.checkSnapshots();

@@ -47,54 +48,59 @@ return yield this.detectMemoryLeaks();

}
diffLeakByWorkDir(options) {
return __awaiter(this, void 0, void 0, function* () {
const controlSnapshotDir = FileManager_1.default.getCurDataDir({
workDir: options.controlWorkDir,
});
const treatmentSnapshotDir = FileManager_1.default.getCurDataDir({
workDir: options.treatmentWorkDir,
});
// check control working dir
Utils_1.default.checkSnapshots({ snapshotDir: controlSnapshotDir });
// check treatment working dir
Utils_1.default.checkSnapshots({ snapshotDir: treatmentSnapshotDir });
// display control and treatment memory
MemoryBarChart_1.default.plotMemoryBarChart(options);
return this.diffMemoryLeakTraces(options);
});
}
// find all unique pattern of leaks
diffMemoryLeakTraces(options) {
return __awaiter(this, void 0, void 0, function* () {
Config_1.default.dumpNodeInfo = false;
// diff snapshots and get control raw paths
let snapshotDiff = yield this.diffSnapshots({
loadAllSnapshots: true,
workDir: options.controlWorkDir,
});
const controlLeakPaths = this.filterLeakPaths(snapshotDiff.leakedHeapNodeIdSet, snapshotDiff.snapshot, { workDir: options.controlWorkDir });
const controlSnapshot = snapshotDiff.snapshot;
// diff snapshots and get treatment raw paths
snapshotDiff = yield this.diffSnapshots({
loadAllSnapshots: true,
workDir: options.treatmentWorkDir,
});
const treatmentLeakPaths = this.filterLeakPaths(snapshotDiff.leakedHeapNodeIdSet, snapshotDiff.snapshot, { workDir: options.controlWorkDir });
const treatmentSnapshot = snapshotDiff.snapshot;
Console_1.default.topLevel(`${controlLeakPaths.length} traces from control group`);
Console_1.default.topLevel(`${treatmentLeakPaths.length} traces from treatment group`);
const result = TraceBucket_1.default.clusterControlTreatmentPaths(controlLeakPaths, controlSnapshot, treatmentLeakPaths, treatmentSnapshot, Utils_1.default.aggregateDominatorMetrics, {
strategy: Config_1.default.isMLClustering
? new MLTraceSimilarityStrategy_1.default()
: undefined,
});
Console_1.default.midLevel(`MemLab found ${result.treatmentOnlyClusters.length} new leak(s) in the treatment group`);
yield this.serializeClusterUpdate(result.treatmentOnlyClusters);
// TODO (lgong): log leak traces
return [];
});
}
// find all unique pattern of leaks
detectMemoryLeaks() {
return __awaiter(this, void 0, void 0, function* () {
const snapshotDiff = yield this.diffSnapshots(true);
const snapshotDiff = yield this.diffSnapshots({ loadAllSnapshots: true });
Config_1.default.dumpNodeInfo = false;
const { paths } = yield this.searchLeakedTraces(snapshotDiff.leakedHeapNodeIdSet, snapshotDiff.snapshot);
const paths = yield this.findLeakTraces(snapshotDiff.leakedHeapNodeIdSet, snapshotDiff.snapshot);
return LeakTraceDetailsLogger_1.default.logTraces(snapshotDiff.leakedHeapNodeIdSet, snapshotDiff.snapshot, snapshotDiff.listOfLeakedHeapNodeIdSet, paths, Config_1.default.traceJsonOutDir);
});
}
visualizeMemoryUsage(options = {}) {
if (Config_1.default.useExternalSnapshot || options.snapshotDir) {
return;
}
const tabsOrder = Utils_1.default.loadTabsOrder();
// if memory usage data is incomplete, skip the visualization
for (const tab of tabsOrder) {
if (!(tab.JSHeapUsedSize > 0)) {
if (Config_1.default.verbose) {
Console_1.default.error('Memory usage data incomplete');
}
return;
}
}
const plotData = tabsOrder.map((tab, idx) => [
idx + 1,
((tab.JSHeapUsedSize / 100000) | 0) / 10,
]);
// the graph component cannot handle an array with a single element
while (plotData.length < 2) {
plotData.push([plotData.length + 1, 0]);
}
// plot visual settings
const minY = 1;
const maxY = plotData.reduce((m, v) => Math.max(m, v[1]), 0) * 1.15;
const yFractions = 1;
const yLabelWidth = 1 +
Math.max(minY.toFixed(yFractions).length, maxY.toFixed(yFractions).length);
const maxWidth = process.stdout.columns - 10;
const idealWidth = Math.max(2 * plotData.length + 2 * yLabelWidth, 10);
const plotWidth = Math.min(idealWidth, maxWidth);
Console_1.default.topLevel('Memory usage across all steps:');
Console_1.default.topLevel((0, babar_1.default)(plotData, {
color: 'green',
width: plotWidth,
height: 10,
xFractions: 0,
yFractions,
minY,
maxY,
}));
Console_1.default.topLevel('');
}
focus(options = {}) {

@@ -120,3 +126,3 @@ return __awaiter(this, void 0, void 0, function* () {

Utils_1.default.checkSnapshots();
const snapshotDiff = yield this.diffSnapshots(true);
const snapshotDiff = yield this.diffSnapshots({ loadAllSnapshots: true });
nodeIdsInSnapshots = snapshotDiff.listOfLeakedHeapNodeIdSet;

@@ -138,6 +144,6 @@ snapshotLeakedHeapNodeIdSet = snapshotDiff.leakedHeapNodeIdSet;

}
diffSnapshots(loadAll = false) {
diffSnapshots(options = {}) {
return __awaiter(this, void 0, void 0, function* () {
const nodeIdsInSnapshots = [];
const tabsOrder = Utils_1.default.loadTabsOrder();
const tabsOrder = Utils_1.default.loadTabsOrder(FileManager_1.default.getSnapshotSequenceMetaFile(options));
// a set keeping track of node ids generated before the target snapshot

@@ -148,3 +154,3 @@ const baselineIds = new Set();

let leakedHeapNodeIdSet = null;
const options = { verbose: true };
const parseSnapshotOptions = { verbose: true, workDir: options.workDir };
let snapshot = null;

@@ -167,9 +173,9 @@ for (let i = 0; i < tabsOrder.length; i++) {

// in quick mode, there is no need to load all snapshots
if (!loadAll && !tab.type) {
if (!options.loadAllSnapshots && !tab.type) {
continue;
}
const file = Utils_1.default.getSnapshotFilePath(tab);
const file = Utils_1.default.getSnapshotFilePath(tab, options);
if (this.shouldLoadCompleteSnapshot(tabsOrder, tab)) {
// final snapshot needs to build node index
const opt = Object.assign({ buildNodeIdIndex: true }, options);
const opt = Object.assign(Object.assign({ buildNodeIdIndex: true }, parseSnapshotOptions), { workDir: options.workDir });
snapshot = yield Utils_1.default.getSnapshotFromFile(file, opt);

@@ -182,3 +188,3 @@ // record Ids in the snapshot

else {
idsInSnapshot = yield Utils_1.default.getSnapshotNodeIdsFromFile(file, options);
idsInSnapshot = yield Utils_1.default.getSnapshotNodeIdsFromFile(file, parseSnapshotOptions);
nodeIdsInSnapshots.pop();

@@ -246,6 +252,6 @@ nodeIdsInSnapshots.push(idsInSnapshot);

// summarize the page interaction and dump to the leak text summary file
dumpPageInteractionSummary() {
const tabsOrder = Utils_1.default.loadTabsOrder();
dumpPageInteractionSummary(options = {}) {
const tabsOrder = Utils_1.default.loadTabsOrder(FileManager_1.default.getSnapshotSequenceMetaFile(options));
const tabsOrderStr = Serializer_1.default.summarizeTabsOrder(tabsOrder);
fs_1.default.writeFileSync(Config_1.default.exploreResultFile, tabsOrderStr, 'UTF-8');
fs_1.default.writeFileSync(FileManager_1.default.getLeakSummaryFile(options), tabsOrderStr, 'UTF-8');
}

@@ -331,5 +337,5 @@ // summarize the leak and print the info in console

}
printHeapAndLeakInfo(leakedNodeIds, snapshot) {
printHeapAndLeakInfo(leakedNodeIds, snapshot, options = {}) {
// write page interaction summary to the leaks text file
this.dumpPageInteractionSummary();
this.dumpPageInteractionSummary(options);
// dump leak summry to console

@@ -343,3 +349,3 @@ this.dumpLeakSummaryToConsole(leakedNodeIds, snapshot);

}
logLeakTraceSummary(trace, nodeIdInPaths, snapshot) {
logLeakTraceSummary(trace, nodeIdInPaths, snapshot, options = {}) {
if (!Config_1.default.isFullRun) {

@@ -350,32 +356,36 @@ return;

const pathStr = Serializer_1.default.summarizePath(trace, nodeIdInPaths, snapshot);
fs_1.default.appendFileSync(Config_1.default.exploreResultFile, `\n\n${pathStr}\n\n`, 'UTF-8');
fs_1.default.appendFileSync(FileManager_1.default.getLeakSummaryFile(options), `\n\n${pathStr}\n\n`, 'UTF-8');
}
filterLeakPaths(leakedNodeIds, snapshot, options = {}) {
const finder = this.preparePathFinder(snapshot);
this.printHeapAndLeakInfo(leakedNodeIds, snapshot, options);
// get all leaked objects
this.filterLeakedObjects(leakedNodeIds, snapshot);
const nodeIdInPaths = new Set();
const paths = [];
let numOfLeakedObjects = 0;
let i = 0;
// analysis for each node
Utils_1.default.applyToNodes(leakedNodeIds, snapshot, node => {
if (!Config_1.default.isContinuousTest && ++i % 11 === 0) {
Console_1.default.overwrite(`progress: ${i} / ${leakedNodeIds.size} @${node.id}`);
}
// BFS search for path from the leaked node to GC roots
const p = finder.getPathToGCRoots(snapshot, node);
if (!p || !Utils_1.default.isInterestingPath(p)) {
return;
}
++numOfLeakedObjects;
paths.push(p);
this.logLeakTraceSummary(p, nodeIdInPaths, snapshot, options);
}, { reverse: true });
if (Config_1.default.verbose) {
Console_1.default.midLevel(`${numOfLeakedObjects} leaked objects`);
}
return paths;
}
// find unique paths of leaked nodes
searchLeakedTraces(leakedNodeIds, snapshot) {
findLeakTraces(leakedNodeIds, snapshot, options = {}) {
return __awaiter(this, void 0, void 0, function* () {
const finder = this.preparePathFinder(snapshot);
this.printHeapAndLeakInfo(leakedNodeIds, snapshot);
// get all leaked objects
this.filterLeakedObjects(leakedNodeIds, snapshot);
const nodeIdInPaths = new Set();
const paths = [];
let numOfLeakedObjects = 0;
let i = 0;
// analysis for each node
Utils_1.default.applyToNodes(leakedNodeIds, snapshot, node => {
if (!Config_1.default.isContinuousTest && ++i % 11 === 0) {
Console_1.default.overwrite(`progress: ${i} / ${leakedNodeIds.size} @${node.id}`);
}
// BFS search for path from the leaked node to GC roots
const p = finder.getPathToGCRoots(snapshot, node);
if (!p || !Utils_1.default.isInterestingPath(p)) {
return;
}
++numOfLeakedObjects;
paths.push(p);
this.logLeakTraceSummary(p, nodeIdInPaths, snapshot);
}, { reverse: true });
if (Config_1.default.verbose) {
Console_1.default.midLevel(`${numOfLeakedObjects} leaked objects`);
}
const paths = this.filterLeakPaths(leakedNodeIds, snapshot, options);
// cluster traces from the current run

@@ -394,5 +404,3 @@ const clusters = TraceBucket_1.default.clusterPaths(paths, snapshot, Utils_1.default.aggregateDominatorMetrics, {

}
return {
paths: clusters.map(c => c.path),
};
return clusters.map(c => c.path);
});

@@ -402,3 +410,3 @@ }

* Given a set of heap object ids, cluster them based on the similarity
* of their retainer traces and return a
* of their retainer traces
* @param leakedNodeIds

@@ -447,3 +455,3 @@ * @param snapshot

}
dumpPathByNodeId(leakedIdSet, snapshot, nodeIdsInSnapshots, id, pathLoaderFile, summaryFile) {
dumpPathByNodeId(leakedIdSet, snapshot, nodeIdsInSnapshots, id, pathLoaderFile, summaryFile, options = {}) {
Console_1.default.overwrite('start analysis...');

@@ -462,3 +470,3 @@ const finder = this.preparePathFinder(snapshot);

LeakTraceDetailsLogger_1.default.logTrace(leakedIdSet, snapshot, nodeIdsInSnapshots, path, pathLoaderFile);
const tabsOrder = Utils_1.default.loadTabsOrder();
const tabsOrder = Utils_1.default.loadTabsOrder(FileManager_1.default.getSnapshotSequenceMetaFile(options));
const interactionSummary = Serializer_1.default.summarizeTabsOrder(tabsOrder);

@@ -465,0 +473,0 @@ let pathSummary = Serializer_1.default.summarizePath(path, nodeIdInPaths, snapshot, { color: true });

@@ -98,3 +98,5 @@ /**

declare function getSingleSnapshotFileForAnalysis(): string;
declare function getSnapshotFilePath(tab: E2EStepInfo): string;
declare function getSnapshotFilePath(tab: E2EStepInfo, options?: {
workDir?: string;
}): string;
declare function equalOrMatch(v1: any, v2: any): boolean;

@@ -101,0 +103,0 @@ declare function getSnapshotFilePathWithTabType(type: string | RegExp): Nullable<string>;

@@ -1349,9 +1349,13 @@ "use strict";

}
function getSnapshotFilePath(tab) {
function getSnapshotFilePath(tab, options = {}) {
const fileName = `s${tab.idx}.heapsnapshot`;
if (options.workDir) {
return path_1.default.join(FileManager_1.default.getCurDataDir(options), fileName);
}
if (!Config_1.default.useExternalSnapshot) {
return path_1.default.join(Config_1.default.curDataDir, `s${tab.idx}.heapsnapshot`);
return path_1.default.join(Config_1.default.curDataDir, fileName);
}
// if we are loading snapshot from external snapshot dir
if (Config_1.default.externalSnapshotDir) {
return path_1.default.join(Config_1.default.externalSnapshotDir, `s${tab.idx}.heapsnapshot`);
return path_1.default.join(Config_1.default.externalSnapshotDir, fileName);
}

@@ -1358,0 +1362,0 @@ return Config_1.default.externalSnapshotFilePaths[tab.idx - 1];

@@ -10,3 +10,3 @@ /**

*/
import type { IHeapNode, IHeapSnapshot, LeakTrace, LeakTracePathItem, Optional, TraceCluster, TraceClusterDiff, IClusterStrategy } from '../lib/Types';
import type { IHeapNode, IHeapSnapshot, LeakTrace, LeakTracePathItem, Optional, TraceCluster, TraceClusterDiff, IClusterStrategy, ControlTreatmentClusterResult } from '../lib/Types';
import type { NormalizedTraceElement } from './TraceElement';

@@ -18,2 +18,5 @@ declare type AggregateNodeCb = (ids: Set<number>, snapshot: IHeapSnapshot, checkCb: (node: IHeapNode) => boolean, calculateCb: (node: IHeapNode) => number) => number;

constructor(p?: LeakTracePathItem | null, snapshot?: IHeapSnapshot | null);
static getPathLastNode(p: LeakTracePathItem, options?: {
untilFirstDetachedDOMElem?: boolean;
}): Optional<IHeapNode>;
static pathToTrace(p: LeakTracePathItem, options?: {

@@ -35,2 +38,8 @@ untilFirstDetachedDOMElem?: boolean;

}): TraceCluster[];
private static buildTraceToPathMap;
private static pushLeakPathToCluster;
private static initEmptyCluster;
static clusterControlTreatmentPaths(controlPaths: LeakTracePathItem[], controlSnapshot: IHeapSnapshot, treatmentPaths: LeakTracePathItem[], treatmentSnapshot: IHeapSnapshot, aggregateDominatorMetrics: AggregateNodeCb, option?: {
strategy?: IClusterStrategy;
}): ControlTreatmentClusterResult;
static generateUnClassifiedClusters(paths: LeakTracePathItem[], snapshot: IHeapSnapshot, aggregateDominatorMetrics: AggregateNodeCb): TraceCluster[];

@@ -37,0 +46,0 @@ static loadCluster(): NormalizedTrace[];

@@ -41,2 +41,23 @@ "use strict";

}
static getPathLastNode(p, options = {}) {
const skipRest = !!options.untilFirstDetachedDOMElem;
const shouldSkip = (node) => {
// only consider the trace from GC root to the first detached element
// NOTE: do not use utils.isDetachedDOMNode, which relies on
// the fact that p.node is a HeapNode
return (skipRest &&
node.name.startsWith('Detached ') &&
node.name !== 'Detached InternalNode');
};
let curItem = p;
while (curItem.next) {
if (curItem.node) {
if (shouldSkip(curItem.node)) {
break;
}
}
curItem = curItem.next;
}
return curItem === null || curItem === void 0 ? void 0 : curItem.node;
}
// convert path to leak trace

@@ -118,2 +139,9 @@ static pathToTrace(p, options = {}) {

}
else {
// force sample objects with non-trvial self size
const lastNode = NormalizedTrace.getPathLastNode(p);
if (lastNode && lastNode.self_size >= 100000) {
ret.push(p);
}
}
}

@@ -238,2 +266,105 @@ return ret;

}
static buildTraceToPathMap(paths) {
const traceToPathMap = new Map();
for (const p of paths) {
const trace = NormalizedTrace.pathToTrace(p, {
untilFirstDetachedDOMElem: true,
});
traceToPathMap.set(trace, p);
}
return traceToPathMap;
}
static pushLeakPathToCluster(traceToPathMap, trace, cluster) {
// if this is a control path, update control cluster
const curPath = traceToPathMap.get(trace);
if (cluster.count === 0) {
cluster.path = curPath;
// add representative object id if there is one
const lastNode = trace[trace.length - 1];
if ('id' in lastNode) {
cluster.id = lastNode.id;
}
}
cluster.count = cluster.count + 1;
NormalizedTrace.addLeakedNodeToCluster(cluster, curPath);
}
static initEmptyCluster(snapshot) {
return {
path: {},
count: 0,
snapshot,
retainedSize: 0,
leakedNodeIds: new Set(),
};
}
static clusterControlTreatmentPaths(controlPaths, controlSnapshot, treatmentPaths, treatmentSnapshot, aggregateDominatorMetrics, option = {}) {
const result = {
controlOnlyClusters: [],
treatmentOnlyClusters: [],
hybridClusters: [],
};
Console_1.default.overwrite('Clustering leak traces');
if (controlPaths.length === 0 && treatmentPaths.length === 0) {
Console_1.default.midLevel('No leaks found');
return result;
}
// sample paths if there are too many
controlPaths = this.samplePaths(controlPaths);
treatmentPaths = this.samplePaths(treatmentPaths);
// build control trace to control path map
const controlTraceToPathMap = NormalizedTrace.buildTraceToPathMap(controlPaths);
const controlTraces = Array.from(controlTraceToPathMap.keys());
// build treatment trace to treatment path map
const treatmentTraceToPathMap = NormalizedTrace.buildTraceToPathMap(treatmentPaths);
const treatmentTraces = Array.from(treatmentTraceToPathMap.keys());
// cluster traces from both the control group and the treatment group
const { allClusters } = NormalizedTrace.diffTraces([...controlTraces, ...treatmentTraces], [], option);
// construct TraceCluster from clustering result
allClusters.forEach((traces) => {
var _a, _b;
const controlCluster = NormalizedTrace.initEmptyCluster(controlSnapshot);
const treatmentCluster = NormalizedTrace.initEmptyCluster(treatmentSnapshot);
for (const trace of traces) {
const normalizedTrace = trace;
if (controlTraceToPathMap.has(normalizedTrace)) {
NormalizedTrace.pushLeakPathToCluster(controlTraceToPathMap, normalizedTrace, controlCluster);
}
else {
NormalizedTrace.pushLeakPathToCluster(treatmentTraceToPathMap, normalizedTrace, treatmentCluster);
}
}
const controlClusterSize = (_a = controlCluster.count) !== null && _a !== void 0 ? _a : 0;
const treatmentClusterSize = (_b = treatmentCluster.count) !== null && _b !== void 0 ? _b : 0;
// calculate aggregated cluster size for control cluster
if (controlClusterSize > 0) {
this.calculateClusterRetainedSize(controlCluster, controlSnapshot, aggregateDominatorMetrics);
}
// calculate aggregated cluster size for treatment cluster
if (treatmentClusterSize > 0) {
this.calculateClusterRetainedSize(treatmentCluster, treatmentSnapshot, aggregateDominatorMetrics);
}
if (controlClusterSize === 0) {
result.treatmentOnlyClusters.push(treatmentCluster);
}
else if (treatmentClusterSize === 0) {
result.controlOnlyClusters.push(controlCluster);
}
else {
result.hybridClusters.push({
control: controlCluster,
treatment: treatmentCluster,
});
}
});
result.treatmentOnlyClusters.sort((c1, c2) => { var _a, _b; return ((_a = c2.retainedSize) !== null && _a !== void 0 ? _a : 0) - ((_b = c1.retainedSize) !== null && _b !== void 0 ? _b : 0); });
result.controlOnlyClusters.sort((c1, c2) => { var _a, _b; return ((_a = c2.retainedSize) !== null && _a !== void 0 ? _a : 0) - ((_b = c1.retainedSize) !== null && _b !== void 0 ? _b : 0); });
result.hybridClusters.sort((g1, g2) => {
var _a, _b, _c, _d;
return ((_a = g2.control.retainedSize) !== null && _a !== void 0 ? _a : 0) +
((_b = g2.treatment.retainedSize) !== null && _b !== void 0 ? _b : 0) -
((_c = g1.control.retainedSize) !== null && _c !== void 0 ? _c : 0) -
((_d = g1.treatment.retainedSize) !== null && _d !== void 0 ? _d : 0);
});
return result;
}
static generateUnClassifiedClusters(paths, snapshot, aggregateDominatorMetrics) {

@@ -240,0 +371,0 @@ return this.clusterPaths(paths, snapshot, aggregateDominatorMetrics, {

{
"name": "@memlab/core",
"version": "1.1.18",
"version": "1.1.19",
"license": "MIT",

@@ -5,0 +5,0 @@ "description": "memlab core libraries",

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc