@datadog/pprof
Advanced tools
Comparing version 3.0.0-pre-b1bc08a to 3.0.0
@@ -20,3 +20,3 @@ /** | ||
export declare function getAllocationProfile(): AllocationProfileNode; | ||
export declare type NearHeapLimitCallback = (profile: AllocationProfileNode) => void; | ||
export type NearHeapLimitCallback = (profile: AllocationProfileNode) => void; | ||
export declare function monitorOutOfMemory(heapLimitExtensionSize: number, maxHeapLimitExtensionCount: number, dumpHeapProfileOnSdterr: boolean, exportCommand: Array<String> | undefined, callback: NearHeapLimitCallback | undefined, callbackMode: number): void; |
@@ -17,5 +17,28 @@ "use strict"; | ||
*/ | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | ||
Object.defineProperty(o, "default", { enumerable: true, value: v }); | ||
}) : function(o, v) { | ||
o["default"] = v; | ||
}); | ||
var __importStar = (this && this.__importStar) || function (mod) { | ||
if (mod && mod.__esModule) return mod; | ||
var result = {}; | ||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | ||
__setModuleDefault(result, mod); | ||
return result; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.monitorOutOfMemory = exports.getAllocationProfile = exports.stopSamplingHeapProfiler = exports.startSamplingHeapProfiler = void 0; | ||
const path = require("path"); | ||
const path = __importStar(require("path")); | ||
const findBinding = require('node-gyp-build'); | ||
@@ -22,0 +45,0 @@ const profiler = findBinding(path.join(__dirname, '..', '..')); |
@@ -39,3 +39,3 @@ /** | ||
export declare function stop(): void; | ||
export declare type NearHeapLimitCallback = (profile: Profile) => void; | ||
export type NearHeapLimitCallback = (profile: Profile) => void; | ||
export declare const CallbackMode: { | ||
@@ -42,0 +42,0 @@ Async: number; |
@@ -1,2 +0,1 @@ | ||
import cpuProfiler from './cpu-profiler'; | ||
import * as heapProfiler from './heap-profiler'; | ||
@@ -7,6 +6,9 @@ import * as timeProfiler from './time-profiler'; | ||
export { SourceMapper } from './sourcemapper/sourcemapper'; | ||
export declare const CpuProfiler: typeof cpuProfiler; | ||
export { setLogger } from './logger'; | ||
export declare const time: { | ||
profile: typeof timeProfiler.profile; | ||
start: typeof timeProfiler.start; | ||
stop: typeof timeProfiler.stop; | ||
setLabels: typeof timeProfiler.setLabels; | ||
isStarted: typeof timeProfiler.isStarted; | ||
}; | ||
@@ -13,0 +15,0 @@ export declare const heap: { |
"use strict"; | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | ||
Object.defineProperty(o, "default", { enumerable: true, value: v }); | ||
}) : function(o, v) { | ||
o["default"] = v; | ||
}); | ||
var __importStar = (this && this.__importStar) || function (mod) { | ||
if (mod && mod.__esModule) return mod; | ||
var result = {}; | ||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | ||
__setModuleDefault(result, mod); | ||
return result; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.heap = exports.time = exports.CpuProfiler = exports.SourceMapper = exports.encodeSync = exports.encode = void 0; | ||
exports.heap = exports.time = exports.setLogger = exports.SourceMapper = exports.encodeSync = exports.encode = void 0; | ||
/** | ||
@@ -20,6 +43,5 @@ * Copyright 2019 Google Inc. All Rights Reserved. | ||
const fs_1 = require("fs"); | ||
const cpu_profiler_1 = require("./cpu-profiler"); | ||
const heapProfiler = require("./heap-profiler"); | ||
const heapProfiler = __importStar(require("./heap-profiler")); | ||
const profile_encoder_1 = require("./profile-encoder"); | ||
const timeProfiler = require("./time-profiler"); | ||
const timeProfiler = __importStar(require("./time-profiler")); | ||
var profile_encoder_2 = require("./profile-encoder"); | ||
@@ -30,6 +52,10 @@ Object.defineProperty(exports, "encode", { enumerable: true, get: function () { return profile_encoder_2.encode; } }); | ||
Object.defineProperty(exports, "SourceMapper", { enumerable: true, get: function () { return sourcemapper_1.SourceMapper; } }); | ||
exports.CpuProfiler = cpu_profiler_1.default; | ||
var logger_1 = require("./logger"); | ||
Object.defineProperty(exports, "setLogger", { enumerable: true, get: function () { return logger_1.setLogger; } }); | ||
exports.time = { | ||
profile: timeProfiler.profile, | ||
start: timeProfiler.start, | ||
stop: timeProfiler.stop, | ||
setLabels: timeProfiler.setLabels, | ||
isStarted: timeProfiler.isStarted, | ||
}; | ||
@@ -47,7 +73,7 @@ exports.heap = { | ||
if (module.parent && module.parent.id === 'internal/preload') { | ||
const stop = exports.time.start(); | ||
exports.time.start({}); | ||
process.on('exit', () => { | ||
// The process is going to terminate imminently. All work here needs to | ||
// be synchronous. | ||
const profile = stop(); | ||
const profile = exports.time.stop(); | ||
const buffer = (0, profile_encoder_1.encodeSync)(profile); | ||
@@ -54,0 +80,0 @@ (0, fs_1.writeFileSync)(`pprof-profile-${process.pid}.pb.gz`, buffer); |
@@ -19,5 +19,5 @@ "use strict"; | ||
exports.encodeSync = exports.encode = void 0; | ||
const pify = require("pify"); | ||
const util_1 = require("util"); | ||
const zlib_1 = require("zlib"); | ||
const gzipPromise = pify(zlib_1.gzip); | ||
const gzipPromise = (0, util_1.promisify)(zlib_1.gzip); | ||
async function encode(profile) { | ||
@@ -24,0 +24,0 @@ return gzipPromise(profile.encode()); |
@@ -195,6 +195,18 @@ "use strict"; | ||
const appendTimeEntryToSamples = (entry, samples) => { | ||
if (entry.node.hitCount > 0) { | ||
let unlabelledHits = entry.node.hitCount; | ||
for (const labelSet of entry.node.labelSets || []) { | ||
if (Object.keys(labelSet).length > 0) { | ||
const sample = new pprof_format_1.Sample({ | ||
locationId: entry.stack, | ||
value: [1, intervalNanos], | ||
label: buildLabels(labelSet, stringTable), | ||
}); | ||
samples.push(sample); | ||
unlabelledHits--; | ||
} | ||
} | ||
if (unlabelledHits > 0) { | ||
const sample = new pprof_format_1.Sample({ | ||
locationId: entry.stack, | ||
value: [entry.node.hitCount, entry.node.hitCount * intervalNanos], | ||
value: [unlabelledHits, unlabelledHits * intervalNanos], | ||
}); | ||
@@ -201,0 +213,0 @@ samples.push(sample); |
@@ -35,3 +35,4 @@ /** | ||
infoMap: Map<string, MapInfoCompiled>; | ||
static create(searchDirs: string[]): Promise<SourceMapper>; | ||
debug: boolean; | ||
static create(searchDirs: string[], debug?: boolean): Promise<SourceMapper>; | ||
/** | ||
@@ -45,3 +46,3 @@ * @param {Array.<string>} sourceMapPaths An array of paths to .map source map | ||
*/ | ||
constructor(); | ||
constructor(debug?: boolean); | ||
/** | ||
@@ -48,0 +49,0 @@ * Used to get the information about the transpiled file from a given input |
@@ -17,2 +17,28 @@ "use strict"; | ||
*/ | ||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
var desc = Object.getOwnPropertyDescriptor(m, k); | ||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||
desc = { enumerable: true, get: function() { return m[k]; } }; | ||
} | ||
Object.defineProperty(o, k2, desc); | ||
}) : (function(o, m, k, k2) { | ||
if (k2 === undefined) k2 = k; | ||
o[k2] = m[k]; | ||
})); | ||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | ||
Object.defineProperty(o, "default", { enumerable: true, value: v }); | ||
}) : function(o, v) { | ||
o["default"] = v; | ||
}); | ||
var __importStar = (this && this.__importStar) || function (mod) { | ||
if (mod && mod.__esModule) return mod; | ||
var result = {}; | ||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | ||
__setModuleDefault(result, mod); | ||
return result; | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
@@ -24,20 +50,14 @@ exports.SourceMapper = void 0; | ||
// code to generated code. | ||
const fs = require("fs"); | ||
const path = require("path"); | ||
// Apparently the source-map module feature-detects the browser by checking | ||
// if the fetch function exists. Because it now exists in Node.js v18, the | ||
// source-map module thinks it's running in a browser and doesn't work. | ||
const desc = Object.getOwnPropertyDescriptor(globalThis, 'fetch'); | ||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment | ||
// @ts-ignore | ||
delete globalThis.fetch; | ||
const sourceMap = require("source-map"); | ||
if (desc) { | ||
Object.defineProperty(globalThis, 'fetch', desc); | ||
} | ||
const pify = require('pify'); | ||
const pLimit = require('p-limit'); | ||
const readFile = pify(fs.readFile); | ||
const fs = __importStar(require("fs")); | ||
const path = __importStar(require("path")); | ||
const sourceMap = __importStar(require("source-map")); | ||
const logger_1 = require("../logger"); | ||
const p_limit_1 = __importDefault(require("p-limit")); | ||
const readFile = fs.promises.readFile; | ||
const CONCURRENCY = 10; | ||
const MAP_EXT = '.map'; | ||
function error(msg) { | ||
logger_1.logger.debug(`Error: ${msg}`); | ||
return new Error(msg); | ||
} | ||
/** | ||
@@ -50,7 +70,7 @@ * @param {!Map} infoMap The map that maps input source files to | ||
*/ | ||
async function processSourceMap(infoMap, mapPath) { | ||
async function processSourceMap(infoMap, mapPath, debug) { | ||
// this handles the case when the path is undefined, null, or | ||
// the empty string | ||
if (!mapPath || !mapPath.endsWith(MAP_EXT)) { | ||
throw new Error(`The path "${mapPath}" does not specify a source map file`); | ||
throw error(`The path "${mapPath}" does not specify a source map file`); | ||
} | ||
@@ -63,3 +83,3 @@ mapPath = path.normalize(mapPath); | ||
catch (e) { | ||
throw new Error('Could not read source map file ' + mapPath + ': ' + e); | ||
throw error('Could not read source map file ' + mapPath + ': ' + e); | ||
} | ||
@@ -77,3 +97,3 @@ let consumer; | ||
catch (e) { | ||
throw new Error('An error occurred while reading the ' + | ||
throw error('An error occurred while reading the ' + | ||
'sourceMap file ' + | ||
@@ -84,28 +104,52 @@ mapPath + | ||
} | ||
/* | ||
* If the source map file defines a "file" attribute, use it as | ||
/* If the source map file defines a "file" attribute, use it as | ||
* the output file where the path is relative to the directory | ||
* containing the map file. Otherwise, use the name of the output | ||
* file (with the .map extension removed) as the output file. | ||
* With nextjs/webpack, when there are subdirectories in `pages` directory, | ||
* the generated source maps do not reference correctly the generated files | ||
* in their `file` property. | ||
* For example if the generated file / source maps have paths: | ||
* <root>/pages/sub/foo.js(.map) | ||
* foo.js.map will have ../pages/sub/foo.js as `file` property instead of | ||
* ../../pages/sub/foo.js | ||
* To workaround this, check first if file referenced in `file` property | ||
* exists and if it does not, check if generated file exists alongside the | ||
* source map file. | ||
*/ | ||
const dir = path.dirname(mapPath); | ||
const generatedBase = consumer.file | ||
? consumer.file | ||
: path.basename(mapPath, MAP_EXT); | ||
const generatedPath = path.resolve(dir, generatedBase); | ||
infoMap.set(generatedPath, { mapFileDir: dir, mapConsumer: consumer }); | ||
const generatedPathCandidates = []; | ||
if (consumer.file) { | ||
generatedPathCandidates.push(path.resolve(dir, consumer.file)); | ||
} | ||
const samePath = path.resolve(dir, path.basename(mapPath, MAP_EXT)); | ||
if (generatedPathCandidates.length === 0 || | ||
generatedPathCandidates[0] !== samePath) { | ||
generatedPathCandidates.push(samePath); | ||
} | ||
for (const generatedPath of generatedPathCandidates) { | ||
try { | ||
await fs.promises.access(generatedPath, fs.constants.F_OK); | ||
infoMap.set(generatedPath, { mapFileDir: dir, mapConsumer: consumer }); | ||
if (debug) { | ||
logger_1.logger.debug(`Loaded source map for ${generatedPath} => ${mapPath}`); | ||
} | ||
return; | ||
} | ||
catch (err) { | ||
if (debug) { | ||
logger_1.logger.debug(`Generated path ${generatedPath} does not exist`); | ||
} | ||
} | ||
} | ||
if (debug) { | ||
logger_1.logger.debug(`Unable to find generated file for ${mapPath}`); | ||
} | ||
} | ||
class SourceMapper { | ||
/** | ||
* @param {Array.<string>} sourceMapPaths An array of paths to .map source map | ||
* files that should be processed. The paths should be relative to the | ||
* current process's current working directory | ||
* @param {Logger} logger A logger that reports errors that occurred while | ||
* processing the given source map files | ||
* @constructor | ||
*/ | ||
constructor() { | ||
this.infoMap = new Map(); | ||
} | ||
static async create(searchDirs) { | ||
static async create(searchDirs, debug = false) { | ||
if (debug) { | ||
logger_1.logger.debug(`Looking for source map files in dirs: [${searchDirs.join(', ')}]`); | ||
} | ||
const mapFiles = []; | ||
@@ -120,8 +164,23 @@ for (const dir of searchDirs) { | ||
catch (e) { | ||
throw new Error(`failed to get source maps from ${dir}: ${e}`); | ||
throw error(`failed to get source maps from ${dir}: ${e}`); | ||
} | ||
} | ||
return createFromMapFiles(mapFiles); | ||
if (debug) { | ||
logger_1.logger.debug(`Found source map files: [${mapFiles.join(', ')}]`); | ||
} | ||
return createFromMapFiles(mapFiles, debug); | ||
} | ||
/** | ||
* @param {Array.<string>} sourceMapPaths An array of paths to .map source map | ||
* files that should be processed. The paths should be relative to the | ||
* current process's current working directory | ||
* @param {Logger} logger A logger that reports errors that occurred while | ||
* processing the given source map files | ||
* @constructor | ||
*/ | ||
constructor(debug = false) { | ||
this.infoMap = new Map(); | ||
this.debug = debug; | ||
} | ||
/** | ||
* Used to get the information about the transpiled file from a given input | ||
@@ -182,2 +241,5 @@ * source file provided there isn't any ambiguity with associating the input | ||
if (entry === null) { | ||
if (this.debug) { | ||
logger_1.logger.debug(`Source map lookup failed: no map found for ${location.file} (normalized: ${inputPath})`); | ||
} | ||
return location; | ||
@@ -187,28 +249,30 @@ } | ||
line: location.line, | ||
column: location.column, | ||
bias: sourceMap.SourceMapConsumer.LEAST_UPPER_BOUND, | ||
column: location.column > 0 ? location.column - 1 : 0, // SourceMapConsumer expects column to be 0-based | ||
}; | ||
// TODO: Determine how to remove the explicit cast here. | ||
const consumer = entry.mapConsumer; | ||
let pos = consumer.originalPositionFor(generatedPos); | ||
const pos = consumer.originalPositionFor(generatedPos); | ||
if (pos.source === null) { | ||
generatedPos.bias = sourceMap.SourceMapConsumer.GREATEST_LOWER_BOUND; | ||
pos = consumer.originalPositionFor(generatedPos); | ||
} | ||
if (pos.source === null) { | ||
if (this.debug) { | ||
logger_1.logger.debug(`Source map lookup failed for ${location.name}(${location.file}:${location.line}:${location.column})`); | ||
} | ||
return location; | ||
} | ||
return { | ||
const loc = { | ||
file: path.resolve(entry.mapFileDir, pos.source), | ||
line: pos.line || undefined, | ||
name: pos.name || location.name, | ||
column: pos.column || undefined, | ||
column: pos.column === null ? undefined : pos.column + 1, // convert column back to 1-based | ||
}; | ||
if (this.debug) { | ||
logger_1.logger.debug(`Source map lookup succeeded for ${location.name}(${location.file}:${location.line}:${location.column}) => ${loc.name}(${loc.file}:${loc.line}:${loc.column})`); | ||
} | ||
return loc; | ||
} | ||
} | ||
exports.SourceMapper = SourceMapper; | ||
async function createFromMapFiles(mapFiles) { | ||
const limit = pLimit(CONCURRENCY); | ||
const mapper = new SourceMapper(); | ||
const promises = mapFiles.map(mapPath => limit(() => processSourceMap(mapper.infoMap, mapPath))); | ||
async function createFromMapFiles(mapFiles, debug) { | ||
const limit = (0, p_limit_1.default)(CONCURRENCY); | ||
const mapper = new SourceMapper(debug); | ||
const promises = mapFiles.map(mapPath => limit(() => processSourceMap(mapper.infoMap, mapPath, debug))); | ||
try { | ||
@@ -218,3 +282,3 @@ await Promise.all(promises); | ||
catch (err) { | ||
throw new Error('An error occurred while processing the source map files' + err); | ||
throw error('An error occurred while processing the source map files' + err); | ||
} | ||
@@ -253,2 +317,5 @@ return mapper; | ||
} | ||
else { | ||
logger_1.logger.debug(() => `Non fatal error: ${error}`); | ||
} | ||
} | ||
@@ -255,0 +322,0 @@ } |
@@ -17,11 +17,11 @@ /** | ||
import { SourceMapper } from './sourcemapper/sourcemapper'; | ||
declare type Microseconds = number; | ||
declare type Milliseconds = number; | ||
import { LabelSet } from './v8-types'; | ||
type Microseconds = number; | ||
type Milliseconds = number; | ||
export interface TimeProfilerOptions { | ||
/** time in milliseconds for which to collect profile. */ | ||
durationMillis: Milliseconds; | ||
durationMillis?: Milliseconds; | ||
/** average time in microseconds between samples */ | ||
intervalMicros?: Microseconds; | ||
sourceMapper?: SourceMapper; | ||
name?: string; | ||
/** | ||
@@ -34,5 +34,9 @@ * This configuration option is experimental. | ||
lineNumbers?: boolean; | ||
customLabels?: boolean; | ||
} | ||
export declare function profile(options: TimeProfilerOptions): Promise<import("pprof-format").Profile>; | ||
export declare function start(intervalMicros?: Microseconds, name?: string, sourceMapper?: SourceMapper, lineNumbers?: boolean): (restart?: boolean) => import("pprof-format").Profile; | ||
export declare function profile({ intervalMicros, durationMillis, sourceMapper, lineNumbers, customLabels, }: TimeProfilerOptions): Promise<import("pprof-format").Profile>; | ||
export declare function start({ intervalMicros, durationMillis, sourceMapper, lineNumbers, customLabels, }: TimeProfilerOptions): void; | ||
export declare function stop(restart?: boolean): import("pprof-format").Profile; | ||
export declare function setLabels(labels?: LabelSet): void; | ||
export declare function isStarted(): boolean; | ||
export {}; |
@@ -17,57 +17,68 @@ "use strict"; | ||
*/ | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.start = exports.profile = void 0; | ||
const delay_1 = require("delay"); | ||
exports.isStarted = exports.setLabels = exports.stop = exports.start = exports.profile = void 0; | ||
const delay_1 = __importDefault(require("delay")); | ||
const profile_serializer_1 = require("./profile-serializer"); | ||
const time_profiler_bindings_1 = require("./time-profiler-bindings"); | ||
const DEFAULT_INTERVAL_MICROS = 1000; | ||
const majorVersion = process.version.slice(1).split('.').map(Number)[0]; | ||
async function profile(options) { | ||
const stop = start(options.intervalMicros || DEFAULT_INTERVAL_MICROS, options.name, options.sourceMapper, options.lineNumbers); | ||
await (0, delay_1.default)(options.durationMillis); | ||
const DEFAULT_DURATION_MILLIS = 60000; | ||
let gProfiler; | ||
let gSourceMapper; | ||
let gIntervalMicros; | ||
/** Make sure to stop profiler before node shuts down, otherwise profiling | ||
* signal might cause a crash if it occurs during shutdown */ | ||
process.once('exit', () => { | ||
if (isStarted()) | ||
stop(); | ||
}); | ||
async function profile({ intervalMicros = DEFAULT_INTERVAL_MICROS, durationMillis = DEFAULT_DURATION_MILLIS, sourceMapper, lineNumbers = false, customLabels = false, }) { | ||
start({ | ||
intervalMicros, | ||
durationMillis, | ||
sourceMapper, | ||
lineNumbers, | ||
customLabels, | ||
}); | ||
await (0, delay_1.default)(durationMillis); | ||
return stop(); | ||
} | ||
exports.profile = profile; | ||
function ensureRunName(name) { | ||
return name || `pprof-${Date.now()}-${Math.random()}`; | ||
// Temporarily retained for backwards compatibility with older tracer | ||
function start({ intervalMicros = DEFAULT_INTERVAL_MICROS, durationMillis = DEFAULT_DURATION_MILLIS, sourceMapper, lineNumbers = false, customLabels = false, }) { | ||
if (gProfiler) { | ||
throw new Error('Wall profiler is already started'); | ||
} | ||
gProfiler = new time_profiler_bindings_1.TimeProfiler(intervalMicros, durationMillis * 1000, lineNumbers, customLabels); | ||
gProfiler.start(); | ||
gSourceMapper = sourceMapper; | ||
gIntervalMicros = intervalMicros; | ||
} | ||
// NOTE: refreshing doesn't work if giving a profile name. | ||
function start(intervalMicros = DEFAULT_INTERVAL_MICROS, name, sourceMapper, lineNumbers = true) { | ||
const profiler = new time_profiler_bindings_1.TimeProfiler(intervalMicros); | ||
let runName = start(); | ||
return majorVersion < 16 ? stopOld : stop; | ||
function start() { | ||
const runName = ensureRunName(name); | ||
profiler.start(runName, lineNumbers); | ||
return runName; | ||
exports.start = start; | ||
function stop(restart = false) { | ||
if (!gProfiler) { | ||
throw new Error('Wall profiler is not started'); | ||
} | ||
// Node.js versions prior to v16 leak memory if not disposed and recreated | ||
// between each profile. As disposing deletes current profile data too, | ||
// we must stop then dispose then start. | ||
function stopOld(restart = false) { | ||
const result = profiler.stop(runName, lineNumbers); | ||
profiler.dispose(); | ||
if (restart) { | ||
runName = start(); | ||
} | ||
return (0, profile_serializer_1.serializeTimeProfile)(result, intervalMicros, sourceMapper, true); | ||
const profile = gProfiler.stop(restart); | ||
const serialized_profile = (0, profile_serializer_1.serializeTimeProfile)(profile, gIntervalMicros, gSourceMapper, true); | ||
if (!restart) { | ||
gProfiler = undefined; | ||
gSourceMapper = undefined; | ||
} | ||
// For Node.js v16+, we want to start the next profile before we stop the | ||
// current one as otherwise the active profile count could reach zero which | ||
// means V8 might tear down the symbolizer thread and need to start it again. | ||
function stop(restart = false) { | ||
let nextRunName; | ||
if (restart) { | ||
nextRunName = start(); | ||
} | ||
const result = profiler.stop(runName, lineNumbers); | ||
if (nextRunName) { | ||
runName = nextRunName; | ||
} | ||
if (!restart) | ||
profiler.dispose(); | ||
return (0, profile_serializer_1.serializeTimeProfile)(result, intervalMicros, sourceMapper, true); | ||
return serialized_profile; | ||
} | ||
exports.stop = stop; | ||
function setLabels(labels) { | ||
if (!gProfiler) { | ||
throw new Error('Wall profiler is not started'); | ||
} | ||
gProfiler.labels = labels; | ||
} | ||
exports.start = start; | ||
exports.setLabels = setLabels; | ||
function isStarted() { | ||
return !!gProfiler; | ||
} | ||
exports.isStarted = isStarted; | ||
//# sourceMappingURL=time-profiler.js.map |
@@ -33,2 +33,3 @@ /** | ||
hitCount: number; | ||
labelSets?: LabelSet[]; | ||
} | ||
@@ -35,0 +36,0 @@ export interface AllocationProfileNode extends ProfileNode { |
{ | ||
"name": "@datadog/pprof", | ||
"version": "3.0.0-pre-b1bc08a", | ||
"version": "3.0.0", | ||
"description": "pprof support for Node.js", | ||
@@ -11,4 +11,5 @@ "repository": "datadog/pprof-nodejs", | ||
"rebuild": "node-gyp rebuild --jobs=max", | ||
"test:js": "nyc mocha out/test/test-*.js", | ||
"test:js": "nyc mocha -r source-map-support/register out/test/test-*.js", | ||
"test:cpp": "node scripts/cctest.js", | ||
"test:wall": "nyc mocha -r source-map-support/register out/test/test-time-profiler.js", | ||
"test": "npm run test:js && npm run test:cpp", | ||
@@ -20,6 +21,4 @@ "codecov": "nyc report --reporter=json && codecov -f coverage/*.json", | ||
"format": "clang-format --style file -i --glob='bindings/**/*.{h,hh,cpp,cc}'", | ||
"prebuild": "node scripts/prebuild.js", | ||
"prebuilds": "node scripts/prebuilds.js", | ||
"prepare": "npm run compile && npm run rebuild", | ||
"pretest": "npm run compile && npm run rebuild" | ||
"pretest": "npm run compile" | ||
}, | ||
@@ -32,45 +31,29 @@ "author": { | ||
"delay": "^5.0.0", | ||
"node-gyp-build": "^3.9.0", | ||
"node-gyp-build": "<4.0", | ||
"p-limit": "^3.1.0", | ||
"pify": "^5.0.0", | ||
"pprof-format": "^2.0.6", | ||
"source-map": "^0.7.3", | ||
"split": "^1.0.1" | ||
"pprof-format": "^2.0.7", | ||
"source-map": "^0.7.4" | ||
}, | ||
"devDependencies": { | ||
"@types/mocha": "^9.1.1", | ||
"@types/node": "^18.0.6", | ||
"@types/p-limit": "^2.0.0", | ||
"@types/pify": "^5.0.0", | ||
"@types/pretty-ms": "^5.0.1", | ||
"@types/request": "^2.47.1", | ||
"@types/sinon": "^10.0.0", | ||
"@types/mocha": "^10.0.1", | ||
"@types/node": ">=12", | ||
"@types/sinon": "^10.0.15", | ||
"@types/tmp": "^0.2.3", | ||
"@typescript-eslint/eslint-plugin": "^5.38.0", | ||
"axios": "^0.27.2", | ||
"checksum": "^1.0.0", | ||
"@typescript-eslint/eslint-plugin": "^5.60.1", | ||
"clang-format": "^1.8.0", | ||
"codecov": "^3.0.0", | ||
"codecov": "^3.8.2", | ||
"deep-copy": "^1.4.2", | ||
"eslint-config-standard": "^17.0.0", | ||
"eslint-config-standard": "^17.1.0", | ||
"eslint-plugin-import": "^2.26.0", | ||
"eslint-plugin-n": "^15.2.0", | ||
"eslint-plugin-promise": "^6.0.0", | ||
"glob": "^8.0.3", | ||
"gts": "^3.0.0", | ||
"eslint-plugin-n": "^16.0.1", | ||
"eslint-plugin-promise": "^6.1.1", | ||
"gts": "^4.0.1", | ||
"js-green-licenses": "^4.0.0", | ||
"linkinator": "^4.0.2", | ||
"mkdirp": "^1.0.4", | ||
"mocha": "^9.2.2", | ||
"mocha": "^10.2.0", | ||
"nan": "^2.17.0", | ||
"nyc": "^15.0.0", | ||
"protobufjs-cli": "^1.0.2", | ||
"rimraf": "^3.0.2", | ||
"semver": "^7.3.5", | ||
"sinon": "^14.0.0", | ||
"source-map-support": "^0.5.12", | ||
"tar": "^6.1.0", | ||
"nyc": "^15.1.0", | ||
"sinon": "^15.2.0", | ||
"source-map-support": "^0.5.21", | ||
"tmp": "0.2.1", | ||
"ts-mockito": "^2.2.5", | ||
"typescript": "^4.7.4" | ||
"typescript": "<5.1" | ||
}, | ||
@@ -77,0 +60,0 @@ "files": [ |
# pprof support for Node.js | ||
[![NPM Version][npm-image]][npm-url] | ||
[![Build Status][circle-image]][circle-url] | ||
[![Build Status][build-image]][build-url] | ||
[![Known Vulnerabilities][snyk-image]][snyk-url] | ||
@@ -111,4 +111,4 @@ | ||
[circle-image]: https://circleci.com/gh/google/pprof-nodejs.svg?style=svg | ||
[circle-url]: https://circleci.com/gh/google/pprof-nodejs | ||
[build-image]: https://github.com/Datadog/pprof-nodejs/actions/workflows/build.yml/badge.svg?branch=main | ||
[build-url]: https://github.com/Datadog/pprof-nodejs/actions/workflows/build.yml | ||
[coveralls-image]: https://coveralls.io/repos/google/pprof-nodejs/badge.svg?branch=main&service=github | ||
@@ -115,0 +115,0 @@ [npm-image]: https://badge.fury.io/js/pprof.svg |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
No v1
QualityPackage is not semver >=1. This means it is not stable and does not support ^ ranges.
Found 1 instance in 1 package
5
21
1519
1
3
10544722
104
- Removedpify@^5.0.0
- Removedsplit@^1.0.1
- Removedpify@5.0.0(transitive)
- Removedsplit@1.0.1(transitive)
- Removedthrough@2.3.8(transitive)
Updatednode-gyp-build@<4.0
Updatedpprof-format@^2.0.7
Updatedsource-map@^0.7.4