@datadog/build-plugin
Advanced tools
Comparing version 0.3.6 to 0.4.0
@@ -7,3 +7,3 @@ "use strict"; | ||
const webpack_1 = require("../webpack"); | ||
const testHelpers_ignore_1 = require("./testHelpers.ignore"); | ||
const testHelpers_1 = require("./helpers/testHelpers"); | ||
describe('webpack', () => { | ||
@@ -17,3 +17,3 @@ test('It should initialise correctly', () => { | ||
const plugin = new webpack_1.BuildPlugin({ | ||
hooks: ['./src/__tests__/customHook.ignore.ts'], | ||
hooks: ['./src/__tests__/mocks/customHook.ts'], | ||
}); | ||
@@ -29,3 +29,3 @@ expect(plugin.hooks.length).toBe(4); | ||
const executePlugin = (plugin) => { | ||
plugin.apply(testHelpers_ignore_1.mockCompiler); | ||
plugin.apply(testHelpers_1.mockCompiler); | ||
}; | ||
@@ -32,0 +32,0 @@ executePlugin(plugin1); |
@@ -20,3 +20,3 @@ "use strict"; | ||
const path_1 = __importDefault(require("path")); | ||
const testHelpers_ignore_1 = require("../../__tests__/testHelpers.ignore"); | ||
const testHelpers_1 = require("../../__tests__/helpers/testHelpers"); | ||
describe('Output Files', () => { | ||
@@ -28,3 +28,3 @@ const getExistsProms = (output, context) => __awaiter(void 0, void 0, void 0, function* () { | ||
{ log: console.log, options: { output, context } }, { | ||
report: testHelpers_ignore_1.mockReport, | ||
report: testHelpers_1.mockReport, | ||
metrics: {}, | ||
@@ -31,0 +31,0 @@ stats: { toJson: () => ({}) }, |
@@ -5,12 +5,145 @@ "use strict"; | ||
// Copyright 2019-Present Datadog, Inc. | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const testHelpers_ignore_1 = require("../../../__tests__/testHelpers.ignore"); | ||
const testHelpers_1 = require("../../../__tests__/helpers/testHelpers"); | ||
const path_1 = __importDefault(require("path")); | ||
const aggregator_1 = require("../aggregator"); | ||
const exec = require('util').promisify(require('child_process').exec); | ||
const PROJECTS_ROOT = path_1.default.join(__dirname, '../../../__tests__/mocks/projects'); | ||
describe('Aggregator', () => { | ||
test('It should aggregate metrics without throwing.', () => { | ||
const { getMetrics } = require('../aggregator'); | ||
const opts = { context: '', filters: [], tags: [] }; | ||
expect(() => { | ||
getMetrics(testHelpers_ignore_1.mockReport, testHelpers_ignore_1.mockStats, opts); | ||
}).not.toThrow(); | ||
}); | ||
beforeAll(() => __awaiter(void 0, void 0, void 0, function* () { | ||
yield exec(`yarn build`); | ||
}), 20000); | ||
for (const version of [4, 5]) { | ||
describe(`Webpack ${version}`, () => { | ||
let statsJson; | ||
let dependenciesJson; | ||
const WEBPACK_ROOT = path_1.default.join(PROJECTS_ROOT, `./webpack${version}`); | ||
const OUTPUT = path_1.default.join(WEBPACK_ROOT, './webpack-profile-debug/'); | ||
beforeAll(() => __awaiter(void 0, void 0, void 0, function* () { | ||
const output = yield exec(`yarn workspace webpack${version} build`); | ||
// eslint-disable-next-line no-console | ||
console.log(`Build ${version} :`, output.stderr); | ||
statsJson = require(path_1.default.join(OUTPUT, './stats.json')); | ||
dependenciesJson = require(path_1.default.join(OUTPUT, './dependencies.json')); | ||
}), 20000); | ||
test('It should aggregate metrics without throwing.', () => { | ||
const { getMetrics } = require('../aggregator'); | ||
const opts = { context: '', filters: [], tags: [] }; | ||
expect(() => { | ||
getMetrics(testHelpers_1.mockReport, testHelpers_1.mockStats, opts); | ||
}).not.toThrow(); | ||
}); | ||
describe('Modules', () => { | ||
let metrics; | ||
beforeAll(() => { | ||
const indexed = aggregator_1.getIndexed(statsJson, WEBPACK_ROOT); | ||
metrics = aggregator_1.getModules(statsJson, dependenciesJson, indexed, WEBPACK_ROOT); | ||
}); | ||
test('It should give module metrics.', () => { | ||
expect(metrics.length).not.toBe(0); | ||
}); | ||
test(`It should filter out webpack's modules.`, () => { | ||
expect(metrics.find((m) => { | ||
return m.tags.find((t) => /^moduleName:webpack\/runtime/.test(t)); | ||
})).toBeUndefined(); | ||
}); | ||
test(`It should add tags about the entry and the chunk.`, () => { | ||
for (const metric of metrics) { | ||
expect(metric.tags).toContain('entryName:yolo'); | ||
expect(metric.tags).toContain('entryName:cheesecake'); | ||
expect(metric.tags).toContain('chunkName:yolo'); | ||
expect(metric.tags).toContain('chunkName:cheesecake'); | ||
} | ||
}); | ||
test('It should have 3 metrics per module.', () => { | ||
const modules = [ | ||
'./src/file0000.js', | ||
'./src/file0001.js', | ||
'./workspaces/app/file0000.js', | ||
'./workspaces/app/file0001.js', | ||
]; | ||
for (const module of modules) { | ||
const modulesMetrics = metrics.filter((m) => m.tags.includes(`moduleName:${module}`)); | ||
expect(modulesMetrics.length).toBe(3); | ||
} | ||
}); | ||
}); | ||
describe('Entries', () => { | ||
let metrics; | ||
beforeAll(() => { | ||
const indexed = aggregator_1.getIndexed(statsJson, WEBPACK_ROOT); | ||
metrics = aggregator_1.getEntries(statsJson, indexed); | ||
}); | ||
test('It should give entries metrics.', () => { | ||
expect(metrics.length).not.toBe(0); | ||
}); | ||
test('It should give 4 metrics per entry.', () => { | ||
const entries = ['yolo', 'cheesecake']; | ||
for (const entry of entries) { | ||
const entriesMetrics = metrics.filter((m) => m.tags.includes(`entryName:${entry}`)); | ||
expect(entriesMetrics.length).toBe(4); | ||
} | ||
}); | ||
}); | ||
describe('Chunks', () => { | ||
let metrics; | ||
beforeAll(() => { | ||
const indexed = aggregator_1.getIndexed(statsJson, WEBPACK_ROOT); | ||
metrics = aggregator_1.getChunks(statsJson, indexed); | ||
}); | ||
test('It should give chunks metrics.', () => { | ||
expect(metrics.length).not.toBe(0); | ||
}); | ||
test('It should give 2 metrics per chunk.', () => { | ||
const chunks = ['yolo', 'cheesecake']; | ||
for (const chunk of chunks) { | ||
const chunksMetrics = metrics.filter((m) => m.tags.includes(`chunkName:${chunk}`)); | ||
expect(chunksMetrics.length).toBe(2); | ||
} | ||
}); | ||
test(`It should add tags about the entry.`, () => { | ||
for (const metric of metrics) { | ||
expect(metric.tags.join(',')).toMatch(/entryName:(yolo|cheesecake)/); | ||
} | ||
}); | ||
}); | ||
describe('Assets', () => { | ||
let metrics; | ||
beforeAll(() => { | ||
const indexed = aggregator_1.getIndexed(statsJson, WEBPACK_ROOT); | ||
metrics = aggregator_1.getAssets(statsJson, indexed); | ||
}); | ||
test('It should give assets metrics.', () => { | ||
expect(metrics.length).not.toBe(0); | ||
}); | ||
test('It should give 1 metric per asset.', () => { | ||
const assets = ['yolo.js', 'cheesecake.js']; | ||
for (const asset of assets) { | ||
const assetsMetrics = metrics.filter((m) => m.tags.includes(`assetName:${asset}`)); | ||
expect(assetsMetrics.length).toBe(1); | ||
} | ||
}); | ||
test(`It should add tags about the entry and the chunk.`, () => { | ||
for (const metric of metrics) { | ||
expect(metric.tags).toContain('entryName:yolo'); | ||
expect(metric.tags).toContain('entryName:cheesecake'); | ||
expect(metric.tags).toContain('chunkName:yolo'); | ||
expect(metric.tags).toContain('chunkName:cheesecake'); | ||
} | ||
}); | ||
}); | ||
}); | ||
} | ||
}); |
@@ -15,3 +15,3 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
const testHelpers_ignore_1 = require("../../../__tests__/testHelpers.ignore"); | ||
const testHelpers_1 = require("../../../__tests__/helpers/testHelpers"); | ||
describe('Datadog Hook', () => { | ||
@@ -24,4 +24,4 @@ const buildPluginMock = { | ||
const obj = yield hooks.preoutput.call(buildPluginMock, { | ||
report: testHelpers_ignore_1.mockReport, | ||
stats: testHelpers_ignore_1.mockStats, | ||
report: testHelpers_1.mockReport, | ||
stats: testHelpers_1.mockStats, | ||
}); | ||
@@ -28,0 +28,0 @@ expect(typeof obj).toBe('object'); |
@@ -1,3 +0,18 @@ | ||
import { Report, Stats } from '../../types'; | ||
import { MetricToSend, GetMetricsOptions } from './types'; | ||
import { Chunk, Report, StatsJson, Stats, Module, LocalModules, IndexedObject } from '../../types'; | ||
import { Metric, MetricToSend, GetMetricsOptions } from './types'; | ||
export declare const getFromId: (coll: any[], id: string) => any; | ||
export declare const foundInModules: (input: { | ||
modules?: Module[] | undefined; | ||
}, identifier?: string | undefined) => boolean; | ||
export declare const getChunksFromModule: (stats: StatsJson, chunksPerId: { | ||
[key: string]: Chunk; | ||
}, module: Module) => Chunk[]; | ||
export declare const getEntriesFromChunk: (stats: StatsJson, chunk: Chunk, indexed: IndexedObject, parentEntries?: Set<string>, parentChunks?: Set<string>) => Set<string>; | ||
export declare const getEntryTags: (entries: Set<string>) => string[]; | ||
export declare const getChunkTags: (chunks: Chunk[]) => string[]; | ||
export declare const getModules: (stats: StatsJson, dependencies: LocalModules, indexed: IndexedObject, context: string) => Metric[]; | ||
export declare const getChunks: (stats: StatsJson, indexed: IndexedObject) => Metric[]; | ||
export declare const getAssets: (stats: StatsJson, indexed: IndexedObject) => Metric[]; | ||
export declare const getEntries: (stats: StatsJson, indexed: IndexedObject) => Metric[]; | ||
export declare const getIndexed: (stats: StatsJson, context: string) => IndexedObject; | ||
export declare const getMetrics: (report: Report, stats: Stats, opts: GetMetricsOptions) => MetricToSend[]; |
@@ -9,3 +9,3 @@ "use strict"; | ||
const flattened = (arr) => [].concat(...arr); | ||
const getType = (name) => name.split('.').pop(); | ||
const getType = (name) => (name.includes('.') ? name.split('.').pop() : 'unknown'); | ||
const getGenerals = (timings, stats) => [ | ||
@@ -147,36 +147,128 @@ { | ||
}; | ||
const getModules = (modules, dependencies, context) => { | ||
const modulesPerName = {}; | ||
for (const module of modules) { | ||
modulesPerName[helpers_1.formatModuleName(module.name, context)] = module; | ||
exports.getFromId = (coll, id) => coll.find((c) => c.id === id); | ||
exports.foundInModules = (input, identifier) => { | ||
if (!identifier || !input.modules || !input.modules.length) { | ||
return false; | ||
} | ||
const clonedModules = [...modules]; | ||
return flattened(clonedModules.map((module) => { | ||
// Modules are sometimes registered with their loader. | ||
if (module.name.includes('!')) { | ||
return []; | ||
return !!input.modules.find((m) => { | ||
if (m.identifier && m.identifier === identifier) { | ||
return true; | ||
// eslint-disable-next-line no-underscore-dangle | ||
} | ||
const moduleName = helpers_1.getDisplayName(module.name, context); | ||
const tree = Array.from(findDependencies(module.name, dependencies)).map((dependencyName) => modulesPerName[dependencyName]); | ||
const treeSize = tree.reduce((previous, current) => { | ||
return previous + current.size; | ||
}, 0); | ||
else if (m._identifier && m._identifier === identifier) { | ||
return true; | ||
} | ||
if (m.modules && m.modules.length) { | ||
return exports.foundInModules(m, identifier); | ||
} | ||
}); | ||
}; | ||
exports.getChunksFromModule = (stats, chunksPerId, module) => { | ||
if (module.chunks.length) { | ||
return module.chunks.map((c) => chunksPerId[c]); | ||
} | ||
// Find the chunks from the chunk list directly. | ||
// Webpack may not have registered module's chunks in some cases. | ||
// eslint-disable-next-line no-underscore-dangle | ||
return stats.chunks.filter((c) => exports.foundInModules(c, module.identifier || module._identifier)); | ||
}; | ||
exports.getEntriesFromChunk = (stats, chunk, indexed, parentEntries = new Set(), parentChunks = new Set()) => { | ||
const entry = indexed.entriesPerChunkId[chunk.id]; | ||
if (entry) { | ||
parentEntries.add(entry.name); | ||
} | ||
// Escape cyclic dependencies. | ||
if (parentChunks.has(chunk.id)) { | ||
return parentEntries; | ||
} | ||
parentChunks.add(chunk.id); | ||
chunk.parents.forEach((p) => { | ||
const parentChunk = indexed.chunksPerId[p]; | ||
if (parentChunk) { | ||
exports.getEntriesFromChunk(stats, parentChunk, indexed, parentEntries, parentChunks); | ||
} | ||
}); | ||
return parentEntries; | ||
}; | ||
exports.getEntryTags = (entries) => Array.from(entries).map((e) => `entryName:${e}`); | ||
exports.getChunkTags = (chunks) => flattened(chunks | ||
.map((c) => { | ||
if (c.names && c.names.length) { | ||
return c.names.map((n) => `chunkName:${n}`); | ||
} | ||
}) | ||
.filter((c) => c)); | ||
const getMetricsFromModule = (stats, dependencies, indexed, context, module) => { | ||
const chunks = exports.getChunksFromModule(stats, indexed.chunksPerId, module); | ||
const entries = new Set(); | ||
for (const chunk of chunks) { | ||
exports.getEntriesFromChunk(stats, chunk, indexed, entries); | ||
} | ||
const chunkTags = exports.getChunkTags(chunks); | ||
const entryTags = exports.getEntryTags(entries); | ||
const moduleName = helpers_1.getDisplayName(module.name, context); | ||
// The reason we have to do two loops over modules. | ||
const tree = Array.from(findDependencies(module.name, dependencies)).map((dependencyName) => indexed.modulesPerName[dependencyName]); | ||
const treeSize = tree.reduce((previous, current) => { | ||
return previous + (current ? current.size : 0); | ||
}, 0); | ||
return [ | ||
{ | ||
metric: 'modules.size', | ||
type: 'size', | ||
value: module.size, | ||
tags: [ | ||
`moduleName:${moduleName}`, | ||
`moduleType:${getType(moduleName)}`, | ||
...entryTags, | ||
...chunkTags, | ||
], | ||
}, | ||
{ | ||
metric: 'modules.tree.size', | ||
type: 'size', | ||
value: treeSize, | ||
tags: [ | ||
`moduleName:${moduleName}`, | ||
`moduleType:${getType(moduleName)}`, | ||
...entryTags, | ||
...chunkTags, | ||
], | ||
}, | ||
{ | ||
metric: 'modules.tree.count', | ||
type: 'count', | ||
value: tree.length, | ||
tags: [ | ||
`moduleName:${moduleName}`, | ||
`moduleType:${getType(moduleName)}`, | ||
...entryTags, | ||
...chunkTags, | ||
], | ||
}, | ||
]; | ||
}; | ||
exports.getModules = (stats, dependencies, indexed, context) => { | ||
return flattened(Object.values(indexed.modulesPerName).map((module) => { | ||
return getMetricsFromModule(stats, dependencies, indexed, context, module); | ||
})); | ||
}; | ||
// Find in entries.chunks | ||
exports.getChunks = (stats, indexed) => { | ||
const chunks = stats.chunks; | ||
return flattened(chunks.map((chunk) => { | ||
const entryTags = exports.getEntryTags(exports.getEntriesFromChunk(stats, chunk, indexed)); | ||
const chunkName = chunk.names.length ? chunk.names.join(' ') : chunk.id; | ||
return [ | ||
{ | ||
metric: 'modules.size', | ||
metric: 'chunks.size', | ||
type: 'size', | ||
value: module.size, | ||
tags: [`moduleName:${moduleName}`, `moduleType:${getType(moduleName)}`], | ||
value: chunk.size, | ||
tags: [`chunkName:${chunkName}`, ...entryTags], | ||
}, | ||
{ | ||
metric: 'modules.tree.size', | ||
type: 'size', | ||
value: treeSize, | ||
tags: [`moduleName:${moduleName}`, `moduleType:${getType(moduleName)}`], | ||
}, | ||
{ | ||
metric: 'modules.tree.count', | ||
metric: 'chunks.modules.count', | ||
type: 'count', | ||
value: tree.length, | ||
tags: [`moduleName:${moduleName}`, `moduleType:${getType(moduleName)}`], | ||
value: chunk.modules.length, | ||
tags: [`chunkName:${chunkName}`, ...entryTags], | ||
}, | ||
@@ -186,21 +278,12 @@ ]; | ||
}; | ||
const getChunks = (chunks) => flattened(chunks.map((chunk) => { | ||
const chunkName = chunk.names.length ? chunk.names.join(' ') : chunk.id; | ||
return [ | ||
{ | ||
metric: 'chunks.size', | ||
type: 'size', | ||
value: chunk.size, | ||
tags: [`chunkName:${chunkName}`], | ||
}, | ||
{ | ||
metric: 'chunks.modules.count', | ||
type: 'count', | ||
value: chunk.modules.length, | ||
tags: [`chunkName:${chunkName}`], | ||
}, | ||
]; | ||
})); | ||
const getAssets = (assets) => { | ||
exports.getAssets = (stats, indexed) => { | ||
const assets = stats.assets; | ||
return assets.map((asset) => { | ||
const chunks = asset.chunks.map((c) => indexed.chunksPerId[c]); | ||
const entries = new Set(); | ||
for (const chunk of chunks) { | ||
exports.getEntriesFromChunk(stats, chunk, indexed, entries); | ||
} | ||
const chunkTags = exports.getChunkTags(chunks); | ||
const entryTags = exports.getEntryTags(entries); | ||
const assetName = asset.name; | ||
@@ -211,9 +294,22 @@ return { | ||
value: asset.size, | ||
tags: [`assetName:${assetName}`, `assetType:${getType(assetName)}`], | ||
tags: [ | ||
`assetName:${assetName}`, | ||
`assetType:${getType(assetName)}`, | ||
...chunkTags, | ||
...entryTags, | ||
], | ||
}; | ||
}); | ||
}; | ||
const getEntries = (stats) => flattened(Object.keys(stats.entrypoints).map((entryName) => { | ||
exports.getEntries = (stats, indexed) => flattened(Object.keys(stats.entrypoints).map((entryName) => { | ||
const entry = stats.entrypoints[entryName]; | ||
const chunks = entry.chunks.map((chunkId) => stats.chunks.find((chunk) => chunk.id === chunkId)); | ||
const chunks = entry.chunks.map((chunkId) => indexed.chunksPerId[chunkId]); | ||
let size = 0; | ||
let moduleCount = 0; | ||
let assetsCount = 0; | ||
for (const chunk of chunks) { | ||
size += chunk.size; | ||
moduleCount += chunk.modules.length; | ||
assetsCount += chunk.files.length; | ||
} | ||
return [ | ||
@@ -223,3 +319,3 @@ { | ||
type: 'size', | ||
value: chunks.reduce((previous, current) => previous + current.size, 0), | ||
value: size, | ||
tags: [`entryName:${entryName}`], | ||
@@ -236,3 +332,3 @@ }, | ||
type: 'count', | ||
value: chunks.reduce((previous, current) => previous + current.modules.length, 0), | ||
value: moduleCount, | ||
tags: [`entryName:${entryName}`], | ||
@@ -243,3 +339,3 @@ }, | ||
type: 'count', | ||
value: chunks.reduce((previous, current) => previous + current.files.length, 0), | ||
value: assetsCount, | ||
tags: [`entryName:${entryName}`], | ||
@@ -249,2 +345,50 @@ }, | ||
})); | ||
exports.getIndexed = (stats, context) => { | ||
// Gather all modules. | ||
const modulesPerName = {}; | ||
const chunksPerId = {}; | ||
const entriesPerChunkId = {}; | ||
const addModule = (module) => { | ||
// console.log('Add Module', module.name); | ||
// No internals. | ||
if (/^webpack\/runtime/.test(module.name)) { | ||
return; | ||
} | ||
// No duplicates. | ||
if (modulesPerName[helpers_1.formatModuleName(module.name, context)]) { | ||
return; | ||
} | ||
// Modules are sometimes registered with their loader. | ||
if (module.name.includes('!')) { | ||
return; | ||
} | ||
modulesPerName[helpers_1.formatModuleName(module.name, context)] = module; | ||
}; | ||
for (const [name, entry] of Object.entries(stats.entrypoints)) { | ||
// In webpack4 we don't have the name of the entry here. | ||
entry.name = name; | ||
for (const chunkId of entry.chunks) { | ||
entriesPerChunkId[chunkId] = entry; | ||
} | ||
} | ||
for (const chunk of stats.chunks) { | ||
chunksPerId[chunk.id] = chunk; | ||
} | ||
for (const module of stats.modules) { | ||
// Sometimes modules are grouped together. | ||
if (module.modules && module.modules.length) { | ||
for (const moduleIn of module.modules) { | ||
addModule(moduleIn); | ||
} | ||
} | ||
else { | ||
addModule(module); | ||
} | ||
} | ||
return { | ||
modulesPerName, | ||
chunksPerId, | ||
entriesPerChunkId, | ||
}; | ||
}; | ||
exports.getMetrics = (report, stats, opts) => { | ||
@@ -254,2 +398,3 @@ const statsJson = stats.toJson({ children: false }); | ||
const metrics = []; | ||
const indexed = exports.getIndexed(statsJson, opts.context); | ||
metrics.push(...getGenerals(timings, statsJson)); | ||
@@ -259,6 +404,6 @@ metrics.push(...getDependencies(Object.values(dependencies))); | ||
metrics.push(...getLoaders(timings.loaders)); | ||
metrics.push(...getModules(statsJson.modules, dependencies, opts.context)); | ||
metrics.push(...getChunks(statsJson.chunks)); | ||
metrics.push(...getAssets(statsJson.assets)); | ||
metrics.push(...getEntries(statsJson)); | ||
metrics.push(...exports.getModules(statsJson, dependencies, indexed, opts.context)); | ||
metrics.push(...exports.getChunks(statsJson, indexed)); | ||
metrics.push(...exports.getAssets(statsJson, indexed)); | ||
metrics.push(...exports.getEntries(statsJson, indexed)); | ||
// Format metrics to be DD ready and apply filters | ||
@@ -265,0 +410,0 @@ const metricsToSend = metrics |
@@ -27,19 +27,18 @@ "use strict"; | ||
const spaces = ' '; | ||
yield Promise.all([ | ||
fs_extra_1.outputJson(path_1.default.join(outputPath, 'timings.json'), { | ||
tapables: report.timings.tapables, | ||
loaders: report.timings.loaders, | ||
modules: report.timings.modules, | ||
}, { spaces }), | ||
fs_extra_1.outputJson(path_1.default.join(outputPath, 'dependencies.json'), report.dependencies, { | ||
spaces, | ||
}), | ||
fs_extra_1.outputJson(path_1.default.join(outputPath, 'stats.json'), stats.toJson({ children: false }), { | ||
spaces, | ||
}), | ||
metrics && | ||
fs_extra_1.outputJson(path_1.default.join(outputPath, 'metrics.json'), metrics, { | ||
spaces, | ||
}), | ||
]); | ||
yield fs_extra_1.outputJson(path_1.default.join(outputPath, 'timings.json'), { | ||
tapables: report.timings.tapables, | ||
loaders: report.timings.loaders, | ||
modules: report.timings.modules, | ||
}, { spaces }); | ||
this.log(`Wrote timings.json`); | ||
yield fs_extra_1.outputJson(path_1.default.join(outputPath, 'dependencies.json'), report.dependencies, { | ||
spaces, | ||
}); | ||
this.log(`Wrote dependencies.json`); | ||
yield fs_extra_1.outputJson(path_1.default.join(outputPath, 'stats.json'), stats.toJson({ children: false }), { spaces }); | ||
this.log(`Wrote stats.json`); | ||
if (metrics) { | ||
yield fs_extra_1.outputJson(path_1.default.join(outputPath, 'metrics.json'), metrics, { spaces }); | ||
this.log(`Wrote metrics.json`); | ||
} | ||
this.log(`Wrote files in ${Date.now() - startWriting}ms.`); | ||
@@ -46,0 +45,0 @@ } |
@@ -8,2 +8,13 @@ export declare type HOOKS = 'output'; | ||
} | ||
export interface IndexedObject { | ||
modulesPerName: { | ||
[key: string]: Module; | ||
}; | ||
chunksPerId: { | ||
[key: string]: Chunk; | ||
}; | ||
entriesPerChunkId: { | ||
[key: string]: Entry; | ||
}; | ||
} | ||
export interface ModuleGraph { | ||
@@ -69,2 +80,3 @@ getModule(dependency: Dependency): Module; | ||
names: string[]; | ||
parents: string[]; | ||
} | ||
@@ -74,8 +86,14 @@ export interface Asset { | ||
size: number; | ||
chunks: string[]; | ||
} | ||
export interface Entry { | ||
name: string; | ||
chunks: string[]; | ||
} | ||
export interface Entries { | ||
[key: string]: Entry; | ||
} | ||
export interface StatsJson { | ||
entrypoints: { | ||
[key: string]: { | ||
chunks: string[]; | ||
}; | ||
[key: string]: Entry; | ||
}; | ||
@@ -179,2 +197,4 @@ chunks: Chunk[]; | ||
_identifier?: string; | ||
identifier?: string; | ||
modules?: Module[]; | ||
moduleGraph?: ModuleGraph; | ||
@@ -185,2 +205,3 @@ size: number; | ||
}[]; | ||
chunks: string[]; | ||
dependencies: Dependency[]; | ||
@@ -187,0 +208,0 @@ } |
{ | ||
"name": "@datadog/build-plugin", | ||
"version": "0.3.6", | ||
"version": "0.4.0", | ||
"license": "MIT", | ||
@@ -8,2 +8,5 @@ "author": "Datadog", | ||
"main": "./dist", | ||
"workspaces": [ | ||
"src/__tests__/mocks/projects/*" | ||
], | ||
"files": [ | ||
@@ -23,3 +26,3 @@ "dist/**/*" | ||
"prepack": "yarn build", | ||
"test": "jest", | ||
"test": "jest --verbose", | ||
"typecheck": "tsc --noEmit", | ||
@@ -26,0 +29,0 @@ "watch": "tsc -w" |
@@ -112,3 +112,3 @@ # Build plugin <!-- omit in toc --> | ||
The most basic configuration looks like this, consult | ||
[the full integration documentation](./hooks/datadog) for more details. | ||
[the full integration documentation](./src/hooks/datadog) for more details. | ||
@@ -115,0 +115,0 @@ ```javascript |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Shell access
Supply chain riskThis module accesses the system shell. Accessing the system shell increases the risk of executing arbitrary code.
Found 1 instance in 1 package
Dynamic require
Supply chain riskDynamic require can indicate the package is performing dangerous or unsafe dynamic code execution.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
92952
2199
9
2