Latest Threat Research:SANDWORM_MODE: Shai-Hulud-Style npm Worm Hijacks CI Workflows and Poisons AI Toolchains.Details
Socket
Book a DemoInstallSign in
Socket

sonda

Package Overview
Dependencies
Maintainers
1
Versions
26
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

sonda - npm Package Compare versions

Comparing version
0.7.1
to
0.8.0
+52
dist/entrypoints/angular.js
import { basename, relative, resolve } from "path";
import { readFileSync, readdirSync } from "fs";
import { Config, processEsbuildMetafile } from "sonda";
//#region src/entrypoints/angular.ts
async function SondaAngular({ config = "angular.json", projects = [],...userOptions }) {
const options = new Config(userOptions, {
integration: "angular",
filename: "sonda_[project]"
});
const angularConfig = loadJson(config);
const projectsToGenerate = projects.length ? projects : Object.keys(angularConfig.projects);
for (const project of projectsToGenerate) {
const { outputPath } = angularConfig.projects[project].architect.build.options;
const paths = typeof outputPath === "object" ? outputPath : { base: outputPath };
paths.base = resolve(process.cwd(), paths.base);
paths.browser = resolve(paths.base, paths.browser || "browser");
paths.server = resolve(paths.base, paths.server || "server");
const metafile = updateMetafile(loadJson(resolve(paths.base, "stats.json")), paths.base);
const sondaOptions = options.clone();
sondaOptions.filename = sondaOptions.filename.replace("[project]", project);
sondaOptions.sourcesPathNormalizer = (path) => resolve(process.cwd(), path);
await processEsbuildMetafile(metafile, sondaOptions);
}
}
function loadJson(path) {
return JSON.parse(readFileSync(resolve(process.cwd(), path), "utf8"));
}
/**
* Output paths in metafile only include file name, without the relative path from the current
* working directory. For example, in the metafile the output path is "main-xxx.js", but in the
* file system it's "dist/project/browser/en/main-xxx.js". This function updates the output paths
* to include the relative path from the current working directory.
*/
function updateMetafile(metafile, basePath) {
const cwd = process.cwd();
const outputs = Object.assign({}, metafile.outputs);
metafile.outputs = {};
for (const path of readdirSync(basePath, {
encoding: "utf8",
recursive: true
})) {
const absolutePath = resolve(basePath, path);
const filename = basename(absolutePath);
const originalOutput = outputs[filename];
if (originalOutput) metafile.outputs[relative(cwd, absolutePath)] = originalOutput;
}
return metafile;
}
//#endregion
export { SondaAngular as default };
import { Config, SondaVitePlugin } from "sonda";
//#region src/entrypoints/astro.ts
function SondaAstroPlugin(userOptions = {}) {
const options = new Config(userOptions, {
integration: "astro",
filename: "sonda_[env]"
});
if (!options.enabled) return {
name: "sonda-astro",
hooks: {}
};
return {
name: "sonda-astro",
hooks: { "astro:build:setup"({ vite, target }) {
if (target === "server" && !options.server) return;
const sondaOptions = options.clone();
sondaOptions.filename = sondaOptions.filename.replace("[env]", target);
vite.plugins ??= [];
vite.plugins.push(SondaVitePlugin(sondaOptions));
} }
};
}
//#endregion
export { SondaAstroPlugin as default };
import { SondaEsbuildPlugin } from "sonda";
//#region src/entrypoints/esbuild.ts
var esbuild_default = SondaEsbuildPlugin;
//#endregion
export { esbuild_default as default };
import { Config, SondaWebpackPlugin } from "sonda";
//#region src/entrypoints/next.ts
function SondaNextPlugin(userOptions = {}) {
return function Sonda(nextConfig = {}) {
const options = new Config(userOptions, {
integration: "next",
filename: "sonda_[env]"
});
if (!options.enabled) return nextConfig;
return Object.assign({}, nextConfig, { webpack(config, { nextRuntime, isServer }) {
const env = nextRuntime || "client";
if (env === "edge" || isServer && !options.server) return config;
const sondaOptions = options.clone();
sondaOptions.filename = sondaOptions.filename.replace("[env]", env);
config.plugins.push(new SondaWebpackPlugin(sondaOptions));
return config;
} });
};
}
//#endregion
export { SondaNextPlugin as default };
import { Config, SondaVitePlugin } from "sonda";
//#region src/entrypoints/nuxt.ts
function SondaNuxtPlugin(userOptions = {}) {
return function SondaNuxtPlugin$1(_, nuxt) {
const options = new Config(userOptions, {
integration: "nuxt",
filename: "sonda_[env]"
});
if (!options.enabled) return;
nuxt.hook("vite:extendConfig", (config, { isClient, isServer }) => {
const env = isClient ? "client" : "nitro";
if (isServer && !options.server) return;
const sondaOptions = options.clone();
sondaOptions.filename = sondaOptions.filename.replace("[env]", env);
config.plugins ??= [];
config.plugins.push(SondaVitePlugin(sondaOptions));
});
};
}
//#endregion
export { SondaNuxtPlugin as default };
import { UserOptions } from "sonda";
import { Plugin } from "rolldown";
//#region src/entrypoints/rolldown.d.ts
declare function RolldownPlugin(userOptions?: UserOptions): Plugin;
//#endregion
export { RolldownPlugin as default };
import { Config, SondaRollupPlugin } from "sonda";
//#region src/entrypoints/rolldown.ts
function RolldownPlugin(userOptions = {}) {
const options = new Config(userOptions, { integration: "rolldown" });
if (!options.enabled) return { name: "sonda-rolldown" };
return {
...SondaRollupPlugin(options),
name: "sonda-rolldown"
};
}
//#endregion
export { RolldownPlugin as default };
import { SondaRollupPlugin } from "sonda";
//#region src/entrypoints/rollup.ts
var rollup_default = SondaRollupPlugin;
//#endregion
export { rollup_default as default };
import { SondaWebpackPlugin, UserOptions } from "sonda";
//#region src/entrypoints/rspack.d.ts
declare class SondaRspackPlugin extends SondaWebpackPlugin {
constructor(userOptions?: UserOptions);
}
//#endregion
export { SondaRspackPlugin as default };
import { Config, SondaWebpackPlugin } from "sonda";
//#region src/entrypoints/rspack.ts
var SondaRspackPlugin = class extends SondaWebpackPlugin {
constructor(userOptions = {}) {
super(userOptions);
this.options = new Config(userOptions, { integration: "rspack" });
}
};
//#endregion
export { SondaRspackPlugin as default };
import { Config, SondaVitePlugin } from "sonda";
//#region src/entrypoints/sveltekit.ts
function SondaSvelteKitPlugin(userOptions = {}) {
const options = new Config(userOptions, {
integration: "sveltekit",
filename: "sonda_[env]"
});
if (!options.enabled) return { name: "sonda-sveltekit" };
return {
...SondaVitePlugin(options),
name: "sonda-sveltekit",
configResolved(config) {
const env = config.build.ssr ? "server" : "client";
const generateForServer = userOptions.server ?? false;
if (env === "server" && !generateForServer) userOptions.enabled = false;
options.filename = options.filename.replace("[env]", env);
}
};
}
//#endregion
export { SondaSvelteKitPlugin as default };
import { SondaVitePlugin } from "sonda";
export { SondaVitePlugin as default };
import { SondaVitePlugin } from "sonda";
//#region src/entrypoints/vite.ts
var vite_default = SondaVitePlugin;
//#endregion
export { vite_default as default };
import { SondaWebpackPlugin } from "sonda";
//#region src/entrypoints/webpack.ts
var webpack_default = SondaWebpackPlugin;
//#endregion
export { webpack_default as default };
import { access, mkdir, readFile, readdir, writeFile } from "fs/promises";
import { basename, dirname, extname, format, isAbsolute, join, posix, relative, resolve, win32 } from "path";
import { isBuiltin } from "module";
import open from "open";
import { brotliCompressSync, gzipSync } from "zlib";
import { existsSync, readFileSync, statSync } from "fs";
import remapping from "@ampproject/remapping";
//#region src/config.ts
var Config = class Config {
#options;
constructor(options, defaults) {
if (options instanceof Config) {
this.#options = options.#options;
return;
}
this.#options = Object.assign({
enabled: true,
format: "html",
outputDir: ".sonda",
open: true,
deep: false,
sources: false,
gzip: false,
brotli: false,
server: false,
filename: "sonda",
sourcesPathNormalizer: null
}, defaults, options);
}
clone() {
return new Config({}, structuredClone(this.#options));
}
get enabled() {
return this.#options.enabled;
}
get format() {
return this.#options.format;
}
get outputDir() {
return this.#options.outputDir;
}
get open() {
return this.#options.open;
}
get deep() {
return this.#options.deep;
}
get sources() {
return this.#options.sources;
}
get gzip() {
return this.#options.gzip;
}
get brotli() {
return this.#options.brotli;
}
get server() {
return this.#options.server;
}
get integration() {
return this.#options.integration;
}
get filename() {
return this.#options.filename;
}
get sourcesPathNormalizer() {
return this.#options.sourcesPathNormalizer;
}
set filename(filename) {
this.#options.filename = filename;
}
set sourcesPathNormalizer(normalizer) {
this.#options.sourcesPathNormalizer = normalizer;
}
};
//#endregion
//#region src/utils.ts
const extensions = {
".js": "script",
".jsx": "script",
".mjs": "script",
".cjs": "script",
".ts": "script",
".tsx": "script",
".cts": "script",
".mts": "script",
".json": "script",
".node": "script",
".wasm": "script",
".css": "style",
".scss": "style",
".sass": "style",
".less": "style",
".styl": "style",
".pcss": "style",
".postcss": "style",
".woff": "font",
".woff2": "font",
".ttf": "font",
".otf": "font",
".eot": "font",
".jpg": "image",
".jpeg": "image",
".png": "image",
".gif": "image",
".svg": "image",
".webp": "image",
".jxl": "image",
".avif": "image",
".ico": "image",
".bmp": "image",
".vue": "component",
".svelte": "component",
".astro": "component",
".marko": "component",
".riot": "component"
};
const ignoredExtensions = [".map", ".d.ts"];
/**
* Normalizes a given path by removing leading null characters and converting it to a relative POSIX path.
*/
function normalizePath(pathToNormalize) {
const normalized = pathToNormalize.replace(/^\0/, "");
const relativized = relative(process.cwd(), normalized);
return relativized.replaceAll(win32.sep, posix.sep);
}
/**
* Returns the type of a given file based on its name.
*/
function getTypeByName(name) {
return extensions[extname(name)] ?? "other";
}
/**
* Sort an array of objects by a specific key.
*/
function sortByKey(data, key) {
return data.toSorted((a, b) => a[key].localeCompare(b[key]));
}
/**
* Returns relative paths to all files in the given directory. The files are filtered to exclude source maps.
*/
async function getAllFiles(dir, recursive = true) {
try {
await access(dir);
const files = await readdir(dir, {
withFileTypes: true,
recursive
});
return files.filter((file) => file.isFile()).filter((file) => !hasIgnoredExtension(file.name)).map((file) => join(relative(process.cwd(), file.parentPath), file.name));
} catch {
return [];
}
}
/**
* Checks if a file name has an ignored extension. Using `endsWith` ensures that extensions like `.d.ts` are
* correctly identified as ignored, even though `extname` would return `.ts`.
*/
function hasIgnoredExtension(name) {
return ignoredExtensions.some((ext) => name.endsWith(ext));
}
//#endregion
//#region package.json
var version = "0.8.0";
//#endregion
//#region src/report/formatters/Formatter.ts
var Formatter = class {
config;
constructor(config) {
this.config = config;
}
/**
* Writes the report to the file system and returns the path to the report.
*/
async write(data) {
const path = await this.getReportPath();
const content = await this.parse(data);
await mkdir(dirname(path), { recursive: true });
await writeFile(path, content);
return path;
}
/**
* Generates a unique report name based on the existing files in
* the specified directory. The name is generated by appending
* a version number to the base name.
*/
async getReportPath() {
const { filename, outputDir } = this.config;
const regex = new RegExp(`^${filename}_(\\d+)\\${this.extension}$`);
const versions = (await getAllFiles(outputDir)).map((path) => basename(path).match(regex)).filter((match) => match !== null).map((match) => parseInt(match[1], 10));
const maxVersion = Math.max(...versions, -1);
const version$1 = String(maxVersion + 1);
return format({
dir: this.config.outputDir,
name: `${filename}_${version$1}`,
ext: this.extension
});
}
};
//#endregion
//#region src/report/formatters/HtmlFormatter.ts
var HtmlFormatter = class extends Formatter {
extension = ".html";
async parse(data) {
const template = await readFile(resolve(import.meta.dirname, "./index.html"), "utf-8");
return template.replace("__REPORT_DATA__", gzipSync(JSON.stringify(data)).toString("base64"));
}
};
//#endregion
//#region src/report/formatters/JsonFormatter.ts
var JsonFormatter = class extends Formatter {
extension = ".json";
parse(data) {
return JSON.stringify(data, null, 2);
}
};
//#endregion
//#region ../load-source-map/dist/index.js
/**
* Strip any JSON XSSI avoidance prefix from the string (as documented in the source maps specification),
* and parses the string as JSON.
*
* https://github.com/mozilla/source-map/blob/3cb92cc3b73bfab27c146bae4ef2bc09dbb4e5ed/lib/util.js#L162-L164
*/
function parseSourceMapInput(str) {
return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, ""));
}
/**
sourceMappingURL=data:application/json;charset=utf-8;base64,data
sourceMappingURL=data:application/json;base64,data
sourceMappingURL=data:application/json;uri,data
sourceMappingURL=map-file-comment.css.map
sourceMappingURL=map-file-comment.css.map?query=value
*/
const sourceMappingRegExp = /[@#]\s*sourceMappingURL=(\S+)\b/g;
/**
* Checks if the given path is a file.
*/
function isFile(path) {
try {
return statSync(path).isFile();
} catch {
return false;
}
}
/**
* Default path normalizer that resolves the path relative to the source root.
*/
function defaultPathNormalizer(path, sourceRoot) {
return isAbsolute(path) ? path : resolve(sourceRoot, path);
}
function loadCodeAndMap(codePath, sourcesPathNormalizer) {
if (!isFile(codePath)) return null;
const code = readFileSync(codePath, "utf-8");
const maybeMap = loadMap(codePath, code);
if (!maybeMap) return { code };
const { map, mapPath } = maybeMap;
const sourceRoot = resolve(dirname(mapPath), map.sourceRoot ?? ".");
const normalizer = sourcesPathNormalizer || defaultPathNormalizer;
map.sources = map.sources.map((source) => source && normalizer(source, sourceRoot));
map.sourcesContent = loadMissingSourcesContent(map);
delete map.sourceRoot;
return {
code,
map
};
}
function loadMap(codePath, code) {
/**
* Because in most cases the source map has the same name as the code file,
* we can try to append `.map` to the code path and check if the file exists.
*/
try {
const possibleMapPath = codePath + ".map";
const map = readFileSync(possibleMapPath, "utf-8");
return {
map: parseSourceMapInput(map),
mapPath: possibleMapPath
};
} catch {}
/**
* If the source map is not found by file name, we can try to extract it from the code.
* The path to the source map is usually in a comment at the end of the file, but it can
* also be inlined in the code itself.
*/
const extractedComment = code.includes("sourceMappingURL") && Array.from(code.matchAll(sourceMappingRegExp)).at(-1);
if (!extractedComment || !extractedComment.length) return null;
const sourceMappingURL = extractedComment[1];
if (sourceMappingURL.startsWith("data:")) {
const map = parseDataUrl(sourceMappingURL);
return {
map: parseSourceMapInput(map),
mapPath: codePath
};
}
const sourceMapFilename = new URL(sourceMappingURL, "file://").pathname;
const mapPath = join(dirname(codePath), sourceMapFilename);
if (!existsSync(mapPath)) return null;
return {
map: parseSourceMapInput(readFileSync(mapPath, "utf-8")),
mapPath
};
}
function parseDataUrl(url) {
const [prefix, payload] = url.split(",");
const encoding = prefix.split(";").at(-1);
switch (encoding) {
case "base64": return Buffer.from(payload, "base64").toString();
case "uri": return decodeURIComponent(payload);
default: throw new Error("Unsupported source map encoding: " + encoding);
}
}
/**
* Loop through the sources and try to load missing `sourcesContent` from the file system.
*/
function loadMissingSourcesContent(map) {
return map.sources.map((source, index) => {
if (map.sourcesContent?.[index]) return map.sourcesContent[index];
if (source && existsSync(source)) return readFileSync(source, "utf-8");
return null;
});
}
//#endregion
//#region src/report/processors/sourcemap.ts
const UNASSIGNED = "[unassigned]";
function getBytesPerSource(code, map, assetSizes, config) {
const contributions = getContributions(map.sources);
const codeLines = code.split(/(?<=\r?\n)/);
for (let lineIndex = 0; lineIndex < codeLines.length; lineIndex++) {
const lineCode = codeLines[lineIndex];
const mappings = map.mappings[lineIndex] || [];
let currentColumn = 0;
for (let i = 0; i <= mappings.length; i++) {
const mapping = mappings[i];
const startColumn = mapping?.[0] ?? lineCode.length;
const endColumn = mappings[i + 1]?.[0] ?? lineCode.length;
if (startColumn > currentColumn) contributions.set(UNASSIGNED, contributions.get(UNASSIGNED) + lineCode.slice(currentColumn, startColumn));
if (mapping) {
const sourceIndex = mapping?.[1];
const codeSlice = lineCode.slice(startColumn, endColumn);
const source = sourceIndex !== void 0 && map.sources[sourceIndex] || UNASSIGNED;
contributions.set(source, contributions.get(source) + codeSlice);
currentColumn = endColumn;
} else currentColumn = startColumn;
}
}
const sourceSizes = /* @__PURE__ */ new Map();
const contributionsSum = {
uncompressed: 0,
gzip: 0,
brotli: 0
};
for (const [source, codeSegment] of contributions) {
const sizes = getSizes(codeSegment, config);
contributionsSum.uncompressed += sizes.uncompressed;
contributionsSum.gzip += sizes.gzip;
contributionsSum.brotli += sizes.brotli;
sourceSizes.set(source, sizes);
}
return adjustSizes(sourceSizes, assetSizes, contributionsSum, config);
}
/**
* Returns the sizes of the given code based on the configuration.
*/
function getSizes(code, config) {
return {
uncompressed: Buffer.byteLength(code),
gzip: config.gzip ? gzipSync(code).length : 0,
brotli: config.brotli ? brotliCompressSync(code).length : 0
};
}
function getContributions(sources) {
const contributions = /* @__PURE__ */ new Map();
sources.filter((source) => source !== null).forEach((source) => contributions.set(source, ""));
contributions.set(UNASSIGNED, "");
return contributions;
}
/**
* Compression efficiency improves with the size of the file.
*
* However, what we have is the compressed size of the entire bundle (`actual`),
* the sum of all files compressed individually (`sum`) and the compressed
* size of a given file (`content`). The last value is essentially a “worst-case”
* scenario, and the actual size of the file in the bundle is likely to be smaller.
*
* We use this information to estimate the actual size of the file in the bundle
* after compression.
*/
function adjustSizes(sources, asset, sums, config) {
const gzipDelta = config.gzip ? asset.gzip / sums.gzip : 0;
const brotliDelta = config.brotli ? asset.brotli / sums.brotli : 0;
for (const [source, sizes] of sources) sources.set(source, {
uncompressed: sizes.uncompressed,
gzip: config.gzip ? Math.round(sizes.gzip * gzipDelta) : 0,
brotli: config.brotli ? Math.round(sizes.brotli * brotliDelta) : 0
});
return sources;
}
//#endregion
//#region src/report/processors/outputs.ts
const RESOURCE_TYPES_TO_ANALYZE = ["script", "style"];
const parentMap = {};
/**
* Update the report with the output assets and their sources from the source map.
*/
function updateOutput(report, path, entrypoints) {
const type = getTypeByName(path);
RESOURCE_TYPES_TO_ANALYZE.includes(type) ? addAnalyzableType(report, path, entrypoints, type) : addNonAnalyzableType(report, path, type);
}
/**
* Adds simple assets like fonts, images, etc. to the report without analyzing their content or dependencies.
*/
function addNonAnalyzableType(report, path, type) {
const content = readFileSync(path);
const sizes = getSizes(content, report.config);
report.addResource({
kind: "asset",
name: normalizePath(path),
type,
...sizes
});
}
/**
* Adds code assets like scripts and styles to the report and analyzes their content
* to find their sources and dependencies.
*/
function addAnalyzableType(report, path, entrypoints, type) {
const assetName = normalizePath(path);
const codeMap = getSource(path, report.config);
if (!codeMap) return addNonAnalyzableType(report, path, type);
const { code, map } = codeMap;
const sizes = getSizes(code, report.config);
const sourcesSizes = getBytesPerSource(code, map, sizes, report.config);
report.addResource({
kind: "asset",
name: assetName,
type,
...sizes
});
if (report.config.sources) report.addSourceMap(assetName, normalizeSourceMap(map));
entrypoints?.forEach((entry) => report.addConnection({
kind: "entrypoint",
source: assetName,
target: normalizePath(entry),
original: null
}));
for (const [source, sizes$1] of sourcesSizes) {
const name = normalizePath(source);
const type$1 = getTypeByName(source);
const parent = parentMap[source] ? normalizePath(parentMap[source]) : null;
const existingSource = report.resources.find((resource) => resource.name === name && resource.kind === "filesystem");
if (!existingSource) {
const index = map.sources.indexOf(source);
const { uncompressed } = getSizes(map.sourcesContent?.[index] || "", {
gzip: false,
brotli: false
});
report.addResource({
kind: "sourcemap",
name,
type: type$1,
format: "other",
uncompressed,
parent: parent || null
});
}
report.addResource({
kind: "chunk",
name,
type: type$1,
format: existingSource?.format || "other",
...sizes$1,
parent: assetName
});
if (parent) report.addConnection({
kind: "sourcemap",
source: parent,
target: name,
original: null
});
}
}
/**
* Normalize the source map to a format expected by the report.
*/
function normalizeSourceMap(map) {
return {
mappings: map.mappings,
sources: map.sources.map((source) => source && normalizePath(source)),
sourcesContent: map.sourcesContent
};
}
/**
* Load the code and source map from the given path. If the `deep` option
* is enabled, it will recursively load the source maps of the sources
* until it finds the original source.
*/
function getSource(path, config) {
const codeMap = loadCodeAndMap(path, config.sourcesPathNormalizer);
if (!codeMap || !codeMap.map) return null;
return {
code: codeMap.code,
map: parseSourceMap(codeMap.map, config.deep)
};
}
/**
* Parse the source map. If `options.deep` is set to `true`, it will
* recursively load the source maps of the sources until it finds
* the original source. Otherwise, it will only decode the source map.
*/
function parseSourceMap(map, deep) {
const alreadyRemapped = /* @__PURE__ */ new Set();
return remapping(map, (file, ctx) => {
if (!deep || alreadyRemapped.has(file)) return;
alreadyRemapped.add(file);
const codeMap = loadCodeAndMap(resolve(process.cwd(), file));
if (!codeMap) return;
ctx.content ??= codeMap.code;
codeMap.map?.sources.filter((source) => source !== null && file !== source).forEach((source) => parentMap[source] = file);
return codeMap.map;
}, { decodedMappings: true });
}
//#endregion
//#region src/report/processors/dependencies.ts
const packageNameRegExp = /(.*)(?:.*node_modules\/)(@[^\/]+\/[^\/]+|[^\/]+)/;
/**
* Finds all external dependencies based on the filesystem resources
* and adds them to the report with their paths.
*/
function updateDependencies(report) {
const dependencies = {};
report.resources.map((file) => packageNameRegExp.exec(file.name)).filter((match) => match !== null).forEach(([path, , name]) => {
const paths = dependencies[name] ??= [];
if (!paths.includes(path)) paths.push(path);
});
return Object.entries(dependencies).map(([name, paths]) => ({
name,
paths
}));
}
//#endregion
//#region src/report/report.ts
const formatters = {
"html": HtmlFormatter,
"json": JsonFormatter
};
var Report = class {
config;
resources = [];
connections = [];
assets = {};
metadata;
dependencies = [];
issues = [];
sourcemaps = [];
constructor(config) {
this.config = config;
this.metadata = {
version,
integration: config.integration,
sources: config.sources,
gzip: config.gzip,
brotli: config.brotli
};
}
addResource(resource) {
if (resource.name.startsWith("data:") || hasIgnoredExtension(resource.name)) return;
const existing = this.resources.find((r) => r.kind === resource.kind && r.name === resource.name && r.parent === resource.parent);
if (existing) return;
this.resources.push(resource);
}
addConnection(connection) {
if (connection.target.startsWith("data:") || hasIgnoredExtension(connection.source) || hasIgnoredExtension(connection.target) || isBuiltin(connection.target)) return;
const existing = this.connections.find((c) => {
return c.kind === connection.kind && c.source === connection.source && c.target === connection.target;
});
if (!existing) {
this.connections.push(connection);
return;
}
/**
* If a connection already exists, update the `original` property if either connection has it.
* If both connections have the `original` property, prioritize the shorter one because it is
* more likely to be the original source than the absolute path.
*/
existing.original = [connection.original, existing.original].filter((original) => original !== null).sort((a, b) => a.length - b.length)[0] || null;
}
addAsset(name, entrypoints) {
if (hasIgnoredExtension(name)) return;
this.assets[name] = entrypoints;
}
async generate() {
for (const [path$1, entrypoints] of Object.entries(this.assets)) updateOutput(this, path$1, entrypoints);
this.dependencies = updateDependencies(this);
const formatter = new formatters[this.config.format](this.config);
const path = await formatter.write(this.#getFormattedData());
if (this.config.open) await open(path);
}
addSourceMap(asset, sourcemap) {
if (this.sourcemaps.some((sm) => sm.name === asset)) return;
this.sourcemaps.push({
name: asset,
map: JSON.stringify(sourcemap)
});
}
#getFormattedData() {
return {
metadata: this.metadata,
resources: sortByKey(this.resources, "name"),
connections: this.connections,
dependencies: sortByKey(this.dependencies, "name"),
issues: this.issues,
sourcemaps: this.sourcemaps
};
}
};
//#endregion
//#region src/integrations/esbuild.ts
function SondaEsbuildPlugin(userOptions = {}) {
const options = new Config(userOptions, { integration: "esbuild" });
return {
name: "sonda-esbuild",
setup(build) {
if (!options.enabled) return;
build.initialOptions.metafile = true;
build.onEnd((result) => processEsbuildMetafile(result.metafile, options));
}
};
}
async function processEsbuildMetafile(metafile, options) {
const report = new Report(options);
for (const [path, input] of Object.entries(metafile.inputs)) {
const name = normalizePath(path);
report.addResource({
kind: "filesystem",
name,
type: getTypeByName(path),
format: input.format || "other",
uncompressed: input.bytes
});
input.imports.forEach((imp) => {
report.addConnection({
kind: connectionKindMapper$1(imp.kind),
source: name,
target: normalizePath(imp.path),
original: imp.original || null
});
});
}
for (const [path, output] of Object.entries(metafile.outputs)) {
report.addAsset(path, output.entryPoint ? [output.entryPoint] : void 0);
if (output.entryPoint) report.addConnection({
kind: "entrypoint",
source: normalizePath(output.entryPoint),
target: normalizePath(path),
original: null
});
}
await report.generate();
}
/**
* Maps esbuild's ImportKind to Sonda's ConnectionKind.
*/
function connectionKindMapper$1(kind) {
switch (kind) {
case "entry-point": return "entrypoint";
case "import-statement":
case "import-rule": return "import";
case "require-call":
case "require-resolve": return "require";
case "dynamic-import": return "dynamic-import";
default: return "import";
}
}
//#endregion
//#region src/integrations/rollup.ts
function SondaRollupPlugin(userOptions = {}) {
const options = new Config(userOptions, { integration: "rollup" });
if (!options.enabled) return { name: "sonda-rollup" };
const report = new Report(options);
return {
name: "sonda-rollup",
async resolveId(source, importer, options$1) {
if (!importer) return;
const resolved = await this.resolve(source, importer, {
...options$1,
skipSelf: true
});
if (resolved) report.addConnection({
kind: "import",
source: normalizePath(importer),
target: normalizePath(resolved.id),
original: source
});
},
moduleParsed(module) {
const name = normalizePath(module.id);
report.addResource({
kind: "filesystem",
name,
type: getTypeByName(name),
format: getModuleFormat(name, module),
uncompressed: module.code ? Buffer.byteLength(module.code) : 0
});
},
async writeBundle({ dir, file }, bundle) {
const outputDir = resolve(process.cwd(), dir ?? dirname(file));
for (const [path, asset] of Object.entries(bundle)) report.addAsset(resolve(outputDir, path), asset.type === "chunk" && asset.facadeModuleId ? [asset.facadeModuleId] : void 0);
await report.generate();
}
};
}
function getModuleFormat(name, module) {
if (getTypeByName(name) !== "script") return "other";
const ext = extname(module.id);
return module.meta.commonjs?.isCommonJS === true || ext === ".cjs" || ext === ".cts" ? "cjs" : "esm";
}
//#endregion
//#region src/integrations/vite.ts
function SondaVitePlugin(userOptions = {}) {
const options = new Config(userOptions, { integration: "vite" });
if (!options.enabled) return { name: "sonda-vite" };
return {
...SondaRollupPlugin(options),
name: "sonda-vite",
enforce: "pre",
apply: "build"
};
}
//#endregion
//#region src/integrations/webpack.ts
var SondaWebpackPlugin = class {
options;
constructor(userOptions = {}) {
this.options = new Config(userOptions, { integration: "webpack" });
}
apply(compiler) {
if (!this.options.enabled) return;
const report = new Report(this.options);
const namespace = compiler.options.output.devtoolNamespace || compiler.options.output.library?.name || "[^/]+/";
/**
* Regex that matches the default Webpack source map filename format
* (https://webpack.js.org/configuration/output/#outputdevtoolmodulefilenametemplate).
*
* Examples:
* - webpack://[namespace]/[path]?[loaders]
* - webpack://[namespace]?[loaders]
* - [namespace]/[path]?[loaders]
* - [path]?[loaders]
* - All of the above without `?[loaders]`
*
* While it doesn't cover all possible cases, it should be enough for now.
*
* Regex explanation:
* - (?:webpack://)? - Non-capturing group that matches the optional "webpack://" prefix
* - (?:${ namespace })? - Non-capturing group that matches the optional namespace
* - ([^?]*) - Matches the path, which is everything up to the first "?" (if present)
*/
const sourceMapFilenameRegex = new RegExp(`(?:webpack://)?(?:${namespace})?([^?]*)`);
compiler.hooks.afterEmit.tapPromise("SondaWebpackPlugin", async (compilation) => {
for (const mod of compilation.modules) {
const name = mod.nameForCondition();
if (!name) continue;
const module = mod.modules?.find((module$1) => module$1.nameForCondition() === name) || mod;
const normalizedName = normalizePath(name);
report.addResource({
kind: "filesystem",
name: normalizedName,
type: getTypeByName(normalizedName),
format: getFormat(normalizedName, module),
uncompressed: module.size()
});
Array.from(compilation.moduleGraph.getOutgoingConnections(module)).filter((connection) => {
const target = connection.module?.nameForCondition();
return !!target && target !== name;
}).map((connection) => ({
kind: connectionKindMapper(connection),
target: normalizePath(connection.module?.nameForCondition()),
original: connection.dependency?.request
})).forEach(({ kind, target, original }) => report.addConnection({
kind,
source: normalizedName,
target,
original
}));
}
for (const name of Object.keys(compilation.assets)) {
let entry = void 0;
for (const chunk of compilation.chunks) {
if (!chunk.files.has(name)) continue;
entry = Array.from(compilation.chunkGraph.getChunkEntryModulesIterable(chunk)).map((module) => module.nameForCondition());
}
report.addAsset(join(compilation.outputOptions.path, name), entry);
}
this.options.sourcesPathNormalizer = (path) => {
if (!path.startsWith("webpack://")) return resolve(process.cwd(), path);
const [, filePath] = path.match(sourceMapFilenameRegex);
return filePath ? resolve(process.cwd(), filePath) : UNASSIGNED;
};
await report.generate();
});
}
};
function getFormat(name, module) {
if (getTypeByName(name) !== "script") return "other";
return module.type === "javascript/esm" ? "esm" : "cjs";
}
/**
* Maps esbuild's ImportKind to Sonda's ConnectionKind.
*/
function connectionKindMapper(connection) {
if (!connection.dependency) return "import";
const { category, type } = connection.dependency;
if (category === "esm" && type === "import()") return "dynamic-import";
if (category === "esm" || category === "css-import") return "import";
if (category === "commonjs") return "require";
return "import";
}
//#endregion
export { Config, Report, SondaEsbuildPlugin, SondaRollupPlugin, SondaVitePlugin, SondaWebpackPlugin, getTypeByName, normalizePath, processEsbuildMetafile };
+2
-2

@@ -11,5 +11,5 @@ #!/usr/bin/env node

format: { type: 'string' },
filename: { type: 'string' },
outputDir: { type: 'string' },
'no-open': { type: 'boolean' },
detailed: { type: 'boolean' },
deep: { type: 'boolean' },
sources: { type: 'boolean' },

@@ -16,0 +16,0 @@ gzip: { type: 'boolean' },

# Changelog
## 0.8.0
This is the biggest release of Sonda to date, featuring a complete rewrite of both the backend and frontend. The goals of this rewrite were to:
- Make it easier to inspect large projects with multiple outputs
- Display much more information about assets, inputs, external dependencies, and import relationships
- Significantly reduce the size of the HTML report files
- Prepare the codebase for future improvements and features
### Highlights
- **New HTML report design** – The report has been redesigned to improve navigation and clarity, especially for large projects with multiple outputs. See the [demo page](https://sonda.dev/demo) for an example.
- **New JSON report format** – The JSON format has been overhauled to better differentiate resource types and expose relationships between them. For details, refer to the [JSON report](https://sonda.dev/features/json-report.html) documentation.
- **ESM-only** – Sonda is now ESM-only and requires Node.js 20.19 or 22.12+. This change helps reduce the distribution size.
- **New output directory** – Reports are now saved to the `.sonda` directory and are prefixed with a unique number to avoid overwriting existing reports.
### Migration
If you're upgrading from version 0.7, you'll need to update your import paths and configuration.
#### Import Paths
Each integration now has its own import path. It's recommended to use the path specific to your framework or bundler, as these may include optimizations that improve performance and accuracy.
For example, if you're using Vite or Rolldown, use their dedicated import paths instead of the generic Rollup integration path.
Available import paths:
- `sonda/angular`
- `sonda/astro`
- `sonda/esbuild`
- `sonda/next`
- `sonda/nuxt`
- `sonda/rolldown`
- `sonda/rollup`
- `sonda/rspack`
- `sonda/sveltekit`
- `sonda/vite`
- `sonda/webpack`
#### Configuration
- The `filename` option has been removed and replaced with `outputDir`, which defaults to `.sonda`. All reports are saved to this directory, and filenames are prefixed with a unique number to prevent overwriting.
- The `detailed` option has been renamed to `deep`.
#### JSON Report
The JSON report format has been completely redesigned. For complete details, refer to the updated [JSON report](https://sonda.dev/features/json-report.html) documentation.
---
### Major Changes
- 0c0113f: BREAKING CHANGE: Drop support for Node 18 and require at least Node 20.19 or 22.12
- 0c0113f: BREAKING CHANGE: Distribute only the ESM builds
- 0c0113f: BREAKING CHANGE: Rename the `detailed` configuration option to `deep`.
- 0c0113f: BREAKING CHANGE: Reports are now saved to the `.sonda` folder by default. The `filename` configuration option has been replaced with the `outputDir` option. Each new report will end with incremented numbers to avoid overwriting previous reports. For example:
- `.sonda/sonda_1.html`
- `.sonda/sonda_2.html`
- `.sonda/sonda_3.html`
- 0c0113f: BREAKING CHANGE: Change format of the JSON report
- 0c0113f: BREAKING CHANGE: Redesign the HTML report
### Minor Changes
- 0c0113f: Add new `sonda/rolldown`, `sonda/rspack` and `sonda/vite` entrypoints
- 0c0113f: GZIP data in HTML report to reduce its size
### Patch Changes
- 0c0113f: Run Vite integration only in production build
- 0c0113f: Fix detection of input files in Next.js integration
- 0c0113f: Update all dependencies
- 0c0113f: Don't change paths in sourcemaps generated by webpack and add handling webpack specific path formats
## 0.7.1

@@ -37,2 +114,4 @@

---
### Minor Changes

@@ -39,0 +118,0 @@

@@ -1,7 +0,14 @@

import type { UserOptions } from '../types';
import { UserOptions } from "sonda";
//#region src/entrypoints/angular.d.ts
interface AngularUserOptions extends UserOptions {
config: string;
projects: string[];
config: string;
projects: string[];
}
export default function SondaAngular(options?: Partial<AngularUserOptions>): void;
export {};
declare function SondaAngular({
config,
projects,
...userOptions
}: AngularUserOptions): Promise<void>;
//#endregion
export { SondaAngular as default };

@@ -1,3 +0,7 @@

import type { AstroIntegration } from 'astro';
import type { FrameworkUserOptions } from '../types';
export default function SondaAstroPlugin(options?: Partial<FrameworkUserOptions>): AstroIntegration;
import { UserOptions } from "sonda";
import { AstroIntegration } from "astro";
//#region src/entrypoints/astro.d.ts
declare function SondaAstroPlugin(userOptions?: UserOptions): AstroIntegration;
//#endregion
export { SondaAstroPlugin as default };

@@ -1,4 +0,2 @@

import type { Metafile, Plugin } from 'esbuild';
import type { UserOptions } from '../types.js';
export default function SondaEsbuildPlugin(options?: Partial<UserOptions>): Plugin;
export declare function processEsbuildMetaFile(metafile: Metafile, options: Partial<UserOptions>): void;
import { SondaEsbuildPlugin } from "sonda";
export { SondaEsbuildPlugin as default };

@@ -1,3 +0,7 @@

import type { NextConfig } from 'next';
import type { FrameworkUserOptions } from '../types.js';
export default function SondaNextPlugin(options?: Partial<FrameworkUserOptions>): (nextConfig?: NextConfig) => NextConfig;
import { UserOptions } from "sonda";
import { NextConfig } from "next";
//#region src/entrypoints/next.d.ts
declare function SondaNextPlugin(userOptions?: UserOptions): (nextConfig?: NextConfig) => NextConfig;
//#endregion
export { SondaNextPlugin as default };

@@ -1,3 +0,7 @@

import type { NuxtModule } from '@nuxt/schema';
import type { FrameworkUserOptions } from '../types';
export default function SondaNuxtPlugin(options?: Partial<FrameworkUserOptions>): NuxtModule;
import { UserOptions } from "sonda";
import { NuxtModule } from "@nuxt/schema";
//#region src/entrypoints/nuxt.d.ts
declare function SondaNuxtPlugin(userOptions?: UserOptions): NuxtModule;
//#endregion
export { SondaNuxtPlugin as default };

@@ -1,3 +0,2 @@

import { type UserOptions } from '../index.js';
import type { Plugin } from 'rollup';
export default function SondaRollupPlugin(options?: Partial<UserOptions>): Plugin;
import { SondaRollupPlugin } from "sonda";
export { SondaRollupPlugin as default };

@@ -1,3 +0,7 @@

import type { PluginOption } from 'vite';
import type { FrameworkUserOptions } from '../types';
export default function SondaSvelteKitPlugin(options?: Partial<FrameworkUserOptions>): PluginOption;
import { UserOptions } from "sonda";
import { PluginOption } from "vite";
//#region src/entrypoints/sveltekit.d.ts
declare function SondaSvelteKitPlugin(userOptions?: UserOptions): PluginOption;
//#endregion
export { SondaSvelteKitPlugin as default };

@@ -1,7 +0,2 @@

import { type UserOptions } from '../index.js';
import type { Compiler } from 'webpack';
export default class SondaWebpackPlugin {
options: Partial<UserOptions>;
constructor(options?: Partial<UserOptions>);
apply(compiler: Compiler): void;
}
import { SondaWebpackPlugin } from "sonda";
export { SondaWebpackPlugin as default };

@@ -1,4 +0,433 @@

export { generateReportFromAssets } from './report/generate.js';
export { addSourcesToInputs } from './sourcemap/map.js';
export { esmRegex, cjsRegex, jsRegexp, normalizePath } from './utils.js';
export type * from './types.js';
import { DecodedSourceMap } from "@ampproject/remapping";
import { PluginOption } from "vite";
import { SourcesPathNormalizer } from "load-source-map";
import { Metafile, Plugin } from "esbuild";
import { Plugin as Plugin$1 } from "rollup";
import { Compiler } from "webpack";
//#region src/config.d.ts
declare class Config implements Required<IntegrationOptions> {
#private;
constructor(options: Partial<IntegrationOptions> | Config, defaults: IntegrationOptions);
clone(): Config;
get enabled(): boolean;
get format(): Format;
get outputDir(): string;
get open(): boolean;
get deep(): boolean;
get sources(): boolean;
get gzip(): boolean;
get brotli(): boolean;
get server(): boolean;
get integration(): Integration;
get filename(): string;
get sourcesPathNormalizer(): SourcesPathNormalizer;
set filename(filename: string);
set sourcesPathNormalizer(normalizer: SourcesPathNormalizer);
}
interface UserOptions {
/**
* Specifies whether the plugin is enabled.
*
* @default true
*/
enabled?: boolean;
/**
* Specifies the output format of the report.
*
* @default 'html'
*/
format?: Format;
/**
* Specifies the name of the directory where the report will be saved.
*
* @default '.sonda'
*/
outputDir?: string;
/**
* Specifies whether to automatically open the report in the default program for
* the given file extension (`.html` or `.json`, depending on the `format` option)
* after the build process.
*
* @default false
*/
open?: boolean;
/**
* Specifies whether to read the source maps of imported modules.
*
* By default, external dependencies bundled into a single file appear as a single
* asset in the report. When this option is enabled, the report includes the source
* files of imported modules, if source maps are available.
*
* Enabling this option may increase the time needed to generate the report and reduce
* the accuracy of estimated GZIP and Brotli sizes for individual files.
*
* @default false
*/
deep?: boolean;
/**
* Specifies whether to include source maps of the assets in the report to visualize
* which parts of the code contribute to the final asset size.
*
* ⚠️ This option significantly increases the size of the report and embeds the
* **source code** of the assets. If you are working with proprietary code, ensure
* you share the report responsibly. ⚠️
*
* @default false
*/
sources?: boolean;
/**
* Specifies whether to calculate the sizes of assets after compression with GZIP.
*
* The report includes estimated compressed sizes for each file within an asset.
* However, these estimates are approximate and should be used as a general reference.
*
* Enabling this option may increase the time required to generate the report.
*
* @default false
*/
gzip?: boolean;
/**
* Specifies whether to calculate the sizes of assets after compression with Brotli.
*
* The report includes estimated compressed sizes for each file within an asset.
* However, these estimates are approximate and should be used as a general reference.
*
* Enabling this option may increase the time required to generate the report.
*
* @default false
*/
brotli?: boolean;
/**
* Specifies whether to generate a report for the server build.
*
* This option is only available for meta-framework integrations.
*
* @default false
*/
server?: boolean;
}
interface IntegrationOptions extends UserOptions {
/**
* Specifies the integration used to generate the report.
*/
integration: Integration;
/**
* Specifies the name of the file where the report will be saved.
*
* @default 'sonda'
*/
filename?: string;
/**
* Normalizes the paths in source maps to a consistent format.
*
* @default null
*/
sourcesPathNormalizer?: SourcesPathNormalizer;
}
type Format = "html" | "json";
type Integration = "angular" | "astro" | "esbuild" | "next" | "nuxt" | "rolldown" | "rollup" | "rspack" | "sveltekit" | "vite" | "webpack" | "unknown";
//#endregion
//#region src/integrations/esbuild.d.ts
declare function SondaEsbuildPlugin(userOptions?: UserOptions): Plugin;
declare function processEsbuildMetafile(metafile: Metafile, options: Config): Promise<void>;
//#endregion
//#region src/integrations/rollup.d.ts
declare function SondaRollupPlugin(userOptions?: UserOptions): Plugin$1;
//#endregion
//#region src/integrations/vite.d.ts
declare function SondaVitePlugin(userOptions?: UserOptions): PluginOption;
//#endregion
//#region src/integrations/webpack.d.ts
declare class SondaWebpackPlugin {
options: Config;
constructor(userOptions?: UserOptions);
apply(compiler: Compiler): void;
}
//#endregion
//#region src/report/types.d.ts
interface JsonReport {
/**
* Metadata about the report, including the version of Sonda used to generate it,
* the integration used, and the options passed to Sonda.
*/
metadata: Metadata;
/**
* List of all input and output resources.
*/
resources: Array<Resource>;
/**
* List of all connections between resources.
*/
connections: Array<Connection>;
/**
* List of all detected external dependencies and their paths. If
* a dependency has more than one path, it's likely duplicated and
* bundled in multiple copies.
*/
dependencies: Array<Dependency>;
/**
* List of issues detected in the outputs.
*/
issues: Array<Issue>;
/**
* Partial source maps of the "asset" resources.
*
* This value is only available when the `deep` option is enabled.
*/
sourcemaps: Array<SourceMap>;
}
interface Metadata {
/**
* Version of Sonda used to generate the report.
*/
version: string;
/**
* Integration used to generate the report.
*/
integration: Integration;
/**
* The normalized value of the `sources` option passed to Sonda.
*/
sources: boolean;
/**
* The normalized value of the `gzip` option passed to Sonda.
*/
gzip: boolean;
/**
* The normalized value of the `brotli` option passed to Sonda.
*/
brotli: boolean;
}
/**
* Base interface for all resources.
*
* Resources represent the following inputs:
*
* ┌─────────────────────────────┐
* │ │
* │ OPTIONAL ORIGINAL SOURCE │
* │ │
* └──────────────▲──────────────┘
* │
* │
* VIA SOURCEMAP
* │
* │
* ┌─────────────────────────────┐ ┌──────────────┼──────────────┐
* │ │ │ │
* │ INTERNAL SOURCE │ │ EXTERNAL SOURCE │
* │ │ │ │
* └──────────────┬──────────────┘ └──────────────┬──────────────┘
* │ │
* │ │
* └─────────────────┬─────────────────┘
* │
* │
* ┌──────────────▼──────────────┐
* │ │
* │ BUNDLER │
* │ │
* └──────────────┬──────────────┘
* │
* │
* │
* │
* │
* ┌──────────────▼──────────────┐
* │ │
* │ ASSET │
* │ │
* └──────────────┬──────────────┘
* │
* │
* VIA SOURCEMAP
* │
* │
* ┌──────────────▼──────────────┐
* │ │
* │ CHUNK │
* │ │
* └─────────────────────────────┘
*/
interface ResourceBase {
/**
* Information where the resource comes from.
*/
kind: ResourceKind;
/**
* Relative path to the resource.
*
* If the `source` is `sourcemap`, the file may not exist in the filesystem.
*/
name: string;
/**
* Type of the resources, determined by the file extension.
*/
type: FileType;
/**
* Format of the module, if the resource type is `script`.
*/
format?: ModuleFormat;
/**
* Size of the resource without any compression.
*/
uncompressed: number;
/**
* Size of the resource after GZIP compression.
*
* This value is only available when the `gzip` option is enabled.
*/
gzip?: number;
/**
* Size of the resource after Brotli compression.
*
* This value is only available when the `brotli` option is enabled.
*/
brotli?: number;
/**
* Parent of the resource.
*
* If the `kind` is `chunk`, this resource is a part of the output
* asset and value of `parent` is the name of the output asset.
*
* If the `kind` is `sourcemap`, this resource is a part of the source
* map of other resource and value of `parent` is the name of that resource.
*/
parent?: string | Array<string> | null;
}
/**
* Input resource loaded from the filesystem by the bundler.
*
* See INTERNAL SOURCE and EXTERNAL SOURCE in the diagram above.
*/
interface FilesystemResource extends ResourceBase {
kind: "filesystem";
name: string;
type: FileType;
format: ModuleFormat;
uncompressed: number;
gzip?: never;
brotli?: never;
parent?: never;
}
/**
* Input resource read from a sourcemap of the filesystem resource.
*
* See OPTIONAL ORIGINAL SOURCE in the diagram above.
*/
interface SourcemapResource extends ResourceBase {
kind: "sourcemap";
name: string;
type: FileType;
format: ModuleFormat;
uncompressed: number;
gzip?: never;
brotli?: never;
parent: string | null;
}
/**
* Output resource generated by the bundler.
*
* See ASSET in the diagram above.
*/
interface AssetResource extends ResourceBase {
kind: "asset";
name: string;
type: FileType;
format?: never;
uncompressed: number;
gzip: number;
brotli: number;
parent?: never;
}
/**
* Part of the input resource that was used in one of the assets
* (after tree-shaking, minification, etc.).
*
* See CHUNK in the diagram above.
*/
interface ChunkResource extends ResourceBase {
kind: "chunk";
name: string;
type: FileType;
format: ModuleFormat;
uncompressed: number;
gzip: number;
brotli: number;
parent: string;
}
interface Connection {
kind: ConnectionKind;
source: string;
target: string;
original: string | null;
}
interface Dependency {
name: string;
paths: Array<string>;
}
interface Issue {
type: string;
data: unknown;
}
/**
* All types of resources.
*/
type Resource = FilesystemResource | SourcemapResource | AssetResource | ChunkResource;
type Sizes = Required<Pick<ResourceBase, "uncompressed" | "gzip" | "brotli">>;
type ResourceKind = "filesystem" | "sourcemap" | "asset" | "chunk";
type ConnectionKind = "entrypoint" | "import" | "require" | "dynamic-import" | "sourcemap";
type FileType = "component" | "font" | "image" | "script" | "style" | "other";
type ModuleFormat = "esm" | "cjs" | "amd" | "umd" | "iife" | "system" | "other";
/**
* Type for the source map strings from the report after decoding.
*/
interface SourceMap {
/**
* Name of the asset file that the source map belongs to.
*/
name: string;
/**
* Stringified source map.
*
* Use the `DecodedMap` type for the decoded version of this map (after `JSON.parse()`).
*/
map: string;
}
type DecodedReportSourceMap = Pick<DecodedSourceMap, "mappings" | "sources" | "sourcesContent">;
//#endregion
//#region src/report/report.d.ts
declare class Report {
#private;
readonly config: Config;
readonly resources: Array<Resource>;
readonly connections: Array<Connection>;
readonly assets: Record<string, Array<string> | undefined>;
protected metadata: Metadata;
protected dependencies: Array<Dependency>;
protected issues: Array<Issue>;
protected sourcemaps: Array<SourceMap>;
constructor(config: Config);
addResource(resource: Resource): void;
addConnection(connection: Connection): void;
addAsset(name: string, entrypoints?: Array<string>): void;
generate(): Promise<void>;
addSourceMap(asset: string, sourcemap: DecodedReportSourceMap): void;
}
//#endregion
//#region src/utils.d.ts
/**
* Normalizes a given path by removing leading null characters and converting it to a relative POSIX path.
*/
declare function normalizePath(pathToNormalize: string): string;
/**
* Returns the type of a given file based on its name.
*/
declare function getTypeByName(name: string): FileType;
/**
* Returns only the object keys which have a string value.
*/
//#endregion
export { AssetResource, ChunkResource, Config, Connection, ConnectionKind, DecodedReportSourceMap, Dependency, FileType, FilesystemResource, Integration, IntegrationOptions, Issue, JsonReport, Metadata, ModuleFormat, Report, Resource, ResourceBase, ResourceKind, Sizes, SondaEsbuildPlugin, SondaRollupPlugin, SondaVitePlugin, SondaWebpackPlugin, SourceMap, SourcemapResource, UserOptions, getTypeByName, normalizePath, processEsbuildMetafile };
{
"name": "sonda",
"version": "0.7.1",
"version": "0.8.0",
"description": "Universal visualizer and analyzer for JavaScript and CSS bundles. Works with most popular bundlers and frameworks.",

@@ -23,2 +23,5 @@ "keywords": [

],
"engines": {
"node": ">=20.19 || >=22.12"
},
"license": "MIT",

@@ -33,62 +36,14 @@ "type": "module",

"exports": {
".": {
"types": "./dist/index.d.ts",
"import": "./dist/index.mjs",
"require": "./dist/index.cjs"
},
"./angular": {
"types": "./dist/entrypoints/angular.d.ts",
"import": "./dist/entrypoints/angular.mjs",
"require": "./dist/entrypoints/angular.cjs"
},
"./astro": {
"types": "./dist/entrypoints/astro.d.ts",
"import": "./dist/entrypoints/astro.mjs",
"require": "./dist/entrypoints/astro.cjs"
},
"./esbuild": {
"types": "./dist/entrypoints/esbuild.d.ts",
"import": "./dist/entrypoints/esbuild.mjs",
"require": "./dist/entrypoints/esbuild.cjs"
},
"./next": {
"types": "./dist/entrypoints/next.d.ts",
"import": "./dist/entrypoints/next.mjs",
"require": "./dist/entrypoints/next.cjs"
},
"./nuxt": {
"types": "./dist/entrypoints/nuxt.d.ts",
"import": "./dist/entrypoints/nuxt.mjs",
"require": "./dist/entrypoints/nuxt.cjs"
},
"./rollup": {
"types": "./dist/entrypoints/rollup.d.ts",
"import": "./dist/entrypoints/rollup.mjs",
"require": "./dist/entrypoints/rollup.cjs"
},
"./rolldown": {
"types": "./dist/entrypoints/rollup.d.ts",
"import": "./dist/entrypoints/rollup.mjs",
"require": "./dist/entrypoints/rollup.cjs"
},
"./sveltekit": {
"types": "./dist/entrypoints/sveltekit.d.ts",
"import": "./dist/entrypoints/sveltekit.mjs",
"require": "./dist/entrypoints/sveltekit.cjs"
},
"./rspack": {
"types": "./dist/entrypoints/webpack.d.ts",
"import": "./dist/entrypoints/webpack.mjs",
"require": "./dist/entrypoints/webpack.cjs"
},
"./vite": {
"types": "./dist/entrypoints/rollup.d.ts",
"import": "./dist/entrypoints/rollup.mjs",
"require": "./dist/entrypoints/rollup.cjs"
},
"./webpack": {
"types": "./dist/entrypoints/webpack.d.ts",
"import": "./dist/entrypoints/webpack.mjs",
"require": "./dist/entrypoints/webpack.cjs"
},
".": "./dist/index.js",
"./angular": "./dist/entrypoints/angular.js",
"./astro": "./dist/entrypoints/astro.js",
"./esbuild": "./dist/entrypoints/esbuild.js",
"./next": "./dist/entrypoints/next.js",
"./nuxt": "./dist/entrypoints/nuxt.js",
"./rolldown": "./dist/entrypoints/rolldown.js",
"./rollup": "./dist/entrypoints/rollup.js",
"./rspack": "./dist/entrypoints/rspack.js",
"./sveltekit": "./dist/entrypoints/sveltekit.js",
"./vite": "./dist/entrypoints/vite.js",
"./webpack": "./dist/entrypoints/webpack.js",
"./package.json": "./package.json"

@@ -106,5 +61,4 @@ },

"@ampproject/remapping": "^2.3.0",
"@jridgewell/sourcemap-codec": "^1.5.0",
"open": "^10.1.0"
"open": "^10.1.2"
}
}

@@ -28,5 +28,1 @@ # Sonda

You can try Sonda at [https://sonda.dev/demo](https://sonda.dev/demo).
## Screenshot
![HTML report generated by Sonda with open modal containing file details and tree map diagram in the background](https://raw.githubusercontent.com/filipsobol/sonda/refs/heads/main/docs/public/details.jpg)
"use strict";
const require_src = require('../src.cjs');
const require_esbuild = require('../esbuild.cjs');
const path = require_src.__toESM(require("path"));
const fs = require_src.__toESM(require("fs"));
//#region src/entrypoints/angular.ts
function SondaAngular(options = {}) {
const cwd = process.cwd();
const { config = "angular.json", projects = [],...opts } = options;
opts.format ??= "html";
opts.filename ??= `sonda-report-[project].${opts.format}`;
if (!opts.filename.includes("[project]")) throw new Error("SondaAngular: The \"filename\" option must include the \"[project]\" token.");
const angularConfig = loadJson(config);
const projectsToGenerate = projects.length ? projects : Object.keys(angularConfig.projects);
for (const project of projectsToGenerate) {
const { outputPath } = angularConfig.projects[project].architect.build.options;
const paths = typeof outputPath === "object" ? outputPath : { base: outputPath };
paths.base = (0, path.resolve)(cwd, paths.base);
paths.browser = (0, path.resolve)(paths.base, paths.browser || "browser");
paths.server = (0, path.resolve)(paths.base, paths.server || "server");
const metafile = updateMetafile(loadJson((0, path.resolve)(paths.base, "stats.json")), paths);
const sondaOptions = Object.assign({}, opts);
sondaOptions.filename = sondaOptions.filename.replace("[project]", project);
require_esbuild.processEsbuildMetaFile(metafile, sondaOptions);
}
}
function loadJson(path$1) {
return JSON.parse((0, fs.readFileSync)((0, path.resolve)(process.cwd(), path$1), "utf8"));
}
/**
* Output paths in metafile only include file name, without the relative path from the current
* working directory. For example, in the metafile the output path is "main-xxx.js", but in the
* file system it's "dist/project/browser/en/main-xxx.js". This function updates the output paths
* to include the relative path from the current working directory.
*/
function updateMetafile(metafile, paths) {
const cwd = process.cwd();
const outputs = Object.assign({}, metafile.outputs);
metafile.outputs = {};
for (const path$1 of (0, fs.readdirSync)(paths.base, {
encoding: "utf8",
recursive: true
})) {
const absolutePath = (0, path.resolve)(paths.base, path$1);
const filename = (0, path.basename)(absolutePath);
const originalOutput = outputs[filename];
if (originalOutput) metafile.outputs[(0, path.relative)(cwd, absolutePath)] = originalOutput;
}
return metafile;
}
//#endregion
module.exports = SondaAngular;
//# sourceMappingURL=angular.cjs.map
{"version":3,"file":"angular.cjs","names":["options: Partial<AngularUserOptions>","paths: Paths","path: string","path","metafile: Metafile"],"sources":["../../src/entrypoints/angular.ts"],"sourcesContent":["import { readFileSync, readdirSync } from 'fs';\nimport { basename, relative, resolve } from 'path';\nimport type { UserOptions } from '../types';\nimport type { Metafile } from 'esbuild';\nimport { processEsbuildMetaFile } from './esbuild';\n\ninterface AngularUserOptions extends UserOptions {\n config: string;\n projects: string[];\n}\n\ninterface Paths {\n base: string;\n browser: string;\n server: string;\n}\n\nexport default function SondaAngular( options: Partial<AngularUserOptions> = {} ): void {\n const cwd = process.cwd();\n const {\n config = 'angular.json',\n projects = [],\n ...opts\n } = options;\n\n opts.format ??= 'html';\n opts.filename ??= `sonda-report-[project].${ opts.format }`;\n\n // Angular workspaces can have multiple projects, so we need to generate a report for each\n if ( !opts.filename.includes( '[project]' ) ) {\n throw new Error( 'SondaAngular: The \"filename\" option must include the \"[project]\" token.' );\n }\n\n const angularConfig = loadJson( config );\n const projectsToGenerate = projects.length ? projects : Object.keys( angularConfig.projects );\n\n for ( const project of projectsToGenerate ) {\n const { outputPath } = angularConfig.projects[ project ].architect.build.options;\n const paths: Paths = typeof outputPath === 'object'\n ? outputPath\n : { base: outputPath };\n\n paths.base = resolve( cwd, paths.base );\n paths.browser = resolve( paths.base, paths.browser || 'browser' );\n paths.server = resolve( paths.base, paths.server || 'server' );\n\n const metafile = updateMetafile(\n loadJson<Metafile>( resolve( paths.base, 'stats.json' ) ),\n paths\n );\n\n // Because this configuration is shared between multiple projects, we need to clone it\n const sondaOptions = Object.assign( {}, opts );\n\n // Replace the \"[project]\" token with the current project name\n sondaOptions.filename = sondaOptions.filename!.replace( '[project]', project );\n\n processEsbuildMetaFile( metafile, sondaOptions );\n }\n}\n\nfunction loadJson<T extends any = any>( path: string ): T {\n return JSON.parse(\n readFileSync( resolve( process.cwd(), path ), 'utf8' )\n );\n}\n\n/**\n * Output paths in metafile only include file name, without the relative path from the current\n * working directory. For example, in the metafile the output path is \"main-xxx.js\", but in the\n * file system it's \"dist/project/browser/en/main-xxx.js\". This function updates the output paths\n * to include the relative path from the current working directory.\n */\nfunction updateMetafile(\n metafile: Metafile,\n paths: Paths\n): Metafile {\n const cwd = process.cwd();\n\n // Clone the original outputs object\n const outputs = Object.assign( {}, metafile.outputs );\n\n // Reset the outputs\n metafile.outputs = {};\n\n for ( const path of readdirSync( paths.base, { encoding: 'utf8', recursive: true } ) ) {\n const absolutePath = resolve( paths.base, path );\n const filename = basename( absolutePath );\n const originalOutput = outputs[ filename ];\n\n // If the output file name exists in the original outputs, add the updated relative path\n if ( originalOutput ) {\n metafile.outputs[ relative( cwd, absolutePath ) ] = originalOutput;\n }\n }\n\n return metafile;\n}\n"],"mappings":";;;;;;;AAiBe,SAAS,aAAcA,UAAuC,CAAE,GAAS;CACtF,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,EACJ,SAAS,gBACT,WAAW,CAAE,EACb,GAAG,MACJ,GAAG;AAEJ,MAAK,WAAW;AAChB,MAAK,cAAc,yBAA0B,KAAK,OAAQ;AAG1D,MAAM,KAAK,SAAS,SAAU,YAAa,CACzC,OAAM,IAAI,MAAO;CAGnB,MAAM,gBAAgB,SAAU,OAAQ;CACxC,MAAM,qBAAqB,SAAS,SAAS,WAAW,OAAO,KAAM,cAAc,SAAU;AAE7F,MAAM,MAAM,WAAW,oBAAqB;EAC1C,MAAM,EAAE,YAAY,GAAG,cAAc,SAAU,SAAU,UAAU,MAAM;EACzE,MAAMC,eAAsB,eAAe,WACvC,aACA,EAAE,MAAM,WAAY;AAExB,QAAM,OAAO,kBAAS,KAAK,MAAM,KAAM;AACvC,QAAM,UAAU,kBAAS,MAAM,MAAM,MAAM,WAAW,UAAW;AACjE,QAAM,SAAS,kBAAS,MAAM,MAAM,MAAM,UAAU,SAAU;EAE9D,MAAM,WAAW,eACf,SAAoB,kBAAS,MAAM,MAAM,aAAc,CAAE,EACzD,MACD;EAGD,MAAM,eAAe,OAAO,OAAQ,CAAE,GAAE,KAAM;AAG9C,eAAa,WAAW,aAAa,SAAU,QAAS,aAAa,QAAS;AAE9E,yCAAwB,UAAU,aAAc;CACjD;AACF;AAED,SAAS,SAA+BC,QAAkB;AACxD,QAAO,KAAK,MACV,qBAAc,kBAAS,QAAQ,KAAK,EAAEC,OAAM,EAAE,OAAQ,CACvD;AACF;;;;;;;AAQD,SAAS,eACPC,UACAH,OACU;CACV,MAAM,MAAM,QAAQ,KAAK;CAGzB,MAAM,UAAU,OAAO,OAAQ,CAAE,GAAE,SAAS,QAAS;AAGrD,UAAS,UAAU,CAAE;AAErB,MAAM,MAAME,UAAQ,oBAAa,MAAM,MAAM;EAAE,UAAU;EAAQ,WAAW;CAAM,EAAE,EAAG;EACrF,MAAM,eAAe,kBAAS,MAAM,MAAMA,OAAM;EAChD,MAAM,WAAW,mBAAU,aAAc;EACzC,MAAM,iBAAiB,QAAS;AAGhC,MAAK,eACH,UAAS,QAAS,mBAAU,KAAK,aAAc,IAAK;CAEvD;AAED,QAAO;AACR"}
import "../src.mjs";
import { processEsbuildMetaFile } from "../esbuild.mjs";
import { basename, relative, resolve } from "path";
import { readFileSync, readdirSync } from "fs";
//#region src/entrypoints/angular.ts
function SondaAngular(options = {}) {
const cwd = process.cwd();
const { config = "angular.json", projects = [],...opts } = options;
opts.format ??= "html";
opts.filename ??= `sonda-report-[project].${opts.format}`;
if (!opts.filename.includes("[project]")) throw new Error("SondaAngular: The \"filename\" option must include the \"[project]\" token.");
const angularConfig = loadJson(config);
const projectsToGenerate = projects.length ? projects : Object.keys(angularConfig.projects);
for (const project of projectsToGenerate) {
const { outputPath } = angularConfig.projects[project].architect.build.options;
const paths = typeof outputPath === "object" ? outputPath : { base: outputPath };
paths.base = resolve(cwd, paths.base);
paths.browser = resolve(paths.base, paths.browser || "browser");
paths.server = resolve(paths.base, paths.server || "server");
const metafile = updateMetafile(loadJson(resolve(paths.base, "stats.json")), paths);
const sondaOptions = Object.assign({}, opts);
sondaOptions.filename = sondaOptions.filename.replace("[project]", project);
processEsbuildMetaFile(metafile, sondaOptions);
}
}
function loadJson(path) {
return JSON.parse(readFileSync(resolve(process.cwd(), path), "utf8"));
}
/**
* Output paths in metafile only include file name, without the relative path from the current
* working directory. For example, in the metafile the output path is "main-xxx.js", but in the
* file system it's "dist/project/browser/en/main-xxx.js". This function updates the output paths
* to include the relative path from the current working directory.
*/
function updateMetafile(metafile, paths) {
const cwd = process.cwd();
const outputs = Object.assign({}, metafile.outputs);
metafile.outputs = {};
for (const path of readdirSync(paths.base, {
encoding: "utf8",
recursive: true
})) {
const absolutePath = resolve(paths.base, path);
const filename = basename(absolutePath);
const originalOutput = outputs[filename];
if (originalOutput) metafile.outputs[relative(cwd, absolutePath)] = originalOutput;
}
return metafile;
}
//#endregion
export { SondaAngular as default };
//# sourceMappingURL=angular.mjs.map
{"version":3,"file":"angular.mjs","names":["options: Partial<AngularUserOptions>","paths: Paths","path: string","metafile: Metafile"],"sources":["../../src/entrypoints/angular.ts"],"sourcesContent":["import { readFileSync, readdirSync } from 'fs';\nimport { basename, relative, resolve } from 'path';\nimport type { UserOptions } from '../types';\nimport type { Metafile } from 'esbuild';\nimport { processEsbuildMetaFile } from './esbuild';\n\ninterface AngularUserOptions extends UserOptions {\n config: string;\n projects: string[];\n}\n\ninterface Paths {\n base: string;\n browser: string;\n server: string;\n}\n\nexport default function SondaAngular( options: Partial<AngularUserOptions> = {} ): void {\n const cwd = process.cwd();\n const {\n config = 'angular.json',\n projects = [],\n ...opts\n } = options;\n\n opts.format ??= 'html';\n opts.filename ??= `sonda-report-[project].${ opts.format }`;\n\n // Angular workspaces can have multiple projects, so we need to generate a report for each\n if ( !opts.filename.includes( '[project]' ) ) {\n throw new Error( 'SondaAngular: The \"filename\" option must include the \"[project]\" token.' );\n }\n\n const angularConfig = loadJson( config );\n const projectsToGenerate = projects.length ? projects : Object.keys( angularConfig.projects );\n\n for ( const project of projectsToGenerate ) {\n const { outputPath } = angularConfig.projects[ project ].architect.build.options;\n const paths: Paths = typeof outputPath === 'object'\n ? outputPath\n : { base: outputPath };\n\n paths.base = resolve( cwd, paths.base );\n paths.browser = resolve( paths.base, paths.browser || 'browser' );\n paths.server = resolve( paths.base, paths.server || 'server' );\n\n const metafile = updateMetafile(\n loadJson<Metafile>( resolve( paths.base, 'stats.json' ) ),\n paths\n );\n\n // Because this configuration is shared between multiple projects, we need to clone it\n const sondaOptions = Object.assign( {}, opts );\n\n // Replace the \"[project]\" token with the current project name\n sondaOptions.filename = sondaOptions.filename!.replace( '[project]', project );\n\n processEsbuildMetaFile( metafile, sondaOptions );\n }\n}\n\nfunction loadJson<T extends any = any>( path: string ): T {\n return JSON.parse(\n readFileSync( resolve( process.cwd(), path ), 'utf8' )\n );\n}\n\n/**\n * Output paths in metafile only include file name, without the relative path from the current\n * working directory. For example, in the metafile the output path is \"main-xxx.js\", but in the\n * file system it's \"dist/project/browser/en/main-xxx.js\". This function updates the output paths\n * to include the relative path from the current working directory.\n */\nfunction updateMetafile(\n metafile: Metafile,\n paths: Paths\n): Metafile {\n const cwd = process.cwd();\n\n // Clone the original outputs object\n const outputs = Object.assign( {}, metafile.outputs );\n\n // Reset the outputs\n metafile.outputs = {};\n\n for ( const path of readdirSync( paths.base, { encoding: 'utf8', recursive: true } ) ) {\n const absolutePath = resolve( paths.base, path );\n const filename = basename( absolutePath );\n const originalOutput = outputs[ filename ];\n\n // If the output file name exists in the original outputs, add the updated relative path\n if ( originalOutput ) {\n metafile.outputs[ relative( cwd, absolutePath ) ] = originalOutput;\n }\n }\n\n return metafile;\n}\n"],"mappings":";;;;;;AAiBe,SAAS,aAAcA,UAAuC,CAAE,GAAS;CACtF,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,EACJ,SAAS,gBACT,WAAW,CAAE,EACb,GAAG,MACJ,GAAG;AAEJ,MAAK,WAAW;AAChB,MAAK,cAAc,yBAA0B,KAAK,OAAQ;AAG1D,MAAM,KAAK,SAAS,SAAU,YAAa,CACzC,OAAM,IAAI,MAAO;CAGnB,MAAM,gBAAgB,SAAU,OAAQ;CACxC,MAAM,qBAAqB,SAAS,SAAS,WAAW,OAAO,KAAM,cAAc,SAAU;AAE7F,MAAM,MAAM,WAAW,oBAAqB;EAC1C,MAAM,EAAE,YAAY,GAAG,cAAc,SAAU,SAAU,UAAU,MAAM;EACzE,MAAMC,eAAsB,eAAe,WACvC,aACA,EAAE,MAAM,WAAY;AAExB,QAAM,OAAO,QAAS,KAAK,MAAM,KAAM;AACvC,QAAM,UAAU,QAAS,MAAM,MAAM,MAAM,WAAW,UAAW;AACjE,QAAM,SAAS,QAAS,MAAM,MAAM,MAAM,UAAU,SAAU;EAE9D,MAAM,WAAW,eACf,SAAoB,QAAS,MAAM,MAAM,aAAc,CAAE,EACzD,MACD;EAGD,MAAM,eAAe,OAAO,OAAQ,CAAE,GAAE,KAAM;AAG9C,eAAa,WAAW,aAAa,SAAU,QAAS,aAAa,QAAS;AAE9E,yBAAwB,UAAU,aAAc;CACjD;AACF;AAED,SAAS,SAA+BC,MAAkB;AACxD,QAAO,KAAK,MACV,aAAc,QAAS,QAAQ,KAAK,EAAE,KAAM,EAAE,OAAQ,CACvD;AACF;;;;;;;AAQD,SAAS,eACPC,UACAF,OACU;CACV,MAAM,MAAM,QAAQ,KAAK;CAGzB,MAAM,UAAU,OAAO,OAAQ,CAAE,GAAE,SAAS,QAAS;AAGrD,UAAS,UAAU,CAAE;AAErB,MAAM,MAAM,QAAQ,YAAa,MAAM,MAAM;EAAE,UAAU;EAAQ,WAAW;CAAM,EAAE,EAAG;EACrF,MAAM,eAAe,QAAS,MAAM,MAAM,KAAM;EAChD,MAAM,WAAW,SAAU,aAAc;EACzC,MAAM,iBAAiB,QAAS;AAGhC,MAAK,eACH,UAAS,QAAS,SAAU,KAAK,aAAc,IAAK;CAEvD;AAED,QAAO;AACR"}
"use strict";
require('../src.cjs');
const require_rollup = require('../rollup.cjs');
//#region src/entrypoints/astro.ts
function SondaAstroPlugin(options = {}) {
return {
name: "sonda/astro",
hooks: { "astro:build:setup"({ vite, target }) {
if (options.enabled === false) return;
options.format ??= "html";
options.filename ??= `sonda-report-[env].${options.format}`;
if (!options.filename.includes("[env]")) throw new Error("SondaAstroPlugin: The \"filename\" option must include the \"[env]\" token.");
const generateForServer = options.server ?? false;
if (target === "server" && !generateForServer) return;
const sondaOptions = Object.assign({}, options);
sondaOptions.filename = sondaOptions.filename.replace("[env]", target);
vite.plugins ??= [];
vite.plugins.push(require_rollup.SondaRollupPlugin(sondaOptions));
} }
};
}
//#endregion
module.exports = SondaAstroPlugin;
//# sourceMappingURL=astro.cjs.map
{"version":3,"file":"astro.cjs","names":["options: Partial<FrameworkUserOptions>"],"sources":["../../src/entrypoints/astro.ts"],"sourcesContent":["import Sonda from './rollup';\nimport type { AstroIntegration } from 'astro';\nimport type { FrameworkUserOptions } from '../types';\n\nexport default function SondaAstroPlugin( options: Partial<FrameworkUserOptions> = {} ): AstroIntegration {\n return {\n name: 'sonda/astro',\n hooks: {\n 'astro:build:setup'( { vite, target } ) {\n if ( options.enabled === false ) {\n return;\n }\n\n options.format ??= 'html';\n options.filename ??= `sonda-report-[env].${ options.format }`;\n\n // Nuxt runs few builds and each must generate a separate report\n if ( !options.filename.includes( '[env]' ) ) {\n throw new Error( 'SondaAstroPlugin: The \"filename\" option must include the \"[env]\" token.' );\n }\n\n const generateForServer = options.server ?? false;\n\n // Do not generate report for the server build unless explicitly enabled\n if ( target === 'server' && !generateForServer ) {\n return;\n }\n\n // Because this configuration is shared between multiple builds, we need to clone it\n const sondaOptions = Object.assign( {}, options );\n\n // Replace the \"[env]\" token with the current build type\n sondaOptions.filename = sondaOptions.filename!.replace( '[env]', target )\n\n vite.plugins ??= [];\n vite.plugins.push( Sonda( sondaOptions ) );\n }\n }\n };\n}\n"],"mappings":";;;;;AAIe,SAAS,iBAAkBA,UAAyC,CAAE,GAAqB;AACxG,QAAO;EACL,MAAM;EACN,OAAO,EACL,oBAAqB,EAAE,MAAM,QAAQ,EAAG;AACtC,OAAK,QAAQ,YAAY,MACvB;AAGF,WAAQ,WAAW;AACnB,WAAQ,cAAc,qBAAsB,QAAQ,OAAQ;AAG5D,QAAM,QAAQ,SAAS,SAAU,QAAS,CACxC,OAAM,IAAI,MAAO;GAGnB,MAAM,oBAAoB,QAAQ,UAAU;AAG5C,OAAK,WAAW,aAAa,kBAC3B;GAIF,MAAM,eAAe,OAAO,OAAQ,CAAE,GAAE,QAAS;AAGjD,gBAAa,WAAW,aAAa,SAAU,QAAS,SAAS,OAAQ;AAEzE,QAAK,YAAY,CAAE;AACnB,QAAK,QAAQ,KAAM,iCAAO,aAAc,CAAE;EAC3C,EACF;CACF;AACF"}
import "../src.mjs";
import { SondaRollupPlugin } from "../rollup.mjs";
//#region src/entrypoints/astro.ts
function SondaAstroPlugin(options = {}) {
return {
name: "sonda/astro",
hooks: { "astro:build:setup"({ vite, target }) {
if (options.enabled === false) return;
options.format ??= "html";
options.filename ??= `sonda-report-[env].${options.format}`;
if (!options.filename.includes("[env]")) throw new Error("SondaAstroPlugin: The \"filename\" option must include the \"[env]\" token.");
const generateForServer = options.server ?? false;
if (target === "server" && !generateForServer) return;
const sondaOptions = Object.assign({}, options);
sondaOptions.filename = sondaOptions.filename.replace("[env]", target);
vite.plugins ??= [];
vite.plugins.push(SondaRollupPlugin(sondaOptions));
} }
};
}
//#endregion
export { SondaAstroPlugin as default };
//# sourceMappingURL=astro.mjs.map
{"version":3,"file":"astro.mjs","names":["options: Partial<FrameworkUserOptions>"],"sources":["../../src/entrypoints/astro.ts"],"sourcesContent":["import Sonda from './rollup';\nimport type { AstroIntegration } from 'astro';\nimport type { FrameworkUserOptions } from '../types';\n\nexport default function SondaAstroPlugin( options: Partial<FrameworkUserOptions> = {} ): AstroIntegration {\n return {\n name: 'sonda/astro',\n hooks: {\n 'astro:build:setup'( { vite, target } ) {\n if ( options.enabled === false ) {\n return;\n }\n\n options.format ??= 'html';\n options.filename ??= `sonda-report-[env].${ options.format }`;\n\n // Nuxt runs few builds and each must generate a separate report\n if ( !options.filename.includes( '[env]' ) ) {\n throw new Error( 'SondaAstroPlugin: The \"filename\" option must include the \"[env]\" token.' );\n }\n\n const generateForServer = options.server ?? false;\n\n // Do not generate report for the server build unless explicitly enabled\n if ( target === 'server' && !generateForServer ) {\n return;\n }\n\n // Because this configuration is shared between multiple builds, we need to clone it\n const sondaOptions = Object.assign( {}, options );\n\n // Replace the \"[env]\" token with the current build type\n sondaOptions.filename = sondaOptions.filename!.replace( '[env]', target )\n\n vite.plugins ??= [];\n vite.plugins.push( Sonda( sondaOptions ) );\n }\n }\n };\n}\n"],"mappings":";;;;AAIe,SAAS,iBAAkBA,UAAyC,CAAE,GAAqB;AACxG,QAAO;EACL,MAAM;EACN,OAAO,EACL,oBAAqB,EAAE,MAAM,QAAQ,EAAG;AACtC,OAAK,QAAQ,YAAY,MACvB;AAGF,WAAQ,WAAW;AACnB,WAAQ,cAAc,qBAAsB,QAAQ,OAAQ;AAG5D,QAAM,QAAQ,SAAS,SAAU,QAAS,CACxC,OAAM,IAAI,MAAO;GAGnB,MAAM,oBAAoB,QAAQ,UAAU;AAG5C,OAAK,WAAW,aAAa,kBAC3B;GAIF,MAAM,eAAe,OAAO,OAAQ,CAAE,GAAE,QAAS;AAGjD,gBAAa,WAAW,aAAa,SAAU,QAAS,SAAS,OAAQ;AAEzE,QAAK,YAAY,CAAE;AACnB,QAAK,QAAQ,KAAM,kBAAO,aAAc,CAAE;EAC3C,EACF;CACF;AACF"}
Object.defineProperty(exports, '__esModule', { value: true });
require('../src.cjs');
const require_esbuild = require('../esbuild.cjs');
Object.defineProperty(exports, 'default', {
enumerable: true,
get: function () {
return require_esbuild.SondaEsbuildPlugin;
}
});
exports.processEsbuildMetaFile = require_esbuild.processEsbuildMetaFile
import "../src.mjs";
import { SondaEsbuildPlugin, processEsbuildMetaFile } from "../esbuild.mjs";
export { SondaEsbuildPlugin as default, processEsbuildMetaFile };
"use strict";
require('../src.cjs');
const require_webpack = require('../webpack.cjs');
//#region src/entrypoints/next.ts
function SondaNextPlugin(options = {}) {
return function Sonda(nextConfig = {}) {
if (options.enabled === false) return nextConfig;
options.format ??= "html";
options.filename ??= `sonda-report-[env].${options.format}`;
if (!options.filename.includes("[env]")) throw new Error("SondaNextPlugin: The \"filename\" option must include the \"[env]\" token.");
const generateForServer = options.server ?? false;
return Object.assign({}, nextConfig, { webpack(config, { nextRuntime, isServer }) {
const env = nextRuntime || "client";
if (env === "edge" || isServer && !generateForServer) return config;
const sondaOptions = Object.assign({}, options);
sondaOptions.filename = sondaOptions.filename.replace("[env]", env);
config.plugins.push(new require_webpack.SondaWebpackPlugin(sondaOptions));
return config;
} });
};
}
//#endregion
module.exports = SondaNextPlugin;
//# sourceMappingURL=next.cjs.map
{"version":3,"file":"next.cjs","names":["options: Partial<FrameworkUserOptions>","nextConfig: NextConfig","SondaWebpack"],"sources":["../../src/entrypoints/next.ts"],"sourcesContent":["import SondaWebpack from './webpack';\nimport type { NextConfig } from 'next';\nimport type { FrameworkUserOptions } from '../types.js';\n\nexport default function SondaNextPlugin( options: Partial<FrameworkUserOptions> = {} ) {\n return function Sonda( nextConfig: NextConfig = {} ): NextConfig {\n if ( options.enabled === false ) {\n return nextConfig;\n }\n\n options.format ??= 'html';\n options.filename ??= `sonda-report-[env].${ options.format }`;\n\n // Next.js runs few builds and each must generate a separate report\n if ( !options.filename.includes( '[env]' ) ) {\n throw new Error( 'SondaNextPlugin: The \"filename\" option must include the \"[env]\" token.' );\n }\n\n const generateForServer = options.server ?? false;\n\n return Object.assign( {}, nextConfig, {\n webpack( config, { nextRuntime, isServer } ) {\n const env = nextRuntime || 'client';\n\n // Do not generate report for...\n if (\n // ... the `edge` build because none of its files have source maps\n env === 'edge'\n\n // ... the server build unless explicitly enabled\n || ( isServer && !generateForServer )\n ) {\n return config;\n }\n\n // Because this configuration is shared between multiple builds, we need to clone it\n const sondaOptions = Object.assign( {}, options );\n\n // Replace the \"[env]\" token with the current build type\n sondaOptions.filename = sondaOptions.filename!.replace( '[env]', env );\n\n // Add the Sonda plugin to the Webpack configuration\n config.plugins.push(\n new SondaWebpack( sondaOptions )\n );\n\n return config;\n }\n } satisfies NextConfig );\n }\n}\n"],"mappings":";;;;;AAIe,SAAS,gBAAiBA,UAAyC,CAAE,GAAG;AACrF,QAAO,SAAS,MAAOC,aAAyB,CAAE,GAAe;AAC/D,MAAK,QAAQ,YAAY,MACvB,QAAO;AAGT,UAAQ,WAAW;AACnB,UAAQ,cAAc,qBAAsB,QAAQ,OAAQ;AAG5D,OAAM,QAAQ,SAAS,SAAU,QAAS,CACxC,OAAM,IAAI,MAAO;EAGnB,MAAM,oBAAoB,QAAQ,UAAU;AAE5C,SAAO,OAAO,OAAQ,CAAE,GAAE,YAAY,EACpC,QAAS,QAAQ,EAAE,aAAa,UAAU,EAAG;GAC3C,MAAM,MAAM,eAAe;AAG3B,OAEE,QAAQ,UAGH,aAAa,kBAElB,QAAO;GAIT,MAAM,eAAe,OAAO,OAAQ,CAAE,GAAE,QAAS;AAGjD,gBAAa,WAAW,aAAa,SAAU,QAAS,SAAS,IAAK;AAGtE,UAAO,QAAQ,KACb,IAAIC,mCAAc,cACnB;AAED,UAAO;EACR,EACF,EAAuB;CACzB;AACF"}
import "../src.mjs";
import { SondaWebpackPlugin } from "../webpack.mjs";
//#region src/entrypoints/next.ts
function SondaNextPlugin(options = {}) {
return function Sonda(nextConfig = {}) {
if (options.enabled === false) return nextConfig;
options.format ??= "html";
options.filename ??= `sonda-report-[env].${options.format}`;
if (!options.filename.includes("[env]")) throw new Error("SondaNextPlugin: The \"filename\" option must include the \"[env]\" token.");
const generateForServer = options.server ?? false;
return Object.assign({}, nextConfig, { webpack(config, { nextRuntime, isServer }) {
const env = nextRuntime || "client";
if (env === "edge" || isServer && !generateForServer) return config;
const sondaOptions = Object.assign({}, options);
sondaOptions.filename = sondaOptions.filename.replace("[env]", env);
config.plugins.push(new SondaWebpackPlugin(sondaOptions));
return config;
} });
};
}
//#endregion
export { SondaNextPlugin as default };
//# sourceMappingURL=next.mjs.map
{"version":3,"file":"next.mjs","names":["options: Partial<FrameworkUserOptions>","nextConfig: NextConfig","SondaWebpack"],"sources":["../../src/entrypoints/next.ts"],"sourcesContent":["import SondaWebpack from './webpack';\nimport type { NextConfig } from 'next';\nimport type { FrameworkUserOptions } from '../types.js';\n\nexport default function SondaNextPlugin( options: Partial<FrameworkUserOptions> = {} ) {\n return function Sonda( nextConfig: NextConfig = {} ): NextConfig {\n if ( options.enabled === false ) {\n return nextConfig;\n }\n\n options.format ??= 'html';\n options.filename ??= `sonda-report-[env].${ options.format }`;\n\n // Next.js runs few builds and each must generate a separate report\n if ( !options.filename.includes( '[env]' ) ) {\n throw new Error( 'SondaNextPlugin: The \"filename\" option must include the \"[env]\" token.' );\n }\n\n const generateForServer = options.server ?? false;\n\n return Object.assign( {}, nextConfig, {\n webpack( config, { nextRuntime, isServer } ) {\n const env = nextRuntime || 'client';\n\n // Do not generate report for...\n if (\n // ... the `edge` build because none of its files have source maps\n env === 'edge'\n\n // ... the server build unless explicitly enabled\n || ( isServer && !generateForServer )\n ) {\n return config;\n }\n\n // Because this configuration is shared between multiple builds, we need to clone it\n const sondaOptions = Object.assign( {}, options );\n\n // Replace the \"[env]\" token with the current build type\n sondaOptions.filename = sondaOptions.filename!.replace( '[env]', env );\n\n // Add the Sonda plugin to the Webpack configuration\n config.plugins.push(\n new SondaWebpack( sondaOptions )\n );\n\n return config;\n }\n } satisfies NextConfig );\n }\n}\n"],"mappings":";;;;AAIe,SAAS,gBAAiBA,UAAyC,CAAE,GAAG;AACrF,QAAO,SAAS,MAAOC,aAAyB,CAAE,GAAe;AAC/D,MAAK,QAAQ,YAAY,MACvB,QAAO;AAGT,UAAQ,WAAW;AACnB,UAAQ,cAAc,qBAAsB,QAAQ,OAAQ;AAG5D,OAAM,QAAQ,SAAS,SAAU,QAAS,CACxC,OAAM,IAAI,MAAO;EAGnB,MAAM,oBAAoB,QAAQ,UAAU;AAE5C,SAAO,OAAO,OAAQ,CAAE,GAAE,YAAY,EACpC,QAAS,QAAQ,EAAE,aAAa,UAAU,EAAG;GAC3C,MAAM,MAAM,eAAe;AAG3B,OAEE,QAAQ,UAGH,aAAa,kBAElB,QAAO;GAIT,MAAM,eAAe,OAAO,OAAQ,CAAE,GAAE,QAAS;AAGjD,gBAAa,WAAW,aAAa,SAAU,QAAS,SAAS,IAAK;AAGtE,UAAO,QAAQ,KACb,IAAIC,mBAAc,cACnB;AAED,UAAO;EACR,EACF,EAAuB;CACzB;AACF"}
"use strict";
require('../src.cjs');
const require_rollup = require('../rollup.cjs');
//#region src/entrypoints/nuxt.ts
function SondaNuxtPlugin(options = {}) {
return function SondaNuxtPlugin$1(_, nuxt) {
if (options.enabled === false) return;
options.format ??= "html";
options.filename ??= `sonda-report-[env].${options.format}`;
if (!options.filename.includes("[env]")) throw new Error("SondaNuxtPlugin: The \"filename\" option must include the \"[env]\" token.");
nuxt.hook("vite:extendConfig", (config, { isClient, isServer }) => {
const env = isClient ? "client" : "nitro";
const generateForServer = options.server ?? false;
if (isServer && !generateForServer) return;
const sondaOptions = Object.assign({}, options);
sondaOptions.filename = sondaOptions.filename.replace("[env]", env);
config.plugins ??= [];
config.plugins.push(require_rollup.SondaRollupPlugin(sondaOptions));
});
};
}
//#endregion
module.exports = SondaNuxtPlugin;
//# sourceMappingURL=nuxt.cjs.map
{"version":3,"file":"nuxt.cjs","names":["options: Partial<FrameworkUserOptions>","SondaNuxtPlugin","nuxt: Nuxt"],"sources":["../../src/entrypoints/nuxt.ts"],"sourcesContent":["import Sonda from './rollup';\nimport type { NuxtModule, Nuxt } from '@nuxt/schema';\nimport type { FrameworkUserOptions } from '../types';\n\nexport default function SondaNuxtPlugin( options: Partial<FrameworkUserOptions> = {} ): NuxtModule {\n return function SondaNuxtPlugin( _, nuxt: Nuxt ): void {\n if ( options.enabled === false ) {\n return;\n }\n\n options.format ??= 'html';\n options.filename ??= `sonda-report-[env].${ options.format }`;\n\n // Nuxt runs few builds and each must generate a separate report\n if ( !options.filename.includes( '[env]' ) ) {\n throw new Error( 'SondaNuxtPlugin: The \"filename\" option must include the \"[env]\" token.' );\n }\n\n nuxt.hook( 'vite:extendConfig', ( config, { isClient, isServer } ) => {\n const env = isClient ? 'client' : 'nitro';\n const generateForServer = options.server ?? false;\n\n // Do not generate report for the server build unless explicitly enabled\n if ( isServer && !generateForServer ) {\n return;\n }\n\n // Because this configuration is shared between multiple builds, we need to clone it\n const sondaOptions = Object.assign( {}, options );\n\n // Replace the \"[env]\" token with the current build type\n sondaOptions.filename = sondaOptions.filename!.replace( '[env]', env )\n\n // Add the Sonda plugin to the Vite configuration\n config.plugins ??= [];\n config.plugins.push( Sonda( sondaOptions ) );\n } )\n }\n}\n"],"mappings":";;;;;AAIe,SAAS,gBAAiBA,UAAyC,CAAE,GAAe;AACjG,QAAO,SAASC,kBAAiB,GAAGC,MAAmB;AACrD,MAAK,QAAQ,YAAY,MACvB;AAGF,UAAQ,WAAW;AACnB,UAAQ,cAAc,qBAAsB,QAAQ,OAAQ;AAG5D,OAAM,QAAQ,SAAS,SAAU,QAAS,CACxC,OAAM,IAAI,MAAO;AAGnB,OAAK,KAAM,qBAAqB,CAAE,QAAQ,EAAE,UAAU,UAAU,KAAM;GACpE,MAAM,MAAM,WAAW,WAAW;GAClC,MAAM,oBAAoB,QAAQ,UAAU;AAG5C,OAAK,aAAa,kBAChB;GAIF,MAAM,eAAe,OAAO,OAAQ,CAAE,GAAE,QAAS;AAGjD,gBAAa,WAAW,aAAa,SAAU,QAAS,SAAS,IAAK;AAGtE,UAAO,YAAY,CAAE;AACrB,UAAO,QAAQ,KAAM,iCAAO,aAAc,CAAE;EAC7C,EAAE;CACJ;AACF"}
import "../src.mjs";
import { SondaRollupPlugin } from "../rollup.mjs";
//#region src/entrypoints/nuxt.ts
function SondaNuxtPlugin(options = {}) {
return function SondaNuxtPlugin$1(_, nuxt) {
if (options.enabled === false) return;
options.format ??= "html";
options.filename ??= `sonda-report-[env].${options.format}`;
if (!options.filename.includes("[env]")) throw new Error("SondaNuxtPlugin: The \"filename\" option must include the \"[env]\" token.");
nuxt.hook("vite:extendConfig", (config, { isClient, isServer }) => {
const env = isClient ? "client" : "nitro";
const generateForServer = options.server ?? false;
if (isServer && !generateForServer) return;
const sondaOptions = Object.assign({}, options);
sondaOptions.filename = sondaOptions.filename.replace("[env]", env);
config.plugins ??= [];
config.plugins.push(SondaRollupPlugin(sondaOptions));
});
};
}
//#endregion
export { SondaNuxtPlugin as default };
//# sourceMappingURL=nuxt.mjs.map
{"version":3,"file":"nuxt.mjs","names":["options: Partial<FrameworkUserOptions>","SondaNuxtPlugin","nuxt: Nuxt"],"sources":["../../src/entrypoints/nuxt.ts"],"sourcesContent":["import Sonda from './rollup';\nimport type { NuxtModule, Nuxt } from '@nuxt/schema';\nimport type { FrameworkUserOptions } from '../types';\n\nexport default function SondaNuxtPlugin( options: Partial<FrameworkUserOptions> = {} ): NuxtModule {\n return function SondaNuxtPlugin( _, nuxt: Nuxt ): void {\n if ( options.enabled === false ) {\n return;\n }\n\n options.format ??= 'html';\n options.filename ??= `sonda-report-[env].${ options.format }`;\n\n // Nuxt runs few builds and each must generate a separate report\n if ( !options.filename.includes( '[env]' ) ) {\n throw new Error( 'SondaNuxtPlugin: The \"filename\" option must include the \"[env]\" token.' );\n }\n\n nuxt.hook( 'vite:extendConfig', ( config, { isClient, isServer } ) => {\n const env = isClient ? 'client' : 'nitro';\n const generateForServer = options.server ?? false;\n\n // Do not generate report for the server build unless explicitly enabled\n if ( isServer && !generateForServer ) {\n return;\n }\n\n // Because this configuration is shared between multiple builds, we need to clone it\n const sondaOptions = Object.assign( {}, options );\n\n // Replace the \"[env]\" token with the current build type\n sondaOptions.filename = sondaOptions.filename!.replace( '[env]', env )\n\n // Add the Sonda plugin to the Vite configuration\n config.plugins ??= [];\n config.plugins.push( Sonda( sondaOptions ) );\n } )\n }\n}\n"],"mappings":";;;;AAIe,SAAS,gBAAiBA,UAAyC,CAAE,GAAe;AACjG,QAAO,SAASC,kBAAiB,GAAGC,MAAmB;AACrD,MAAK,QAAQ,YAAY,MACvB;AAGF,UAAQ,WAAW;AACnB,UAAQ,cAAc,qBAAsB,QAAQ,OAAQ;AAG5D,OAAM,QAAQ,SAAS,SAAU,QAAS,CACxC,OAAM,IAAI,MAAO;AAGnB,OAAK,KAAM,qBAAqB,CAAE,QAAQ,EAAE,UAAU,UAAU,KAAM;GACpE,MAAM,MAAM,WAAW,WAAW;GAClC,MAAM,oBAAoB,QAAQ,UAAU;AAG5C,OAAK,aAAa,kBAChB;GAIF,MAAM,eAAe,OAAO,OAAQ,CAAE,GAAE,QAAS;AAGjD,gBAAa,WAAW,aAAa,SAAU,QAAS,SAAS,IAAK;AAGtE,UAAO,YAAY,CAAE;AACrB,UAAO,QAAQ,KAAM,kBAAO,aAAc,CAAE;EAC7C,EAAE;CACJ;AACF"}
"use strict";
const require_src = require('../src.cjs');
const path = require_src.__toESM(require("path"));
//#region src/entrypoints/parcel.ts
var Reporter = class {
constructor(opts) {
this[Symbol.for("parcel-plugin-config")] = opts;
}
};
const SondaParcelPlugin = new Reporter({
async loadConfig({ config }) {
const conf = await config.getConfig([
(0, path.resolve)(".sondarc"),
(0, path.resolve)(".sondarc.js"),
(0, path.resolve)("sonda.config.js")
], {});
return conf.contents;
},
async report({ event, options }) {
if (event.type !== "buildSuccess") return;
let inputs = {};
const assets = [];
for (let bundle of event.bundleGraph.getBundles()) {
assets.push(bundle.filePath);
bundle.traverseAssets((asset) => {
const input = {
bytes: asset.stats.size,
format: asset.meta.hasCJSExports ? "cjs" : "esm",
imports: [],
belongsTo: null
};
for (let dep of event.bundleGraph.getDependencies(asset)) {
let resolved = event.bundleGraph.getResolvedAsset(dep, bundle);
if (resolved) input.imports.push(require_src.normalizePath((0, path.relative)(process.cwd(), resolved.filePath)));
}
inputs[require_src.normalizePath((0, path.relative)(process.cwd(), asset.filePath))] = input;
});
}
return require_src.generateReportFromAssets(
assets,
inputs,
// TODO: Use user provided options
{
format: "html",
detailed: true,
sources: true,
gzip: true,
brotli: true,
sourcesPathNormalizer: (path$1) => (0, path.join)(options.projectRoot, path$1)
}
);
}
});
var parcel_default = SondaParcelPlugin;
//#endregion
module.exports = parcel_default;
//# sourceMappingURL=parcel.cjs.map
{"version":3,"file":"parcel.cjs","names":["opts: ReporterOpts","SondaParcelPlugin: Reporter","inputs: JsonReport[ 'inputs' ]","assets: Array<string>","input: ReportInput","path: string","path"],"sources":["../../src/entrypoints/parcel.ts"],"sourcesContent":["import { join, relative, resolve } from 'path';\nimport type { Reporter as ReporterOpts } from '@parcel/types';\nimport {\n generateReportFromAssets,\n normalizePath,\n type JsonReport,\n type ReportInput,\n type UserOptions\n} from '../index.js';\n\nclass Reporter {\n constructor ( opts: ReporterOpts ) {\n // @ts-ignore\n this[ Symbol.for( 'parcel-plugin-config' ) ] = opts;\n }\n}\n\nconst SondaParcelPlugin: Reporter = new Reporter( {\n // @ts-ignore\n async loadConfig( { config } ): Promise<UserOptions> {\n // @ts-ignore\n const conf = await config.getConfig<UserOptions>( [\n resolve( '.sondarc' ),\n resolve( '.sondarc.js' ),\n resolve( 'sonda.config.js' ),\n ], {} );\n\n return conf!.contents;\n },\n async report( { event, options } ) {\n if ( event.type !== 'buildSuccess' ) {\n return;\n }\n\n let inputs: JsonReport[ 'inputs' ] = {};\n const assets: Array<string> = [];\n\n for ( let bundle of event.bundleGraph.getBundles() ) {\n assets.push( bundle.filePath );\n\n // @ts-ignore\n bundle.traverseAssets( asset => {\n const input: ReportInput = {\n bytes: asset.stats.size,\n\n // TODO: What abount 'unknown'?\n format: asset.meta.hasCJSExports ? 'cjs' : 'esm',\n imports: [],\n belongsTo: null,\n }\n\n for ( let dep of event.bundleGraph.getDependencies( asset ) ) {\n let resolved = event.bundleGraph.getResolvedAsset( dep, bundle )!;\n\n if ( resolved ) {\n input.imports.push( normalizePath( relative( process.cwd(), resolved.filePath ) ) );\n }\n }\n \n inputs[ normalizePath( relative( process.cwd(), asset.filePath ) ) ] = input;\n } );\n }\n\n return generateReportFromAssets(\n assets,\n inputs,\n // TODO: Use user provided options\n {\n format: 'html',\n detailed: true,\n sources: true,\n gzip: true,\n brotli: true,\n // TODO: `resolve` instead of `join`?\n sourcesPathNormalizer: ( path: string ) => join( options.projectRoot, path ),\n }\n );\n }\n} );\n\nexport default SondaParcelPlugin;\n"],"mappings":";;;;;IAUM,WAAN,MAAe;CACb,YAAcA,MAAqB;AAEjC,OAAM,OAAO,IAAK,uBAAwB,IAAK;CAChD;AACF;AAED,MAAMC,oBAA8B,IAAI,SAAU;CAEhD,MAAM,WAAY,EAAE,QAAQ,EAAyB;EAEnD,MAAM,OAAO,MAAM,OAAO,UAAwB;GAChD,kBAAS,WAAY;GACrB,kBAAS,cAAe;GACxB,kBAAS,kBAAmB;EAC7B,GAAE,CAAE,EAAE;AAEP,SAAO,KAAM;CACd;CACD,MAAM,OAAQ,EAAE,OAAO,SAAS,EAAG;AACjC,MAAK,MAAM,SAAS,eAClB;EAGF,IAAIC,SAAiC,CAAE;EACvC,MAAMC,SAAwB,CAAE;AAEhC,OAAM,IAAI,UAAU,MAAM,YAAY,YAAY,EAAG;AACnD,UAAO,KAAM,OAAO,SAAU;AAG9B,UAAO,eAAgB,WAAS;IAC9B,MAAMC,QAAqB;KACzB,OAAO,MAAM,MAAM;KAGnB,QAAQ,MAAM,KAAK,gBAAgB,QAAQ;KAC3C,SAAS,CAAE;KACX,WAAW;IACZ;AAED,SAAM,IAAI,OAAO,MAAM,YAAY,gBAAiB,MAAO,EAAG;KAC5D,IAAI,WAAW,MAAM,YAAY,iBAAkB,KAAK,OAAQ;AAEhE,SAAK,SACH,OAAM,QAAQ,KAAM,0BAAe,mBAAU,QAAQ,KAAK,EAAE,SAAS,SAAU,CAAE,CAAE;IAEtF;AAED,WAAQ,0BAAe,mBAAU,QAAQ,KAAK,EAAE,MAAM,SAAU,CAAE,IAAK;GACxE,EAAE;EACJ;AAED,SAAO;GACL;GACA;;GAEA;IACE,QAAQ;IACR,UAAU;IACV,SAAS;IACT,MAAM;IACN,QAAQ;IAER,uBAAuB,CAAEC,WAAkB,eAAM,QAAQ,aAAaC,OAAM;GAC7E;CACF;CACF;AACF;qBAEc"}
import type { Reporter as ReporterOpts } from '@parcel/types';
declare class Reporter {
constructor(opts: ReporterOpts);
}
declare const SondaParcelPlugin: Reporter;
export default SondaParcelPlugin;
import { generateReportFromAssets, normalizePath } from "../src.mjs";
import { join, relative, resolve } from "path";
//#region src/entrypoints/parcel.ts
var Reporter = class {
constructor(opts) {
this[Symbol.for("parcel-plugin-config")] = opts;
}
};
const SondaParcelPlugin = new Reporter({
async loadConfig({ config }) {
const conf = await config.getConfig([
resolve(".sondarc"),
resolve(".sondarc.js"),
resolve("sonda.config.js")
], {});
return conf.contents;
},
async report({ event, options }) {
if (event.type !== "buildSuccess") return;
let inputs = {};
const assets = [];
for (let bundle of event.bundleGraph.getBundles()) {
assets.push(bundle.filePath);
bundle.traverseAssets((asset) => {
const input = {
bytes: asset.stats.size,
format: asset.meta.hasCJSExports ? "cjs" : "esm",
imports: [],
belongsTo: null
};
for (let dep of event.bundleGraph.getDependencies(asset)) {
let resolved = event.bundleGraph.getResolvedAsset(dep, bundle);
if (resolved) input.imports.push(normalizePath(relative(process.cwd(), resolved.filePath)));
}
inputs[normalizePath(relative(process.cwd(), asset.filePath))] = input;
});
}
return generateReportFromAssets(
assets,
inputs,
// TODO: Use user provided options
{
format: "html",
detailed: true,
sources: true,
gzip: true,
brotli: true,
sourcesPathNormalizer: (path) => join(options.projectRoot, path)
}
);
}
});
var parcel_default = SondaParcelPlugin;
//#endregion
export { parcel_default as default };
//# sourceMappingURL=parcel.mjs.map
{"version":3,"file":"parcel.mjs","names":["opts: ReporterOpts","SondaParcelPlugin: Reporter","inputs: JsonReport[ 'inputs' ]","assets: Array<string>","input: ReportInput","path: string"],"sources":["../../src/entrypoints/parcel.ts"],"sourcesContent":["import { join, relative, resolve } from 'path';\nimport type { Reporter as ReporterOpts } from '@parcel/types';\nimport {\n generateReportFromAssets,\n normalizePath,\n type JsonReport,\n type ReportInput,\n type UserOptions\n} from '../index.js';\n\nclass Reporter {\n constructor ( opts: ReporterOpts ) {\n // @ts-ignore\n this[ Symbol.for( 'parcel-plugin-config' ) ] = opts;\n }\n}\n\nconst SondaParcelPlugin: Reporter = new Reporter( {\n // @ts-ignore\n async loadConfig( { config } ): Promise<UserOptions> {\n // @ts-ignore\n const conf = await config.getConfig<UserOptions>( [\n resolve( '.sondarc' ),\n resolve( '.sondarc.js' ),\n resolve( 'sonda.config.js' ),\n ], {} );\n\n return conf!.contents;\n },\n async report( { event, options } ) {\n if ( event.type !== 'buildSuccess' ) {\n return;\n }\n\n let inputs: JsonReport[ 'inputs' ] = {};\n const assets: Array<string> = [];\n\n for ( let bundle of event.bundleGraph.getBundles() ) {\n assets.push( bundle.filePath );\n\n // @ts-ignore\n bundle.traverseAssets( asset => {\n const input: ReportInput = {\n bytes: asset.stats.size,\n\n // TODO: What abount 'unknown'?\n format: asset.meta.hasCJSExports ? 'cjs' : 'esm',\n imports: [],\n belongsTo: null,\n }\n\n for ( let dep of event.bundleGraph.getDependencies( asset ) ) {\n let resolved = event.bundleGraph.getResolvedAsset( dep, bundle )!;\n\n if ( resolved ) {\n input.imports.push( normalizePath( relative( process.cwd(), resolved.filePath ) ) );\n }\n }\n \n inputs[ normalizePath( relative( process.cwd(), asset.filePath ) ) ] = input;\n } );\n }\n\n return generateReportFromAssets(\n assets,\n inputs,\n // TODO: Use user provided options\n {\n format: 'html',\n detailed: true,\n sources: true,\n gzip: true,\n brotli: true,\n // TODO: `resolve` instead of `join`?\n sourcesPathNormalizer: ( path: string ) => join( options.projectRoot, path ),\n }\n );\n }\n} );\n\nexport default SondaParcelPlugin;\n"],"mappings":";;;;IAUM,WAAN,MAAe;CACb,YAAcA,MAAqB;AAEjC,OAAM,OAAO,IAAK,uBAAwB,IAAK;CAChD;AACF;AAED,MAAMC,oBAA8B,IAAI,SAAU;CAEhD,MAAM,WAAY,EAAE,QAAQ,EAAyB;EAEnD,MAAM,OAAO,MAAM,OAAO,UAAwB;GAChD,QAAS,WAAY;GACrB,QAAS,cAAe;GACxB,QAAS,kBAAmB;EAC7B,GAAE,CAAE,EAAE;AAEP,SAAO,KAAM;CACd;CACD,MAAM,OAAQ,EAAE,OAAO,SAAS,EAAG;AACjC,MAAK,MAAM,SAAS,eAClB;EAGF,IAAIC,SAAiC,CAAE;EACvC,MAAMC,SAAwB,CAAE;AAEhC,OAAM,IAAI,UAAU,MAAM,YAAY,YAAY,EAAG;AACnD,UAAO,KAAM,OAAO,SAAU;AAG9B,UAAO,eAAgB,WAAS;IAC9B,MAAMC,QAAqB;KACzB,OAAO,MAAM,MAAM;KAGnB,QAAQ,MAAM,KAAK,gBAAgB,QAAQ;KAC3C,SAAS,CAAE;KACX,WAAW;IACZ;AAED,SAAM,IAAI,OAAO,MAAM,YAAY,gBAAiB,MAAO,EAAG;KAC5D,IAAI,WAAW,MAAM,YAAY,iBAAkB,KAAK,OAAQ;AAEhE,SAAK,SACH,OAAM,QAAQ,KAAM,cAAe,SAAU,QAAQ,KAAK,EAAE,SAAS,SAAU,CAAE,CAAE;IAEtF;AAED,WAAQ,cAAe,SAAU,QAAQ,KAAK,EAAE,MAAM,SAAU,CAAE,IAAK;GACxE,EAAE;EACJ;AAED,SAAO;GACL;GACA;;GAEA;IACE,QAAQ;IACR,UAAU;IACV,SAAS;IACT,MAAM;IACN,QAAQ;IAER,uBAAuB,CAAEC,SAAkB,KAAM,QAAQ,aAAa,KAAM;GAC7E;CACF;CACF;AACF;qBAEc"}
require('../src.cjs');
const require_rollup = require('../rollup.cjs');
module.exports = require_rollup.SondaRollupPlugin;
import "../src.mjs";
import { SondaRollupPlugin } from "../rollup.mjs";
export { SondaRollupPlugin as default };
"use strict";
require('../src.cjs');
const require_rollup = require('../rollup.cjs');
//#region src/entrypoints/sveltekit.ts
function SondaSvelteKitPlugin(options = {}) {
if (options.enabled === false) return { name: "sonda" };
options.format ??= "html";
options.filename ??= `sonda-report-[env].${options.format}`;
if (!options.filename.includes("[env]")) throw new Error("SondaSvelteKitPlugin: The \"filename\" option must include the \"[env]\" token.");
return {
apply: "build",
configResolved(config) {
const env = config.build.ssr ? "server" : "client";
const generateForServer = options.server ?? false;
if (env === "server" && !generateForServer) options.enabled = false;
options.filename = options.filename.replace("[env]", env);
},
...require_rollup.SondaRollupPlugin(options)
};
}
//#endregion
module.exports = SondaSvelteKitPlugin;
//# sourceMappingURL=sveltekit.cjs.map
{"version":3,"file":"sveltekit.cjs","names":["options: Partial<FrameworkUserOptions>"],"sources":["../../src/entrypoints/sveltekit.ts"],"sourcesContent":["import Sonda from './rollup';\nimport type { PluginOption } from 'vite';\nimport type { FrameworkUserOptions } from '../types';\n\nexport default function SondaSvelteKitPlugin( options: Partial<FrameworkUserOptions> = {} ): PluginOption {\n if ( options.enabled === false ) {\n return { name: 'sonda' };\n }\n\n options.format ??= 'html';\n options.filename ??= `sonda-report-[env].${ options.format }`;\n\n // SvelteKit runs few builds and each must generate a separate report\n if ( !options.filename.includes( '[env]' ) ) {\n throw new Error( 'SondaSvelteKitPlugin: The \"filename\" option must include the \"[env]\" token.' );\n }\n\n return {\n apply: 'build',\n\n configResolved( config ) {\n const env = config.build.ssr ? 'server' : 'client';\n const generateForServer = options.server ?? false;\n\n if ( env === 'server' && !generateForServer ) {\n options.enabled = false;\n }\n\n options.filename = options.filename!.replace( '[env]', env );\n },\n\n ...Sonda( options )\n };\n}\n"],"mappings":";;;;;AAIe,SAAS,qBAAsBA,UAAyC,CAAE,GAAiB;AACxG,KAAK,QAAQ,YAAY,MACvB,QAAO,EAAE,MAAM,QAAS;AAG1B,SAAQ,WAAW;AACnB,SAAQ,cAAc,qBAAsB,QAAQ,OAAQ;AAG5D,MAAM,QAAQ,SAAS,SAAU,QAAS,CACxC,OAAM,IAAI,MAAO;AAGnB,QAAO;EACL,OAAO;EAEP,eAAgB,QAAS;GACvB,MAAM,MAAM,OAAO,MAAM,MAAM,WAAW;GAC1C,MAAM,oBAAoB,QAAQ,UAAU;AAE5C,OAAK,QAAQ,aAAa,kBACxB,SAAQ,UAAU;AAGpB,WAAQ,WAAW,QAAQ,SAAU,QAAS,SAAS,IAAK;EAC7D;EAED,GAAG,iCAAO,QAAS;CACpB;AACF"}
import "../src.mjs";
import { SondaRollupPlugin } from "../rollup.mjs";
//#region src/entrypoints/sveltekit.ts
function SondaSvelteKitPlugin(options = {}) {
if (options.enabled === false) return { name: "sonda" };
options.format ??= "html";
options.filename ??= `sonda-report-[env].${options.format}`;
if (!options.filename.includes("[env]")) throw new Error("SondaSvelteKitPlugin: The \"filename\" option must include the \"[env]\" token.");
return {
apply: "build",
configResolved(config) {
const env = config.build.ssr ? "server" : "client";
const generateForServer = options.server ?? false;
if (env === "server" && !generateForServer) options.enabled = false;
options.filename = options.filename.replace("[env]", env);
},
...SondaRollupPlugin(options)
};
}
//#endregion
export { SondaSvelteKitPlugin as default };
//# sourceMappingURL=sveltekit.mjs.map
{"version":3,"file":"sveltekit.mjs","names":["options: Partial<FrameworkUserOptions>"],"sources":["../../src/entrypoints/sveltekit.ts"],"sourcesContent":["import Sonda from './rollup';\nimport type { PluginOption } from 'vite';\nimport type { FrameworkUserOptions } from '../types';\n\nexport default function SondaSvelteKitPlugin( options: Partial<FrameworkUserOptions> = {} ): PluginOption {\n if ( options.enabled === false ) {\n return { name: 'sonda' };\n }\n\n options.format ??= 'html';\n options.filename ??= `sonda-report-[env].${ options.format }`;\n\n // SvelteKit runs few builds and each must generate a separate report\n if ( !options.filename.includes( '[env]' ) ) {\n throw new Error( 'SondaSvelteKitPlugin: The \"filename\" option must include the \"[env]\" token.' );\n }\n\n return {\n apply: 'build',\n\n configResolved( config ) {\n const env = config.build.ssr ? 'server' : 'client';\n const generateForServer = options.server ?? false;\n\n if ( env === 'server' && !generateForServer ) {\n options.enabled = false;\n }\n\n options.filename = options.filename!.replace( '[env]', env );\n },\n\n ...Sonda( options )\n };\n}\n"],"mappings":";;;;AAIe,SAAS,qBAAsBA,UAAyC,CAAE,GAAiB;AACxG,KAAK,QAAQ,YAAY,MACvB,QAAO,EAAE,MAAM,QAAS;AAG1B,SAAQ,WAAW;AACnB,SAAQ,cAAc,qBAAsB,QAAQ,OAAQ;AAG5D,MAAM,QAAQ,SAAS,SAAU,QAAS,CACxC,OAAM,IAAI,MAAO;AAGnB,QAAO;EACL,OAAO;EAEP,eAAgB,QAAS;GACvB,MAAM,MAAM,OAAO,MAAM,MAAM,WAAW;GAC1C,MAAM,oBAAoB,QAAQ,UAAU;AAE5C,OAAK,QAAQ,aAAa,kBACxB,SAAQ,UAAU;AAGpB,WAAQ,WAAW,QAAQ,SAAU,QAAS,SAAS,IAAK;EAC7D;EAED,GAAG,kBAAO,QAAS;CACpB;AACF"}
require('../src.cjs');
const require_webpack = require('../webpack.cjs');
module.exports = require_webpack.SondaWebpackPlugin;
import "../src.mjs";
import { SondaWebpackPlugin } from "../webpack.mjs";
export { SondaWebpackPlugin as default };
"use strict";
const require_src = require('./src.cjs');
const path = require_src.__toESM(require("path"));
//#region src/entrypoints/esbuild.ts
function SondaEsbuildPlugin(options = {}) {
return {
name: "sonda",
setup(build) {
if (options.enabled === false) return;
build.initialOptions.metafile = true;
options.detailed = false;
build.onEnd((result) => processEsbuildMetaFile(result.metafile, options));
}
};
}
function processEsbuildMetaFile(metafile, options) {
const cwd = process.cwd();
const inputs = Object.entries(metafile.inputs).reduce((acc, [path$1, data]) => {
acc[path$1] = {
bytes: data.bytes,
format: data.format ?? "unknown",
imports: data.imports.map((data$1) => data$1.path),
belongsTo: null
};
/**
* Because esbuild already reads the existing source maps, there may be
* cases where some report "outputs" include "inputs" that don't exist
* in the main "inputs" object. To avoid this, we parse each esbuild
* input and add its sources to the "inputs" object.
*/
require_src.addSourcesToInputs((0, path.resolve)(cwd, path$1), acc);
return acc;
}, {});
require_src.generateReportFromAssets(Object.keys(metafile.outputs).map((path$1) => (0, path.resolve)(cwd, path$1)), inputs, options);
}
//#endregion
Object.defineProperty(exports, 'SondaEsbuildPlugin', {
enumerable: true,
get: function () {
return SondaEsbuildPlugin;
}
});
Object.defineProperty(exports, 'processEsbuildMetaFile', {
enumerable: true,
get: function () {
return processEsbuildMetaFile;
}
});
//# sourceMappingURL=esbuild.cjs.map
{"version":3,"file":"esbuild.cjs","names":["options: Partial<UserOptions>","metafile: Metafile","path","data"],"sources":["../src/entrypoints/esbuild.ts"],"sourcesContent":["import { resolve } from 'path';\nimport { generateReportFromAssets, addSourcesToInputs } from '../index.js';\nimport type { Metafile, Plugin } from 'esbuild';\nimport type { JsonReport, UserOptions } from '../types.js';\n\nexport default function SondaEsbuildPlugin( options: Partial<UserOptions> = {} ): Plugin {\n\treturn {\n\t\tname: 'sonda',\n\t\tsetup( build ) {\n\t\t\tif ( options.enabled === false ) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tbuild.initialOptions.metafile = true;\n\n\t\t\t// Esbuild already reads the existing source maps, so there's no need to do it again\n\t\t\toptions.detailed = false;\n\n\t\t\tbuild.onEnd( result => processEsbuildMetaFile( result.metafile!, options ) );\n\t\t}\n\t};\n}\n\nexport function processEsbuildMetaFile( metafile: Metafile, options: Partial<UserOptions> ): void {\n\tconst cwd = process.cwd();\n\tconst inputs = Object\n\t\t.entries( metafile.inputs )\n\t\t.reduce( ( acc, [ path, data ] ) => {\n\t\t\tacc[ path ] = {\n\t\t\t\tbytes: data.bytes,\n\t\t\t\tformat: data.format ?? 'unknown',\n\t\t\t\timports: data.imports.map( data => data.path ),\n\t\t\t\tbelongsTo: null,\n\t\t\t};\n\n\t\t\t/**\n\t\t\t * Because esbuild already reads the existing source maps, there may be\n\t\t\t * cases where some report \"outputs\" include \"inputs\" that don't exist\n\t\t\t * in the main \"inputs\" object. To avoid this, we parse each esbuild\n\t\t\t * input and add its sources to the \"inputs\" object.\n\t\t\t */\n\t\t\taddSourcesToInputs(\n\t\t\t\tresolve( cwd, path ),\n\t\t\t\tacc\n\t\t\t);\n\n\t\t\treturn acc;\n\t\t}, {} as JsonReport[ 'inputs' ] );\n\n\tgenerateReportFromAssets(\n\t\tObject.keys( metafile!.outputs ).map( path => resolve( cwd, path ) ),\n\t\tinputs,\n\t\toptions\n\t);\n}\n"],"mappings":";;;;;AAKe,SAAS,mBAAoBA,UAAgC,CAAE,GAAW;AACxF,QAAO;EACN,MAAM;EACN,MAAO,OAAQ;AACd,OAAK,QAAQ,YAAY,MACxB;AAGD,SAAM,eAAe,WAAW;AAGhC,WAAQ,WAAW;AAEnB,SAAM,MAAO,YAAU,uBAAwB,OAAO,UAAW,QAAS,CAAE;EAC5E;CACD;AACD;AAEM,SAAS,uBAAwBC,UAAoBD,SAAsC;CACjG,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,SAAS,OACb,QAAS,SAAS,OAAQ,CAC1B,OAAQ,CAAE,KAAK,CAAEE,QAAM,KAAM,KAAM;AACnC,MAAKA,UAAS;GACb,OAAO,KAAK;GACZ,QAAQ,KAAK,UAAU;GACvB,SAAS,KAAK,QAAQ,IAAK,YAAQC,OAAK,KAAM;GAC9C,WAAW;EACX;;;;;;;AAQD,iCACC,kBAAS,KAAKD,OAAM,EACpB,IACA;AAED,SAAO;CACP,GAAE,CAAE,EAA4B;AAElC,sCACC,OAAO,KAAM,SAAU,QAAS,CAAC,IAAK,YAAQ,kBAAS,KAAKA,OAAM,CAAE,EACpE,QACA,QACA;AACD"}
import { addSourcesToInputs, generateReportFromAssets } from "./src.mjs";
import { resolve } from "path";
//#region src/entrypoints/esbuild.ts
function SondaEsbuildPlugin(options = {}) {
return {
name: "sonda",
setup(build) {
if (options.enabled === false) return;
build.initialOptions.metafile = true;
options.detailed = false;
build.onEnd((result) => processEsbuildMetaFile(result.metafile, options));
}
};
}
function processEsbuildMetaFile(metafile, options) {
const cwd = process.cwd();
const inputs = Object.entries(metafile.inputs).reduce((acc, [path, data]) => {
acc[path] = {
bytes: data.bytes,
format: data.format ?? "unknown",
imports: data.imports.map((data$1) => data$1.path),
belongsTo: null
};
/**
* Because esbuild already reads the existing source maps, there may be
* cases where some report "outputs" include "inputs" that don't exist
* in the main "inputs" object. To avoid this, we parse each esbuild
* input and add its sources to the "inputs" object.
*/
addSourcesToInputs(resolve(cwd, path), acc);
return acc;
}, {});
generateReportFromAssets(Object.keys(metafile.outputs).map((path) => resolve(cwd, path)), inputs, options);
}
//#endregion
export { SondaEsbuildPlugin, processEsbuildMetaFile };
//# sourceMappingURL=esbuild.mjs.map
{"version":3,"file":"esbuild.mjs","names":["options: Partial<UserOptions>","metafile: Metafile","data"],"sources":["../src/entrypoints/esbuild.ts"],"sourcesContent":["import { resolve } from 'path';\nimport { generateReportFromAssets, addSourcesToInputs } from '../index.js';\nimport type { Metafile, Plugin } from 'esbuild';\nimport type { JsonReport, UserOptions } from '../types.js';\n\nexport default function SondaEsbuildPlugin( options: Partial<UserOptions> = {} ): Plugin {\n\treturn {\n\t\tname: 'sonda',\n\t\tsetup( build ) {\n\t\t\tif ( options.enabled === false ) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tbuild.initialOptions.metafile = true;\n\n\t\t\t// Esbuild already reads the existing source maps, so there's no need to do it again\n\t\t\toptions.detailed = false;\n\n\t\t\tbuild.onEnd( result => processEsbuildMetaFile( result.metafile!, options ) );\n\t\t}\n\t};\n}\n\nexport function processEsbuildMetaFile( metafile: Metafile, options: Partial<UserOptions> ): void {\n\tconst cwd = process.cwd();\n\tconst inputs = Object\n\t\t.entries( metafile.inputs )\n\t\t.reduce( ( acc, [ path, data ] ) => {\n\t\t\tacc[ path ] = {\n\t\t\t\tbytes: data.bytes,\n\t\t\t\tformat: data.format ?? 'unknown',\n\t\t\t\timports: data.imports.map( data => data.path ),\n\t\t\t\tbelongsTo: null,\n\t\t\t};\n\n\t\t\t/**\n\t\t\t * Because esbuild already reads the existing source maps, there may be\n\t\t\t * cases where some report \"outputs\" include \"inputs\" that don't exist\n\t\t\t * in the main \"inputs\" object. To avoid this, we parse each esbuild\n\t\t\t * input and add its sources to the \"inputs\" object.\n\t\t\t */\n\t\t\taddSourcesToInputs(\n\t\t\t\tresolve( cwd, path ),\n\t\t\t\tacc\n\t\t\t);\n\n\t\t\treturn acc;\n\t\t}, {} as JsonReport[ 'inputs' ] );\n\n\tgenerateReportFromAssets(\n\t\tObject.keys( metafile!.outputs ).map( path => resolve( cwd, path ) ),\n\t\tinputs,\n\t\toptions\n\t);\n}\n"],"mappings":";;;;AAKe,SAAS,mBAAoBA,UAAgC,CAAE,GAAW;AACxF,QAAO;EACN,MAAM;EACN,MAAO,OAAQ;AACd,OAAK,QAAQ,YAAY,MACxB;AAGD,SAAM,eAAe,WAAW;AAGhC,WAAQ,WAAW;AAEnB,SAAM,MAAO,YAAU,uBAAwB,OAAO,UAAW,QAAS,CAAE;EAC5E;CACD;AACD;AAEM,SAAS,uBAAwBC,UAAoBD,SAAsC;CACjG,MAAM,MAAM,QAAQ,KAAK;CACzB,MAAM,SAAS,OACb,QAAS,SAAS,OAAQ,CAC1B,OAAQ,CAAE,KAAK,CAAE,MAAM,KAAM,KAAM;AACnC,MAAK,QAAS;GACb,OAAO,KAAK;GACZ,QAAQ,KAAK,UAAU;GACvB,SAAS,KAAK,QAAQ,IAAK,YAAQE,OAAK,KAAM;GAC9C,WAAW;EACX;;;;;;;AAQD,qBACC,QAAS,KAAK,KAAM,EACpB,IACA;AAED,SAAO;CACP,GAAE,CAAE,EAA4B;AAElC,0BACC,OAAO,KAAM,SAAU,QAAS,CAAC,IAAK,UAAQ,QAAS,KAAK,KAAM,CAAE,EACpE,QACA,QACA;AACD"}
const require_src = require('./src.cjs');
exports.addSourcesToInputs = require_src.addSourcesToInputs
exports.cjsRegex = require_src.cjsRegex
exports.esmRegex = require_src.esmRegex
exports.generateReportFromAssets = require_src.generateReportFromAssets
exports.jsRegexp = require_src.jsRegexp
exports.normalizePath = require_src.normalizePath
import { addSourcesToInputs, cjsRegex, esmRegex, generateReportFromAssets, jsRegexp, normalizePath } from "./src.mjs";
export { addSourcesToInputs, cjsRegex, esmRegex, generateReportFromAssets, jsRegexp, normalizePath };
import type { JsonReport, ReportInput, PluginOptions } from '../types.js';
export declare function generateJsonReport(assets: Array<string>, inputs: Record<string, ReportInput>, options: PluginOptions): JsonReport;
export declare function generateHtmlReport(assets: Array<string>, inputs: Record<string, ReportInput>, options: PluginOptions): string;
import type { PluginOptions, JsonReport } from '../types.js';
export declare function generateReportFromAssets(assets: string[], inputs: JsonReport['inputs'], pluginOptions: Partial<PluginOptions>): Promise<void>;
"use strict";
const require_src = require('./src.cjs');
const path = require_src.__toESM(require("path"));
//#region src/entrypoints/rollup.ts
function SondaRollupPlugin(options = {}) {
let inputs = {};
return {
name: "sonda",
moduleParsed(module$1) {
if (options.enabled === false) return;
inputs[require_src.normalizePath(module$1.id)] = {
bytes: module$1.code ? Buffer.byteLength(module$1.code) : 0,
format: getFormat(module$1.id, module$1.meta.commonjs?.isCommonJS),
imports: module$1.importedIds.map((id) => require_src.normalizePath(id)),
belongsTo: null
};
},
writeBundle({ dir, file }, bundle) {
if (options.enabled === false) return;
const outputDir = (0, path.resolve)(process.cwd(), dir ?? (0, path.dirname)(file));
const assets = Object.keys(bundle).map((name) => (0, path.resolve)(outputDir, name));
return require_src.generateReportFromAssets(assets, inputs, options);
}
};
}
function getFormat(moduleId, isCommonJS) {
if (isCommonJS === true || require_src.cjsRegex.test(moduleId)) return "cjs";
if (isCommonJS === false || require_src.jsRegexp.test(moduleId)) return "esm";
return "unknown";
}
//#endregion
Object.defineProperty(exports, 'SondaRollupPlugin', {
enumerable: true,
get: function () {
return SondaRollupPlugin;
}
});
//# sourceMappingURL=rollup.cjs.map
{"version":3,"file":"rollup.cjs","names":["options: Partial<UserOptions>","inputs: JsonReport[ 'inputs' ]","module: ModuleInfo","module","bundle: OutputBundle","moduleId: string","isCommonJS: boolean | undefined"],"sources":["../src/entrypoints/rollup.ts"],"sourcesContent":["import { resolve, dirname } from 'path';\nimport {\n\tgenerateReportFromAssets,\n\tcjsRegex,\n\tjsRegexp,\n\tnormalizePath,\n\ttype JsonReport,\n\ttype ModuleFormat,\n\ttype UserOptions\n} from '../index.js';\nimport type { Plugin, ModuleInfo, NormalizedOutputOptions, OutputBundle } from 'rollup';\n\nexport default function SondaRollupPlugin( options: Partial<UserOptions> = {} ): Plugin {\n\tlet inputs: JsonReport[ 'inputs' ] = {};\n\n\treturn {\n\t\tname: 'sonda',\n\n\t\tmoduleParsed( module: ModuleInfo ) {\n\t\t\tif ( options.enabled === false ) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tinputs[ normalizePath( module.id ) ] = {\n\t\t\t\tbytes: module.code ? Buffer.byteLength( module.code ) : 0,\n\t\t\t\tformat: getFormat( module.id, module.meta.commonjs?.isCommonJS ),\n\t\t\t\timports: module.importedIds.map( id => normalizePath( id ) ),\n\t\t\t\tbelongsTo: null,\n\t\t\t};\n\t\t},\n\n\t\twriteBundle(\n\t\t\t{ dir, file }: NormalizedOutputOptions,\n\t\t\tbundle: OutputBundle\n\t\t) {\n\t\t\tif ( options.enabled === false ) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tconst outputDir = resolve( process.cwd(), dir ?? dirname( file! ) );\n\t\t\tconst assets = Object.keys( bundle ).map( name => resolve( outputDir, name ) );\n\n\t\t\treturn generateReportFromAssets(\n\t\t\t\tassets,\n\t\t\t\tinputs,\n\t\t\t\toptions\n\t\t\t);\n\t\t}\n\t};\n}\n\nfunction getFormat( moduleId: string, isCommonJS: boolean | undefined ): ModuleFormat {\n\tif ( isCommonJS === true || cjsRegex.test( moduleId ) ) {\n\t\treturn 'cjs';\n\t}\n\n\tif ( isCommonJS === false || jsRegexp.test( moduleId ) ) {\n\t\treturn 'esm';\n\t}\n\n\treturn'unknown';\n}\n"],"mappings":";;;;;AAYe,SAAS,kBAAmBA,UAAgC,CAAE,GAAW;CACvF,IAAIC,SAAiC,CAAE;AAEvC,QAAO;EACN,MAAM;EAEN,aAAcC,UAAqB;AAClC,OAAK,QAAQ,YAAY,MACxB;AAGD,UAAQ,0BAAeC,SAAO,GAAI,IAAK;IACtC,OAAOA,SAAO,OAAO,OAAO,WAAYA,SAAO,KAAM,GAAG;IACxD,QAAQ,UAAWA,SAAO,IAAIA,SAAO,KAAK,UAAU,WAAY;IAChE,SAAS,SAAO,YAAY,IAAK,QAAM,0BAAe,GAAI,CAAE;IAC5D,WAAW;GACX;EACD;EAED,YACC,EAAE,KAAK,MAA+B,EACtCC,QACC;AACD,OAAK,QAAQ,YAAY,MACxB;GAGD,MAAM,YAAY,kBAAS,QAAQ,KAAK,EAAE,OAAO,kBAAS,KAAO,CAAE;GACnE,MAAM,SAAS,OAAO,KAAM,OAAQ,CAAC,IAAK,UAAQ,kBAAS,WAAW,KAAM,CAAE;AAE9E,UAAO,qCACN,QACA,QACA,QACA;EACD;CACD;AACD;AAED,SAAS,UAAWC,UAAkBC,YAAgD;AACrF,KAAK,eAAe,QAAQ,qBAAS,KAAM,SAAU,CACpD,QAAO;AAGR,KAAK,eAAe,SAAS,qBAAS,KAAM,SAAU,CACrD,QAAO;AAGR,QAAM;AACN"}
import { cjsRegex, generateReportFromAssets, jsRegexp, normalizePath } from "./src.mjs";
import { dirname, resolve } from "path";
//#region src/entrypoints/rollup.ts
function SondaRollupPlugin(options = {}) {
let inputs = {};
return {
name: "sonda",
moduleParsed(module) {
if (options.enabled === false) return;
inputs[normalizePath(module.id)] = {
bytes: module.code ? Buffer.byteLength(module.code) : 0,
format: getFormat(module.id, module.meta.commonjs?.isCommonJS),
imports: module.importedIds.map((id) => normalizePath(id)),
belongsTo: null
};
},
writeBundle({ dir, file }, bundle) {
if (options.enabled === false) return;
const outputDir = resolve(process.cwd(), dir ?? dirname(file));
const assets = Object.keys(bundle).map((name) => resolve(outputDir, name));
return generateReportFromAssets(assets, inputs, options);
}
};
}
function getFormat(moduleId, isCommonJS) {
if (isCommonJS === true || cjsRegex.test(moduleId)) return "cjs";
if (isCommonJS === false || jsRegexp.test(moduleId)) return "esm";
return "unknown";
}
//#endregion
export { SondaRollupPlugin };
//# sourceMappingURL=rollup.mjs.map
{"version":3,"file":"rollup.mjs","names":["options: Partial<UserOptions>","inputs: JsonReport[ 'inputs' ]","module: ModuleInfo","bundle: OutputBundle","moduleId: string","isCommonJS: boolean | undefined"],"sources":["../src/entrypoints/rollup.ts"],"sourcesContent":["import { resolve, dirname } from 'path';\nimport {\n\tgenerateReportFromAssets,\n\tcjsRegex,\n\tjsRegexp,\n\tnormalizePath,\n\ttype JsonReport,\n\ttype ModuleFormat,\n\ttype UserOptions\n} from '../index.js';\nimport type { Plugin, ModuleInfo, NormalizedOutputOptions, OutputBundle } from 'rollup';\n\nexport default function SondaRollupPlugin( options: Partial<UserOptions> = {} ): Plugin {\n\tlet inputs: JsonReport[ 'inputs' ] = {};\n\n\treturn {\n\t\tname: 'sonda',\n\n\t\tmoduleParsed( module: ModuleInfo ) {\n\t\t\tif ( options.enabled === false ) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tinputs[ normalizePath( module.id ) ] = {\n\t\t\t\tbytes: module.code ? Buffer.byteLength( module.code ) : 0,\n\t\t\t\tformat: getFormat( module.id, module.meta.commonjs?.isCommonJS ),\n\t\t\t\timports: module.importedIds.map( id => normalizePath( id ) ),\n\t\t\t\tbelongsTo: null,\n\t\t\t};\n\t\t},\n\n\t\twriteBundle(\n\t\t\t{ dir, file }: NormalizedOutputOptions,\n\t\t\tbundle: OutputBundle\n\t\t) {\n\t\t\tif ( options.enabled === false ) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tconst outputDir = resolve( process.cwd(), dir ?? dirname( file! ) );\n\t\t\tconst assets = Object.keys( bundle ).map( name => resolve( outputDir, name ) );\n\n\t\t\treturn generateReportFromAssets(\n\t\t\t\tassets,\n\t\t\t\tinputs,\n\t\t\t\toptions\n\t\t\t);\n\t\t}\n\t};\n}\n\nfunction getFormat( moduleId: string, isCommonJS: boolean | undefined ): ModuleFormat {\n\tif ( isCommonJS === true || cjsRegex.test( moduleId ) ) {\n\t\treturn 'cjs';\n\t}\n\n\tif ( isCommonJS === false || jsRegexp.test( moduleId ) ) {\n\t\treturn 'esm';\n\t}\n\n\treturn'unknown';\n}\n"],"mappings":";;;;AAYe,SAAS,kBAAmBA,UAAgC,CAAE,GAAW;CACvF,IAAIC,SAAiC,CAAE;AAEvC,QAAO;EACN,MAAM;EAEN,aAAcC,QAAqB;AAClC,OAAK,QAAQ,YAAY,MACxB;AAGD,UAAQ,cAAe,OAAO,GAAI,IAAK;IACtC,OAAO,OAAO,OAAO,OAAO,WAAY,OAAO,KAAM,GAAG;IACxD,QAAQ,UAAW,OAAO,IAAI,OAAO,KAAK,UAAU,WAAY;IAChE,SAAS,OAAO,YAAY,IAAK,QAAM,cAAe,GAAI,CAAE;IAC5D,WAAW;GACX;EACD;EAED,YACC,EAAE,KAAK,MAA+B,EACtCC,QACC;AACD,OAAK,QAAQ,YAAY,MACxB;GAGD,MAAM,YAAY,QAAS,QAAQ,KAAK,EAAE,OAAO,QAAS,KAAO,CAAE;GACnE,MAAM,SAAS,OAAO,KAAM,OAAQ,CAAC,IAAK,UAAQ,QAAS,WAAW,KAAM,CAAE;AAE9E,UAAO,yBACN,QACA,QACA,QACA;EACD;CACD;AACD;AAED,SAAS,UAAWC,UAAkBC,YAAgD;AACrF,KAAK,eAAe,QAAQ,SAAS,KAAM,SAAU,CACpD,QAAO;AAGR,KAAK,eAAe,SAAS,SAAS,KAAM,SAAU,CACrD,QAAO;AAGR,QAAM;AACN"}
import type { DecodedSourceMap } from '@ampproject/remapping';
import type { PluginOptions, Sizes } from '../types.js';
export declare function getBytesPerSource(code: string, map: DecodedSourceMap, assetSizes: Sizes, options: PluginOptions): Map<string, Sizes>;
export declare function getSizes(code: string, options: PluginOptions): Sizes;
import { type DecodedSourceMap, type EncodedSourceMap } from '@ampproject/remapping';
import type { CodeMap, ReportInput } from '../types.js';
export declare function mapSourceMap(map: EncodedSourceMap, dirPath: string, inputs: Record<string, ReportInput>): DecodedSourceMap;
/**
* Loads the source map of a given file and adds its "sources" to the given inputs object.
*/
export declare function addSourcesToInputs(path: string, inputs: Record<string, ReportInput>): CodeMap | null;
"use strict";
//#region rolldown:runtime
var __create = Object.create;
var __defProp = Object.defineProperty;
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
var __getOwnPropNames = Object.getOwnPropertyNames;
var __getProtoOf = Object.getPrototypeOf;
var __hasOwnProp = Object.prototype.hasOwnProperty;
var __copyProps = (to, from, except, desc) => {
if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
key = keys[i];
if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
get: ((k) => from[k]).bind(null, key),
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
});
}
return to;
};
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
value: mod,
enumerable: true
}) : target, mod));
//#endregion
const path = __toESM(require("path"));
const fs = __toESM(require("fs"));
const url = __toESM(require("url"));
const __jridgewell_sourcemap_codec = __toESM(require("@jridgewell/sourcemap-codec"));
const __ampproject_remapping = __toESM(require("@ampproject/remapping"));
const zlib = __toESM(require("zlib"));
//#region ../load-source-map/dist/index.mjs
/**
* Strip any JSON XSSI avoidance prefix from the string (as documented in the source maps specification),
* and parses the string as JSON.
*
* https://github.com/mozilla/source-map/blob/3cb92cc3b73bfab27c146bae4ef2bc09dbb4e5ed/lib/util.js#L162-L164
*/
function parseSourceMapInput(str) {
return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, ""));
}
/**
sourceMappingURL=data:application/json;charset=utf-8;base64,data
sourceMappingURL=data:application/json;base64,data
sourceMappingURL=data:application/json;uri,data
sourceMappingURL=map-file-comment.css.map
sourceMappingURL=map-file-comment.css.map?query=value
*/
const sourceMappingRegExp = /[@#]\s*sourceMappingURL=(\S+)\b/g;
/**
* Checks if the given path is a file.
*/
function isFile(path$1) {
try {
return (0, fs.statSync)(path$1).isFile();
} catch {
return false;
}
}
function loadCodeAndMap(codePath, sourcesPathNormalizer) {
if (!isFile(codePath)) return null;
const code = (0, fs.readFileSync)(codePath, "utf-8");
const extractedComment = code.includes("sourceMappingURL") && Array.from(code.matchAll(sourceMappingRegExp)).at(-1);
if (!extractedComment || !extractedComment.length) return { code };
const maybeMap = loadMap(codePath, extractedComment[1]);
if (!maybeMap) return { code };
const { map, mapPath } = maybeMap;
const mapDir = (0, path.dirname)(mapPath);
sourcesPathNormalizer ??= (path$1) => (0, path.isAbsolute)(path$1) ? path$1 : (0, path.resolve)(mapDir, map.sourceRoot ?? ".", path$1);
map.sources = normalizeSourcesPaths(map, sourcesPathNormalizer);
map.sourcesContent = loadMissingSourcesContent(map);
delete map.sourceRoot;
return {
code,
map
};
}
function loadMap(codePath, sourceMappingURL) {
if (sourceMappingURL.startsWith("data:")) {
const map = parseDataUrl(sourceMappingURL);
return {
map: parseSourceMapInput(map),
mapPath: codePath
};
}
const sourceMapFilename = new URL(sourceMappingURL, "file://").pathname;
const mapPath = (0, path.join)((0, path.dirname)(codePath), sourceMapFilename);
if (!(0, fs.existsSync)(mapPath)) return null;
return {
map: parseSourceMapInput((0, fs.readFileSync)(mapPath, "utf-8")),
mapPath
};
}
function parseDataUrl(url$1) {
const [prefix, payload] = url$1.split(",");
const encoding = prefix.split(";").at(-1);
switch (encoding) {
case "base64": return Buffer.from(payload, "base64").toString();
case "uri": return decodeURIComponent(payload);
default: throw new Error("Unsupported source map encoding: " + encoding);
}
}
/**
* Normalize the paths of the sources in the source map to be absolute paths.
*/
function normalizeSourcesPaths(map, sourcesPathNormalizer) {
return map.sources.map((source) => source ? sourcesPathNormalizer(source) : null);
}
/**
* Loop through the sources and try to load missing `sourcesContent` from the file system.
*/
function loadMissingSourcesContent(map) {
return map.sources.map((source, index) => {
if (map.sourcesContent?.[index]) return map.sourcesContent[index];
if (source && (0, fs.existsSync)(source)) return (0, fs.readFileSync)(source, "utf-8");
return null;
});
}
//#endregion
//#region src/utils.ts
const esmRegex = /\.m[tj]sx?$/;
const cjsRegex = /\.c[tj]sx?$/;
const jsRegexp = /\.[cm]?[tj]s[x]?$/;
function normalizeOptions(options) {
const format$1 = options?.format || options?.filename?.split(".").at(-1) || "html";
const defaultOptions = {
enabled: true,
format: format$1,
filename: "sonda-report." + format$1,
open: true,
detailed: false,
sources: false,
gzip: false,
brotli: false,
sourcesPathNormalizer: null
};
const normalizedOptions = Object.assign({}, defaultOptions, options);
normalizedOptions.filename = normalizeOutputPath(normalizedOptions);
return normalizedOptions;
}
function normalizePath(pathToNormalize) {
const normalized = pathToNormalize.replace(/^\0/, "");
const relativized = (0, path.relative)(process.cwd(), normalized);
return relativized.replaceAll(path.win32.sep, path.posix.sep);
}
function normalizeOutputPath(options) {
let path$1 = options.filename;
const expectedExtension = "." + options.format;
if (!(0, path.isAbsolute)(path$1)) path$1 = (0, path.join)(process.cwd(), path$1);
if (expectedExtension !== (0, path.extname)(path$1)) {
console.warn("\x1B[0;33m" + `Sonda: The file extension specified in the 'filename' does not match the 'format' option. ` + `The extension will be changed to '${expectedExtension}'.`);
path$1 = (0, path.format)({
...(0, path.parse)(path$1),
base: "",
ext: expectedExtension
});
}
return path$1;
}
//#endregion
//#region src/sourcemap/map.ts
function mapSourceMap(map, dirPath, inputs) {
const alreadyRemapped = new Set();
const remapped = (0, __ampproject_remapping.default)(map, (file, ctx) => {
if (alreadyRemapped.has(file)) return;
alreadyRemapped.add(file);
const codeMap = addSourcesToInputs((0, path.resolve)(dirPath, file), inputs);
if (!codeMap) return;
ctx.content ??= codeMap.code;
return codeMap.map;
}, { decodedMappings: true });
return remapped;
}
function addSourcesToInputs(path$1, inputs) {
const codeMap = loadCodeAndMap(path$1);
if (!codeMap) return null;
const parentPath = normalizePath(path$1);
const format$1 = inputs[parentPath]?.format ?? "unknown";
codeMap.map?.sources.filter((source) => source !== null).forEach((source, index) => {
const normalizedPath = normalizePath(source);
if (parentPath === normalizedPath) return;
inputs[normalizedPath] = {
bytes: Buffer.byteLength(codeMap.map.sourcesContent?.[index] ?? ""),
format: format$1,
imports: [],
belongsTo: parentPath
};
});
return codeMap;
}
//#endregion
//#region src/sourcemap/bytes.ts
const UNASSIGNED = "[unassigned]";
function getBytesPerSource(code, map, assetSizes, options) {
const contributions = getContributions(map.sources);
const codeLines = code.split(/(?<=\r?\n)/);
for (let lineIndex = 0; lineIndex < codeLines.length; lineIndex++) {
const lineCode = codeLines[lineIndex];
const mappings = map.mappings[lineIndex] || [];
let currentColumn = 0;
for (let i = 0; i <= mappings.length; i++) {
const mapping = mappings[i];
const startColumn = mapping?.[0] ?? lineCode.length;
const endColumn = mappings[i + 1]?.[0] ?? lineCode.length;
if (startColumn > currentColumn) contributions.set(UNASSIGNED, contributions.get(UNASSIGNED) + lineCode.slice(currentColumn, startColumn));
if (mapping) {
const sourceIndex = mapping?.[1];
const codeSlice = lineCode.slice(startColumn, endColumn);
const source = sourceIndex !== undefined && map.sources[sourceIndex] || UNASSIGNED;
contributions.set(source, contributions.get(source) + codeSlice);
currentColumn = endColumn;
} else currentColumn = startColumn;
}
}
const sourceSizes = new Map();
const contributionsSum = {
uncompressed: 0,
gzip: 0,
brotli: 0
};
for (const [source, codeSegment] of contributions) {
const sizes = getSizes(codeSegment, options);
contributionsSum.uncompressed += sizes.uncompressed;
contributionsSum.gzip += sizes.gzip;
contributionsSum.brotli += sizes.brotli;
sourceSizes.set(source, sizes);
}
return adjustSizes(sourceSizes, assetSizes, contributionsSum, options);
}
function getSizes(code, options) {
return {
uncompressed: Buffer.byteLength(code),
gzip: options.gzip ? (0, zlib.gzipSync)(code).length : 0,
brotli: options.brotli ? (0, zlib.brotliCompressSync)(code).length : 0
};
}
function getContributions(sources) {
const contributions = new Map();
sources.filter((source) => source !== null).forEach((source) => contributions.set(source, ""));
contributions.set(UNASSIGNED, "");
return contributions;
}
/**
* Compression efficiency improves with the size of the file.
*
* However, what we have is the compressed size of the entire bundle (`actual`),
* the sum of all files compressed individually (`sum`) and the compressed
* size of a given file (`content`). The last value is essentially a “worst-case”
* scenario, and the actual size of the file in the bundle is likely to be smaller.
*
* We use this information to estimate the actual size of the file in the bundle
* after compression.
*/
function adjustSizes(sources, asset, sums, options) {
const gzipDelta = options.gzip ? asset.gzip / sums.gzip : 0;
const brotliDelta = options.brotli ? asset.brotli / sums.brotli : 0;
for (const [source, sizes] of sources) sources.set(source, {
uncompressed: sizes.uncompressed,
gzip: options.gzip ? Math.round(sizes.gzip * gzipDelta) : 0,
brotli: options.brotli ? Math.round(sizes.brotli * brotliDelta) : 0
});
return sources;
}
//#endregion
//#region src/report/formats.ts
function generateJsonReport(assets, inputs, options) {
const acceptedExtensions = [
".js",
".mjs",
".cjs",
".css"
];
const outputs = assets.filter((asset) => acceptedExtensions.includes((0, path.extname)(asset))).reduce((carry, asset) => {
const data = processAsset(asset, inputs, options);
if (data) carry[normalizePath(asset)] = data;
return carry;
}, {});
return {
inputs: sortObjectKeys(inputs),
outputs: sortObjectKeys(outputs)
};
}
function generateHtmlReport(assets, inputs, options) {
const json = generateJsonReport(assets, inputs, options);
const __dirname$1 = (0, path.dirname)((0, url.fileURLToPath)(require("url").pathToFileURL(__filename).href));
const template = (0, fs.readFileSync)((0, path.resolve)(__dirname$1, "./index.html"), "utf-8");
return template.replace("__REPORT_DATA__", encodeURIComponent(JSON.stringify(json)));
}
function processAsset(asset, inputs, options) {
const maybeCodeMap = loadCodeAndMap(asset, options.sourcesPathNormalizer);
if (!hasCodeAndMap(maybeCodeMap)) return;
const { code, map } = maybeCodeMap;
const mapped = options.detailed ? mapSourceMap(map, (0, path.dirname)(asset), inputs) : {
...map,
mappings: (0, __jridgewell_sourcemap_codec.decode)(map.mappings)
};
mapped.sources = mapped.sources.map((source) => source && normalizePath(source));
const assetSizes = getSizes(code, options);
const bytes = getBytesPerSource(code, mapped, assetSizes, options);
const outputInputs = Array.from(bytes).reduce((carry, [source, sizes]) => {
carry[normalizePath(source)] = sizes;
return carry;
}, {});
return {
...assetSizes,
inputs: sortObjectKeys(outputInputs),
map: options.sources ? {
version: 3,
names: [],
mappings: mapped.mappings,
sources: mapped.sources,
sourcesContent: mapped.sourcesContent
} : undefined
};
}
function hasCodeAndMap(result) {
return Boolean(result && result.code && result.map);
}
function sortObjectKeys(object) {
return Object.keys(object).sort().reduce((carry, key) => {
carry[key] = object[key];
return carry;
}, {});
}
//#endregion
//#region src/report/generate.ts
async function generateReportFromAssets(assets, inputs, pluginOptions) {
const options = normalizeOptions(pluginOptions);
const handler = options.format === "html" ? saveHtml : saveJson;
const report = handler(assets, inputs, options);
const outputDirectory = (0, path.dirname)(options.filename);
if (!(0, fs.existsSync)(outputDirectory)) (0, fs.mkdirSync)(outputDirectory, { recursive: true });
(0, fs.writeFileSync)(options.filename, report);
if (!options.open) return;
/**
* `open` is ESM-only package, so we need to import it
* dynamically to make it work in CommonJS environment.
*/
const { default: open } = await import("open");
open(options.filename);
}
function saveHtml(assets, inputs, options) {
return generateHtmlReport(assets, inputs, options);
}
function saveJson(assets, inputs, options) {
const report = generateJsonReport(assets, inputs, options);
return JSON.stringify(report, null, 2);
}
//#endregion
Object.defineProperty(exports, '__toESM', {
enumerable: true,
get: function () {
return __toESM;
}
});
Object.defineProperty(exports, 'addSourcesToInputs', {
enumerable: true,
get: function () {
return addSourcesToInputs;
}
});
Object.defineProperty(exports, 'cjsRegex', {
enumerable: true,
get: function () {
return cjsRegex;
}
});
Object.defineProperty(exports, 'esmRegex', {
enumerable: true,
get: function () {
return esmRegex;
}
});
Object.defineProperty(exports, 'generateReportFromAssets', {
enumerable: true,
get: function () {
return generateReportFromAssets;
}
});
Object.defineProperty(exports, 'jsRegexp', {
enumerable: true,
get: function () {
return jsRegexp;
}
});
Object.defineProperty(exports, 'normalizePath', {
enumerable: true,
get: function () {
return normalizePath;
}
});
//# sourceMappingURL=src.cjs.map
{"version":3,"file":"src.cjs","names":["path","url","esmRegex: RegExp","cjsRegex: RegExp","jsRegexp: RegExp","options?: Partial<PluginOptions>","format","defaultOptions: PluginOptions","pathToNormalize: string","win32","posix","options: PluginOptions","path","map: EncodedSourceMap","dirPath: string","inputs: Record<string, ReportInput>","path: string","path","format","code: string","map: DecodedSourceMap","assetSizes: Sizes","options: PluginOptions","mapping: SourceMapSegment | undefined","contributionsSum: Sizes","sources: Array<string | null>","sources: Map<string, Sizes>","asset: Sizes","sums: Sizes","assets: Array<string>","inputs: Record<string, ReportInput>","options: PluginOptions","__dirname","asset: string","result: MaybeCodeMap","object: Record<string, T>","assets: string[]","inputs: JsonReport[ 'inputs' ]","pluginOptions: Partial<PluginOptions>","options: PluginOptions"],"sources":["../../load-source-map/dist/index.mjs","../src/utils.ts","../src/sourcemap/map.ts","../src/sourcemap/bytes.ts","../src/report/formats.ts","../src/report/generate.ts"],"sourcesContent":["import { existsSync, readFileSync, statSync } from \"fs\";\nimport { dirname, isAbsolute, join, resolve } from \"path\";\n\n//#region src/index.ts\n/**\n* Strip any JSON XSSI avoidance prefix from the string (as documented in the source maps specification),\n* and parses the string as JSON.\n*\n* https://github.com/mozilla/source-map/blob/3cb92cc3b73bfab27c146bae4ef2bc09dbb4e5ed/lib/util.js#L162-L164\n*/\nfunction parseSourceMapInput(str) {\n\treturn JSON.parse(str.replace(/^\\)]}'[^\\n]*\\n/, \"\"));\n}\n/**\nsourceMappingURL=data:application/json;charset=utf-8;base64,data\nsourceMappingURL=data:application/json;base64,data\nsourceMappingURL=data:application/json;uri,data\nsourceMappingURL=map-file-comment.css.map\nsourceMappingURL=map-file-comment.css.map?query=value\n*/\nconst sourceMappingRegExp = /[@#]\\s*sourceMappingURL=(\\S+)\\b/g;\n/**\n* Checks if the given path is a file.\n*/\nfunction isFile(path) {\n\ttry {\n\t\treturn statSync(path).isFile();\n\t} catch {\n\t\treturn false;\n\t}\n}\nfunction loadCodeAndMap(codePath, sourcesPathNormalizer) {\n\tif (!isFile(codePath)) return null;\n\tconst code = readFileSync(codePath, \"utf-8\");\n\tconst extractedComment = code.includes(\"sourceMappingURL\") && Array.from(code.matchAll(sourceMappingRegExp)).at(-1);\n\tif (!extractedComment || !extractedComment.length) return { code };\n\tconst maybeMap = loadMap(codePath, extractedComment[1]);\n\tif (!maybeMap) return { code };\n\tconst { map, mapPath } = maybeMap;\n\tconst mapDir = dirname(mapPath);\n\tsourcesPathNormalizer ??= (path) => isAbsolute(path) ? path : resolve(mapDir, map.sourceRoot ?? \".\", path);\n\tmap.sources = normalizeSourcesPaths(map, sourcesPathNormalizer);\n\tmap.sourcesContent = loadMissingSourcesContent(map);\n\tdelete map.sourceRoot;\n\treturn {\n\t\tcode,\n\t\tmap\n\t};\n}\nfunction loadMap(codePath, sourceMappingURL) {\n\tif (sourceMappingURL.startsWith(\"data:\")) {\n\t\tconst map = parseDataUrl(sourceMappingURL);\n\t\treturn {\n\t\t\tmap: parseSourceMapInput(map),\n\t\t\tmapPath: codePath\n\t\t};\n\t}\n\tconst sourceMapFilename = new URL(sourceMappingURL, \"file://\").pathname;\n\tconst mapPath = join(dirname(codePath), sourceMapFilename);\n\tif (!existsSync(mapPath)) return null;\n\treturn {\n\t\tmap: parseSourceMapInput(readFileSync(mapPath, \"utf-8\")),\n\t\tmapPath\n\t};\n}\nfunction parseDataUrl(url) {\n\tconst [prefix, payload] = url.split(\",\");\n\tconst encoding = prefix.split(\";\").at(-1);\n\tswitch (encoding) {\n\t\tcase \"base64\": return Buffer.from(payload, \"base64\").toString();\n\t\tcase \"uri\": return decodeURIComponent(payload);\n\t\tdefault: throw new Error(\"Unsupported source map encoding: \" + encoding);\n\t}\n}\n/**\n* Normalize the paths of the sources in the source map to be absolute paths.\n*/\nfunction normalizeSourcesPaths(map, sourcesPathNormalizer) {\n\treturn map.sources.map((source) => source ? sourcesPathNormalizer(source) : null);\n}\n/**\n* Loop through the sources and try to load missing `sourcesContent` from the file system.\n*/\nfunction loadMissingSourcesContent(map) {\n\treturn map.sources.map((source, index) => {\n\t\tif (map.sourcesContent?.[index]) return map.sourcesContent[index];\n\t\tif (source && existsSync(source)) return readFileSync(source, \"utf-8\");\n\t\treturn null;\n\t});\n}\n\n//#endregion\nexport { loadCodeAndMap };\n//# sourceMappingURL=index.mjs.map","import { join, relative, win32, posix, extname, isAbsolute, format, parse } from 'path';\nimport type { PluginOptions } from './types.js';\n\nexport const esmRegex: RegExp = /\\.m[tj]sx?$/;\nexport const cjsRegex: RegExp = /\\.c[tj]sx?$/;\nexport const jsRegexp: RegExp = /\\.[cm]?[tj]s[x]?$/;\n\nexport function normalizeOptions( options?: Partial<PluginOptions> ): PluginOptions {\n\tconst format = options?.format\n\t\t|| options?.filename?.split( '.' ).at( -1 ) as PluginOptions['format']\n\t\t|| 'html';\n\n\tconst defaultOptions: PluginOptions = {\n\t\tenabled: true,\n\t\tformat,\n\t\tfilename: 'sonda-report.' + format,\n\t\topen: true,\n\t\tdetailed: false,\n\t\tsources: false,\n\t\tgzip: false,\n\t\tbrotli: false, \n\t\tsourcesPathNormalizer: null,\n\t};\n\n\t// Merge user options with the defaults\n\tconst normalizedOptions = Object.assign( {}, defaultOptions, options ) satisfies PluginOptions;\n\n\tnormalizedOptions.filename = normalizeOutputPath( normalizedOptions );\n\n\treturn normalizedOptions;\n}\n\nexport function normalizePath( pathToNormalize: string ): string {\n\t// Unicode escape sequences used by Rollup and Vite to identify virtual modules\n\tconst normalized = pathToNormalize.replace( /^\\0/, '' )\n\n\t// Transform absolute paths to relative paths\n\tconst relativized = relative( process.cwd(), normalized );\n\n\t// Ensure paths are POSIX-compliant - https://stackoverflow.com/a/63251716/4617687\n\treturn relativized.replaceAll( win32.sep, posix.sep );\n}\n\nfunction normalizeOutputPath( options: PluginOptions ): string {\n\tlet path = options.filename;\n\tconst expectedExtension = '.' + options.format;\n\n\t// Ensure the filename is an absolute path\n\tif ( !isAbsolute( path ) ) {\n\t\tpath = join( process.cwd(), path );\n\t}\n\n\t// Ensure that the `filename` extension matches the `format` option\n\tif ( expectedExtension !== extname( path ) ) {\n\t\tconsole.warn(\n\t\t\t'\\x1b[0;33m' + // Make the message yellow\n\t\t\t`Sonda: The file extension specified in the 'filename' does not match the 'format' option. ` +\n\t\t\t`The extension will be changed to '${ expectedExtension }'.`\n\t\t);\n\n\t\tpath = format( { ...parse( path ), base: '', ext: expectedExtension } )\n\t}\n\n\treturn path;\n}\n","import { default as remapping, type DecodedSourceMap, type EncodedSourceMap } from '@ampproject/remapping';\nimport { loadCodeAndMap } from 'load-source-map';\nimport { resolve } from 'path';\nimport { normalizePath } from '../utils.js';\nimport type { CodeMap, ReportInput } from '../types.js';\n\nexport function mapSourceMap(\n\tmap: EncodedSourceMap,\n\tdirPath: string,\n\tinputs: Record<string, ReportInput>\n): DecodedSourceMap {\n\tconst alreadyRemapped = new Set<string>();\n\tconst remapped = remapping( map, ( file, ctx ) => {\n\t\tif ( alreadyRemapped.has( file ) ) {\n\t\t\treturn;\n\t\t}\n\n\t\talreadyRemapped.add( file );\n\n\t\tconst codeMap = addSourcesToInputs(\n\t\t\tresolve( dirPath, file ),\n\t\t\tinputs\n\t\t);\n\n\t\tif ( !codeMap ) {\n\t\t\treturn;\n\t\t}\n\n\t\tctx.content ??= codeMap.code;\n\n\t\treturn codeMap.map;\n\t}, { decodedMappings: true } );\n\n\treturn remapped as DecodedSourceMap;\n}\n\n/**\n * Loads the source map of a given file and adds its \"sources\" to the given inputs object.\n */\nexport function addSourcesToInputs(\n\tpath: string,\n\tinputs: Record<string, ReportInput>\n): CodeMap | null {\n\tconst codeMap = loadCodeAndMap( path );\n\n\tif ( !codeMap ) {\n\t\treturn null;\n\t}\n\n\tconst parentPath = normalizePath( path );\n\tconst format = inputs[ parentPath ]?.format ?? 'unknown';\n\n\tcodeMap.map?.sources\n\t\t.filter( source => source !== null )\n\t\t.forEach( ( source, index ) => {\n\t\t\tconst normalizedPath = normalizePath( source );\n\n\t\t\tif ( parentPath === normalizedPath ) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tinputs[ normalizedPath ] = {\n\t\t\t\tbytes: Buffer.byteLength( codeMap.map!.sourcesContent?.[ index ] ?? '' ),\n\t\t\t\tformat,\n\t\t\t\timports: [],\n\t\t\t\tbelongsTo: parentPath\n\t\t\t};\n\t\t} );\n\t\n\treturn codeMap;\n}\n","import { gzipSync, brotliCompressSync } from 'zlib';\nimport type { DecodedSourceMap, SourceMapSegment } from '@ampproject/remapping';\nimport type { PluginOptions, Sizes } from '../types.js';\n\nconst UNASSIGNED = '[unassigned]';\n\nexport function getBytesPerSource(\n\tcode: string,\n\tmap: DecodedSourceMap,\n\tassetSizes: Sizes,\n\toptions: PluginOptions\n): Map<string, Sizes> {\n\tconst contributions = getContributions( map.sources );\n\n\t// Split the code into lines\n\tconst codeLines = code.split( /(?<=\\r?\\n)/ );\n\n\tfor ( let lineIndex = 0; lineIndex < codeLines.length; lineIndex++ ) {\n\t\tconst lineCode = codeLines[ lineIndex ];\n\t\tconst mappings = map.mappings[ lineIndex ] || [];\n\t\tlet currentColumn = 0;\n\n\t\tfor ( let i = 0; i <= mappings.length; i++ ) {\n\t\t\t// 0: generatedColumn\n\t\t\t// 1: sourceIndex\n\t\t\t// 2: originalLine\n\t\t\t// 3: originalColumn\n\t\t\t// 4: nameIndex\n\n\t\t\tconst mapping: SourceMapSegment | undefined = mappings[ i ];\n\t\t\tconst startColumn = mapping?.[ 0 ] ?? lineCode.length;\n\t\t\tconst endColumn = mappings[ i + 1 ]?.[ 0 ] ?? lineCode.length;\n\n\t\t\t// Slice the code from currentColumn to startColumn for unassigned code\n\t\t\tif ( startColumn > currentColumn ) {\n\t\t\t\tcontributions.set( UNASSIGNED, contributions.get( UNASSIGNED ) + lineCode.slice( currentColumn, startColumn ) );\n\t\t\t}\n\n\t\t\tif ( mapping ) {\n\t\t\t\t// Slice the code from startColumn to endColumn for assigned code\n\t\t\t\tconst sourceIndex = mapping?.[ 1 ];\n\t\t\t\tconst codeSlice = lineCode.slice( startColumn, endColumn );\n\t\t\t\tconst source = sourceIndex !== undefined && map.sources[ sourceIndex ] || UNASSIGNED;\n\n\t\t\t\tcontributions.set( source, contributions.get( source ) + codeSlice );\n\t\t\t\tcurrentColumn = endColumn;\n\t\t\t} else {\n\t\t\t\tcurrentColumn = startColumn;\n\t\t\t}\n\t\t}\n\t}\n\n\t// Compute sizes for each source\n\tconst sourceSizes = new Map<string, Sizes>();\n\n\tconst contributionsSum: Sizes = {\n\t\tuncompressed: 0,\n\t\tgzip: 0,\n\t\tbrotli: 0\n\t};\n\n\tfor ( const [ source, codeSegment ] of contributions ) {\n\t\tconst sizes = getSizes( codeSegment, options );\n\n\t\tcontributionsSum.uncompressed += sizes.uncompressed;\n\t\tcontributionsSum.gzip += sizes.gzip;\n\t\tcontributionsSum.brotli += sizes.brotli;\n\n\t\tsourceSizes.set( source, sizes );\n\t}\n\n\treturn adjustSizes( sourceSizes, assetSizes, contributionsSum, options );\n}\n\nexport function getSizes(\n\tcode: string,\n\toptions: PluginOptions\n): Sizes {\n\treturn {\n\t\tuncompressed: Buffer.byteLength( code ),\n\t\tgzip: options.gzip ? gzipSync( code ).length : 0,\n\t\tbrotli: options.brotli ? brotliCompressSync( code ).length : 0\n\t};\n}\n\nfunction getContributions( sources: Array<string | null> ): Map<string, string> {\n\tconst contributions = new Map<string, string>();\n\n\t// Populate contributions with sources\n\tsources\n\t\t.filter( source => source !== null )\n\t\t.forEach( source => contributions.set( source, '' ) );\n\n\t// Add entry for the code that is not assigned to any source\n\tcontributions.set( UNASSIGNED, '' );\n\n\treturn contributions;\n}\n\n/**\n * Compression efficiency improves with the size of the file.\n *\n * However, what we have is the compressed size of the entire bundle (`actual`),\n * the sum of all files compressed individually (`sum`) and the compressed\n * size of a given file (`content`). The last value is essentially a “worst-case”\n * scenario, and the actual size of the file in the bundle is likely to be smaller.\n *\n * We use this information to estimate the actual size of the file in the bundle\n * after compression.\n */\nfunction adjustSizes(\n\tsources: Map<string, Sizes>,\n\tasset: Sizes,\n\tsums: Sizes,\n\toptions: PluginOptions\n): Map<string, Sizes> {\n\tconst gzipDelta = options.gzip ? asset.gzip / sums.gzip : 0;\n\tconst brotliDelta = options.brotli ? asset.brotli / sums.brotli : 0;\n\n\tfor ( const [ source, sizes ] of sources ) {\n\t\tsources.set( source, {\n\t\t\tuncompressed: sizes.uncompressed,\n\t\t\tgzip: options.gzip ? Math.round( sizes.gzip * gzipDelta ) : 0,\n\t\t\tbrotli: options.brotli ? Math.round( sizes.brotli * brotliDelta ) : 0\n\t\t} );\n\t}\n\n\treturn sources;\n}\n","import { readFileSync } from 'fs';\nimport { fileURLToPath } from 'url';\nimport { dirname, extname, resolve } from 'path';\nimport { loadCodeAndMap } from 'load-source-map';\nimport { decode } from '@jridgewell/sourcemap-codec';\nimport { mapSourceMap } from '../sourcemap/map.js';\nimport { getBytesPerSource, getSizes } from '../sourcemap/bytes.js';\nimport type {\n JsonReport,\n MaybeCodeMap,\n ReportInput,\n ReportOutput,\n CodeMap,\n ReportOutputInput,\n PluginOptions\n} from '../types.js';\nimport { normalizePath } from '../utils.js';\n\nexport function generateJsonReport(\n assets: Array<string>,\n inputs: Record<string, ReportInput>,\n options: PluginOptions\n): JsonReport {\n const acceptedExtensions = [ '.js', '.mjs', '.cjs', '.css' ];\n\n const outputs = assets\n .filter( asset => acceptedExtensions.includes( extname( asset ) ) )\n .reduce( ( carry, asset ) => {\n const data = processAsset( asset, inputs, options );\n\n if ( data ) {\n carry[ normalizePath( asset ) ] = data;\n }\n\n return carry;\n }, {} as Record<string, ReportOutput> );\n\n return {\n inputs: sortObjectKeys( inputs ),\n outputs: sortObjectKeys( outputs )\n };\n}\n\nexport function generateHtmlReport(\n assets: Array<string>,\n inputs: Record<string, ReportInput>,\n options: PluginOptions\n): string {\n const json = generateJsonReport( assets, inputs, options );\n // Replace with `import.meta.dirname` after upgrading to Node 20\n const __dirname = dirname( fileURLToPath( import.meta.url ) );\n const template = readFileSync( resolve( __dirname, './index.html' ), 'utf-8' );\n\n return template.replace( '__REPORT_DATA__', encodeURIComponent( JSON.stringify( json ) ) );\n}\n\nfunction processAsset(\n asset: string,\n inputs: Record<string, ReportInput>,\n options: PluginOptions\n): ReportOutput | void {\n const maybeCodeMap = loadCodeAndMap( asset, options.sourcesPathNormalizer );\n\n if ( !hasCodeAndMap( maybeCodeMap ) ) {\n return;\n }\n\n const { code, map } = maybeCodeMap;\n const mapped = options.detailed\n ? mapSourceMap( map, dirname( asset ), inputs )\n : { ...map, mappings: decode( map.mappings ) };\n\n mapped.sources = mapped.sources.map( source => source && normalizePath( source ) );\n\n const assetSizes = getSizes( code, options );\n const bytes = getBytesPerSource( code, mapped, assetSizes, options );\n const outputInputs = Array\n .from( bytes )\n .reduce( ( carry, [ source, sizes ] ) => {\n carry[ normalizePath( source ) ] = sizes;\n\n return carry;\n }, {} as Record<string, ReportOutputInput> );\n\n return {\n ...assetSizes,\n inputs: sortObjectKeys( outputInputs ),\n map: options.sources ? {\n version: 3,\n names: [],\n mappings: mapped.mappings,\n sources: mapped.sources,\n sourcesContent: mapped.sourcesContent,\n } : undefined\n };\n}\n\nfunction hasCodeAndMap( result: MaybeCodeMap ): result is Required<CodeMap> {\n return Boolean( result && result.code && result.map );\n}\n\nfunction sortObjectKeys<T extends unknown>( object: Record<string, T> ): Record<string, T> {\n return Object\n .keys( object )\n .sort()\n .reduce( ( carry, key ) => {\n carry[ key ] = object[ key ];\n\n return carry;\n }, {} as Record<string, T> );\n} \n","import { dirname } from 'path';\nimport { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { generateHtmlReport, generateJsonReport } from './formats.js';\nimport type { PluginOptions, JsonReport } from '../types.js';\nimport { normalizeOptions } from '../utils.js';\n\nexport async function generateReportFromAssets(\n\tassets: string[],\n\tinputs: JsonReport[ 'inputs' ],\n\tpluginOptions: Partial<PluginOptions>\n): Promise<void> {\n\tconst options = normalizeOptions( pluginOptions );\n\tconst handler = options.format === 'html' ? saveHtml : saveJson;\n\tconst report = handler( assets, inputs, options );\n\tconst outputDirectory = dirname( options.filename );\n\n\t// Ensure the output directory exists\n\tif ( !existsSync( outputDirectory ) ) {\n\t\tmkdirSync( outputDirectory, { recursive: true } );\n\t}\n\n\t// Write the report to the file system\n\twriteFileSync( options.filename, report );\n\n\tif ( !options.open ) {\n\t\treturn;\n\t}\n\n\t/**\n\t * `open` is ESM-only package, so we need to import it\n\t * dynamically to make it work in CommonJS environment.\n\t */\n\tconst { default: open } = await import( 'open' );\n\n\t// Open the report in the default program for the file extension\n\topen( options.filename );\n}\n\nfunction saveHtml(\n\tassets: string[],\n\tinputs: JsonReport[ 'inputs' ],\n\toptions: PluginOptions\n): string {\n\treturn generateHtmlReport( assets, inputs, options );\n}\n\nfunction saveJson(\n\tassets: string[],\n\tinputs: JsonReport[ 'inputs' ],\n\toptions: PluginOptions\n): string {\n\tconst report = generateJsonReport( assets, inputs, options );\n\n\treturn JSON.stringify( report, null, 2 );\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAUA,SAAS,oBAAoB,KAAK;AACjC,QAAO,KAAK,MAAM,IAAI,QAAQ,kBAAkB,GAAG,CAAC;AACpD;;;;;;;;AAQD,MAAM,sBAAsB;;;;AAI5B,SAAS,OAAOA,QAAM;AACrB,KAAI;AACH,SAAO,iBAASA,OAAK,CAAC,QAAQ;CAC9B,QAAO;AACP,SAAO;CACP;AACD;AACD,SAAS,eAAe,UAAU,uBAAuB;AACxD,MAAK,OAAO,SAAS,CAAE,QAAO;CAC9B,MAAM,OAAO,qBAAa,UAAU,QAAQ;CAC5C,MAAM,mBAAmB,KAAK,SAAS,mBAAmB,IAAI,MAAM,KAAK,KAAK,SAAS,oBAAoB,CAAC,CAAC,GAAG,GAAG;AACnH,MAAK,qBAAqB,iBAAiB,OAAQ,QAAO,EAAE,KAAM;CAClE,MAAM,WAAW,QAAQ,UAAU,iBAAiB,GAAG;AACvD,MAAK,SAAU,QAAO,EAAE,KAAM;CAC9B,MAAM,EAAE,KAAK,SAAS,GAAG;CACzB,MAAM,SAAS,kBAAQ,QAAQ;AAC/B,2BAA0B,CAACA,WAAS,qBAAWA,OAAK,GAAGA,SAAO,kBAAQ,QAAQ,IAAI,cAAc,KAAKA,OAAK;AAC1G,KAAI,UAAU,sBAAsB,KAAK,sBAAsB;AAC/D,KAAI,iBAAiB,0BAA0B,IAAI;AACnD,QAAO,IAAI;AACX,QAAO;EACN;EACA;CACA;AACD;AACD,SAAS,QAAQ,UAAU,kBAAkB;AAC5C,KAAI,iBAAiB,WAAW,QAAQ,EAAE;EACzC,MAAM,MAAM,aAAa,iBAAiB;AAC1C,SAAO;GACN,KAAK,oBAAoB,IAAI;GAC7B,SAAS;EACT;CACD;CACD,MAAM,oBAAoB,IAAI,IAAI,kBAAkB,WAAW;CAC/D,MAAM,UAAU,eAAK,kBAAQ,SAAS,EAAE,kBAAkB;AAC1D,MAAK,mBAAW,QAAQ,CAAE,QAAO;AACjC,QAAO;EACN,KAAK,oBAAoB,qBAAa,SAAS,QAAQ,CAAC;EACxD;CACA;AACD;AACD,SAAS,aAAaC,OAAK;CAC1B,MAAM,CAAC,QAAQ,QAAQ,GAAG,MAAI,MAAM,IAAI;CACxC,MAAM,WAAW,OAAO,MAAM,IAAI,CAAC,GAAG,GAAG;AACzC,SAAQ,UAAR;AACC,OAAK,SAAU,QAAO,OAAO,KAAK,SAAS,SAAS,CAAC,UAAU;AAC/D,OAAK,MAAO,QAAO,mBAAmB,QAAQ;AAC9C,UAAS,OAAM,IAAI,MAAM,sCAAsC;CAC/D;AACD;;;;AAID,SAAS,sBAAsB,KAAK,uBAAuB;AAC1D,QAAO,IAAI,QAAQ,IAAI,CAAC,WAAW,SAAS,sBAAsB,OAAO,GAAG,KAAK;AACjF;;;;AAID,SAAS,0BAA0B,KAAK;AACvC,QAAO,IAAI,QAAQ,IAAI,CAAC,QAAQ,UAAU;AACzC,MAAI,IAAI,iBAAiB,OAAQ,QAAO,IAAI,eAAe;AAC3D,MAAI,UAAU,mBAAW,OAAO,CAAE,QAAO,qBAAa,QAAQ,QAAQ;AACtE,SAAO;CACP,EAAC;AACF;;;;MCtFYC,WAAmB;MACnBC,WAAmB;MACnBC,WAAmB;AAEzB,SAAS,iBAAkBC,SAAkD;CACnF,MAAMC,WAAS,SAAS,UACpB,SAAS,UAAU,MAAO,IAAK,CAAC,GAAI,GAAI,IACxC;CAEJ,MAAMC,iBAAgC;EACrC,SAAS;EACT;EACA,UAAU,kBAAkBD;EAC5B,MAAM;EACN,UAAU;EACV,SAAS;EACT,MAAM;EACN,QAAQ;EACR,uBAAuB;CACvB;CAGD,MAAM,oBAAoB,OAAO,OAAQ,CAAE,GAAE,gBAAgB,QAAS;AAEtE,mBAAkB,WAAW,oBAAqB,kBAAmB;AAErE,QAAO;AACP;AAEM,SAAS,cAAeE,iBAAkC;CAEhE,MAAM,aAAa,gBAAgB,QAAS,OAAO,GAAI;CAGvD,MAAM,cAAc,mBAAU,QAAQ,KAAK,EAAE,WAAY;AAGzD,QAAO,YAAY,WAAYC,WAAM,KAAKC,WAAM,IAAK;AACrD;AAED,SAAS,oBAAqBC,SAAiC;CAC9D,IAAIC,SAAO,QAAQ;CACnB,MAAM,oBAAoB,MAAM,QAAQ;AAGxC,MAAM,qBAAYA,OAAM,CACvB,UAAO,eAAM,QAAQ,KAAK,EAAEA,OAAM;AAInC,KAAK,sBAAsB,kBAASA,OAAM,EAAG;AAC5C,UAAQ,KACP,gBACC,+FACA,oCAAqC,kBAAmB,IACzD;AAED,WAAO,iBAAQ;GAAE,GAAG,gBAAOA,OAAM;GAAE,MAAM;GAAI,KAAK;EAAmB,EAAE;CACvE;AAED,QAAOA;AACP;;;;AC1DM,SAAS,aACfC,KACAC,SACAC,QACmB;CACnB,MAAM,kBAAkB,IAAI;CAC5B,MAAM,WAAW,oCAAW,KAAK,CAAE,MAAM,QAAS;AACjD,MAAK,gBAAgB,IAAK,KAAM,CAC/B;AAGD,kBAAgB,IAAK,KAAM;EAE3B,MAAM,UAAU,mBACf,kBAAS,SAAS,KAAM,EACxB,OACA;AAED,OAAM,QACL;AAGD,MAAI,YAAY,QAAQ;AAExB,SAAO,QAAQ;CACf,GAAE,EAAE,iBAAiB,KAAM,EAAE;AAE9B,QAAO;AACP;AAKM,SAAS,mBACfC,QACAD,QACiB;CACjB,MAAM,UAAU,eAAgBE,OAAM;AAEtC,MAAM,QACL,QAAO;CAGR,MAAM,aAAa,cAAeA,OAAM;CACxC,MAAMC,WAAS,OAAQ,aAAc,UAAU;AAE/C,SAAQ,KAAK,QACX,OAAQ,YAAU,WAAW,KAAM,CACnC,QAAS,CAAE,QAAQ,UAAW;EAC9B,MAAM,iBAAiB,cAAe,OAAQ;AAE9C,MAAK,eAAe,eACnB;AAGD,SAAQ,kBAAmB;GAC1B,OAAO,OAAO,WAAY,QAAQ,IAAK,iBAAkB,UAAW,GAAI;GACxE;GACA,SAAS,CAAE;GACX,WAAW;EACX;CACD,EAAE;AAEJ,QAAO;AACP;;;;AClED,MAAM,aAAa;AAEZ,SAAS,kBACfC,MACAC,KACAC,YACAC,SACqB;CACrB,MAAM,gBAAgB,iBAAkB,IAAI,QAAS;CAGrD,MAAM,YAAY,KAAK,MAAO,aAAc;AAE5C,MAAM,IAAI,YAAY,GAAG,YAAY,UAAU,QAAQ,aAAc;EACpE,MAAM,WAAW,UAAW;EAC5B,MAAM,WAAW,IAAI,SAAU,cAAe,CAAE;EAChD,IAAI,gBAAgB;AAEpB,OAAM,IAAI,IAAI,GAAG,KAAK,SAAS,QAAQ,KAAM;GAO5C,MAAMC,UAAwC,SAAU;GACxD,MAAM,cAAc,UAAW,MAAO,SAAS;GAC/C,MAAM,YAAY,SAAU,IAAI,KAAO,MAAO,SAAS;AAGvD,OAAK,cAAc,cAClB,eAAc,IAAK,YAAY,cAAc,IAAK,WAAY,GAAG,SAAS,MAAO,eAAe,YAAa,CAAE;AAGhH,OAAK,SAAU;IAEd,MAAM,cAAc,UAAW;IAC/B,MAAM,YAAY,SAAS,MAAO,aAAa,UAAW;IAC1D,MAAM,SAAS,gBAAgB,aAAa,IAAI,QAAS,gBAAiB;AAE1E,kBAAc,IAAK,QAAQ,cAAc,IAAK,OAAQ,GAAG,UAAW;AACpE,oBAAgB;GAChB,MACA,iBAAgB;EAEjB;CACD;CAGD,MAAM,cAAc,IAAI;CAExB,MAAMC,mBAA0B;EAC/B,cAAc;EACd,MAAM;EACN,QAAQ;CACR;AAED,MAAM,MAAM,CAAE,QAAQ,YAAa,IAAI,eAAgB;EACtD,MAAM,QAAQ,SAAU,aAAa,QAAS;AAE9C,mBAAiB,gBAAgB,MAAM;AACvC,mBAAiB,QAAQ,MAAM;AAC/B,mBAAiB,UAAU,MAAM;AAEjC,cAAY,IAAK,QAAQ,MAAO;CAChC;AAED,QAAO,YAAa,aAAa,YAAY,kBAAkB,QAAS;AACxE;AAEM,SAAS,SACfL,MACAG,SACQ;AACR,QAAO;EACN,cAAc,OAAO,WAAY,KAAM;EACvC,MAAM,QAAQ,OAAO,mBAAU,KAAM,CAAC,SAAS;EAC/C,QAAQ,QAAQ,SAAS,6BAAoB,KAAM,CAAC,SAAS;CAC7D;AACD;AAED,SAAS,iBAAkBG,SAAqD;CAC/E,MAAM,gBAAgB,IAAI;AAG1B,SACE,OAAQ,YAAU,WAAW,KAAM,CACnC,QAAS,YAAU,cAAc,IAAK,QAAQ,GAAI,CAAE;AAGtD,eAAc,IAAK,YAAY,GAAI;AAEnC,QAAO;AACP;;;;;;;;;;;;AAaD,SAAS,YACRC,SACAC,OACAC,MACAN,SACqB;CACrB,MAAM,YAAY,QAAQ,OAAO,MAAM,OAAO,KAAK,OAAO;CAC1D,MAAM,cAAc,QAAQ,SAAS,MAAM,SAAS,KAAK,SAAS;AAElE,MAAM,MAAM,CAAE,QAAQ,MAAO,IAAI,QAChC,SAAQ,IAAK,QAAQ;EACpB,cAAc,MAAM;EACpB,MAAM,QAAQ,OAAO,KAAK,MAAO,MAAM,OAAO,UAAW,GAAG;EAC5D,QAAQ,QAAQ,SAAS,KAAK,MAAO,MAAM,SAAS,YAAa,GAAG;CACpE,EAAE;AAGJ,QAAO;AACP;;;;AC9GM,SAAS,mBACdO,QACAC,QACAC,SACY;CACZ,MAAM,qBAAqB;EAAE;EAAO;EAAQ;EAAQ;CAAQ;CAE5D,MAAM,UAAU,OACb,OAAQ,WAAS,mBAAmB,SAAU,kBAAS,MAAO,CAAE,CAAE,CAClE,OAAQ,CAAE,OAAO,UAAW;EAC3B,MAAM,OAAO,aAAc,OAAO,QAAQ,QAAS;AAEnD,MAAK,KACH,OAAO,cAAe,MAAO,IAAK;AAGpC,SAAO;CACR,GAAE,CAAE,EAAkC;AAEzC,QAAO;EACL,QAAQ,eAAgB,OAAQ;EAChC,SAAS,eAAgB,QAAS;CACnC;AACF;AAEM,SAAS,mBACdF,QACAC,QACAC,SACQ;CACR,MAAM,OAAO,mBAAoB,QAAQ,QAAQ,QAAS;CAE1D,MAAMC,cAAY,kBAAS,qEAAgC,CAAE;CAC7D,MAAM,WAAW,qBAAc,kBAASA,aAAW,eAAgB,EAAE,QAAS;AAE9E,QAAO,SAAS,QAAS,mBAAmB,mBAAoB,KAAK,UAAW,KAAM,CAAE,CAAE;AAC3F;AAED,SAAS,aACPC,OACAH,QACAC,SACqB;CACrB,MAAM,eAAe,eAAgB,OAAO,QAAQ,sBAAuB;AAE3E,MAAM,cAAe,aAAc,CACjC;CAGF,MAAM,EAAE,MAAM,KAAK,GAAG;CACtB,MAAM,SAAS,QAAQ,WACnB,aAAc,KAAK,kBAAS,MAAO,EAAE,OAAQ,GAC7C;EAAE,GAAG;EAAK,UAAU,yCAAQ,IAAI,SAAU;CAAE;AAEhD,QAAO,UAAU,OAAO,QAAQ,IAAK,YAAU,UAAU,cAAe,OAAQ,CAAE;CAElF,MAAM,aAAa,SAAU,MAAM,QAAS;CAC5C,MAAM,QAAQ,kBAAmB,MAAM,QAAQ,YAAY,QAAS;CACpE,MAAM,eAAe,MAClB,KAAM,MAAO,CACb,OAAQ,CAAE,OAAO,CAAE,QAAQ,MAAO,KAAM;AACvC,QAAO,cAAe,OAAQ,IAAK;AAEnC,SAAO;CACR,GAAE,CAAE,EAAuC;AAE9C,QAAO;EACL,GAAG;EACH,QAAQ,eAAgB,aAAc;EACtC,KAAK,QAAQ,UAAU;GACrB,SAAS;GACT,OAAO,CAAE;GACT,UAAU,OAAO;GACjB,SAAS,OAAO;GAChB,gBAAgB,OAAO;EACxB,IAAG;CACL;AACF;AAED,SAAS,cAAeG,QAAoD;AAC1E,QAAO,QAAS,UAAU,OAAO,QAAQ,OAAO,IAAK;AACtD;AAED,SAAS,eAAmCC,QAA+C;AACzF,QAAO,OACJ,KAAM,OAAQ,CACd,MAAM,CACN,OAAQ,CAAE,OAAO,QAAS;AACzB,QAAO,OAAQ,OAAQ;AAEvB,SAAO;CACR,GAAE,CAAE,EAAuB;AAC/B;;;;ACxGM,eAAe,yBACrBC,QACAC,QACAC,eACgB;CAChB,MAAM,UAAU,iBAAkB,cAAe;CACjD,MAAM,UAAU,QAAQ,WAAW,SAAS,WAAW;CACvD,MAAM,SAAS,QAAS,QAAQ,QAAQ,QAAS;CACjD,MAAM,kBAAkB,kBAAS,QAAQ,SAAU;AAGnD,MAAM,mBAAY,gBAAiB,CAClC,mBAAW,iBAAiB,EAAE,WAAW,KAAM,EAAE;AAIlD,uBAAe,QAAQ,UAAU,OAAQ;AAEzC,MAAM,QAAQ,KACb;;;;;CAOD,MAAM,EAAE,SAAS,MAAM,GAAG,MAAM,OAAQ;AAGxC,MAAM,QAAQ,SAAU;AACxB;AAED,SAAS,SACRF,QACAC,QACAE,SACS;AACT,QAAO,mBAAoB,QAAQ,QAAQ,QAAS;AACpD;AAED,SAAS,SACRH,QACAC,QACAE,SACS;CACT,MAAM,SAAS,mBAAoB,QAAQ,QAAQ,QAAS;AAE5D,QAAO,KAAK,UAAW,QAAQ,MAAM,EAAG;AACxC"}
import { dirname, extname, format, isAbsolute, join, parse, posix, relative, resolve, win32 } from "path";
import { existsSync, mkdirSync, readFileSync, statSync, writeFileSync } from "fs";
import { fileURLToPath } from "url";
import { decode } from "@jridgewell/sourcemap-codec";
import remapping from "@ampproject/remapping";
import { brotliCompressSync, gzipSync } from "zlib";
//#region ../load-source-map/dist/index.mjs
/**
* Strip any JSON XSSI avoidance prefix from the string (as documented in the source maps specification),
* and parses the string as JSON.
*
* https://github.com/mozilla/source-map/blob/3cb92cc3b73bfab27c146bae4ef2bc09dbb4e5ed/lib/util.js#L162-L164
*/
function parseSourceMapInput(str) {
return JSON.parse(str.replace(/^\)]}'[^\n]*\n/, ""));
}
/**
sourceMappingURL=data:application/json;charset=utf-8;base64,data
sourceMappingURL=data:application/json;base64,data
sourceMappingURL=data:application/json;uri,data
sourceMappingURL=map-file-comment.css.map
sourceMappingURL=map-file-comment.css.map?query=value
*/
const sourceMappingRegExp = /[@#]\s*sourceMappingURL=(\S+)\b/g;
/**
* Checks if the given path is a file.
*/
function isFile(path) {
try {
return statSync(path).isFile();
} catch {
return false;
}
}
function loadCodeAndMap(codePath, sourcesPathNormalizer) {
if (!isFile(codePath)) return null;
const code = readFileSync(codePath, "utf-8");
const extractedComment = code.includes("sourceMappingURL") && Array.from(code.matchAll(sourceMappingRegExp)).at(-1);
if (!extractedComment || !extractedComment.length) return { code };
const maybeMap = loadMap(codePath, extractedComment[1]);
if (!maybeMap) return { code };
const { map, mapPath } = maybeMap;
const mapDir = dirname(mapPath);
sourcesPathNormalizer ??= (path) => isAbsolute(path) ? path : resolve(mapDir, map.sourceRoot ?? ".", path);
map.sources = normalizeSourcesPaths(map, sourcesPathNormalizer);
map.sourcesContent = loadMissingSourcesContent(map);
delete map.sourceRoot;
return {
code,
map
};
}
function loadMap(codePath, sourceMappingURL) {
if (sourceMappingURL.startsWith("data:")) {
const map = parseDataUrl(sourceMappingURL);
return {
map: parseSourceMapInput(map),
mapPath: codePath
};
}
const sourceMapFilename = new URL(sourceMappingURL, "file://").pathname;
const mapPath = join(dirname(codePath), sourceMapFilename);
if (!existsSync(mapPath)) return null;
return {
map: parseSourceMapInput(readFileSync(mapPath, "utf-8")),
mapPath
};
}
function parseDataUrl(url) {
const [prefix, payload] = url.split(",");
const encoding = prefix.split(";").at(-1);
switch (encoding) {
case "base64": return Buffer.from(payload, "base64").toString();
case "uri": return decodeURIComponent(payload);
default: throw new Error("Unsupported source map encoding: " + encoding);
}
}
/**
* Normalize the paths of the sources in the source map to be absolute paths.
*/
function normalizeSourcesPaths(map, sourcesPathNormalizer) {
return map.sources.map((source) => source ? sourcesPathNormalizer(source) : null);
}
/**
* Loop through the sources and try to load missing `sourcesContent` from the file system.
*/
function loadMissingSourcesContent(map) {
return map.sources.map((source, index) => {
if (map.sourcesContent?.[index]) return map.sourcesContent[index];
if (source && existsSync(source)) return readFileSync(source, "utf-8");
return null;
});
}
//#endregion
//#region src/utils.ts
const esmRegex = /\.m[tj]sx?$/;
const cjsRegex = /\.c[tj]sx?$/;
const jsRegexp = /\.[cm]?[tj]s[x]?$/;
function normalizeOptions(options) {
const format$1 = options?.format || options?.filename?.split(".").at(-1) || "html";
const defaultOptions = {
enabled: true,
format: format$1,
filename: "sonda-report." + format$1,
open: true,
detailed: false,
sources: false,
gzip: false,
brotli: false,
sourcesPathNormalizer: null
};
const normalizedOptions = Object.assign({}, defaultOptions, options);
normalizedOptions.filename = normalizeOutputPath(normalizedOptions);
return normalizedOptions;
}
function normalizePath(pathToNormalize) {
const normalized = pathToNormalize.replace(/^\0/, "");
const relativized = relative(process.cwd(), normalized);
return relativized.replaceAll(win32.sep, posix.sep);
}
function normalizeOutputPath(options) {
let path = options.filename;
const expectedExtension = "." + options.format;
if (!isAbsolute(path)) path = join(process.cwd(), path);
if (expectedExtension !== extname(path)) {
console.warn("\x1B[0;33m" + `Sonda: The file extension specified in the 'filename' does not match the 'format' option. ` + `The extension will be changed to '${expectedExtension}'.`);
path = format({
...parse(path),
base: "",
ext: expectedExtension
});
}
return path;
}
//#endregion
//#region src/sourcemap/map.ts
function mapSourceMap(map, dirPath, inputs) {
const alreadyRemapped = new Set();
const remapped = remapping(map, (file, ctx) => {
if (alreadyRemapped.has(file)) return;
alreadyRemapped.add(file);
const codeMap = addSourcesToInputs(resolve(dirPath, file), inputs);
if (!codeMap) return;
ctx.content ??= codeMap.code;
return codeMap.map;
}, { decodedMappings: true });
return remapped;
}
function addSourcesToInputs(path, inputs) {
const codeMap = loadCodeAndMap(path);
if (!codeMap) return null;
const parentPath = normalizePath(path);
const format$1 = inputs[parentPath]?.format ?? "unknown";
codeMap.map?.sources.filter((source) => source !== null).forEach((source, index) => {
const normalizedPath = normalizePath(source);
if (parentPath === normalizedPath) return;
inputs[normalizedPath] = {
bytes: Buffer.byteLength(codeMap.map.sourcesContent?.[index] ?? ""),
format: format$1,
imports: [],
belongsTo: parentPath
};
});
return codeMap;
}
//#endregion
//#region src/sourcemap/bytes.ts
const UNASSIGNED = "[unassigned]";
function getBytesPerSource(code, map, assetSizes, options) {
const contributions = getContributions(map.sources);
const codeLines = code.split(/(?<=\r?\n)/);
for (let lineIndex = 0; lineIndex < codeLines.length; lineIndex++) {
const lineCode = codeLines[lineIndex];
const mappings = map.mappings[lineIndex] || [];
let currentColumn = 0;
for (let i = 0; i <= mappings.length; i++) {
const mapping = mappings[i];
const startColumn = mapping?.[0] ?? lineCode.length;
const endColumn = mappings[i + 1]?.[0] ?? lineCode.length;
if (startColumn > currentColumn) contributions.set(UNASSIGNED, contributions.get(UNASSIGNED) + lineCode.slice(currentColumn, startColumn));
if (mapping) {
const sourceIndex = mapping?.[1];
const codeSlice = lineCode.slice(startColumn, endColumn);
const source = sourceIndex !== undefined && map.sources[sourceIndex] || UNASSIGNED;
contributions.set(source, contributions.get(source) + codeSlice);
currentColumn = endColumn;
} else currentColumn = startColumn;
}
}
const sourceSizes = new Map();
const contributionsSum = {
uncompressed: 0,
gzip: 0,
brotli: 0
};
for (const [source, codeSegment] of contributions) {
const sizes = getSizes(codeSegment, options);
contributionsSum.uncompressed += sizes.uncompressed;
contributionsSum.gzip += sizes.gzip;
contributionsSum.brotli += sizes.brotli;
sourceSizes.set(source, sizes);
}
return adjustSizes(sourceSizes, assetSizes, contributionsSum, options);
}
function getSizes(code, options) {
return {
uncompressed: Buffer.byteLength(code),
gzip: options.gzip ? gzipSync(code).length : 0,
brotli: options.brotli ? brotliCompressSync(code).length : 0
};
}
function getContributions(sources) {
const contributions = new Map();
sources.filter((source) => source !== null).forEach((source) => contributions.set(source, ""));
contributions.set(UNASSIGNED, "");
return contributions;
}
/**
* Compression efficiency improves with the size of the file.
*
* However, what we have is the compressed size of the entire bundle (`actual`),
* the sum of all files compressed individually (`sum`) and the compressed
* size of a given file (`content`). The last value is essentially a “worst-case”
* scenario, and the actual size of the file in the bundle is likely to be smaller.
*
* We use this information to estimate the actual size of the file in the bundle
* after compression.
*/
function adjustSizes(sources, asset, sums, options) {
const gzipDelta = options.gzip ? asset.gzip / sums.gzip : 0;
const brotliDelta = options.brotli ? asset.brotli / sums.brotli : 0;
for (const [source, sizes] of sources) sources.set(source, {
uncompressed: sizes.uncompressed,
gzip: options.gzip ? Math.round(sizes.gzip * gzipDelta) : 0,
brotli: options.brotli ? Math.round(sizes.brotli * brotliDelta) : 0
});
return sources;
}
//#endregion
//#region src/report/formats.ts
function generateJsonReport(assets, inputs, options) {
const acceptedExtensions = [
".js",
".mjs",
".cjs",
".css"
];
const outputs = assets.filter((asset) => acceptedExtensions.includes(extname(asset))).reduce((carry, asset) => {
const data = processAsset(asset, inputs, options);
if (data) carry[normalizePath(asset)] = data;
return carry;
}, {});
return {
inputs: sortObjectKeys(inputs),
outputs: sortObjectKeys(outputs)
};
}
function generateHtmlReport(assets, inputs, options) {
const json = generateJsonReport(assets, inputs, options);
const __dirname = dirname(fileURLToPath(import.meta.url));
const template = readFileSync(resolve(__dirname, "./index.html"), "utf-8");
return template.replace("__REPORT_DATA__", encodeURIComponent(JSON.stringify(json)));
}
function processAsset(asset, inputs, options) {
const maybeCodeMap = loadCodeAndMap(asset, options.sourcesPathNormalizer);
if (!hasCodeAndMap(maybeCodeMap)) return;
const { code, map } = maybeCodeMap;
const mapped = options.detailed ? mapSourceMap(map, dirname(asset), inputs) : {
...map,
mappings: decode(map.mappings)
};
mapped.sources = mapped.sources.map((source) => source && normalizePath(source));
const assetSizes = getSizes(code, options);
const bytes = getBytesPerSource(code, mapped, assetSizes, options);
const outputInputs = Array.from(bytes).reduce((carry, [source, sizes]) => {
carry[normalizePath(source)] = sizes;
return carry;
}, {});
return {
...assetSizes,
inputs: sortObjectKeys(outputInputs),
map: options.sources ? {
version: 3,
names: [],
mappings: mapped.mappings,
sources: mapped.sources,
sourcesContent: mapped.sourcesContent
} : undefined
};
}
function hasCodeAndMap(result) {
return Boolean(result && result.code && result.map);
}
function sortObjectKeys(object) {
return Object.keys(object).sort().reduce((carry, key) => {
carry[key] = object[key];
return carry;
}, {});
}
//#endregion
//#region src/report/generate.ts
async function generateReportFromAssets(assets, inputs, pluginOptions) {
const options = normalizeOptions(pluginOptions);
const handler = options.format === "html" ? saveHtml : saveJson;
const report = handler(assets, inputs, options);
const outputDirectory = dirname(options.filename);
if (!existsSync(outputDirectory)) mkdirSync(outputDirectory, { recursive: true });
writeFileSync(options.filename, report);
if (!options.open) return;
/**
* `open` is ESM-only package, so we need to import it
* dynamically to make it work in CommonJS environment.
*/
const { default: open } = await import("open");
open(options.filename);
}
function saveHtml(assets, inputs, options) {
return generateHtmlReport(assets, inputs, options);
}
function saveJson(assets, inputs, options) {
const report = generateJsonReport(assets, inputs, options);
return JSON.stringify(report, null, 2);
}
//#endregion
export { addSourcesToInputs, cjsRegex, esmRegex, generateReportFromAssets, jsRegexp, normalizePath };
//# sourceMappingURL=src.mjs.map
{"version":3,"file":"src.mjs","names":["esmRegex: RegExp","cjsRegex: RegExp","jsRegexp: RegExp","options?: Partial<PluginOptions>","format","defaultOptions: PluginOptions","pathToNormalize: string","options: PluginOptions","map: EncodedSourceMap","dirPath: string","inputs: Record<string, ReportInput>","path: string","format","code: string","map: DecodedSourceMap","assetSizes: Sizes","options: PluginOptions","mapping: SourceMapSegment | undefined","contributionsSum: Sizes","sources: Array<string | null>","sources: Map<string, Sizes>","asset: Sizes","sums: Sizes","assets: Array<string>","inputs: Record<string, ReportInput>","options: PluginOptions","asset: string","result: MaybeCodeMap","object: Record<string, T>","assets: string[]","inputs: JsonReport[ 'inputs' ]","pluginOptions: Partial<PluginOptions>","options: PluginOptions"],"sources":["../../load-source-map/dist/index.mjs","../src/utils.ts","../src/sourcemap/map.ts","../src/sourcemap/bytes.ts","../src/report/formats.ts","../src/report/generate.ts"],"sourcesContent":["import { existsSync, readFileSync, statSync } from \"fs\";\nimport { dirname, isAbsolute, join, resolve } from \"path\";\n\n//#region src/index.ts\n/**\n* Strip any JSON XSSI avoidance prefix from the string (as documented in the source maps specification),\n* and parses the string as JSON.\n*\n* https://github.com/mozilla/source-map/blob/3cb92cc3b73bfab27c146bae4ef2bc09dbb4e5ed/lib/util.js#L162-L164\n*/\nfunction parseSourceMapInput(str) {\n\treturn JSON.parse(str.replace(/^\\)]}'[^\\n]*\\n/, \"\"));\n}\n/**\nsourceMappingURL=data:application/json;charset=utf-8;base64,data\nsourceMappingURL=data:application/json;base64,data\nsourceMappingURL=data:application/json;uri,data\nsourceMappingURL=map-file-comment.css.map\nsourceMappingURL=map-file-comment.css.map?query=value\n*/\nconst sourceMappingRegExp = /[@#]\\s*sourceMappingURL=(\\S+)\\b/g;\n/**\n* Checks if the given path is a file.\n*/\nfunction isFile(path) {\n\ttry {\n\t\treturn statSync(path).isFile();\n\t} catch {\n\t\treturn false;\n\t}\n}\nfunction loadCodeAndMap(codePath, sourcesPathNormalizer) {\n\tif (!isFile(codePath)) return null;\n\tconst code = readFileSync(codePath, \"utf-8\");\n\tconst extractedComment = code.includes(\"sourceMappingURL\") && Array.from(code.matchAll(sourceMappingRegExp)).at(-1);\n\tif (!extractedComment || !extractedComment.length) return { code };\n\tconst maybeMap = loadMap(codePath, extractedComment[1]);\n\tif (!maybeMap) return { code };\n\tconst { map, mapPath } = maybeMap;\n\tconst mapDir = dirname(mapPath);\n\tsourcesPathNormalizer ??= (path) => isAbsolute(path) ? path : resolve(mapDir, map.sourceRoot ?? \".\", path);\n\tmap.sources = normalizeSourcesPaths(map, sourcesPathNormalizer);\n\tmap.sourcesContent = loadMissingSourcesContent(map);\n\tdelete map.sourceRoot;\n\treturn {\n\t\tcode,\n\t\tmap\n\t};\n}\nfunction loadMap(codePath, sourceMappingURL) {\n\tif (sourceMappingURL.startsWith(\"data:\")) {\n\t\tconst map = parseDataUrl(sourceMappingURL);\n\t\treturn {\n\t\t\tmap: parseSourceMapInput(map),\n\t\t\tmapPath: codePath\n\t\t};\n\t}\n\tconst sourceMapFilename = new URL(sourceMappingURL, \"file://\").pathname;\n\tconst mapPath = join(dirname(codePath), sourceMapFilename);\n\tif (!existsSync(mapPath)) return null;\n\treturn {\n\t\tmap: parseSourceMapInput(readFileSync(mapPath, \"utf-8\")),\n\t\tmapPath\n\t};\n}\nfunction parseDataUrl(url) {\n\tconst [prefix, payload] = url.split(\",\");\n\tconst encoding = prefix.split(\";\").at(-1);\n\tswitch (encoding) {\n\t\tcase \"base64\": return Buffer.from(payload, \"base64\").toString();\n\t\tcase \"uri\": return decodeURIComponent(payload);\n\t\tdefault: throw new Error(\"Unsupported source map encoding: \" + encoding);\n\t}\n}\n/**\n* Normalize the paths of the sources in the source map to be absolute paths.\n*/\nfunction normalizeSourcesPaths(map, sourcesPathNormalizer) {\n\treturn map.sources.map((source) => source ? sourcesPathNormalizer(source) : null);\n}\n/**\n* Loop through the sources and try to load missing `sourcesContent` from the file system.\n*/\nfunction loadMissingSourcesContent(map) {\n\treturn map.sources.map((source, index) => {\n\t\tif (map.sourcesContent?.[index]) return map.sourcesContent[index];\n\t\tif (source && existsSync(source)) return readFileSync(source, \"utf-8\");\n\t\treturn null;\n\t});\n}\n\n//#endregion\nexport { loadCodeAndMap };\n//# sourceMappingURL=index.mjs.map","import { join, relative, win32, posix, extname, isAbsolute, format, parse } from 'path';\nimport type { PluginOptions } from './types.js';\n\nexport const esmRegex: RegExp = /\\.m[tj]sx?$/;\nexport const cjsRegex: RegExp = /\\.c[tj]sx?$/;\nexport const jsRegexp: RegExp = /\\.[cm]?[tj]s[x]?$/;\n\nexport function normalizeOptions( options?: Partial<PluginOptions> ): PluginOptions {\n\tconst format = options?.format\n\t\t|| options?.filename?.split( '.' ).at( -1 ) as PluginOptions['format']\n\t\t|| 'html';\n\n\tconst defaultOptions: PluginOptions = {\n\t\tenabled: true,\n\t\tformat,\n\t\tfilename: 'sonda-report.' + format,\n\t\topen: true,\n\t\tdetailed: false,\n\t\tsources: false,\n\t\tgzip: false,\n\t\tbrotli: false, \n\t\tsourcesPathNormalizer: null,\n\t};\n\n\t// Merge user options with the defaults\n\tconst normalizedOptions = Object.assign( {}, defaultOptions, options ) satisfies PluginOptions;\n\n\tnormalizedOptions.filename = normalizeOutputPath( normalizedOptions );\n\n\treturn normalizedOptions;\n}\n\nexport function normalizePath( pathToNormalize: string ): string {\n\t// Unicode escape sequences used by Rollup and Vite to identify virtual modules\n\tconst normalized = pathToNormalize.replace( /^\\0/, '' )\n\n\t// Transform absolute paths to relative paths\n\tconst relativized = relative( process.cwd(), normalized );\n\n\t// Ensure paths are POSIX-compliant - https://stackoverflow.com/a/63251716/4617687\n\treturn relativized.replaceAll( win32.sep, posix.sep );\n}\n\nfunction normalizeOutputPath( options: PluginOptions ): string {\n\tlet path = options.filename;\n\tconst expectedExtension = '.' + options.format;\n\n\t// Ensure the filename is an absolute path\n\tif ( !isAbsolute( path ) ) {\n\t\tpath = join( process.cwd(), path );\n\t}\n\n\t// Ensure that the `filename` extension matches the `format` option\n\tif ( expectedExtension !== extname( path ) ) {\n\t\tconsole.warn(\n\t\t\t'\\x1b[0;33m' + // Make the message yellow\n\t\t\t`Sonda: The file extension specified in the 'filename' does not match the 'format' option. ` +\n\t\t\t`The extension will be changed to '${ expectedExtension }'.`\n\t\t);\n\n\t\tpath = format( { ...parse( path ), base: '', ext: expectedExtension } )\n\t}\n\n\treturn path;\n}\n","import { default as remapping, type DecodedSourceMap, type EncodedSourceMap } from '@ampproject/remapping';\nimport { loadCodeAndMap } from 'load-source-map';\nimport { resolve } from 'path';\nimport { normalizePath } from '../utils.js';\nimport type { CodeMap, ReportInput } from '../types.js';\n\nexport function mapSourceMap(\n\tmap: EncodedSourceMap,\n\tdirPath: string,\n\tinputs: Record<string, ReportInput>\n): DecodedSourceMap {\n\tconst alreadyRemapped = new Set<string>();\n\tconst remapped = remapping( map, ( file, ctx ) => {\n\t\tif ( alreadyRemapped.has( file ) ) {\n\t\t\treturn;\n\t\t}\n\n\t\talreadyRemapped.add( file );\n\n\t\tconst codeMap = addSourcesToInputs(\n\t\t\tresolve( dirPath, file ),\n\t\t\tinputs\n\t\t);\n\n\t\tif ( !codeMap ) {\n\t\t\treturn;\n\t\t}\n\n\t\tctx.content ??= codeMap.code;\n\n\t\treturn codeMap.map;\n\t}, { decodedMappings: true } );\n\n\treturn remapped as DecodedSourceMap;\n}\n\n/**\n * Loads the source map of a given file and adds its \"sources\" to the given inputs object.\n */\nexport function addSourcesToInputs(\n\tpath: string,\n\tinputs: Record<string, ReportInput>\n): CodeMap | null {\n\tconst codeMap = loadCodeAndMap( path );\n\n\tif ( !codeMap ) {\n\t\treturn null;\n\t}\n\n\tconst parentPath = normalizePath( path );\n\tconst format = inputs[ parentPath ]?.format ?? 'unknown';\n\n\tcodeMap.map?.sources\n\t\t.filter( source => source !== null )\n\t\t.forEach( ( source, index ) => {\n\t\t\tconst normalizedPath = normalizePath( source );\n\n\t\t\tif ( parentPath === normalizedPath ) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tinputs[ normalizedPath ] = {\n\t\t\t\tbytes: Buffer.byteLength( codeMap.map!.sourcesContent?.[ index ] ?? '' ),\n\t\t\t\tformat,\n\t\t\t\timports: [],\n\t\t\t\tbelongsTo: parentPath\n\t\t\t};\n\t\t} );\n\t\n\treturn codeMap;\n}\n","import { gzipSync, brotliCompressSync } from 'zlib';\nimport type { DecodedSourceMap, SourceMapSegment } from '@ampproject/remapping';\nimport type { PluginOptions, Sizes } from '../types.js';\n\nconst UNASSIGNED = '[unassigned]';\n\nexport function getBytesPerSource(\n\tcode: string,\n\tmap: DecodedSourceMap,\n\tassetSizes: Sizes,\n\toptions: PluginOptions\n): Map<string, Sizes> {\n\tconst contributions = getContributions( map.sources );\n\n\t// Split the code into lines\n\tconst codeLines = code.split( /(?<=\\r?\\n)/ );\n\n\tfor ( let lineIndex = 0; lineIndex < codeLines.length; lineIndex++ ) {\n\t\tconst lineCode = codeLines[ lineIndex ];\n\t\tconst mappings = map.mappings[ lineIndex ] || [];\n\t\tlet currentColumn = 0;\n\n\t\tfor ( let i = 0; i <= mappings.length; i++ ) {\n\t\t\t// 0: generatedColumn\n\t\t\t// 1: sourceIndex\n\t\t\t// 2: originalLine\n\t\t\t// 3: originalColumn\n\t\t\t// 4: nameIndex\n\n\t\t\tconst mapping: SourceMapSegment | undefined = mappings[ i ];\n\t\t\tconst startColumn = mapping?.[ 0 ] ?? lineCode.length;\n\t\t\tconst endColumn = mappings[ i + 1 ]?.[ 0 ] ?? lineCode.length;\n\n\t\t\t// Slice the code from currentColumn to startColumn for unassigned code\n\t\t\tif ( startColumn > currentColumn ) {\n\t\t\t\tcontributions.set( UNASSIGNED, contributions.get( UNASSIGNED ) + lineCode.slice( currentColumn, startColumn ) );\n\t\t\t}\n\n\t\t\tif ( mapping ) {\n\t\t\t\t// Slice the code from startColumn to endColumn for assigned code\n\t\t\t\tconst sourceIndex = mapping?.[ 1 ];\n\t\t\t\tconst codeSlice = lineCode.slice( startColumn, endColumn );\n\t\t\t\tconst source = sourceIndex !== undefined && map.sources[ sourceIndex ] || UNASSIGNED;\n\n\t\t\t\tcontributions.set( source, contributions.get( source ) + codeSlice );\n\t\t\t\tcurrentColumn = endColumn;\n\t\t\t} else {\n\t\t\t\tcurrentColumn = startColumn;\n\t\t\t}\n\t\t}\n\t}\n\n\t// Compute sizes for each source\n\tconst sourceSizes = new Map<string, Sizes>();\n\n\tconst contributionsSum: Sizes = {\n\t\tuncompressed: 0,\n\t\tgzip: 0,\n\t\tbrotli: 0\n\t};\n\n\tfor ( const [ source, codeSegment ] of contributions ) {\n\t\tconst sizes = getSizes( codeSegment, options );\n\n\t\tcontributionsSum.uncompressed += sizes.uncompressed;\n\t\tcontributionsSum.gzip += sizes.gzip;\n\t\tcontributionsSum.brotli += sizes.brotli;\n\n\t\tsourceSizes.set( source, sizes );\n\t}\n\n\treturn adjustSizes( sourceSizes, assetSizes, contributionsSum, options );\n}\n\nexport function getSizes(\n\tcode: string,\n\toptions: PluginOptions\n): Sizes {\n\treturn {\n\t\tuncompressed: Buffer.byteLength( code ),\n\t\tgzip: options.gzip ? gzipSync( code ).length : 0,\n\t\tbrotli: options.brotli ? brotliCompressSync( code ).length : 0\n\t};\n}\n\nfunction getContributions( sources: Array<string | null> ): Map<string, string> {\n\tconst contributions = new Map<string, string>();\n\n\t// Populate contributions with sources\n\tsources\n\t\t.filter( source => source !== null )\n\t\t.forEach( source => contributions.set( source, '' ) );\n\n\t// Add entry for the code that is not assigned to any source\n\tcontributions.set( UNASSIGNED, '' );\n\n\treturn contributions;\n}\n\n/**\n * Compression efficiency improves with the size of the file.\n *\n * However, what we have is the compressed size of the entire bundle (`actual`),\n * the sum of all files compressed individually (`sum`) and the compressed\n * size of a given file (`content`). The last value is essentially a “worst-case”\n * scenario, and the actual size of the file in the bundle is likely to be smaller.\n *\n * We use this information to estimate the actual size of the file in the bundle\n * after compression.\n */\nfunction adjustSizes(\n\tsources: Map<string, Sizes>,\n\tasset: Sizes,\n\tsums: Sizes,\n\toptions: PluginOptions\n): Map<string, Sizes> {\n\tconst gzipDelta = options.gzip ? asset.gzip / sums.gzip : 0;\n\tconst brotliDelta = options.brotli ? asset.brotli / sums.brotli : 0;\n\n\tfor ( const [ source, sizes ] of sources ) {\n\t\tsources.set( source, {\n\t\t\tuncompressed: sizes.uncompressed,\n\t\t\tgzip: options.gzip ? Math.round( sizes.gzip * gzipDelta ) : 0,\n\t\t\tbrotli: options.brotli ? Math.round( sizes.brotli * brotliDelta ) : 0\n\t\t} );\n\t}\n\n\treturn sources;\n}\n","import { readFileSync } from 'fs';\nimport { fileURLToPath } from 'url';\nimport { dirname, extname, resolve } from 'path';\nimport { loadCodeAndMap } from 'load-source-map';\nimport { decode } from '@jridgewell/sourcemap-codec';\nimport { mapSourceMap } from '../sourcemap/map.js';\nimport { getBytesPerSource, getSizes } from '../sourcemap/bytes.js';\nimport type {\n JsonReport,\n MaybeCodeMap,\n ReportInput,\n ReportOutput,\n CodeMap,\n ReportOutputInput,\n PluginOptions\n} from '../types.js';\nimport { normalizePath } from '../utils.js';\n\nexport function generateJsonReport(\n assets: Array<string>,\n inputs: Record<string, ReportInput>,\n options: PluginOptions\n): JsonReport {\n const acceptedExtensions = [ '.js', '.mjs', '.cjs', '.css' ];\n\n const outputs = assets\n .filter( asset => acceptedExtensions.includes( extname( asset ) ) )\n .reduce( ( carry, asset ) => {\n const data = processAsset( asset, inputs, options );\n\n if ( data ) {\n carry[ normalizePath( asset ) ] = data;\n }\n\n return carry;\n }, {} as Record<string, ReportOutput> );\n\n return {\n inputs: sortObjectKeys( inputs ),\n outputs: sortObjectKeys( outputs )\n };\n}\n\nexport function generateHtmlReport(\n assets: Array<string>,\n inputs: Record<string, ReportInput>,\n options: PluginOptions\n): string {\n const json = generateJsonReport( assets, inputs, options );\n // Replace with `import.meta.dirname` after upgrading to Node 20\n const __dirname = dirname( fileURLToPath( import.meta.url ) );\n const template = readFileSync( resolve( __dirname, './index.html' ), 'utf-8' );\n\n return template.replace( '__REPORT_DATA__', encodeURIComponent( JSON.stringify( json ) ) );\n}\n\nfunction processAsset(\n asset: string,\n inputs: Record<string, ReportInput>,\n options: PluginOptions\n): ReportOutput | void {\n const maybeCodeMap = loadCodeAndMap( asset, options.sourcesPathNormalizer );\n\n if ( !hasCodeAndMap( maybeCodeMap ) ) {\n return;\n }\n\n const { code, map } = maybeCodeMap;\n const mapped = options.detailed\n ? mapSourceMap( map, dirname( asset ), inputs )\n : { ...map, mappings: decode( map.mappings ) };\n\n mapped.sources = mapped.sources.map( source => source && normalizePath( source ) );\n\n const assetSizes = getSizes( code, options );\n const bytes = getBytesPerSource( code, mapped, assetSizes, options );\n const outputInputs = Array\n .from( bytes )\n .reduce( ( carry, [ source, sizes ] ) => {\n carry[ normalizePath( source ) ] = sizes;\n\n return carry;\n }, {} as Record<string, ReportOutputInput> );\n\n return {\n ...assetSizes,\n inputs: sortObjectKeys( outputInputs ),\n map: options.sources ? {\n version: 3,\n names: [],\n mappings: mapped.mappings,\n sources: mapped.sources,\n sourcesContent: mapped.sourcesContent,\n } : undefined\n };\n}\n\nfunction hasCodeAndMap( result: MaybeCodeMap ): result is Required<CodeMap> {\n return Boolean( result && result.code && result.map );\n}\n\nfunction sortObjectKeys<T extends unknown>( object: Record<string, T> ): Record<string, T> {\n return Object\n .keys( object )\n .sort()\n .reduce( ( carry, key ) => {\n carry[ key ] = object[ key ];\n\n return carry;\n }, {} as Record<string, T> );\n} \n","import { dirname } from 'path';\nimport { existsSync, mkdirSync, writeFileSync } from 'fs';\nimport { generateHtmlReport, generateJsonReport } from './formats.js';\nimport type { PluginOptions, JsonReport } from '../types.js';\nimport { normalizeOptions } from '../utils.js';\n\nexport async function generateReportFromAssets(\n\tassets: string[],\n\tinputs: JsonReport[ 'inputs' ],\n\tpluginOptions: Partial<PluginOptions>\n): Promise<void> {\n\tconst options = normalizeOptions( pluginOptions );\n\tconst handler = options.format === 'html' ? saveHtml : saveJson;\n\tconst report = handler( assets, inputs, options );\n\tconst outputDirectory = dirname( options.filename );\n\n\t// Ensure the output directory exists\n\tif ( !existsSync( outputDirectory ) ) {\n\t\tmkdirSync( outputDirectory, { recursive: true } );\n\t}\n\n\t// Write the report to the file system\n\twriteFileSync( options.filename, report );\n\n\tif ( !options.open ) {\n\t\treturn;\n\t}\n\n\t/**\n\t * `open` is ESM-only package, so we need to import it\n\t * dynamically to make it work in CommonJS environment.\n\t */\n\tconst { default: open } = await import( 'open' );\n\n\t// Open the report in the default program for the file extension\n\topen( options.filename );\n}\n\nfunction saveHtml(\n\tassets: string[],\n\tinputs: JsonReport[ 'inputs' ],\n\toptions: PluginOptions\n): string {\n\treturn generateHtmlReport( assets, inputs, options );\n}\n\nfunction saveJson(\n\tassets: string[],\n\tinputs: JsonReport[ 'inputs' ],\n\toptions: PluginOptions\n): string {\n\tconst report = generateJsonReport( assets, inputs, options );\n\n\treturn JSON.stringify( report, null, 2 );\n}\n"],"mappings":";;;;;;;;;;;;;;AAUA,SAAS,oBAAoB,KAAK;AACjC,QAAO,KAAK,MAAM,IAAI,QAAQ,kBAAkB,GAAG,CAAC;AACpD;;;;;;;;AAQD,MAAM,sBAAsB;;;;AAI5B,SAAS,OAAO,MAAM;AACrB,KAAI;AACH,SAAO,SAAS,KAAK,CAAC,QAAQ;CAC9B,QAAO;AACP,SAAO;CACP;AACD;AACD,SAAS,eAAe,UAAU,uBAAuB;AACxD,MAAK,OAAO,SAAS,CAAE,QAAO;CAC9B,MAAM,OAAO,aAAa,UAAU,QAAQ;CAC5C,MAAM,mBAAmB,KAAK,SAAS,mBAAmB,IAAI,MAAM,KAAK,KAAK,SAAS,oBAAoB,CAAC,CAAC,GAAG,GAAG;AACnH,MAAK,qBAAqB,iBAAiB,OAAQ,QAAO,EAAE,KAAM;CAClE,MAAM,WAAW,QAAQ,UAAU,iBAAiB,GAAG;AACvD,MAAK,SAAU,QAAO,EAAE,KAAM;CAC9B,MAAM,EAAE,KAAK,SAAS,GAAG;CACzB,MAAM,SAAS,QAAQ,QAAQ;AAC/B,2BAA0B,CAAC,SAAS,WAAW,KAAK,GAAG,OAAO,QAAQ,QAAQ,IAAI,cAAc,KAAK,KAAK;AAC1G,KAAI,UAAU,sBAAsB,KAAK,sBAAsB;AAC/D,KAAI,iBAAiB,0BAA0B,IAAI;AACnD,QAAO,IAAI;AACX,QAAO;EACN;EACA;CACA;AACD;AACD,SAAS,QAAQ,UAAU,kBAAkB;AAC5C,KAAI,iBAAiB,WAAW,QAAQ,EAAE;EACzC,MAAM,MAAM,aAAa,iBAAiB;AAC1C,SAAO;GACN,KAAK,oBAAoB,IAAI;GAC7B,SAAS;EACT;CACD;CACD,MAAM,oBAAoB,IAAI,IAAI,kBAAkB,WAAW;CAC/D,MAAM,UAAU,KAAK,QAAQ,SAAS,EAAE,kBAAkB;AAC1D,MAAK,WAAW,QAAQ,CAAE,QAAO;AACjC,QAAO;EACN,KAAK,oBAAoB,aAAa,SAAS,QAAQ,CAAC;EACxD;CACA;AACD;AACD,SAAS,aAAa,KAAK;CAC1B,MAAM,CAAC,QAAQ,QAAQ,GAAG,IAAI,MAAM,IAAI;CACxC,MAAM,WAAW,OAAO,MAAM,IAAI,CAAC,GAAG,GAAG;AACzC,SAAQ,UAAR;AACC,OAAK,SAAU,QAAO,OAAO,KAAK,SAAS,SAAS,CAAC,UAAU;AAC/D,OAAK,MAAO,QAAO,mBAAmB,QAAQ;AAC9C,UAAS,OAAM,IAAI,MAAM,sCAAsC;CAC/D;AACD;;;;AAID,SAAS,sBAAsB,KAAK,uBAAuB;AAC1D,QAAO,IAAI,QAAQ,IAAI,CAAC,WAAW,SAAS,sBAAsB,OAAO,GAAG,KAAK;AACjF;;;;AAID,SAAS,0BAA0B,KAAK;AACvC,QAAO,IAAI,QAAQ,IAAI,CAAC,QAAQ,UAAU;AACzC,MAAI,IAAI,iBAAiB,OAAQ,QAAO,IAAI,eAAe;AAC3D,MAAI,UAAU,WAAW,OAAO,CAAE,QAAO,aAAa,QAAQ,QAAQ;AACtE,SAAO;CACP,EAAC;AACF;;;;MCtFYA,WAAmB;MACnBC,WAAmB;MACnBC,WAAmB;AAEzB,SAAS,iBAAkBC,SAAkD;CACnF,MAAMC,WAAS,SAAS,UACpB,SAAS,UAAU,MAAO,IAAK,CAAC,GAAI,GAAI,IACxC;CAEJ,MAAMC,iBAAgC;EACrC,SAAS;EACT;EACA,UAAU,kBAAkBD;EAC5B,MAAM;EACN,UAAU;EACV,SAAS;EACT,MAAM;EACN,QAAQ;EACR,uBAAuB;CACvB;CAGD,MAAM,oBAAoB,OAAO,OAAQ,CAAE,GAAE,gBAAgB,QAAS;AAEtE,mBAAkB,WAAW,oBAAqB,kBAAmB;AAErE,QAAO;AACP;AAEM,SAAS,cAAeE,iBAAkC;CAEhE,MAAM,aAAa,gBAAgB,QAAS,OAAO,GAAI;CAGvD,MAAM,cAAc,SAAU,QAAQ,KAAK,EAAE,WAAY;AAGzD,QAAO,YAAY,WAAY,MAAM,KAAK,MAAM,IAAK;AACrD;AAED,SAAS,oBAAqBC,SAAiC;CAC9D,IAAI,OAAO,QAAQ;CACnB,MAAM,oBAAoB,MAAM,QAAQ;AAGxC,MAAM,WAAY,KAAM,CACvB,QAAO,KAAM,QAAQ,KAAK,EAAE,KAAM;AAInC,KAAK,sBAAsB,QAAS,KAAM,EAAG;AAC5C,UAAQ,KACP,gBACC,+FACA,oCAAqC,kBAAmB,IACzD;AAED,SAAO,OAAQ;GAAE,GAAG,MAAO,KAAM;GAAE,MAAM;GAAI,KAAK;EAAmB,EAAE;CACvE;AAED,QAAO;AACP;;;;AC1DM,SAAS,aACfC,KACAC,SACAC,QACmB;CACnB,MAAM,kBAAkB,IAAI;CAC5B,MAAM,WAAW,UAAW,KAAK,CAAE,MAAM,QAAS;AACjD,MAAK,gBAAgB,IAAK,KAAM,CAC/B;AAGD,kBAAgB,IAAK,KAAM;EAE3B,MAAM,UAAU,mBACf,QAAS,SAAS,KAAM,EACxB,OACA;AAED,OAAM,QACL;AAGD,MAAI,YAAY,QAAQ;AAExB,SAAO,QAAQ;CACf,GAAE,EAAE,iBAAiB,KAAM,EAAE;AAE9B,QAAO;AACP;AAKM,SAAS,mBACfC,MACAD,QACiB;CACjB,MAAM,UAAU,eAAgB,KAAM;AAEtC,MAAM,QACL,QAAO;CAGR,MAAM,aAAa,cAAe,KAAM;CACxC,MAAME,WAAS,OAAQ,aAAc,UAAU;AAE/C,SAAQ,KAAK,QACX,OAAQ,YAAU,WAAW,KAAM,CACnC,QAAS,CAAE,QAAQ,UAAW;EAC9B,MAAM,iBAAiB,cAAe,OAAQ;AAE9C,MAAK,eAAe,eACnB;AAGD,SAAQ,kBAAmB;GAC1B,OAAO,OAAO,WAAY,QAAQ,IAAK,iBAAkB,UAAW,GAAI;GACxE;GACA,SAAS,CAAE;GACX,WAAW;EACX;CACD,EAAE;AAEJ,QAAO;AACP;;;;AClED,MAAM,aAAa;AAEZ,SAAS,kBACfC,MACAC,KACAC,YACAC,SACqB;CACrB,MAAM,gBAAgB,iBAAkB,IAAI,QAAS;CAGrD,MAAM,YAAY,KAAK,MAAO,aAAc;AAE5C,MAAM,IAAI,YAAY,GAAG,YAAY,UAAU,QAAQ,aAAc;EACpE,MAAM,WAAW,UAAW;EAC5B,MAAM,WAAW,IAAI,SAAU,cAAe,CAAE;EAChD,IAAI,gBAAgB;AAEpB,OAAM,IAAI,IAAI,GAAG,KAAK,SAAS,QAAQ,KAAM;GAO5C,MAAMC,UAAwC,SAAU;GACxD,MAAM,cAAc,UAAW,MAAO,SAAS;GAC/C,MAAM,YAAY,SAAU,IAAI,KAAO,MAAO,SAAS;AAGvD,OAAK,cAAc,cAClB,eAAc,IAAK,YAAY,cAAc,IAAK,WAAY,GAAG,SAAS,MAAO,eAAe,YAAa,CAAE;AAGhH,OAAK,SAAU;IAEd,MAAM,cAAc,UAAW;IAC/B,MAAM,YAAY,SAAS,MAAO,aAAa,UAAW;IAC1D,MAAM,SAAS,gBAAgB,aAAa,IAAI,QAAS,gBAAiB;AAE1E,kBAAc,IAAK,QAAQ,cAAc,IAAK,OAAQ,GAAG,UAAW;AACpE,oBAAgB;GAChB,MACA,iBAAgB;EAEjB;CACD;CAGD,MAAM,cAAc,IAAI;CAExB,MAAMC,mBAA0B;EAC/B,cAAc;EACd,MAAM;EACN,QAAQ;CACR;AAED,MAAM,MAAM,CAAE,QAAQ,YAAa,IAAI,eAAgB;EACtD,MAAM,QAAQ,SAAU,aAAa,QAAS;AAE9C,mBAAiB,gBAAgB,MAAM;AACvC,mBAAiB,QAAQ,MAAM;AAC/B,mBAAiB,UAAU,MAAM;AAEjC,cAAY,IAAK,QAAQ,MAAO;CAChC;AAED,QAAO,YAAa,aAAa,YAAY,kBAAkB,QAAS;AACxE;AAEM,SAAS,SACfL,MACAG,SACQ;AACR,QAAO;EACN,cAAc,OAAO,WAAY,KAAM;EACvC,MAAM,QAAQ,OAAO,SAAU,KAAM,CAAC,SAAS;EAC/C,QAAQ,QAAQ,SAAS,mBAAoB,KAAM,CAAC,SAAS;CAC7D;AACD;AAED,SAAS,iBAAkBG,SAAqD;CAC/E,MAAM,gBAAgB,IAAI;AAG1B,SACE,OAAQ,YAAU,WAAW,KAAM,CACnC,QAAS,YAAU,cAAc,IAAK,QAAQ,GAAI,CAAE;AAGtD,eAAc,IAAK,YAAY,GAAI;AAEnC,QAAO;AACP;;;;;;;;;;;;AAaD,SAAS,YACRC,SACAC,OACAC,MACAN,SACqB;CACrB,MAAM,YAAY,QAAQ,OAAO,MAAM,OAAO,KAAK,OAAO;CAC1D,MAAM,cAAc,QAAQ,SAAS,MAAM,SAAS,KAAK,SAAS;AAElE,MAAM,MAAM,CAAE,QAAQ,MAAO,IAAI,QAChC,SAAQ,IAAK,QAAQ;EACpB,cAAc,MAAM;EACpB,MAAM,QAAQ,OAAO,KAAK,MAAO,MAAM,OAAO,UAAW,GAAG;EAC5D,QAAQ,QAAQ,SAAS,KAAK,MAAO,MAAM,SAAS,YAAa,GAAG;CACpE,EAAE;AAGJ,QAAO;AACP;;;;AC9GM,SAAS,mBACdO,QACAC,QACAC,SACY;CACZ,MAAM,qBAAqB;EAAE;EAAO;EAAQ;EAAQ;CAAQ;CAE5D,MAAM,UAAU,OACb,OAAQ,WAAS,mBAAmB,SAAU,QAAS,MAAO,CAAE,CAAE,CAClE,OAAQ,CAAE,OAAO,UAAW;EAC3B,MAAM,OAAO,aAAc,OAAO,QAAQ,QAAS;AAEnD,MAAK,KACH,OAAO,cAAe,MAAO,IAAK;AAGpC,SAAO;CACR,GAAE,CAAE,EAAkC;AAEzC,QAAO;EACL,QAAQ,eAAgB,OAAQ;EAChC,SAAS,eAAgB,QAAS;CACnC;AACF;AAEM,SAAS,mBACdF,QACAC,QACAC,SACQ;CACR,MAAM,OAAO,mBAAoB,QAAQ,QAAQ,QAAS;CAE1D,MAAM,YAAY,QAAS,cAAe,OAAO,KAAK,IAAK,CAAE;CAC7D,MAAM,WAAW,aAAc,QAAS,WAAW,eAAgB,EAAE,QAAS;AAE9E,QAAO,SAAS,QAAS,mBAAmB,mBAAoB,KAAK,UAAW,KAAM,CAAE,CAAE;AAC3F;AAED,SAAS,aACPC,OACAF,QACAC,SACqB;CACrB,MAAM,eAAe,eAAgB,OAAO,QAAQ,sBAAuB;AAE3E,MAAM,cAAe,aAAc,CACjC;CAGF,MAAM,EAAE,MAAM,KAAK,GAAG;CACtB,MAAM,SAAS,QAAQ,WACnB,aAAc,KAAK,QAAS,MAAO,EAAE,OAAQ,GAC7C;EAAE,GAAG;EAAK,UAAU,OAAQ,IAAI,SAAU;CAAE;AAEhD,QAAO,UAAU,OAAO,QAAQ,IAAK,YAAU,UAAU,cAAe,OAAQ,CAAE;CAElF,MAAM,aAAa,SAAU,MAAM,QAAS;CAC5C,MAAM,QAAQ,kBAAmB,MAAM,QAAQ,YAAY,QAAS;CACpE,MAAM,eAAe,MAClB,KAAM,MAAO,CACb,OAAQ,CAAE,OAAO,CAAE,QAAQ,MAAO,KAAM;AACvC,QAAO,cAAe,OAAQ,IAAK;AAEnC,SAAO;CACR,GAAE,CAAE,EAAuC;AAE9C,QAAO;EACL,GAAG;EACH,QAAQ,eAAgB,aAAc;EACtC,KAAK,QAAQ,UAAU;GACrB,SAAS;GACT,OAAO,CAAE;GACT,UAAU,OAAO;GACjB,SAAS,OAAO;GAChB,gBAAgB,OAAO;EACxB,IAAG;CACL;AACF;AAED,SAAS,cAAeE,QAAoD;AAC1E,QAAO,QAAS,UAAU,OAAO,QAAQ,OAAO,IAAK;AACtD;AAED,SAAS,eAAmCC,QAA+C;AACzF,QAAO,OACJ,KAAM,OAAQ,CACd,MAAM,CACN,OAAQ,CAAE,OAAO,QAAS;AACzB,QAAO,OAAQ,OAAQ;AAEvB,SAAO;CACR,GAAE,CAAE,EAAuB;AAC/B;;;;ACxGM,eAAe,yBACrBC,QACAC,QACAC,eACgB;CAChB,MAAM,UAAU,iBAAkB,cAAe;CACjD,MAAM,UAAU,QAAQ,WAAW,SAAS,WAAW;CACvD,MAAM,SAAS,QAAS,QAAQ,QAAQ,QAAS;CACjD,MAAM,kBAAkB,QAAS,QAAQ,SAAU;AAGnD,MAAM,WAAY,gBAAiB,CAClC,WAAW,iBAAiB,EAAE,WAAW,KAAM,EAAE;AAIlD,eAAe,QAAQ,UAAU,OAAQ;AAEzC,MAAM,QAAQ,KACb;;;;;CAOD,MAAM,EAAE,SAAS,MAAM,GAAG,MAAM,OAAQ;AAGxC,MAAM,QAAQ,SAAU;AACxB;AAED,SAAS,SACRF,QACAC,QACAE,SACS;AACT,QAAO,mBAAoB,QAAQ,QAAQ,QAAS;AACpD;AAED,SAAS,SACRH,QACAC,QACAE,SACS;CACT,MAAM,SAAS,mBAAoB,QAAQ,QAAQ,QAAS;AAE5D,QAAO,KAAK,UAAW,QAAQ,MAAM,EAAG;AACxC"}
import type { DecodedSourceMap, EncodedSourceMap } from '@ampproject/remapping';
export interface UserOptions {
/**
* Specifies whether the plugin is enabled.
*
* @default true
*/
enabled: boolean;
/**
* Specifies the output format of the report.
*
* @default 'html'
*/
format: 'html' | 'json';
/**
* Specifies the path of the generated report. This can be a filename, a relative path,
* or an absolute path. By default, the report is saved in the current working directory.
*
* @default 'sonda-report.html' or 'sonda-report.json' depending on the `format` option
*/
filename: string;
/**
* Specifies whether to automatically open the report in the default program for
* the given file extension (`.html` or `.json`, depending on the `format` option)
* after the build process.
*
* @default true
*/
open: boolean;
/**
* Specifies whether to read the source maps of imported modules.
*
* By default, external dependencies bundled into a single file appear as a single
* asset in the report. When this option is enabled, the report includes the source
* files of imported modules, if source maps are available.
*
* Enabling this option may increase the time needed to generate the report and reduce
* the accuracy of estimated GZIP and Brotli sizes for individual files.
*
* @default false
*/
detailed: boolean;
/**
* Specifies whether to include source maps of the assets in the report to visualize
* which parts of the code contribute to the final asset size.
*
* ⚠️ This option significantly increases the size of the report and embeds the
* **source code** of the assets. If you are working with proprietary code, ensure
* you share the report responsibly. ⚠️
*
* @default false
*/
sources: boolean;
/**
* Specifies whether to calculate the sizes of assets after compression with GZIP.
*
* The report includes estimated compressed sizes for each file within an asset.
* However, these estimates are approximate and should be used as a general reference.
*
* Enabling this option may increase the time required to generate the report.
*
* @default false
*/
gzip: boolean;
/**
* Specifies whether to calculate the sizes of assets after compression with Brotli.
*
* The report includes estimated compressed sizes for each file within an asset.
* However, these estimates are approximate and should be used as a general reference.
*
* Enabling this option may increase the time required to generate the report.
*
* @default false
*/
brotli: boolean;
}
export interface FrameworkUserOptions extends UserOptions {
/**
* Specifies whether the plugin generates a report for the server build.
*
* @default false
*/
server: boolean;
}
export interface PluginOptions extends UserOptions {
sourcesPathNormalizer: ((path: string) => string) | null;
}
export interface ReportInput {
bytes: number;
format: ModuleFormat;
imports: Array<string>;
belongsTo: string | null;
}
export interface ReportOutput extends Sizes {
inputs: Record<string, ReportOutputInput>;
map?: DecodedSourceMap;
}
export interface ReportOutputInput extends Sizes {
}
export interface JsonReport {
inputs: Record<string, ReportInput>;
outputs: Record<string, ReportOutput>;
}
export interface Sizes {
uncompressed: number;
gzip: number;
brotli: number;
}
export interface CodeMap {
code: string;
map?: EncodedSourceMap;
}
export type ModuleFormat = 'esm' | 'cjs' | 'unknown';
export type MaybeCodeMap = CodeMap | null;
import type { PluginOptions } from './types.js';
export declare const esmRegex: RegExp;
export declare const cjsRegex: RegExp;
export declare const jsRegexp: RegExp;
export declare function normalizeOptions(options?: Partial<PluginOptions>): PluginOptions;
export declare function normalizePath(pathToNormalize: string): string;
"use strict";
const require_src = require('./src.cjs');
const path = require_src.__toESM(require("path"));
//#region src/entrypoints/webpack.ts
var SondaWebpackPlugin = class {
options;
constructor(options = {}) {
this.options = options;
}
apply(compiler) {
if (this.options.enabled === false) return;
compiler.options.output.devtoolModuleFilenameTemplate = "[absolute-resource-path]";
compiler.hooks.afterEmit.tapPromise("SondaWebpackPlugin", (compilation) => {
const inputs = {};
const stats = compilation.getStats().toJson({
modules: true,
providedExports: true
});
const outputPath = stats.outputPath || compiler.outputPath;
const modules = stats.modules?.flatMap((mod) => mod.modules ? [mod, ...mod.modules] : mod).filter((mod) => mod.nameForCondition && !mod.codeGenerated).filter((mod, index, self) => self.findIndex((m) => m.nameForCondition === mod.nameForCondition) === index) || [];
modules.forEach((module$1) => {
const imports = modules.reduce((acc, { nameForCondition, issuerName, reasons }) => {
if (issuerName === module$1.name || reasons?.some((reason) => reason.resolvedModule === module$1.name)) acc.push(require_src.normalizePath(nameForCondition));
return acc;
}, []);
inputs[require_src.normalizePath(module$1.nameForCondition)] = {
bytes: module$1.size || 0,
format: getFormat(module$1),
imports,
belongsTo: null
};
});
return require_src.generateReportFromAssets(stats.assets?.map((asset) => (0, path.resolve)(outputPath, asset.name)) || [], inputs, this.options);
});
}
};
function getFormat(module$1) {
if (!require_src.jsRegexp.test(module$1.nameForCondition)) return "unknown";
/**
* Sometimes ESM modules have `moduleType` set as `javascript/auto`, so we
* also need to check if the module has exports to determine if it's ESM.
*/
if (module$1.moduleType === "javascript/esm" || !!module$1.providedExports?.length) return "esm";
return "cjs";
}
//#endregion
Object.defineProperty(exports, 'SondaWebpackPlugin', {
enumerable: true,
get: function () {
return SondaWebpackPlugin;
}
});
//# sourceMappingURL=webpack.cjs.map
{"version":3,"file":"webpack.cjs","names":["options: Partial<UserOptions>","compiler: Compiler","inputs: JsonReport[ 'inputs' ]","modules: Array<StatsModule>","module","module: StatsModule"],"sources":["../src/entrypoints/webpack.ts"],"sourcesContent":["import { resolve } from 'path';\nimport {\n\tgenerateReportFromAssets,\n\tjsRegexp,\n\tnormalizePath,\n\ttype JsonReport,\n\ttype ModuleFormat,\n\ttype UserOptions\n} from '../index.js';\nimport type { Compiler, StatsModule } from 'webpack';\n\nexport default class SondaWebpackPlugin {\n\toptions: Partial<UserOptions>;\n\n\tconstructor ( options: Partial<UserOptions> = {} ) {\n\t\tthis.options = options;\n\t}\n\n\tapply( compiler: Compiler ): void {\n\t\tif (this.options.enabled === false ) {\n\t\t\treturn;\n\t\t}\n\n\t\tcompiler.options.output.devtoolModuleFilenameTemplate = '[absolute-resource-path]';\n\n\t\tcompiler.hooks.afterEmit.tapPromise( 'SondaWebpackPlugin', compilation => {\n\t\t\tconst inputs: JsonReport[ 'inputs' ] = {};\n\t\t\tconst stats = compilation.getStats().toJson( {\n\t\t\t\tmodules: true,\n\t\t\t\tprovidedExports: true,\n\t\t\t} );\n\n\t\t\tconst outputPath = stats.outputPath || compiler.outputPath;\n\t\t\tconst modules: Array<StatsModule> = stats.modules\n\t\t\t\t?.flatMap( mod => mod.modules ? [ mod, ...mod.modules ] : mod )\n\t\t\t\t.filter( mod => mod.nameForCondition && !mod.codeGenerated )\n\t\t\t\t.filter( ( mod, index, self ) => self.findIndex( m => m.nameForCondition === mod.nameForCondition ) === index )\n\t\t\t\t|| [];\n\n\t\t\tmodules.forEach( module => {\n\t\t\t\tconst imports = modules.reduce( ( acc, { nameForCondition, issuerName, reasons } ) => {\n\t\t\t\t\tif ( issuerName === module.name || reasons?.some( reason => reason.resolvedModule === module.name ) ) {\n\t\t\t\t\t\tacc.push( normalizePath( nameForCondition! ) );\n\t\t\t\t\t}\n\n\t\t\t\t\treturn acc;\n\t\t\t\t}, [] as Array<string> );\n\n\t\t\t\tinputs[ normalizePath( module.nameForCondition! ) ] = {\n\t\t\t\t\tbytes: module.size || 0,\n\t\t\t\t\tformat: getFormat( module ),\n\t\t\t\t\timports,\n\t\t\t\t\tbelongsTo: null\n\t\t\t\t};\n\t\t\t} );\n\n\t\t\treturn generateReportFromAssets(\n\t\t\t\tstats.assets?.map( asset => resolve( outputPath, asset.name ) ) || [],\n\t\t\t\tinputs,\n\t\t\t\tthis.options\n\t\t\t);\n\t\t} );\n\t}\n}\n\nfunction getFormat( module: StatsModule ): ModuleFormat {\n\tif ( !jsRegexp.test( module.nameForCondition! ) ) {\n\t\treturn 'unknown';\n\t}\n\n\t/**\n\t * Sometimes ESM modules have `moduleType` set as `javascript/auto`, so we\n\t * also need to check if the module has exports to determine if it's ESM.\n\t */\n\tif ( module.moduleType === 'javascript/esm' || !!module.providedExports?.length ) {\n\t\treturn 'esm';\n\t}\n\n\treturn 'cjs';\n}\n"],"mappings":";;;;;IAWqB,qBAAN,MAAyB;CACvC;CAEA,YAAcA,UAAgC,CAAE,GAAG;AAClD,OAAK,UAAU;CACf;CAED,MAAOC,UAA2B;AACjC,MAAI,KAAK,QAAQ,YAAY,MAC5B;AAGD,WAAS,QAAQ,OAAO,gCAAgC;AAExD,WAAS,MAAM,UAAU,WAAY,sBAAsB,iBAAe;GACzE,MAAMC,SAAiC,CAAE;GACzC,MAAM,QAAQ,YAAY,UAAU,CAAC,OAAQ;IAC5C,SAAS;IACT,iBAAiB;GACjB,EAAE;GAEH,MAAM,aAAa,MAAM,cAAc,SAAS;GAChD,MAAMC,UAA8B,MAAM,SACvC,QAAS,SAAO,IAAI,UAAU,CAAE,KAAK,GAAG,IAAI,OAAS,IAAG,IAAK,CAC9D,OAAQ,SAAO,IAAI,qBAAqB,IAAI,cAAe,CAC3D,OAAQ,CAAE,KAAK,OAAO,SAAU,KAAK,UAAW,OAAK,EAAE,qBAAqB,IAAI,iBAAkB,KAAK,MAAO,IAC5G,CAAE;AAEN,WAAQ,QAAS,cAAU;IAC1B,MAAM,UAAU,QAAQ,OAAQ,CAAE,KAAK,EAAE,kBAAkB,YAAY,SAAS,KAAM;AACrF,SAAK,eAAeC,SAAO,QAAQ,SAAS,KAAM,YAAU,OAAO,mBAAmBA,SAAO,KAAM,CAClG,KAAI,KAAM,0BAAe,iBAAmB,CAAE;AAG/C,YAAO;IACP,GAAE,CAAE,EAAmB;AAExB,WAAQ,0BAAeA,SAAO,iBAAmB,IAAK;KACrD,OAAOA,SAAO,QAAQ;KACtB,QAAQ,UAAWA,SAAQ;KAC3B;KACA,WAAW;IACX;GACD,EAAE;AAEH,UAAO,qCACN,MAAM,QAAQ,IAAK,WAAS,kBAAS,YAAY,MAAM,KAAM,CAAE,IAAI,CAAE,GACrE,QACA,KAAK,QACL;EACD,EAAE;CACH;AACD;AAED,SAAS,UAAWC,UAAoC;AACvD,MAAM,qBAAS,KAAMD,SAAO,iBAAmB,CAC9C,QAAO;;;;;AAOR,KAAKA,SAAO,eAAe,sBAAsBA,SAAO,iBAAiB,OACxE,QAAO;AAGR,QAAO;AACP"}
import { generateReportFromAssets, jsRegexp, normalizePath } from "./src.mjs";
import { resolve } from "path";
//#region src/entrypoints/webpack.ts
var SondaWebpackPlugin = class {
options;
constructor(options = {}) {
this.options = options;
}
apply(compiler) {
if (this.options.enabled === false) return;
compiler.options.output.devtoolModuleFilenameTemplate = "[absolute-resource-path]";
compiler.hooks.afterEmit.tapPromise("SondaWebpackPlugin", (compilation) => {
const inputs = {};
const stats = compilation.getStats().toJson({
modules: true,
providedExports: true
});
const outputPath = stats.outputPath || compiler.outputPath;
const modules = stats.modules?.flatMap((mod) => mod.modules ? [mod, ...mod.modules] : mod).filter((mod) => mod.nameForCondition && !mod.codeGenerated).filter((mod, index, self) => self.findIndex((m) => m.nameForCondition === mod.nameForCondition) === index) || [];
modules.forEach((module) => {
const imports = modules.reduce((acc, { nameForCondition, issuerName, reasons }) => {
if (issuerName === module.name || reasons?.some((reason) => reason.resolvedModule === module.name)) acc.push(normalizePath(nameForCondition));
return acc;
}, []);
inputs[normalizePath(module.nameForCondition)] = {
bytes: module.size || 0,
format: getFormat(module),
imports,
belongsTo: null
};
});
return generateReportFromAssets(stats.assets?.map((asset) => resolve(outputPath, asset.name)) || [], inputs, this.options);
});
}
};
function getFormat(module) {
if (!jsRegexp.test(module.nameForCondition)) return "unknown";
/**
* Sometimes ESM modules have `moduleType` set as `javascript/auto`, so we
* also need to check if the module has exports to determine if it's ESM.
*/
if (module.moduleType === "javascript/esm" || !!module.providedExports?.length) return "esm";
return "cjs";
}
//#endregion
export { SondaWebpackPlugin };
//# sourceMappingURL=webpack.mjs.map
{"version":3,"file":"webpack.mjs","names":["options: Partial<UserOptions>","compiler: Compiler","inputs: JsonReport[ 'inputs' ]","modules: Array<StatsModule>","module: StatsModule"],"sources":["../src/entrypoints/webpack.ts"],"sourcesContent":["import { resolve } from 'path';\nimport {\n\tgenerateReportFromAssets,\n\tjsRegexp,\n\tnormalizePath,\n\ttype JsonReport,\n\ttype ModuleFormat,\n\ttype UserOptions\n} from '../index.js';\nimport type { Compiler, StatsModule } from 'webpack';\n\nexport default class SondaWebpackPlugin {\n\toptions: Partial<UserOptions>;\n\n\tconstructor ( options: Partial<UserOptions> = {} ) {\n\t\tthis.options = options;\n\t}\n\n\tapply( compiler: Compiler ): void {\n\t\tif (this.options.enabled === false ) {\n\t\t\treturn;\n\t\t}\n\n\t\tcompiler.options.output.devtoolModuleFilenameTemplate = '[absolute-resource-path]';\n\n\t\tcompiler.hooks.afterEmit.tapPromise( 'SondaWebpackPlugin', compilation => {\n\t\t\tconst inputs: JsonReport[ 'inputs' ] = {};\n\t\t\tconst stats = compilation.getStats().toJson( {\n\t\t\t\tmodules: true,\n\t\t\t\tprovidedExports: true,\n\t\t\t} );\n\n\t\t\tconst outputPath = stats.outputPath || compiler.outputPath;\n\t\t\tconst modules: Array<StatsModule> = stats.modules\n\t\t\t\t?.flatMap( mod => mod.modules ? [ mod, ...mod.modules ] : mod )\n\t\t\t\t.filter( mod => mod.nameForCondition && !mod.codeGenerated )\n\t\t\t\t.filter( ( mod, index, self ) => self.findIndex( m => m.nameForCondition === mod.nameForCondition ) === index )\n\t\t\t\t|| [];\n\n\t\t\tmodules.forEach( module => {\n\t\t\t\tconst imports = modules.reduce( ( acc, { nameForCondition, issuerName, reasons } ) => {\n\t\t\t\t\tif ( issuerName === module.name || reasons?.some( reason => reason.resolvedModule === module.name ) ) {\n\t\t\t\t\t\tacc.push( normalizePath( nameForCondition! ) );\n\t\t\t\t\t}\n\n\t\t\t\t\treturn acc;\n\t\t\t\t}, [] as Array<string> );\n\n\t\t\t\tinputs[ normalizePath( module.nameForCondition! ) ] = {\n\t\t\t\t\tbytes: module.size || 0,\n\t\t\t\t\tformat: getFormat( module ),\n\t\t\t\t\timports,\n\t\t\t\t\tbelongsTo: null\n\t\t\t\t};\n\t\t\t} );\n\n\t\t\treturn generateReportFromAssets(\n\t\t\t\tstats.assets?.map( asset => resolve( outputPath, asset.name ) ) || [],\n\t\t\t\tinputs,\n\t\t\t\tthis.options\n\t\t\t);\n\t\t} );\n\t}\n}\n\nfunction getFormat( module: StatsModule ): ModuleFormat {\n\tif ( !jsRegexp.test( module.nameForCondition! ) ) {\n\t\treturn 'unknown';\n\t}\n\n\t/**\n\t * Sometimes ESM modules have `moduleType` set as `javascript/auto`, so we\n\t * also need to check if the module has exports to determine if it's ESM.\n\t */\n\tif ( module.moduleType === 'javascript/esm' || !!module.providedExports?.length ) {\n\t\treturn 'esm';\n\t}\n\n\treturn 'cjs';\n}\n"],"mappings":";;;;IAWqB,qBAAN,MAAyB;CACvC;CAEA,YAAcA,UAAgC,CAAE,GAAG;AAClD,OAAK,UAAU;CACf;CAED,MAAOC,UAA2B;AACjC,MAAI,KAAK,QAAQ,YAAY,MAC5B;AAGD,WAAS,QAAQ,OAAO,gCAAgC;AAExD,WAAS,MAAM,UAAU,WAAY,sBAAsB,iBAAe;GACzE,MAAMC,SAAiC,CAAE;GACzC,MAAM,QAAQ,YAAY,UAAU,CAAC,OAAQ;IAC5C,SAAS;IACT,iBAAiB;GACjB,EAAE;GAEH,MAAM,aAAa,MAAM,cAAc,SAAS;GAChD,MAAMC,UAA8B,MAAM,SACvC,QAAS,SAAO,IAAI,UAAU,CAAE,KAAK,GAAG,IAAI,OAAS,IAAG,IAAK,CAC9D,OAAQ,SAAO,IAAI,qBAAqB,IAAI,cAAe,CAC3D,OAAQ,CAAE,KAAK,OAAO,SAAU,KAAK,UAAW,OAAK,EAAE,qBAAqB,IAAI,iBAAkB,KAAK,MAAO,IAC5G,CAAE;AAEN,WAAQ,QAAS,YAAU;IAC1B,MAAM,UAAU,QAAQ,OAAQ,CAAE,KAAK,EAAE,kBAAkB,YAAY,SAAS,KAAM;AACrF,SAAK,eAAe,OAAO,QAAQ,SAAS,KAAM,YAAU,OAAO,mBAAmB,OAAO,KAAM,CAClG,KAAI,KAAM,cAAe,iBAAmB,CAAE;AAG/C,YAAO;IACP,GAAE,CAAE,EAAmB;AAExB,WAAQ,cAAe,OAAO,iBAAmB,IAAK;KACrD,OAAO,OAAO,QAAQ;KACtB,QAAQ,UAAW,OAAQ;KAC3B;KACA,WAAW;IACX;GACD,EAAE;AAEH,UAAO,yBACN,MAAM,QAAQ,IAAK,WAAS,QAAS,YAAY,MAAM,KAAM,CAAE,IAAI,CAAE,GACrE,QACA,KAAK,QACL;EACD,EAAE;CACH;AACD;AAED,SAAS,UAAWC,QAAoC;AACvD,MAAM,SAAS,KAAM,OAAO,iBAAmB,CAC9C,QAAO;;;;;AAOR,KAAK,OAAO,eAAe,sBAAsB,OAAO,iBAAiB,OACxE,QAAO;AAGR,QAAO;AACP"}

Sorry, the diff of this file is too big to display