You're Invited:Meet the Socket Team at RSAC and BSidesSF 2026, March 23–26.RSVP
Socket
Book a DemoSign in
Socket

@flakiness/sdk

Package Overview
Dependencies
Maintainers
1
Versions
90
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@flakiness/sdk - npm Package Compare versions

Comparing version
0.148.0
to
0.149.0
+156
lib/_internalUtils.js
// src/_internalUtils.ts
import { spawnSync } from "child_process";
import crypto from "crypto";
import fs from "fs";
import http from "http";
import https from "https";
import util from "util";
import zlib from "zlib";
var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
async function compressTextAsync(text) {
return asyncBrotliCompress(text, {
chunkSize: 32 * 1024,
params: {
[zlib.constants.BROTLI_PARAM_QUALITY]: 6,
[zlib.constants.BROTLI_PARAM_MODE]: zlib.constants.BROTLI_MODE_TEXT
}
});
}
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
function shell(command, args, options) {
try {
const result = spawnSync(command, args, { encoding: "utf-8", ...options });
if (result.status !== 0) {
return void 0;
}
return result.stdout.trim();
} catch (e) {
console.error(e);
return void 0;
}
}
function sha1Text(data) {
const hash = crypto.createHash("sha1");
hash.update(data);
return hash.digest("hex");
}
function sha1File(filePath) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash("sha1");
const stream = fs.createReadStream(filePath);
stream.on("data", (chunk) => {
hash.update(chunk);
});
stream.on("end", () => {
resolve(hash.digest("hex"));
});
stream.on("error", (err) => {
reject(err);
});
});
}
function randomUUIDBase62() {
const BASE62_CHARSET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
let num = BigInt("0x" + crypto.randomUUID().replace(/-/g, ""));
if (num === 0n)
return BASE62_CHARSET[0];
const chars = [];
while (num > 0n) {
const remainder = Number(num % 62n);
num /= 62n;
chars.push(BASE62_CHARSET[remainder]);
}
return chars.reverse().join("");
}
export {
compressTextAsync,
errorText,
httpUtils,
randomUUIDBase62,
retryWithBackoff,
sha1File,
sha1Text,
shell
};
//# sourceMappingURL=_internalUtils.js.map
//# sourceMappingURL=_stable-hash.d.js.map
var __defProp = Object.defineProperty;
var __export = (target, all) => {
for (var name in all)
__defProp(target, name, { get: all[name], enumerable: true });
};
// src/browser.ts
import { FlakinessReport } from "@flakiness/flakiness-report";
// src/reportUtilsBrowser.ts
var reportUtilsBrowser_exports = {};
__export(reportUtilsBrowser_exports, {
normalizeReport: () => normalizeReport,
stripAnsi: () => stripAnsi,
visitTests: () => visitTests
});
// src/normalizeReport.ts
import stableObjectHash from "stable-hash";
var Multimap = class {
_map = /* @__PURE__ */ new Map();
set(key, value) {
const set = this._map.get(key) ?? /* @__PURE__ */ new Set();
this._map.set(key, set);
set.add(value);
}
getAll(key) {
return Array.from(this._map.get(key) ?? []);
}
};
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests2(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
}
}
}
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
visitTests2(suite.tests ?? [], suiteId);
}
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
}
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
visitTests2(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
function computeEnvId(env) {
return stableObjectHash(env);
}
function computeSuiteId(suite, parentSuiteId) {
return stableObjectHash({
parentSuiteId: parentSuiteId ?? "",
type: suite.type,
file: suite.location?.file ?? "",
title: suite.title
});
}
function computeTestId(test, suiteId) {
return stableObjectHash({
suiteId,
file: test.location?.file ?? "",
title: test.title
});
}
// src/stripAnsi.ts
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function stripAnsi(str) {
return str.replace(ansiRegex, "");
}
// src/visitTests.ts
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
}
export {
FlakinessReport,
reportUtilsBrowser_exports as ReportUtils
};
//# sourceMappingURL=browser.js.map
// src/ciUtils.ts
var CIUtils;
((CIUtils2) => {
function runUrl() {
return githubActions() ?? azure() ?? process.env.CI_JOB_URL ?? process.env.BUILD_URL;
}
CIUtils2.runUrl = runUrl;
})(CIUtils || (CIUtils = {}));
function githubActions() {
const serverUrl = process.env.GITHUB_SERVER_URL || "https://github.com";
const repo = process.env.GITHUB_REPOSITORY;
const runId = process.env.GITHUB_RUN_ID;
if (!repo || !runId) return void 0;
try {
const url = new URL(`${serverUrl}/${repo}/actions/runs/${runId}`);
const attempt = process.env.GITHUB_RUN_ATTEMPT;
if (attempt) url.searchParams.set("attempt", attempt);
url.searchParams.set("check_suite_focus", "true");
return url.toString();
} catch (error) {
return void 0;
}
}
function azure() {
const collectionUri = process.env.SYSTEM_TEAMFOUNDATIONCOLLECTIONURI;
const project = process.env.SYSTEM_TEAMPROJECT;
const buildId = process.env.BUILD_BUILDID;
if (!collectionUri || !project || !buildId)
return void 0;
try {
const baseUrl = collectionUri.endsWith("/") ? collectionUri : `${collectionUri}/`;
const url = new URL(`${baseUrl}${project}/_build/results`);
url.searchParams.set("buildId", buildId);
return url.toString();
} catch (error) {
return void 0;
}
}
export {
CIUtils
};
//# sourceMappingURL=ciUtils.js.map
// src/gitWorktree.ts
import assert from "assert";
import { exec } from "child_process";
import debug from "debug";
import { posix as posixPath, win32 as win32Path } from "path";
import { promisify } from "util";
// src/_internalUtils.ts
import { spawnSync } from "child_process";
import http from "http";
import https from "https";
import util from "util";
import zlib from "zlib";
var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
function shell(command, args, options) {
try {
const result = spawnSync(command, args, { encoding: "utf-8", ...options });
if (result.status !== 0) {
return void 0;
}
return result.stdout.trim();
} catch (e) {
console.error(e);
return void 0;
}
}
// src/gitWorktree.ts
var log = debug("fk:git");
var execAsync = promisify(exec);
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function toPosixAbsolutePath(absolutePath) {
if (IS_WIN32_PATH.test(absolutePath)) {
absolutePath = absolutePath.split(win32Path.sep).join(posixPath.sep);
}
if (IS_ALMOST_POSIX_PATH.test(absolutePath))
return "/" + absolutePath[0] + absolutePath.substring(2);
return absolutePath;
}
function toNativeAbsolutePath(posix) {
if (process.platform !== "win32")
return posix;
assert(posix.startsWith("/"), "The path must be absolute");
const m = posix.match(/^\/([a-zA-Z])(\/.*)?$/);
assert(m, `Invalid POSIX path: ${posix}`);
const drive = m[1];
const rest = (m[2] ?? "").split(posixPath.sep).join(win32Path.sep);
return drive.toUpperCase() + ":" + rest;
}
var GitWorktree = class _GitWorktree {
constructor(_gitRoot) {
this._gitRoot = _gitRoot;
this._posixGitRoot = toPosixAbsolutePath(this._gitRoot);
}
/**
* Creates a GitWorktree instance from any path inside a git repository.
*
* @param {string} somePathInsideGitRepo - Any path (file or directory) within a git repository.
* Can be absolute or relative. The function will locate the git root directory.
*
* @returns {GitWorktree} A new GitWorktree instance bound to the discovered git root.
*
* @throws {Error} Throws if the path is not inside a git repository or if git commands fail.
*
* @example
* ```typescript
* const worktree = GitWorktree.create('./src/my-test.ts');
* const gitRoot = worktree.rootPath();
* ```
*/
static create(somePathInsideGitRepo) {
const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
cwd: somePathInsideGitRepo,
encoding: "utf-8"
});
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
return new _GitWorktree(root);
}
_posixGitRoot;
/**
* Returns the native absolute path of the git repository root directory.
*
* @returns {string} Native absolute path to the git root. Format matches the current platform
* (Windows or POSIX).
*
* @example
* ```typescript
* const root = worktree.rootPath();
* // On Windows: 'D:\project'
* // On Unix: '/project'
* ```
*/
rootPath() {
return this._gitRoot;
}
/**
* Returns the commit ID (SHA-1 hash) of the current HEAD commit.
*
* @returns {FlakinessReport.CommitId} Full 40-character commit hash of the HEAD commit.
*
* @throws {Error} Throws if git command fails or repository is in an invalid state.
*
* @example
* ```typescript
* const commitId = worktree.headCommitId();
* // Returns: 'a1b2c3d4e5f6...' (40-character SHA-1)
* ```
*/
headCommitId() {
const sha = shell(`git`, ["rev-parse", "HEAD"], {
cwd: this._gitRoot,
encoding: "utf-8"
});
assert(sha, `FAILED: git rev-parse HEAD @ ${this._gitRoot}`);
return sha.trim();
}
/**
* Converts a native absolute path to a git-relative POSIX path.
*
* Takes any absolute path (Windows or POSIX format) and converts it to a POSIX path
* relative to the git repository root. This is essential for Flakiness reports where
* all file paths must be git-relative and use POSIX separators.
*
* @param {string} absolutePath - Native absolute path to convert. Can be in Windows format
* (e.g., `D:\project\src\test.ts`) or POSIX format (e.g., `/project/src/test.ts`).
*
* @returns {FlakinessReport.GitFilePath} POSIX path relative to git root (e.g., `src/test.ts`).
* Returns an empty string if the path is the git root itself.
*
* @example
* ```typescript
* const gitPath = worktree.gitPath('/Users/project/src/test.ts');
* // Returns: 'src/test.ts'
* ```
*/
gitPath(absolutePath) {
return posixPath.relative(this._posixGitRoot, toPosixAbsolutePath(absolutePath));
}
/**
* Converts a git-relative POSIX path to a native absolute path.
*
* Takes a POSIX path relative to the git root and converts it to the native absolute path
* format for the current platform (Windows or POSIX). This is the inverse of `gitPath()`.
*
* @param {FlakinessReport.GitFilePath} relativePath - POSIX path relative to git root
* (e.g., `src/test.ts`).
*
* @returns {string} Native absolute path. On Windows, returns Windows format (e.g., `D:\project\src\test.ts`).
* On POSIX systems, returns POSIX format (e.g., `/project/src/test.ts`).
*
* @example
* ```typescript
* const absolutePath = worktree.absolutePath('src/test.ts');
* // On Windows: 'D:\project\src\test.ts'
* // On Unix: '/project/src/test.ts'
* ```
*/
absolutePath(relativePath) {
return toNativeAbsolutePath(posixPath.join(this._posixGitRoot, relativePath));
}
/**
* Lists recent commits from the repository.
*
* Retrieves commit information including commit ID, timestamp, author, message, and parent commits.
* Note: CI environments often have shallow checkouts with limited history, which may affect
* the number of commits returned.
*
* @param {number} count - Maximum number of commits to retrieve, starting from HEAD.
*
* @returns {Promise<GitCommit[]>} Promise that resolves to an array of commit objects, ordered
* from most recent to oldest. Each commit includes:
* - `commitId` - Full commit hash
* - `timestamp` - Commit timestamp in milliseconds since Unix epoch
* - `message` - Commit message (subject line)
* - `author` - Author name
* - `parents` - Array of parent commit IDs
*
* @example
* ```typescript
* const commits = await worktree.listCommits(10);
* console.log(`Latest commit: ${commits[0].message}`);
* ```
*/
async listCommits(count) {
return await listCommits(this._gitRoot, "HEAD", count);
}
};
async function listCommits(gitRoot, head, count) {
const FIELD_SEPARATOR = "|~|";
const RECORD_SEPARATOR = "\0";
const prettyFormat = [
"%H",
// Full commit hash
"%ct",
// Commit timestamp (Unix seconds)
"%an",
// Author name
"%s",
// Subject line
"%P"
// Parent hashes (space-separated)
].join(FIELD_SEPARATOR);
const command = `git log ${head} -n ${count} --pretty=format:"${prettyFormat}" -z`;
try {
const { stdout } = await execAsync(command, { cwd: gitRoot });
if (!stdout) {
return [];
}
return stdout.trim().split(RECORD_SEPARATOR).filter((record) => record).map((record) => {
const [commitId, timestampStr, author, message, parentsStr] = record.split(FIELD_SEPARATOR);
const parents = parentsStr ? parentsStr.split(" ").filter((p) => p) : [];
return {
commitId,
timestamp: parseInt(timestampStr, 10) * 1e3,
author,
message,
parents,
walkIndex: 0
};
});
} catch (error) {
log(`Failed to list commits for repository at ${gitRoot}:`, error);
return [];
}
}
export {
GitWorktree
};
//# sourceMappingURL=gitWorktree.js.map
// src/normalizeReport.ts
import stableObjectHash from "stable-hash";
var Multimap = class {
_map = /* @__PURE__ */ new Map();
set(key, value) {
const set = this._map.get(key) ?? /* @__PURE__ */ new Set();
this._map.set(key, set);
set.add(value);
}
getAll(key) {
return Array.from(this._map.get(key) ?? []);
}
};
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
}
}
}
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
visitTests(suite.tests ?? [], suiteId);
}
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
}
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
visitTests(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
function computeEnvId(env) {
return stableObjectHash(env);
}
function computeSuiteId(suite, parentSuiteId) {
return stableObjectHash({
parentSuiteId: parentSuiteId ?? "",
type: suite.type,
file: suite.location?.file ?? "",
title: suite.title
});
}
function computeTestId(test, suiteId) {
return stableObjectHash({
suiteId,
file: test.location?.file ?? "",
title: test.title
});
}
export {
normalizeReport
};
//# sourceMappingURL=normalizeReport.js.map
// src/normalizeReport.ts
import stableObjectHash from "stable-hash";
var Multimap = class {
_map = /* @__PURE__ */ new Map();
set(key, value) {
const set = this._map.get(key) ?? /* @__PURE__ */ new Set();
this._map.set(key, set);
set.add(value);
}
getAll(key) {
return Array.from(this._map.get(key) ?? []);
}
};
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests2(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
}
}
}
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
visitTests2(suite.tests ?? [], suiteId);
}
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
}
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
visitTests2(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
function computeEnvId(env) {
return stableObjectHash(env);
}
function computeSuiteId(suite, parentSuiteId) {
return stableObjectHash({
parentSuiteId: parentSuiteId ?? "",
type: suite.type,
file: suite.location?.file ?? "",
title: suite.title
});
}
function computeTestId(test, suiteId) {
return stableObjectHash({
suiteId,
file: test.location?.file ?? "",
title: test.title
});
}
// src/stripAnsi.ts
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function stripAnsi(str) {
return str.replace(ansiRegex, "");
}
// src/visitTests.ts
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
}
export {
normalizeReport,
stripAnsi,
visitTests
};
//# sourceMappingURL=reportUtilsBrowser.js.map
// src/stripAnsi.ts
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function stripAnsi(str) {
return str.replace(ansiRegex, "");
}
export {
stripAnsi
};
//# sourceMappingURL=stripAnsi.js.map
// src/uploadReport.ts
import assert from "assert";
import fs2 from "fs";
import { URL } from "url";
// src/_internalUtils.ts
import crypto from "crypto";
import fs from "fs";
import http from "http";
import https from "https";
import util from "util";
import zlib from "zlib";
var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
async function compressTextAsync(text) {
return asyncBrotliCompress(text, {
chunkSize: 32 * 1024,
params: {
[zlib.constants.BROTLI_PARAM_QUALITY]: 6,
[zlib.constants.BROTLI_PARAM_MODE]: zlib.constants.BROTLI_MODE_TEXT
}
});
}
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
function sha1Text(data) {
const hash = crypto.createHash("sha1");
hash.update(data);
return hash.digest("hex");
}
function sha1File(filePath) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash("sha1");
const stream = fs.createReadStream(filePath);
stream.on("data", (chunk) => {
hash.update(chunk);
});
stream.on("end", () => {
resolve(hash.digest("hex"));
});
stream.on("error", (err) => {
reject(err);
});
});
}
// src/uploadReport.ts
async function createFileAttachment(contentType, filePath) {
return {
type: "file",
contentType,
id: await sha1File(filePath),
path: filePath
};
}
async function createDataAttachment(contentType, data) {
return {
type: "buffer",
contentType,
id: sha1Text(data),
body: data
};
}
async function uploadReport(report, attachments, options) {
const flakinessAccessToken = options?.flakinessAccessToken ?? process.env["FLAKINESS_ACCESS_TOKEN"];
const flakinessEndpoint = options?.flakinessEndpoint ?? process.env["FLAKINESS_ENDPOINT"] ?? "https://flakiness.io";
const logger = options?.logger ?? console;
if (!flakinessAccessToken) {
const reason = "No FLAKINESS_ACCESS_TOKEN found";
if (process.env.CI)
logger.warn(`[flakiness.io] \u26A0 Skipping upload: ${reason}`);
return { status: "skipped", reason };
}
try {
const upload = new ReportUpload(report, attachments, { flakinessAccessToken, flakinessEndpoint });
const uploadResult = await upload.upload();
if (!uploadResult.success) {
const errorMessage = uploadResult.message || "Unknown upload error";
logger.error(`[flakiness.io] \u2715 Failed to upload: ${errorMessage}`);
if (options?.throwOnFailure)
throw new Error(`Flakiness upload failed: ${errorMessage}`);
return { status: "failed", error: errorMessage };
}
logger.log(`[flakiness.io] \u2713 Uploaded to ${uploadResult.reportUrl}`);
return { status: "success", reportUrl: uploadResult.reportUrl };
} catch (e) {
const errorMessage = e.message || String(e);
logger.error(`[flakiness.io] \u2715 Unexpected error during upload: ${errorMessage}`);
if (options?.throwOnFailure)
throw e;
return { status: "failed", error: errorMessage };
}
}
var HTTP_BACKOFF = [100, 500, 1e3, 1e3, 1e3, 1e3];
var ReportUpload = class {
_report;
_attachments;
_options;
constructor(report, attachments, options) {
this._options = options;
this._report = report;
this._attachments = attachments;
}
async _api(pathname, token, body) {
const url = new URL(this._options.flakinessEndpoint);
url.pathname = pathname;
return await fetch(url, {
method: "POST",
headers: {
"Authorization": `Bearer ${token}`,
"Content-Type": "application/json"
},
body: body ? JSON.stringify(body) : void 0
}).then(async (response) => !response.ok ? {
result: void 0,
error: response.status + " " + url.href + " " + await response.text()
} : {
result: await response.json(),
error: void 0
}).catch((error) => ({
result: void 0,
error
}));
}
async upload() {
const response = await this._api("/api/upload/start", this._options.flakinessAccessToken);
if (response?.error || !response.result)
return { success: false, message: response.error };
const webUrl = new URL(response.result.webUrl, this._options.flakinessEndpoint).toString();
const attachmentsPresignedUrls = await this._api("/api/upload/attachments", response.result.uploadToken, {
attachmentIds: this._attachments.map((a) => a.id)
});
if (attachmentsPresignedUrls?.error || !attachmentsPresignedUrls.result)
return { success: false, message: attachmentsPresignedUrls.error };
const attachments = new Map(attachmentsPresignedUrls.result.map((a) => [a.attachmentId, a.presignedUrl]));
await Promise.all([
this._uploadReport(JSON.stringify(this._report), response.result.presignedReportUrl),
...this._attachments.map((attachment) => {
const uploadURL = attachments.get(attachment.id);
if (!uploadURL)
throw new Error("Internal error: missing upload URL for attachment!");
return this._uploadAttachment(attachment, uploadURL);
})
]);
await this._api("/api/upload/finish", response.result.uploadToken);
return { success: true, reportUrl: webUrl };
}
async _uploadReport(data, uploadUrl) {
const compressed = await compressTextAsync(data);
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(compressed) + "",
"Content-Encoding": "br"
};
await retryWithBackoff(async () => {
const { request, responseDataPromise } = httpUtils.createRequest({
url: uploadUrl,
headers,
method: "put"
});
request.write(compressed);
request.end();
await responseDataPromise;
}, HTTP_BACKOFF);
}
async _uploadAttachment(attachment, uploadUrl) {
const mimeType = attachment.contentType.toLocaleLowerCase().trim();
const compressable = mimeType.startsWith("text/") || mimeType.endsWith("+json") || mimeType.endsWith("+text") || mimeType.endsWith("+xml");
if (!compressable && attachment.type === "file") {
await retryWithBackoff(async () => {
const { request, responseDataPromise } = httpUtils.createRequest({
url: uploadUrl,
headers: {
"Content-Type": attachment.contentType,
"Content-Length": (await fs2.promises.stat(attachment.path)).size + ""
},
method: "put"
});
fs2.createReadStream(attachment.path).pipe(request);
await responseDataPromise;
}, HTTP_BACKOFF);
return;
}
let buffer = attachment.type === "buffer" ? attachment.body : await fs2.promises.readFile(attachment.path);
assert(buffer);
const encoding = compressable ? "br" : void 0;
if (compressable)
buffer = await compressTextAsync(buffer);
const headers = {
"Content-Type": attachment.contentType,
"Content-Length": Buffer.byteLength(buffer) + "",
"Content-Encoding": encoding
};
await retryWithBackoff(async () => {
const { request, responseDataPromise } = httpUtils.createRequest({
url: uploadUrl,
headers,
method: "put"
});
request.write(buffer);
request.end();
await responseDataPromise;
}, HTTP_BACKOFF);
}
};
export {
createDataAttachment,
createFileAttachment,
uploadReport
};
//# sourceMappingURL=uploadReport.js.map
// src/visitTests.ts
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
}
export {
visitTests
};
//# sourceMappingURL=visitTests.js.map
// src/writeReport.ts
import fs from "fs";
import path from "path";
async function writeReport(report, attachments, outputFolder) {
const reportPath = path.join(outputFolder, "report.json");
const attachmentsFolder = path.join(outputFolder, "attachments");
await fs.promises.rm(outputFolder, { recursive: true, force: true });
await fs.promises.mkdir(outputFolder, { recursive: true });
await fs.promises.writeFile(reportPath, JSON.stringify(report), "utf-8");
if (attachments.length)
await fs.promises.mkdir(attachmentsFolder);
const movedAttachments = [];
for (const attachment of attachments) {
const attachmentPath = path.join(attachmentsFolder, attachment.id);
if (attachment.type === "file")
await fs.promises.cp(attachment.path, attachmentPath);
else if (attachment.type === "buffer")
await fs.promises.writeFile(attachmentPath, attachment.body);
movedAttachments.push({
type: "file",
contentType: attachment.contentType,
id: attachment.id,
path: attachmentPath
});
}
return movedAttachments;
}
export {
writeReport
};
//# sourceMappingURL=writeReport.js.map
+92
-1
// src/createEnvironment.ts
import { spawnSync } from "child_process";
import fs from "fs";
import os from "os";
// src/_internalUtils.ts
import { spawnSync } from "child_process";
import http from "http";
import https from "https";
import util from "util";
import zlib from "zlib";
var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
function shell(command, args, options) {

@@ -17,2 +106,4 @@ try {

}
// src/createEnvironment.ts
function readLinuxOSRelease() {

@@ -19,0 +110,0 @@ const osReleaseText = fs.readFileSync("/etc/os-release", "utf-8");

+17
-122
// src/createTestStepSnippets.ts
import { codeFrameColumns } from "@babel/code-frame";
import fs from "fs";
import { posix as posixPath } from "path";
// src/reportUtils.ts
import { Multimap } from "@flakiness/shared/common/multimap.js";
import { xxHash, xxHashObject } from "@flakiness/shared/common/utils.js";
var ReportUtils;
((ReportUtils2) => {
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
// src/visitTests.ts
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
ReportUtils2.visitTests = visitTests;
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests2(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
}
}
}
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
visitTests2(suite.tests ?? [], suiteId);
}
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
}
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
visitTests2(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
ReportUtils2.normalizeReport = normalizeReport;
function computeEnvId(env) {
return xxHashObject(env);
}
function computeSuiteId(suite, parentSuiteId) {
return xxHash([
parentSuiteId ?? "",
suite.type,
suite.location?.file ?? "",
suite.title
]);
}
function computeTestId(test, suiteId) {
return xxHash([
suiteId,
test.location?.file ?? "",
test.title
]);
}
})(ReportUtils || (ReportUtils = {}));
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
}
// src/createTestStepSnippets.ts
function createTestStepSnippetsInplace(report, gitRoot) {
function createTestStepSnippetsInplace(worktree, report) {
const allSteps = /* @__PURE__ */ new Map();
ReportUtils.visitTests(report, (test) => {
visitTests(report, (test) => {
for (const attempt of test.attempts) {

@@ -146,3 +41,3 @@ for (const step of attempt.steps ?? []) {

try {
source = fs.readFileSync(posixPath.join(gitRoot, gitFilePath), "utf-8");
source = fs.readFileSync(worktree.absolutePath(gitFilePath), "utf-8");
} catch (e) {

@@ -149,0 +44,0 @@ continue;

@@ -5,22 +5,98 @@ // src/flakinessProjectConfig.ts

// src/git.ts
// src/gitWorktree.ts
import assert from "assert";
// src/pathutils.ts
import { exec } from "child_process";
import debug from "debug";
import { posix as posixPath, win32 as win32Path } from "path";
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function normalizePath(aPath) {
if (IS_WIN32_PATH.test(aPath)) {
aPath = aPath.split(win32Path.sep).join(posixPath.sep);
}
if (IS_ALMOST_POSIX_PATH.test(aPath))
return "/" + aPath[0] + aPath.substring(2);
return aPath;
}
import { promisify } from "util";
// src/utils.ts
// src/_internalUtils.ts
import { spawnSync } from "child_process";
import http from "http";
import https from "https";
import util from "util";
import zlib from "zlib";
var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
function shell(command, args, options) {

@@ -39,11 +115,201 @@ try {

// src/git.ts
function computeGitRoot(somePathInsideGitRepo) {
const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
cwd: somePathInsideGitRepo,
encoding: "utf-8"
});
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
return normalizePath(root);
// src/gitWorktree.ts
var log = debug("fk:git");
var execAsync = promisify(exec);
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function toPosixAbsolutePath(absolutePath) {
if (IS_WIN32_PATH.test(absolutePath)) {
absolutePath = absolutePath.split(win32Path.sep).join(posixPath.sep);
}
if (IS_ALMOST_POSIX_PATH.test(absolutePath))
return "/" + absolutePath[0] + absolutePath.substring(2);
return absolutePath;
}
function toNativeAbsolutePath(posix) {
if (process.platform !== "win32")
return posix;
assert(posix.startsWith("/"), "The path must be absolute");
const m = posix.match(/^\/([a-zA-Z])(\/.*)?$/);
assert(m, `Invalid POSIX path: ${posix}`);
const drive = m[1];
const rest = (m[2] ?? "").split(posixPath.sep).join(win32Path.sep);
return drive.toUpperCase() + ":" + rest;
}
var GitWorktree = class _GitWorktree {
constructor(_gitRoot) {
this._gitRoot = _gitRoot;
this._posixGitRoot = toPosixAbsolutePath(this._gitRoot);
}
/**
* Creates a GitWorktree instance from any path inside a git repository.
*
* @param {string} somePathInsideGitRepo - Any path (file or directory) within a git repository.
* Can be absolute or relative. The function will locate the git root directory.
*
* @returns {GitWorktree} A new GitWorktree instance bound to the discovered git root.
*
* @throws {Error} Throws if the path is not inside a git repository or if git commands fail.
*
* @example
* ```typescript
* const worktree = GitWorktree.create('./src/my-test.ts');
* const gitRoot = worktree.rootPath();
* ```
*/
static create(somePathInsideGitRepo) {
const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
cwd: somePathInsideGitRepo,
encoding: "utf-8"
});
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
return new _GitWorktree(root);
}
_posixGitRoot;
/**
* Returns the native absolute path of the git repository root directory.
*
* @returns {string} Native absolute path to the git root. Format matches the current platform
* (Windows or POSIX).
*
* @example
* ```typescript
* const root = worktree.rootPath();
* // On Windows: 'D:\project'
* // On Unix: '/project'
* ```
*/
rootPath() {
return this._gitRoot;
}
/**
* Returns the commit ID (SHA-1 hash) of the current HEAD commit.
*
* @returns {FlakinessReport.CommitId} Full 40-character commit hash of the HEAD commit.
*
* @throws {Error} Throws if git command fails or repository is in an invalid state.
*
* @example
* ```typescript
* const commitId = worktree.headCommitId();
* // Returns: 'a1b2c3d4e5f6...' (40-character SHA-1)
* ```
*/
headCommitId() {
const sha = shell(`git`, ["rev-parse", "HEAD"], {
cwd: this._gitRoot,
encoding: "utf-8"
});
assert(sha, `FAILED: git rev-parse HEAD @ ${this._gitRoot}`);
return sha.trim();
}
/**
* Converts a native absolute path to a git-relative POSIX path.
*
* Takes any absolute path (Windows or POSIX format) and converts it to a POSIX path
* relative to the git repository root. This is essential for Flakiness reports where
* all file paths must be git-relative and use POSIX separators.
*
* @param {string} absolutePath - Native absolute path to convert. Can be in Windows format
* (e.g., `D:\project\src\test.ts`) or POSIX format (e.g., `/project/src/test.ts`).
*
* @returns {FlakinessReport.GitFilePath} POSIX path relative to git root (e.g., `src/test.ts`).
* Returns an empty string if the path is the git root itself.
*
* @example
* ```typescript
* const gitPath = worktree.gitPath('/Users/project/src/test.ts');
* // Returns: 'src/test.ts'
* ```
*/
gitPath(absolutePath) {
return posixPath.relative(this._posixGitRoot, toPosixAbsolutePath(absolutePath));
}
/**
* Converts a git-relative POSIX path to a native absolute path.
*
* Takes a POSIX path relative to the git root and converts it to the native absolute path
* format for the current platform (Windows or POSIX). This is the inverse of `gitPath()`.
*
* @param {FlakinessReport.GitFilePath} relativePath - POSIX path relative to git root
* (e.g., `src/test.ts`).
*
* @returns {string} Native absolute path. On Windows, returns Windows format (e.g., `D:\project\src\test.ts`).
* On POSIX systems, returns POSIX format (e.g., `/project/src/test.ts`).
*
* @example
* ```typescript
* const absolutePath = worktree.absolutePath('src/test.ts');
* // On Windows: 'D:\project\src\test.ts'
* // On Unix: '/project/src/test.ts'
* ```
*/
absolutePath(relativePath) {
return toNativeAbsolutePath(posixPath.join(this._posixGitRoot, relativePath));
}
/**
* Lists recent commits from the repository.
*
* Retrieves commit information including commit ID, timestamp, author, message, and parent commits.
* Note: CI environments often have shallow checkouts with limited history, which may affect
* the number of commits returned.
*
* @param {number} count - Maximum number of commits to retrieve, starting from HEAD.
*
* @returns {Promise<GitCommit[]>} Promise that resolves to an array of commit objects, ordered
* from most recent to oldest. Each commit includes:
* - `commitId` - Full commit hash
* - `timestamp` - Commit timestamp in milliseconds since Unix epoch
* - `message` - Commit message (subject line)
* - `author` - Author name
* - `parents` - Array of parent commit IDs
*
* @example
* ```typescript
* const commits = await worktree.listCommits(10);
* console.log(`Latest commit: ${commits[0].message}`);
* ```
*/
async listCommits(count) {
return await listCommits(this._gitRoot, "HEAD", count);
}
};
async function listCommits(gitRoot, head, count) {
const FIELD_SEPARATOR = "|~|";
const RECORD_SEPARATOR = "\0";
const prettyFormat = [
"%H",
// Full commit hash
"%ct",
// Commit timestamp (Unix seconds)
"%an",
// Author name
"%s",
// Subject line
"%P"
// Parent hashes (space-separated)
].join(FIELD_SEPARATOR);
const command = `git log ${head} -n ${count} --pretty=format:"${prettyFormat}" -z`;
try {
const { stdout } = await execAsync(command, { cwd: gitRoot });
if (!stdout) {
return [];
}
return stdout.trim().split(RECORD_SEPARATOR).filter((record) => record).map((record) => {
const [commitId, timestampStr, author, message, parentsStr] = record.split(FIELD_SEPARATOR);
const parents = parentsStr ? parentsStr.split(" ").filter((p) => p) : [];
return {
commitId,
timestamp: parseInt(timestampStr, 10) * 1e3,
author,
message,
parents,
walkIndex: 0
};
});
} catch (error) {
log(`Failed to list commits for repository at ${gitRoot}:`, error);
return [];
}
}

@@ -67,4 +333,4 @@ // src/flakinessProjectConfig.ts

try {
const gitRoot = computeGitRoot(process.cwd());
return createConfigPath(gitRoot);
const worktree = GitWorktree.create(process.cwd());
return createConfigPath(worktree.rootPath());
} catch (e) {

@@ -79,2 +345,18 @@ return createConfigPath(process.cwd());

}
/**
* Loads the Flakiness project configuration from disk.
*
* Searches for an existing `.flakiness/config.json` file starting from the current working
* directory and walking up the directory tree. If no config exists, it determines the
* appropriate location (git root or current directory) for future saves.
*
* @returns {Promise<FlakinessProjectConfig>} Promise that resolves to a FlakinessProjectConfig
* instance. If no config file exists, returns an instance with default/empty values.
*
* @example
* ```typescript
* const config = await FlakinessProjectConfig.load();
* const projectId = config.projectPublicId();
* ```
*/
static async load() {

@@ -86,14 +368,53 @@ const configPath = ensureConfigPath();

}
/**
* Creates a new empty Flakiness project configuration.
*
* Creates a configuration instance with no values set. Use this when you want to build
* a configuration from scratch. Call `save()` to persist it to disk.
*
* @returns {FlakinessProjectConfig} A new empty configuration instance.
*
* @example
* ```typescript
* const config = FlakinessProjectConfig.createEmpty();
* config.setProjectPublicId('my-project-id');
* await config.save();
* ```
*/
static createEmpty() {
return new _FlakinessProjectConfig(ensureConfigPath(), {});
}
/**
* Returns the absolute path to the configuration file.
*
* @returns {string} Absolute path to `.flakiness/config.json`.
*/
path() {
return this._configPath;
}
/**
* Returns the project's public ID, if configured.
*
* The project public ID is used to associate reports with a specific Flakiness.io project.
*
* @returns {string | undefined} Project public ID, or `undefined` if not set.
*/
projectPublicId() {
return this._config.projectPublicId;
}
/**
* Returns the report viewer URL, either custom or default.
*
* @returns {string} Custom report viewer URL if configured, otherwise the default
* `https://report.flakiness.io`.
*/
reportViewerUrl() {
return this._config.customReportViewerUrl ?? "https://report.flakiness.io";
}
/**
* Sets or clears the custom report viewer URL.
*
* @param {string | undefined} url - Custom report viewer URL to use, or `undefined` to
* clear and use the default URL.
*/
setCustomReportViewerUrl(url) {

@@ -105,5 +426,27 @@ if (url)

}
/**
* Sets the project's public ID.
*
* @param {string | undefined} projectId - Project public ID to set, or `undefined` to clear.
*/
setProjectPublicId(projectId) {
this._config.projectPublicId = projectId;
}
/**
* Saves the configuration to disk.
*
* Writes the current configuration values to `.flakiness/config.json`. Creates the
* `.flakiness` directory if it doesn't exist.
*
* @returns {Promise<void>} Promise that resolves when the file has been written.
*
* @throws {Error} Throws if unable to create directories or write the file.
*
* @example
* ```typescript
* const config = await FlakinessProjectConfig.load();
* config.setProjectPublicId('my-project');
* await config.save();
* ```
*/
async save() {

@@ -110,0 +453,0 @@ await fs.promises.mkdir(path.dirname(this._configPath), { recursive: true });

+820
-507

@@ -8,10 +8,150 @@ var __defProp = Object.defineProperty;

// src/index.ts
import fs6 from "fs";
import path3 from "path";
import { FlakinessReport } from "@flakiness/flakiness-report";
// src/createEnvironment.ts
// src/ciUtils.ts
var CIUtils;
((CIUtils2) => {
function runUrl() {
return githubActions() ?? azure() ?? process.env.CI_JOB_URL ?? process.env.BUILD_URL;
}
CIUtils2.runUrl = runUrl;
})(CIUtils || (CIUtils = {}));
function githubActions() {
const serverUrl = process.env.GITHUB_SERVER_URL || "https://github.com";
const repo = process.env.GITHUB_REPOSITORY;
const runId = process.env.GITHUB_RUN_ID;
if (!repo || !runId) return void 0;
try {
const url = new URL(`${serverUrl}/${repo}/actions/runs/${runId}`);
const attempt = process.env.GITHUB_RUN_ATTEMPT;
if (attempt) url.searchParams.set("attempt", attempt);
url.searchParams.set("check_suite_focus", "true");
return url.toString();
} catch (error) {
return void 0;
}
}
function azure() {
const collectionUri = process.env.SYSTEM_TEAMFOUNDATIONCOLLECTIONURI;
const project = process.env.SYSTEM_TEAMPROJECT;
const buildId = process.env.BUILD_BUILDID;
if (!collectionUri || !project || !buildId)
return void 0;
try {
const baseUrl = collectionUri.endsWith("/") ? collectionUri : `${collectionUri}/`;
const url = new URL(`${baseUrl}${project}/_build/results`);
url.searchParams.set("buildId", buildId);
return url.toString();
} catch (error) {
return void 0;
}
}
// src/gitWorktree.ts
import assert from "assert";
import { exec } from "child_process";
import debug from "debug";
import { posix as posixPath, win32 as win32Path } from "path";
import { promisify } from "util";
// src/_internalUtils.ts
import { spawnSync } from "child_process";
import crypto from "crypto";
import fs from "fs";
import os from "os";
import http from "http";
import https from "https";
import util from "util";
import zlib from "zlib";
var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
async function compressTextAsync(text) {
return asyncBrotliCompress(text, {
chunkSize: 32 * 1024,
params: {
[zlib.constants.BROTLI_PARAM_QUALITY]: 6,
[zlib.constants.BROTLI_PARAM_MODE]: zlib.constants.BROTLI_MODE_TEXT
}
});
}
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve2;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve2 = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve2(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
function shell(command, args, options) {

@@ -29,4 +169,253 @@ try {

}
function sha1Text(data) {
const hash = crypto.createHash("sha1");
hash.update(data);
return hash.digest("hex");
}
function sha1File(filePath) {
return new Promise((resolve2, reject) => {
const hash = crypto.createHash("sha1");
const stream = fs.createReadStream(filePath);
stream.on("data", (chunk) => {
hash.update(chunk);
});
stream.on("end", () => {
resolve2(hash.digest("hex"));
});
stream.on("error", (err) => {
reject(err);
});
});
}
function randomUUIDBase62() {
const BASE62_CHARSET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
let num = BigInt("0x" + crypto.randomUUID().replace(/-/g, ""));
if (num === 0n)
return BASE62_CHARSET[0];
const chars = [];
while (num > 0n) {
const remainder = Number(num % 62n);
num /= 62n;
chars.push(BASE62_CHARSET[remainder]);
}
return chars.reverse().join("");
}
// src/gitWorktree.ts
var log = debug("fk:git");
var execAsync = promisify(exec);
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function toPosixAbsolutePath(absolutePath) {
if (IS_WIN32_PATH.test(absolutePath)) {
absolutePath = absolutePath.split(win32Path.sep).join(posixPath.sep);
}
if (IS_ALMOST_POSIX_PATH.test(absolutePath))
return "/" + absolutePath[0] + absolutePath.substring(2);
return absolutePath;
}
function toNativeAbsolutePath(posix) {
if (process.platform !== "win32")
return posix;
assert(posix.startsWith("/"), "The path must be absolute");
const m = posix.match(/^\/([a-zA-Z])(\/.*)?$/);
assert(m, `Invalid POSIX path: ${posix}`);
const drive = m[1];
const rest = (m[2] ?? "").split(posixPath.sep).join(win32Path.sep);
return drive.toUpperCase() + ":" + rest;
}
var GitWorktree = class _GitWorktree {
constructor(_gitRoot) {
this._gitRoot = _gitRoot;
this._posixGitRoot = toPosixAbsolutePath(this._gitRoot);
}
/**
* Creates a GitWorktree instance from any path inside a git repository.
*
* @param {string} somePathInsideGitRepo - Any path (file or directory) within a git repository.
* Can be absolute or relative. The function will locate the git root directory.
*
* @returns {GitWorktree} A new GitWorktree instance bound to the discovered git root.
*
* @throws {Error} Throws if the path is not inside a git repository or if git commands fail.
*
* @example
* ```typescript
* const worktree = GitWorktree.create('./src/my-test.ts');
* const gitRoot = worktree.rootPath();
* ```
*/
static create(somePathInsideGitRepo) {
const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
cwd: somePathInsideGitRepo,
encoding: "utf-8"
});
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
return new _GitWorktree(root);
}
_posixGitRoot;
/**
* Returns the native absolute path of the git repository root directory.
*
* @returns {string} Native absolute path to the git root. Format matches the current platform
* (Windows or POSIX).
*
* @example
* ```typescript
* const root = worktree.rootPath();
* // On Windows: 'D:\project'
* // On Unix: '/project'
* ```
*/
rootPath() {
return this._gitRoot;
}
/**
* Returns the commit ID (SHA-1 hash) of the current HEAD commit.
*
* @returns {FlakinessReport.CommitId} Full 40-character commit hash of the HEAD commit.
*
* @throws {Error} Throws if git command fails or repository is in an invalid state.
*
* @example
* ```typescript
* const commitId = worktree.headCommitId();
* // Returns: 'a1b2c3d4e5f6...' (40-character SHA-1)
* ```
*/
headCommitId() {
const sha = shell(`git`, ["rev-parse", "HEAD"], {
cwd: this._gitRoot,
encoding: "utf-8"
});
assert(sha, `FAILED: git rev-parse HEAD @ ${this._gitRoot}`);
return sha.trim();
}
/**
* Converts a native absolute path to a git-relative POSIX path.
*
* Takes any absolute path (Windows or POSIX format) and converts it to a POSIX path
* relative to the git repository root. This is essential for Flakiness reports where
* all file paths must be git-relative and use POSIX separators.
*
* @param {string} absolutePath - Native absolute path to convert. Can be in Windows format
* (e.g., `D:\project\src\test.ts`) or POSIX format (e.g., `/project/src/test.ts`).
*
* @returns {FlakinessReport.GitFilePath} POSIX path relative to git root (e.g., `src/test.ts`).
* Returns an empty string if the path is the git root itself.
*
* @example
* ```typescript
* const gitPath = worktree.gitPath('/Users/project/src/test.ts');
* // Returns: 'src/test.ts'
* ```
*/
gitPath(absolutePath) {
return posixPath.relative(this._posixGitRoot, toPosixAbsolutePath(absolutePath));
}
/**
* Converts a git-relative POSIX path to a native absolute path.
*
* Takes a POSIX path relative to the git root and converts it to the native absolute path
* format for the current platform (Windows or POSIX). This is the inverse of `gitPath()`.
*
* @param {FlakinessReport.GitFilePath} relativePath - POSIX path relative to git root
* (e.g., `src/test.ts`).
*
* @returns {string} Native absolute path. On Windows, returns Windows format (e.g., `D:\project\src\test.ts`).
* On POSIX systems, returns POSIX format (e.g., `/project/src/test.ts`).
*
* @example
* ```typescript
* const absolutePath = worktree.absolutePath('src/test.ts');
* // On Windows: 'D:\project\src\test.ts'
* // On Unix: '/project/src/test.ts'
* ```
*/
absolutePath(relativePath) {
return toNativeAbsolutePath(posixPath.join(this._posixGitRoot, relativePath));
}
/**
* Lists recent commits from the repository.
*
* Retrieves commit information including commit ID, timestamp, author, message, and parent commits.
* Note: CI environments often have shallow checkouts with limited history, which may affect
* the number of commits returned.
*
* @param {number} count - Maximum number of commits to retrieve, starting from HEAD.
*
* @returns {Promise<GitCommit[]>} Promise that resolves to an array of commit objects, ordered
* from most recent to oldest. Each commit includes:
* - `commitId` - Full commit hash
* - `timestamp` - Commit timestamp in milliseconds since Unix epoch
* - `message` - Commit message (subject line)
* - `author` - Author name
* - `parents` - Array of parent commit IDs
*
* @example
* ```typescript
* const commits = await worktree.listCommits(10);
* console.log(`Latest commit: ${commits[0].message}`);
* ```
*/
async listCommits(count) {
return await listCommits(this._gitRoot, "HEAD", count);
}
};
async function listCommits(gitRoot, head, count) {
const FIELD_SEPARATOR = "|~|";
const RECORD_SEPARATOR = "\0";
const prettyFormat = [
"%H",
// Full commit hash
"%ct",
// Commit timestamp (Unix seconds)
"%an",
// Author name
"%s",
// Subject line
"%P"
// Parent hashes (space-separated)
].join(FIELD_SEPARATOR);
const command = `git log ${head} -n ${count} --pretty=format:"${prettyFormat}" -z`;
try {
const { stdout } = await execAsync(command, { cwd: gitRoot });
if (!stdout) {
return [];
}
return stdout.trim().split(RECORD_SEPARATOR).filter((record) => record).map((record) => {
const [commitId, timestampStr, author, message, parentsStr] = record.split(FIELD_SEPARATOR);
const parents = parentsStr ? parentsStr.split(" ").filter((p) => p) : [];
return {
commitId,
timestamp: parseInt(timestampStr, 10) * 1e3,
author,
message,
parents,
walkIndex: 0
};
});
} catch (error) {
log(`Failed to list commits for repository at ${gitRoot}:`, error);
return [];
}
}
// src/reportUtils.ts
var reportUtils_exports = {};
__export(reportUtils_exports, {
createDataAttachment: () => createDataAttachment,
createEnvironment: () => createEnvironment,
createFileAttachment: () => createFileAttachment,
createTestStepSnippetsInplace: () => createTestStepSnippetsInplace,
normalizeReport: () => normalizeReport,
stripAnsi: () => stripAnsi,
visitTests: () => visitTests
});
// src/createEnvironment.ts
import fs2 from "fs";
import os from "os";
function readLinuxOSRelease() {
const osReleaseText = fs.readFileSync("/etc/os-release", "utf-8");
const osReleaseText = fs2.readFileSync("/etc/os-release", "utf-8");
return new Map(osReleaseText.toLowerCase().split("\n").filter((line) => line.includes("=")).map((line) => {

@@ -91,129 +480,24 @@ line = line.trim();

import { codeFrameColumns } from "@babel/code-frame";
import fs2 from "fs";
import { posix as posixPath } from "path";
import fs3 from "fs";
// src/reportUtils.ts
import { Multimap } from "@flakiness/shared/common/multimap.js";
import { xxHash, xxHashObject } from "@flakiness/shared/common/utils.js";
var ReportUtils;
((ReportUtils2) => {
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
// src/visitTests.ts
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
ReportUtils2.visitTests = visitTests;
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests2(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
}
}
}
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
visitTests2(suite.tests ?? [], suiteId);
}
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
}
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
visitTests2(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
ReportUtils2.normalizeReport = normalizeReport;
function computeEnvId(env) {
return xxHashObject(env);
}
function computeSuiteId(suite, parentSuiteId) {
return xxHash([
parentSuiteId ?? "",
suite.type,
suite.location?.file ?? "",
suite.title
]);
}
function computeTestId(test, suiteId) {
return xxHash([
suiteId,
test.location?.file ?? "",
test.title
]);
}
})(ReportUtils || (ReportUtils = {}));
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
}
// src/createTestStepSnippets.ts
function createTestStepSnippetsInplace(report, gitRoot) {
function createTestStepSnippetsInplace(worktree, report) {
const allSteps = /* @__PURE__ */ new Map();
ReportUtils.visitTests(report, (test) => {
visitTests(report, (test) => {
for (const attempt of test.attempts) {

@@ -232,6 +516,6 @@ for (const step of attempt.steps ?? []) {

});
for (const [gitFilePath2, steps] of allSteps) {
for (const [gitFilePath, steps] of allSteps) {
let source;
try {
source = fs2.readFileSync(posixPath.join(gitRoot, gitFilePath2), "utf-8");
source = fs3.readFileSync(worktree.absolutePath(gitFilePath), "utf-8");
} catch (e) {

@@ -258,240 +542,126 @@ continue;

// src/flakinessProjectConfig.ts
import fs3 from "fs";
import path from "path";
// src/git.ts
import assert from "assert";
// src/pathutils.ts
var pathutils_exports = {};
__export(pathutils_exports, {
gitFilePath: () => gitFilePath,
normalizePath: () => normalizePath
});
import { posix as posixPath2, win32 as win32Path } from "path";
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function normalizePath(aPath) {
if (IS_WIN32_PATH.test(aPath)) {
aPath = aPath.split(win32Path.sep).join(posixPath2.sep);
// src/normalizeReport.ts
import stableObjectHash from "stable-hash";
var Multimap = class {
_map = /* @__PURE__ */ new Map();
set(key, value) {
const set = this._map.get(key) ?? /* @__PURE__ */ new Set();
this._map.set(key, set);
set.add(value);
}
if (IS_ALMOST_POSIX_PATH.test(aPath))
return "/" + aPath[0] + aPath.substring(2);
return aPath;
}
function gitFilePath(gitRoot, absolutePath) {
return posixPath2.relative(gitRoot, absolutePath);
}
// src/utils.ts
import { spawnSync as spawnSync2 } from "child_process";
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
getAll(key) {
return Array.from(this._map.get(key) ?? []);
}
};
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests2(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
}
}
}
return await job();
}
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function stripAnsi(str) {
return str.replace(ansiRegex, "");
}
function shell2(command, args, options) {
try {
const result = spawnSync2(command, args, { encoding: "utf-8", ...options });
if (result.status !== 0) {
return void 0;
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
return result.stdout.trim();
} catch (e) {
console.error(e);
return void 0;
visitTests2(suite.tests ?? [], suiteId);
}
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
}
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
visitTests2(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
// src/git.ts
function gitCommitInfo(gitRepo) {
const sha = shell2(`git`, ["rev-parse", "HEAD"], {
cwd: gitRepo,
encoding: "utf-8"
function computeEnvId(env) {
return stableObjectHash(env);
}
function computeSuiteId(suite, parentSuiteId) {
return stableObjectHash({
parentSuiteId: parentSuiteId ?? "",
type: suite.type,
file: suite.location?.file ?? "",
title: suite.title
});
assert(sha, `FAILED: git rev-parse HEAD @ ${gitRepo}`);
return sha.trim();
}
function computeGitRoot(somePathInsideGitRepo) {
const root = shell2(`git`, ["rev-parse", "--show-toplevel"], {
cwd: somePathInsideGitRepo,
encoding: "utf-8"
function computeTestId(test, suiteId) {
return stableObjectHash({
suiteId,
file: test.location?.file ?? "",
title: test.title
});
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
return normalizePath(root);
}
// src/flakinessProjectConfig.ts
function createConfigPath(dir) {
return path.join(dir, ".flakiness", "config.json");
// src/stripAnsi.ts
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function stripAnsi(str) {
return str.replace(ansiRegex, "");
}
var gConfigPath;
function ensureConfigPath() {
if (!gConfigPath)
gConfigPath = computeConfigPath();
return gConfigPath;
}
function computeConfigPath() {
for (let p = process.cwd(); p !== path.resolve(p, ".."); p = path.resolve(p, "..")) {
const configPath = createConfigPath(p);
if (fs3.existsSync(configPath))
return configPath;
}
try {
const gitRoot = computeGitRoot(process.cwd());
return createConfigPath(gitRoot);
} catch (e) {
return createConfigPath(process.cwd());
}
}
var FlakinessProjectConfig = class _FlakinessProjectConfig {
constructor(_configPath, _config) {
this._configPath = _configPath;
this._config = _config;
}
static async load() {
const configPath = ensureConfigPath();
const data = await fs3.promises.readFile(configPath, "utf-8").catch((e) => void 0);
const json = data ? JSON.parse(data) : {};
return new _FlakinessProjectConfig(configPath, json);
}
static createEmpty() {
return new _FlakinessProjectConfig(ensureConfigPath(), {});
}
path() {
return this._configPath;
}
projectPublicId() {
return this._config.projectPublicId;
}
reportViewerUrl() {
return this._config.customReportViewerUrl ?? "https://report.flakiness.io";
}
setCustomReportViewerUrl(url) {
if (url)
this._config.customReportViewerUrl = url;
else
delete this._config.customReportViewerUrl;
}
setProjectPublicId(projectId) {
this._config.projectPublicId = projectId;
}
async save() {
await fs3.promises.mkdir(path.dirname(this._configPath), { recursive: true });
await fs3.promises.writeFile(this._configPath, JSON.stringify(this._config, null, 2));
}
};
// src/reportUploader.ts
import { compressTextAsync, compressTextSync } from "@flakiness/shared/node/compression.js";
// src/uploadReport.ts
import assert2 from "assert";
import crypto from "crypto";
import fs4 from "fs";
import { URL as URL2 } from "url";
// src/httpUtils.ts
import http from "http";
import https from "https";
var FLAKINESS_DBG2 = !!process.env.FLAKINESS_DBG;
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve2;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve2 = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve2(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
// src/reportUploader.ts
function sha1File(filePath) {
return new Promise((resolve2, reject) => {
const hash = crypto.createHash("sha1");
const stream = fs4.createReadStream(filePath);
stream.on("data", (chunk) => {
hash.update(chunk);
});
stream.on("end", () => {
resolve2(hash.digest("hex"));
});
stream.on("error", (err) => {
reject(err);
});
});
}
async function createFileAttachment(contentType, filePath) {
return {
type: "file",
contentType,

@@ -503,46 +673,39 @@ id: await sha1File(filePath),

async function createDataAttachment(contentType, data) {
const hash = crypto.createHash("sha1");
hash.update(data);
const id = hash.digest("hex");
return {
type: "buffer",
contentType,
id,
id: sha1Text(data),
body: data
};
}
var ReportUploader = class _ReportUploader {
static optionsFromEnv(overrides) {
const flakinessAccessToken = overrides?.flakinessAccessToken ?? process.env["FLAKINESS_ACCESS_TOKEN"];
if (!flakinessAccessToken)
return void 0;
const flakinessEndpoint = overrides?.flakinessEndpoint ?? process.env["FLAKINESS_ENDPOINT"] ?? "https://flakiness.io";
return { flakinessAccessToken, flakinessEndpoint };
async function uploadReport(report, attachments, options) {
const flakinessAccessToken = options?.flakinessAccessToken ?? process.env["FLAKINESS_ACCESS_TOKEN"];
const flakinessEndpoint = options?.flakinessEndpoint ?? process.env["FLAKINESS_ENDPOINT"] ?? "https://flakiness.io";
const logger = options?.logger ?? console;
if (!flakinessAccessToken) {
const reason = "No FLAKINESS_ACCESS_TOKEN found";
if (process.env.CI)
logger.warn(`[flakiness.io] \u26A0 Skipping upload: ${reason}`);
return { status: "skipped", reason };
}
static async upload(options) {
const uploaderOptions = _ReportUploader.optionsFromEnv(options);
if (!uploaderOptions) {
if (process.env.CI)
options.log?.(`[flakiness.io] Uploading skipped since no FLAKINESS_ACCESS_TOKEN is specified`);
return void 0;
}
const uploader = new _ReportUploader(uploaderOptions);
const upload = uploader.createUpload(options.report, options.attachments);
try {
const upload = new ReportUpload(report, attachments, { flakinessAccessToken, flakinessEndpoint });
const uploadResult = await upload.upload();
if (!uploadResult.success) {
options.log?.(`[flakiness.io] X Failed to upload to ${uploaderOptions.flakinessEndpoint}: ${uploadResult.message}`);
return { errorMessage: uploadResult.message };
const errorMessage = uploadResult.message || "Unknown upload error";
logger.error(`[flakiness.io] \u2715 Failed to upload: ${errorMessage}`);
if (options?.throwOnFailure)
throw new Error(`Flakiness upload failed: ${errorMessage}`);
return { status: "failed", error: errorMessage };
}
options.log?.(`[flakiness.io] \u2713 Report uploaded ${uploadResult.message ?? ""}`);
if (uploadResult.reportUrl)
options.log?.(`[flakiness.io] ${uploadResult.reportUrl}`);
logger.log(`[flakiness.io] \u2713 Uploaded to ${uploadResult.reportUrl}`);
return { status: "success", reportUrl: uploadResult.reportUrl };
} catch (e) {
const errorMessage = e.message || String(e);
logger.error(`[flakiness.io] \u2715 Unexpected error during upload: ${errorMessage}`);
if (options?.throwOnFailure)
throw e;
return { status: "failed", error: errorMessage };
}
_options;
constructor(options) {
this._options = options;
}
createUpload(report, attachments) {
const upload = new ReportUpload(this._options, report, attachments);
return upload;
}
};
}
var HTTP_BACKOFF = [100, 500, 1e3, 1e3, 1e3, 1e3];

@@ -553,3 +716,3 @@ var ReportUpload = class {

_options;
constructor(options, report, attachments) {
constructor(report, attachments, options) {
this._options = options;

@@ -580,3 +743,3 @@ this._report = report;

}
async upload(options) {
async upload() {
const response = await this._api("/api/upload/start", this._options.flakinessAccessToken);

@@ -593,3 +756,3 @@ if (response?.error || !response.result)

await Promise.all([
this._uploadReport(JSON.stringify(this._report), response.result.presignedReportUrl, options?.syncCompression ?? false),
this._uploadReport(JSON.stringify(this._report), response.result.presignedReportUrl),
...this._attachments.map((attachment) => {

@@ -599,3 +762,3 @@ const uploadURL = attachments.get(attachment.id);

throw new Error("Internal error: missing upload URL for attachment!");
return this._uploadAttachment(attachment, uploadURL, options?.syncCompression ?? false);
return this._uploadAttachment(attachment, uploadURL);
})

@@ -606,4 +769,4 @@ ]);

}
async _uploadReport(data, uploadUrl, syncCompression) {
const compressed = syncCompression ? compressTextSync(data) : await compressTextAsync(data);
async _uploadReport(data, uploadUrl) {
const compressed = await compressTextAsync(data);
const headers = {

@@ -625,7 +788,6 @@ "Content-Type": "application/json",

}
async _uploadAttachment(attachment, uploadUrl, syncCompression) {
async _uploadAttachment(attachment, uploadUrl) {
const mimeType = attachment.contentType.toLocaleLowerCase().trim();
const compressable = mimeType.startsWith("text/") || mimeType.endsWith("+json") || mimeType.endsWith("+text") || mimeType.endsWith("+xml");
if (!compressable && attachment.path) {
const attachmentPath = attachment.path;
if (!compressable && attachment.type === "file") {
await retryWithBackoff(async () => {

@@ -636,7 +798,7 @@ const { request, responseDataPromise } = httpUtils.createRequest({

"Content-Type": attachment.contentType,
"Content-Length": (await fs4.promises.stat(attachmentPath)).size + ""
"Content-Length": (await fs4.promises.stat(attachment.path)).size + ""
},
method: "put"
});
fs4.createReadStream(attachmentPath).pipe(request);
fs4.createReadStream(attachment.path).pipe(request);
await responseDataPromise;

@@ -646,7 +808,7 @@ }, HTTP_BACKOFF);

}
let buffer = attachment.body ? attachment.body : attachment.path ? await fs4.promises.readFile(attachment.path) : void 0;
let buffer = attachment.type === "buffer" ? attachment.body : await fs4.promises.readFile(attachment.path);
assert2(buffer);
const encoding = compressable ? "br" : void 0;
if (compressable)
buffer = syncCompression ? compressTextSync(buffer) : await compressTextAsync(buffer);
buffer = await compressTextAsync(buffer);
const headers = {

@@ -670,13 +832,241 @@ "Content-Type": attachment.contentType,

// src/systemUtilizationSampler.ts
import { spawnSync as spawnSync2 } from "child_process";
import os2 from "os";
function getAvailableMemMacOS() {
const lines = spawnSync2("vm_stat", { encoding: "utf8" }).stdout.trim().split("\n");
const pageSize = parseInt(lines[0].match(/page size of (\d+) bytes/)[1], 10);
if (isNaN(pageSize)) {
console.warn("[flakiness.io] Error detecting macos page size");
return 0;
}
let totalFree = 0;
for (const line of lines) {
if (/Pages (free|inactive|speculative):/.test(line)) {
const match = line.match(/\d+/);
if (match)
totalFree += parseInt(match[0], 10);
}
}
return totalFree * pageSize;
}
function getSystemUtilization() {
let idleTicks = 0;
let totalTicks = 0;
for (const cpu of os2.cpus()) {
totalTicks += cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.irq + cpu.times.idle;
idleTicks += cpu.times.idle;
}
return {
idleTicks,
totalTicks,
timestamp: Date.now(),
freeBytes: os2.platform() === "darwin" ? getAvailableMemMacOS() : os2.freemem()
};
}
function toFKUtilization(sample, previous) {
const idleTicks = sample.idleTicks - previous.idleTicks;
const totalTicks = sample.totalTicks - previous.totalTicks;
const cpuUtilization = Math.floor((1 - idleTicks / totalTicks) * 1e4) / 100;
const memoryUtilization = Math.floor((1 - sample.freeBytes / os2.totalmem()) * 1e4) / 100;
return {
cpuUtilization,
memoryUtilization,
dts: sample.timestamp - previous.timestamp
};
}
var SystemUtilizationSampler = class {
/**
* The accumulated system utilization data.
*
* This object is populated as samples are collected and can be directly included in
* Flakiness reports. It contains:
* - `samples` - Array of utilization samples with CPU/memory percentages and durations
* - `startTimestamp` - Timestamp when sampling began
* - `totalMemoryBytes` - Total system memory in bytes
*/
result;
_lastSample = getSystemUtilization();
_timer;
/**
* Creates a new SystemUtilizationSampler and starts sampling immediately.
*
* The first sample is collected after 50ms, and subsequent samples are collected
* every 1000ms. Call `dispose()` to stop sampling and clean up resources.
*/
constructor() {
this.result = {
samples: [],
startTimestamp: this._lastSample.timestamp,
totalMemoryBytes: os2.totalmem()
};
this._timer = setTimeout(this._addSample.bind(this), 50);
}
_addSample() {
const sample = getSystemUtilization();
this.result.samples.push(toFKUtilization(sample, this._lastSample));
this._lastSample = sample;
this._timer = setTimeout(this._addSample.bind(this), 1e3);
}
/**
* Stops sampling and cleans up resources.
*
* Call this method when you're done collecting utilization data to stop the sampling
* timer and prevent memory leaks. The `result` object remains accessible after disposal.
*/
dispose() {
clearTimeout(this._timer);
}
};
// src/showReport.ts
import { randomUUIDBase62 } from "@flakiness/shared/node/nodeutils.js";
import chalk from "chalk";
import open from "open";
// src/flakinessProjectConfig.ts
import fs5 from "fs";
import path from "path";
function createConfigPath(dir) {
return path.join(dir, ".flakiness", "config.json");
}
var gConfigPath;
function ensureConfigPath() {
if (!gConfigPath)
gConfigPath = computeConfigPath();
return gConfigPath;
}
function computeConfigPath() {
for (let p = process.cwd(); p !== path.resolve(p, ".."); p = path.resolve(p, "..")) {
const configPath = createConfigPath(p);
if (fs5.existsSync(configPath))
return configPath;
}
try {
const worktree = GitWorktree.create(process.cwd());
return createConfigPath(worktree.rootPath());
} catch (e) {
return createConfigPath(process.cwd());
}
}
var FlakinessProjectConfig = class _FlakinessProjectConfig {
constructor(_configPath, _config) {
this._configPath = _configPath;
this._config = _config;
}
/**
* Loads the Flakiness project configuration from disk.
*
* Searches for an existing `.flakiness/config.json` file starting from the current working
* directory and walking up the directory tree. If no config exists, it determines the
* appropriate location (git root or current directory) for future saves.
*
* @returns {Promise<FlakinessProjectConfig>} Promise that resolves to a FlakinessProjectConfig
* instance. If no config file exists, returns an instance with default/empty values.
*
* @example
* ```typescript
* const config = await FlakinessProjectConfig.load();
* const projectId = config.projectPublicId();
* ```
*/
static async load() {
const configPath = ensureConfigPath();
const data = await fs5.promises.readFile(configPath, "utf-8").catch((e) => void 0);
const json = data ? JSON.parse(data) : {};
return new _FlakinessProjectConfig(configPath, json);
}
/**
* Creates a new empty Flakiness project configuration.
*
* Creates a configuration instance with no values set. Use this when you want to build
* a configuration from scratch. Call `save()` to persist it to disk.
*
* @returns {FlakinessProjectConfig} A new empty configuration instance.
*
* @example
* ```typescript
* const config = FlakinessProjectConfig.createEmpty();
* config.setProjectPublicId('my-project-id');
* await config.save();
* ```
*/
static createEmpty() {
return new _FlakinessProjectConfig(ensureConfigPath(), {});
}
/**
* Returns the absolute path to the configuration file.
*
* @returns {string} Absolute path to `.flakiness/config.json`.
*/
path() {
return this._configPath;
}
/**
* Returns the project's public ID, if configured.
*
* The project public ID is used to associate reports with a specific Flakiness.io project.
*
* @returns {string | undefined} Project public ID, or `undefined` if not set.
*/
projectPublicId() {
return this._config.projectPublicId;
}
/**
* Returns the report viewer URL, either custom or default.
*
* @returns {string} Custom report viewer URL if configured, otherwise the default
* `https://report.flakiness.io`.
*/
reportViewerUrl() {
return this._config.customReportViewerUrl ?? "https://report.flakiness.io";
}
/**
* Sets or clears the custom report viewer URL.
*
* @param {string | undefined} url - Custom report viewer URL to use, or `undefined` to
* clear and use the default URL.
*/
setCustomReportViewerUrl(url) {
if (url)
this._config.customReportViewerUrl = url;
else
delete this._config.customReportViewerUrl;
}
/**
* Sets the project's public ID.
*
* @param {string | undefined} projectId - Project public ID to set, or `undefined` to clear.
*/
setProjectPublicId(projectId) {
this._config.projectPublicId = projectId;
}
/**
* Saves the configuration to disk.
*
* Writes the current configuration values to `.flakiness/config.json`. Creates the
* `.flakiness` directory if it doesn't exist.
*
* @returns {Promise<void>} Promise that resolves when the file has been written.
*
* @throws {Error} Throws if unable to create directories or write the file.
*
* @example
* ```typescript
* const config = await FlakinessProjectConfig.load();
* config.setProjectPublicId('my-project');
* await config.save();
* ```
*/
async save() {
await fs5.promises.mkdir(path.dirname(this._configPath), { recursive: true });
await fs5.promises.writeFile(this._configPath, JSON.stringify(this._config, null, 2));
}
};
// src/staticServer.ts
import debug from "debug";
import * as fs5 from "fs";
import debug2 from "debug";
import * as fs6 from "fs";
import * as http2 from "http";
import * as path2 from "path";
var log = debug("fk:static_server");
var log2 = debug2("fk:static_server");
var StaticServer = class {

@@ -732,3 +1122,3 @@ _server;

await result;
log('Serving "%s" on "%s"', this._absoluteFolderPath, this.address());
log2('Serving "%s" on "%s"', this._absoluteFolderPath, this.address());
}

@@ -746,3 +1136,3 @@ async start(port, host = "127.0.0.1") {

throw err;
log("Port %d is busy (EADDRINUSE). Trying next port...", port);
log2("Port %d is busy (EADDRINUSE). Trying next port...", port);
port = port + 1;

@@ -752,3 +1142,3 @@ if (port > 65535)

}
log("All sequential ports busy. Falling back to random port.");
log2("All sequential ports busy. Falling back to random port.");
await this._startServer(0, host);

@@ -761,6 +1151,6 @@ return this.address();

if (err) {
log("Error stopping server: %o", err);
log2("Error stopping server: %o", err);
reject(err);
} else {
log("Server stopped.");
log2("Server stopped.");
resolve2();

@@ -774,3 +1164,3 @@ }

res.end(text);
log(`[${code}] ${req.method} ${req.url}`);
log2(`[${code}] ${req.method} ${req.url}`);
}

@@ -793,5 +1183,5 @@ _handleRequest(req, res) {

}
req.on("aborted", () => log(`ABORTED ${req.method} ${req.url}`));
req.on("aborted", () => log2(`ABORTED ${req.method} ${req.url}`));
res.on("close", () => {
if (!res.headersSent) log(`CLOSED BEFORE SEND ${req.method} ${req.url}`);
if (!res.headersSent) log2(`CLOSED BEFORE SEND ${req.method} ${req.url}`);
});

@@ -809,3 +1199,3 @@ if (!url || !url.startsWith(this._pathPrefix)) {

}
fs5.stat(filePath, (err, stats) => {
fs6.stat(filePath, (err, stats) => {
if (err || !stats.isFile()) {

@@ -818,7 +1208,7 @@ this._errorResponse(req, res, 404, "File Not Found");

res.writeHead(200, { "Content-Type": contentType });
log(`[200] ${req.method} ${req.url} -> ${filePath}`);
const readStream = fs5.createReadStream(filePath);
log2(`[200] ${req.method} ${req.url} -> ${filePath}`);
const readStream = fs6.createReadStream(filePath);
readStream.pipe(res);
readStream.on("error", (err2) => {
log("Stream error: %o", err2);
log2("Stream error: %o", err2);
res.end();

@@ -851,92 +1241,22 @@ });

// src/systemUtilizationSampler.ts
import { spawnSync as spawnSync3 } from "child_process";
import os2 from "os";
function getAvailableMemMacOS() {
const lines = spawnSync3("vm_stat", { encoding: "utf8" }).stdout.trim().split("\n");
const pageSize = parseInt(lines[0].match(/page size of (\d+) bytes/)[1], 10);
if (isNaN(pageSize)) {
console.warn("[flakiness.io] Error detecting macos page size");
return 0;
}
let totalFree = 0;
for (const line of lines) {
if (/Pages (free|inactive|speculative):/.test(line)) {
const match = line.match(/\d+/);
if (match)
totalFree += parseInt(match[0], 10);
}
}
return totalFree * pageSize;
}
function getSystemUtilization() {
let idleTicks = 0;
let totalTicks = 0;
for (const cpu of os2.cpus()) {
totalTicks += cpu.times.user + cpu.times.nice + cpu.times.sys + cpu.times.irq + cpu.times.idle;
idleTicks += cpu.times.idle;
}
return {
idleTicks,
totalTicks,
timestamp: Date.now(),
freeBytes: os2.platform() === "darwin" ? getAvailableMemMacOS() : os2.freemem()
};
}
function toFKUtilization(sample, previous) {
const idleTicks = sample.idleTicks - previous.idleTicks;
const totalTicks = sample.totalTicks - previous.totalTicks;
const cpuUtilization = Math.floor((1 - idleTicks / totalTicks) * 1e4) / 100;
const memoryUtilization = Math.floor((1 - sample.freeBytes / os2.totalmem()) * 1e4) / 100;
return {
cpuUtilization,
memoryUtilization,
dts: sample.timestamp - previous.timestamp
};
}
var SystemUtilizationSampler = class {
result;
_lastSample = getSystemUtilization();
_timer;
constructor() {
this.result = {
samples: [],
startTimestamp: this._lastSample.timestamp,
totalMemoryBytes: os2.totalmem()
};
this._timer = setTimeout(this._addSample.bind(this), 50);
}
_addSample() {
const sample = getSystemUtilization();
this.result.samples.push(toFKUtilization(sample, this._lastSample));
this._lastSample = sample;
this._timer = setTimeout(this._addSample.bind(this), 1e3);
}
dispose() {
clearTimeout(this._timer);
}
};
// src/index.ts
function inferRunUrl() {
if (process.env.GITHUB_REPOSITORY && process.env.GITHUB_RUN_ID)
return `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}`;
return void 0;
}
async function saveReport(report, attachments, outputFolder) {
// src/writeReport.ts
import fs7 from "fs";
import path3 from "path";
async function writeReport(report, attachments, outputFolder) {
const reportPath = path3.join(outputFolder, "report.json");
const attachmentsFolder = path3.join(outputFolder, "attachments");
await fs6.promises.rm(outputFolder, { recursive: true, force: true });
await fs6.promises.mkdir(outputFolder, { recursive: true });
await fs6.promises.writeFile(reportPath, JSON.stringify(report), "utf-8");
await fs7.promises.rm(outputFolder, { recursive: true, force: true });
await fs7.promises.mkdir(outputFolder, { recursive: true });
await fs7.promises.writeFile(reportPath, JSON.stringify(report), "utf-8");
if (attachments.length)
await fs6.promises.mkdir(attachmentsFolder);
await fs7.promises.mkdir(attachmentsFolder);
const movedAttachments = [];
for (const attachment of attachments) {
const attachmentPath = path3.join(attachmentsFolder, attachment.id);
if (attachment.path)
await fs6.promises.cp(attachment.path, attachmentPath);
else if (attachment.body)
await fs6.promises.writeFile(attachmentPath, attachment.body);
if (attachment.type === "file")
await fs7.promises.cp(attachment.path, attachmentPath);
else if (attachment.type === "buffer")
await fs7.promises.writeFile(attachmentPath, attachment.body);
movedAttachments.push({
type: "file",
contentType: attachment.contentType,

@@ -950,19 +1270,12 @@ id: attachment.id,

export {
CIUtils,
FlakinessProjectConfig,
FlakinessReport,
ReportUploader,
ReportUtils,
GitWorktree,
reportUtils_exports as ReportUtils,
SystemUtilizationSampler,
computeGitRoot,
createDataAttachment,
createEnvironment,
createFileAttachment,
createTestStepSnippetsInplace,
gitCommitInfo,
inferRunUrl,
pathutils_exports as pathutils,
saveReport,
showReport,
stripAnsi
uploadReport,
writeReport
};
//# sourceMappingURL=index.js.map

@@ -1,123 +0,394 @@

// src/reportUtils.ts
import { Multimap } from "@flakiness/shared/common/multimap.js";
import { xxHash, xxHashObject } from "@flakiness/shared/common/utils.js";
var ReportUtils;
((ReportUtils2) => {
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
// src/createEnvironment.ts
import fs2 from "fs";
import os from "os";
// src/_internalUtils.ts
import { spawnSync } from "child_process";
import crypto from "crypto";
import fs from "fs";
import http from "http";
import https from "https";
import util from "util";
import zlib from "zlib";
var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
}
ReportUtils2.visitTests = visitTests;
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
return await job();
}
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
function shell(command, args, options) {
try {
const result = spawnSync(command, args, { encoding: "utf-8", ...options });
if (result.status !== 0) {
return void 0;
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests2(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
return result.stdout.trim();
} catch (e) {
console.error(e);
return void 0;
}
}
function sha1Text(data) {
const hash = crypto.createHash("sha1");
hash.update(data);
return hash.digest("hex");
}
function sha1File(filePath) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash("sha1");
const stream = fs.createReadStream(filePath);
stream.on("data", (chunk) => {
hash.update(chunk);
});
stream.on("end", () => {
resolve(hash.digest("hex"));
});
stream.on("error", (err) => {
reject(err);
});
});
}
// src/createEnvironment.ts
function readLinuxOSRelease() {
const osReleaseText = fs2.readFileSync("/etc/os-release", "utf-8");
return new Map(osReleaseText.toLowerCase().split("\n").filter((line) => line.includes("=")).map((line) => {
line = line.trim();
let [key, value] = line.split("=");
if (value.startsWith('"') && value.endsWith('"'))
value = value.substring(1, value.length - 1);
return [key, value];
}));
}
function osLinuxInfo() {
const arch = shell(`uname`, [`-m`]);
const osReleaseMap = readLinuxOSRelease();
const name = osReleaseMap.get("name") ?? shell(`uname`);
const version = osReleaseMap.get("version_id");
return { name, arch, version };
}
function osDarwinInfo() {
const name = "macos";
const arch = shell(`uname`, [`-m`]);
const version = shell(`sw_vers`, [`-productVersion`]);
return { name, arch, version };
}
function osWinInfo() {
const name = "win";
const arch = process.arch;
const version = os.release();
return { name, arch, version };
}
function getOSInfo() {
if (process.platform === "darwin")
return osDarwinInfo();
if (process.platform === "win32")
return osWinInfo();
return osLinuxInfo();
}
function extractEnvConfiguration() {
const ENV_PREFIX = "FK_ENV_";
return Object.fromEntries(
Object.entries(process.env).filter(([key]) => key.toUpperCase().startsWith(ENV_PREFIX.toUpperCase())).map(([key, value]) => [key.substring(ENV_PREFIX.length).toLowerCase(), (value ?? "").trim().toLowerCase()])
);
}
function createEnvironment(options) {
const osInfo = getOSInfo();
return {
name: options.name,
systemData: {
osArch: osInfo.arch,
osName: osInfo.name,
osVersion: osInfo.version
},
userSuppliedData: {
...extractEnvConfiguration(),
...options.userSuppliedData ?? {}
},
opaqueData: options.opaqueData
};
}
// src/createTestStepSnippets.ts
import { codeFrameColumns } from "@babel/code-frame";
import fs3 from "fs";
// src/visitTests.ts
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
}
// src/createTestStepSnippets.ts
function createTestStepSnippetsInplace(worktree, report) {
const allSteps = /* @__PURE__ */ new Map();
visitTests(report, (test) => {
for (const attempt of test.attempts) {
for (const step of attempt.steps ?? []) {
if (!step.location)
continue;
let fileSteps = allSteps.get(step.location.file);
if (!fileSteps) {
fileSteps = /* @__PURE__ */ new Set();
allSteps.set(step.location.file, fileSteps);
}
fileSteps.add(step);
}
}
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
visitTests2(suite.tests ?? [], suiteId);
});
for (const [gitFilePath, steps] of allSteps) {
let source;
try {
source = fs3.readFileSync(worktree.absolutePath(gitFilePath), "utf-8");
} catch (e) {
continue;
}
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
const lines = source.split("\n").length;
const highlighted = codeFrameColumns(source, { start: { line: lines, column: 1 } }, { highlightCode: true, linesAbove: lines, linesBelow: 0 });
const highlightedLines = highlighted.split("\n");
const lineWithArrow = highlightedLines[highlightedLines.length - 1];
for (const step of steps) {
if (!step.location)
continue;
if (step.location.line < 2 || step.location.line >= lines)
continue;
const snippetLines = highlightedLines.slice(step.location.line - 2, step.location.line + 1);
const index = lineWithArrow.indexOf("^");
const shiftedArrow = lineWithArrow.slice(0, index) + " ".repeat(step.location.column - 1) + lineWithArrow.slice(index);
snippetLines.splice(2, 0, shiftedArrow);
step.snippet = snippetLines.join("\n");
}
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
}
// src/normalizeReport.ts
import stableObjectHash from "stable-hash";
var Multimap = class {
_map = /* @__PURE__ */ new Map();
set(key, value) {
const set = this._map.get(key) ?? /* @__PURE__ */ new Set();
this._map.set(key, set);
set.add(value);
}
getAll(key) {
return Array.from(this._map.get(key) ?? []);
}
};
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests2(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
}
}
visitTests2(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
ReportUtils2.normalizeReport = normalizeReport;
function computeEnvId(env) {
return xxHashObject(env);
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
visitTests2(suite.tests ?? [], suiteId);
}
function computeSuiteId(suite, parentSuiteId) {
return xxHash([
parentSuiteId ?? "",
suite.type,
suite.location?.file ?? "",
suite.title
]);
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
}
function computeTestId(test, suiteId) {
return xxHash([
suiteId,
test.location?.file ?? "",
test.title
]);
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
})(ReportUtils || (ReportUtils = {}));
visitTests2(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
function computeEnvId(env) {
return stableObjectHash(env);
}
function computeSuiteId(suite, parentSuiteId) {
return stableObjectHash({
parentSuiteId: parentSuiteId ?? "",
type: suite.type,
file: suite.location?.file ?? "",
title: suite.title
});
}
function computeTestId(test, suiteId) {
return stableObjectHash({
suiteId,
file: test.location?.file ?? "",
title: test.title
});
}
// src/stripAnsi.ts
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function stripAnsi(str) {
return str.replace(ansiRegex, "");
}
// src/uploadReport.ts
async function createFileAttachment(contentType, filePath) {
return {
type: "file",
contentType,
id: await sha1File(filePath),
path: filePath
};
}
async function createDataAttachment(contentType, data) {
return {
type: "buffer",
contentType,
id: sha1Text(data),
body: data
};
}
export {
ReportUtils
createDataAttachment,
createEnvironment,
createFileAttachment,
createTestStepSnippetsInplace,
normalizeReport,
stripAnsi,
visitTests
};
//# sourceMappingURL=reportUtils.js.map
// src/showReport.ts
import { randomUUIDBase62 } from "@flakiness/shared/node/nodeutils.js";
import chalk from "chalk";
import open from "open";
// src/flakinessProjectConfig.ts
import fs from "fs";
import path from "path";
// src/git.ts
import assert from "assert";
// src/pathutils.ts
import { posix as posixPath, win32 as win32Path } from "path";
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function normalizePath(aPath) {
if (IS_WIN32_PATH.test(aPath)) {
aPath = aPath.split(win32Path.sep).join(posixPath.sep);
// src/_internalUtils.ts
import { spawnSync } from "child_process";
import crypto from "crypto";
import http from "http";
import https from "https";
import util from "util";
import zlib from "zlib";
var asyncBrotliCompress = util.promisify(zlib.brotliCompress);
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
if (IS_ALMOST_POSIX_PATH.test(aPath))
return "/" + aPath[0] + aPath.substring(2);
return aPath;
return await job();
}
// src/utils.ts
import { spawnSync } from "child_process";
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve2;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve2 = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve2(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
function shell(command, args, options) {

@@ -42,12 +107,224 @@ try {

}
function randomUUIDBase62() {
const BASE62_CHARSET = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
let num = BigInt("0x" + crypto.randomUUID().replace(/-/g, ""));
if (num === 0n)
return BASE62_CHARSET[0];
const chars = [];
while (num > 0n) {
const remainder = Number(num % 62n);
num /= 62n;
chars.push(BASE62_CHARSET[remainder]);
}
return chars.reverse().join("");
}
// src/git.ts
function computeGitRoot(somePathInsideGitRepo) {
const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
cwd: somePathInsideGitRepo,
encoding: "utf-8"
});
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
return normalizePath(root);
// src/flakinessProjectConfig.ts
import fs from "fs";
import path from "path";
// src/gitWorktree.ts
import assert from "assert";
import { exec } from "child_process";
import debug from "debug";
import { posix as posixPath, win32 as win32Path } from "path";
import { promisify } from "util";
var log = debug("fk:git");
var execAsync = promisify(exec);
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function toPosixAbsolutePath(absolutePath) {
if (IS_WIN32_PATH.test(absolutePath)) {
absolutePath = absolutePath.split(win32Path.sep).join(posixPath.sep);
}
if (IS_ALMOST_POSIX_PATH.test(absolutePath))
return "/" + absolutePath[0] + absolutePath.substring(2);
return absolutePath;
}
function toNativeAbsolutePath(posix) {
if (process.platform !== "win32")
return posix;
assert(posix.startsWith("/"), "The path must be absolute");
const m = posix.match(/^\/([a-zA-Z])(\/.*)?$/);
assert(m, `Invalid POSIX path: ${posix}`);
const drive = m[1];
const rest = (m[2] ?? "").split(posixPath.sep).join(win32Path.sep);
return drive.toUpperCase() + ":" + rest;
}
var GitWorktree = class _GitWorktree {
constructor(_gitRoot) {
this._gitRoot = _gitRoot;
this._posixGitRoot = toPosixAbsolutePath(this._gitRoot);
}
/**
* Creates a GitWorktree instance from any path inside a git repository.
*
* @param {string} somePathInsideGitRepo - Any path (file or directory) within a git repository.
* Can be absolute or relative. The function will locate the git root directory.
*
* @returns {GitWorktree} A new GitWorktree instance bound to the discovered git root.
*
* @throws {Error} Throws if the path is not inside a git repository or if git commands fail.
*
* @example
* ```typescript
* const worktree = GitWorktree.create('./src/my-test.ts');
* const gitRoot = worktree.rootPath();
* ```
*/
static create(somePathInsideGitRepo) {
const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
cwd: somePathInsideGitRepo,
encoding: "utf-8"
});
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
return new _GitWorktree(root);
}
_posixGitRoot;
/**
* Returns the native absolute path of the git repository root directory.
*
* @returns {string} Native absolute path to the git root. Format matches the current platform
* (Windows or POSIX).
*
* @example
* ```typescript
* const root = worktree.rootPath();
* // On Windows: 'D:\project'
* // On Unix: '/project'
* ```
*/
rootPath() {
return this._gitRoot;
}
/**
* Returns the commit ID (SHA-1 hash) of the current HEAD commit.
*
* @returns {FlakinessReport.CommitId} Full 40-character commit hash of the HEAD commit.
*
* @throws {Error} Throws if git command fails or repository is in an invalid state.
*
* @example
* ```typescript
* const commitId = worktree.headCommitId();
* // Returns: 'a1b2c3d4e5f6...' (40-character SHA-1)
* ```
*/
headCommitId() {
const sha = shell(`git`, ["rev-parse", "HEAD"], {
cwd: this._gitRoot,
encoding: "utf-8"
});
assert(sha, `FAILED: git rev-parse HEAD @ ${this._gitRoot}`);
return sha.trim();
}
/**
* Converts a native absolute path to a git-relative POSIX path.
*
* Takes any absolute path (Windows or POSIX format) and converts it to a POSIX path
* relative to the git repository root. This is essential for Flakiness reports where
* all file paths must be git-relative and use POSIX separators.
*
* @param {string} absolutePath - Native absolute path to convert. Can be in Windows format
* (e.g., `D:\project\src\test.ts`) or POSIX format (e.g., `/project/src/test.ts`).
*
* @returns {FlakinessReport.GitFilePath} POSIX path relative to git root (e.g., `src/test.ts`).
* Returns an empty string if the path is the git root itself.
*
* @example
* ```typescript
* const gitPath = worktree.gitPath('/Users/project/src/test.ts');
* // Returns: 'src/test.ts'
* ```
*/
gitPath(absolutePath) {
return posixPath.relative(this._posixGitRoot, toPosixAbsolutePath(absolutePath));
}
/**
* Converts a git-relative POSIX path to a native absolute path.
*
* Takes a POSIX path relative to the git root and converts it to the native absolute path
* format for the current platform (Windows or POSIX). This is the inverse of `gitPath()`.
*
* @param {FlakinessReport.GitFilePath} relativePath - POSIX path relative to git root
* (e.g., `src/test.ts`).
*
* @returns {string} Native absolute path. On Windows, returns Windows format (e.g., `D:\project\src\test.ts`).
* On POSIX systems, returns POSIX format (e.g., `/project/src/test.ts`).
*
* @example
* ```typescript
* const absolutePath = worktree.absolutePath('src/test.ts');
* // On Windows: 'D:\project\src\test.ts'
* // On Unix: '/project/src/test.ts'
* ```
*/
absolutePath(relativePath) {
return toNativeAbsolutePath(posixPath.join(this._posixGitRoot, relativePath));
}
/**
* Lists recent commits from the repository.
*
* Retrieves commit information including commit ID, timestamp, author, message, and parent commits.
* Note: CI environments often have shallow checkouts with limited history, which may affect
* the number of commits returned.
*
* @param {number} count - Maximum number of commits to retrieve, starting from HEAD.
*
* @returns {Promise<GitCommit[]>} Promise that resolves to an array of commit objects, ordered
* from most recent to oldest. Each commit includes:
* - `commitId` - Full commit hash
* - `timestamp` - Commit timestamp in milliseconds since Unix epoch
* - `message` - Commit message (subject line)
* - `author` - Author name
* - `parents` - Array of parent commit IDs
*
* @example
* ```typescript
* const commits = await worktree.listCommits(10);
* console.log(`Latest commit: ${commits[0].message}`);
* ```
*/
async listCommits(count) {
return await listCommits(this._gitRoot, "HEAD", count);
}
};
async function listCommits(gitRoot, head, count) {
const FIELD_SEPARATOR = "|~|";
const RECORD_SEPARATOR = "\0";
const prettyFormat = [
"%H",
// Full commit hash
"%ct",
// Commit timestamp (Unix seconds)
"%an",
// Author name
"%s",
// Subject line
"%P"
// Parent hashes (space-separated)
].join(FIELD_SEPARATOR);
const command = `git log ${head} -n ${count} --pretty=format:"${prettyFormat}" -z`;
try {
const { stdout } = await execAsync(command, { cwd: gitRoot });
if (!stdout) {
return [];
}
return stdout.trim().split(RECORD_SEPARATOR).filter((record) => record).map((record) => {
const [commitId, timestampStr, author, message, parentsStr] = record.split(FIELD_SEPARATOR);
const parents = parentsStr ? parentsStr.split(" ").filter((p) => p) : [];
return {
commitId,
timestamp: parseInt(timestampStr, 10) * 1e3,
author,
message,
parents,
walkIndex: 0
};
});
} catch (error) {
log(`Failed to list commits for repository at ${gitRoot}:`, error);
return [];
}
}

@@ -71,4 +348,4 @@ // src/flakinessProjectConfig.ts

try {
const gitRoot = computeGitRoot(process.cwd());
return createConfigPath(gitRoot);
const worktree = GitWorktree.create(process.cwd());
return createConfigPath(worktree.rootPath());
} catch (e) {

@@ -83,2 +360,18 @@ return createConfigPath(process.cwd());

}
/**
* Loads the Flakiness project configuration from disk.
*
* Searches for an existing `.flakiness/config.json` file starting from the current working
* directory and walking up the directory tree. If no config exists, it determines the
* appropriate location (git root or current directory) for future saves.
*
* @returns {Promise<FlakinessProjectConfig>} Promise that resolves to a FlakinessProjectConfig
* instance. If no config file exists, returns an instance with default/empty values.
*
* @example
* ```typescript
* const config = await FlakinessProjectConfig.load();
* const projectId = config.projectPublicId();
* ```
*/
static async load() {

@@ -90,14 +383,53 @@ const configPath = ensureConfigPath();

}
/**
* Creates a new empty Flakiness project configuration.
*
* Creates a configuration instance with no values set. Use this when you want to build
* a configuration from scratch. Call `save()` to persist it to disk.
*
* @returns {FlakinessProjectConfig} A new empty configuration instance.
*
* @example
* ```typescript
* const config = FlakinessProjectConfig.createEmpty();
* config.setProjectPublicId('my-project-id');
* await config.save();
* ```
*/
static createEmpty() {
return new _FlakinessProjectConfig(ensureConfigPath(), {});
}
/**
* Returns the absolute path to the configuration file.
*
* @returns {string} Absolute path to `.flakiness/config.json`.
*/
path() {
return this._configPath;
}
/**
* Returns the project's public ID, if configured.
*
* The project public ID is used to associate reports with a specific Flakiness.io project.
*
* @returns {string | undefined} Project public ID, or `undefined` if not set.
*/
projectPublicId() {
return this._config.projectPublicId;
}
/**
* Returns the report viewer URL, either custom or default.
*
* @returns {string} Custom report viewer URL if configured, otherwise the default
* `https://report.flakiness.io`.
*/
reportViewerUrl() {
return this._config.customReportViewerUrl ?? "https://report.flakiness.io";
}
/**
* Sets or clears the custom report viewer URL.
*
* @param {string | undefined} url - Custom report viewer URL to use, or `undefined` to
* clear and use the default URL.
*/
setCustomReportViewerUrl(url) {

@@ -109,5 +441,27 @@ if (url)

}
/**
* Sets the project's public ID.
*
* @param {string | undefined} projectId - Project public ID to set, or `undefined` to clear.
*/
setProjectPublicId(projectId) {
this._config.projectPublicId = projectId;
}
/**
* Saves the configuration to disk.
*
* Writes the current configuration values to `.flakiness/config.json`. Creates the
* `.flakiness` directory if it doesn't exist.
*
* @returns {Promise<void>} Promise that resolves when the file has been written.
*
* @throws {Error} Throws if unable to create directories or write the file.
*
* @example
* ```typescript
* const config = await FlakinessProjectConfig.load();
* config.setProjectPublicId('my-project');
* await config.save();
* ```
*/
async save() {

@@ -120,7 +474,7 @@ await fs.promises.mkdir(path.dirname(this._configPath), { recursive: true });

// src/staticServer.ts
import debug from "debug";
import debug2 from "debug";
import * as fs2 from "fs";
import * as http from "http";
import * as http2 from "http";
import * as path2 from "path";
var log = debug("fk:static_server");
var log2 = debug2("fk:static_server");
var StaticServer = class {

@@ -147,3 +501,3 @@ _server;

this._cors = cors;
this._server = http.createServer((req, res) => this._handleRequest(req, res));
this._server = http2.createServer((req, res) => this._handleRequest(req, res));
}

@@ -177,3 +531,3 @@ port() {

await result;
log('Serving "%s" on "%s"', this._absoluteFolderPath, this.address());
log2('Serving "%s" on "%s"', this._absoluteFolderPath, this.address());
}

@@ -191,3 +545,3 @@ async start(port, host = "127.0.0.1") {

throw err;
log("Port %d is busy (EADDRINUSE). Trying next port...", port);
log2("Port %d is busy (EADDRINUSE). Trying next port...", port);
port = port + 1;

@@ -197,3 +551,3 @@ if (port > 65535)

}
log("All sequential ports busy. Falling back to random port.");
log2("All sequential ports busy. Falling back to random port.");
await this._startServer(0, host);

@@ -206,6 +560,6 @@ return this.address();

if (err) {
log("Error stopping server: %o", err);
log2("Error stopping server: %o", err);
reject(err);
} else {
log("Server stopped.");
log2("Server stopped.");
resolve2();

@@ -219,3 +573,3 @@ }

res.end(text);
log(`[${code}] ${req.method} ${req.url}`);
log2(`[${code}] ${req.method} ${req.url}`);
}

@@ -238,5 +592,5 @@ _handleRequest(req, res) {

}
req.on("aborted", () => log(`ABORTED ${req.method} ${req.url}`));
req.on("aborted", () => log2(`ABORTED ${req.method} ${req.url}`));
res.on("close", () => {
if (!res.headersSent) log(`CLOSED BEFORE SEND ${req.method} ${req.url}`);
if (!res.headersSent) log2(`CLOSED BEFORE SEND ${req.method} ${req.url}`);
});

@@ -262,7 +616,7 @@ if (!url || !url.startsWith(this._pathPrefix)) {

res.writeHead(200, { "Content-Type": contentType });
log(`[200] ${req.method} ${req.url} -> ${filePath}`);
log2(`[200] ${req.method} ${req.url} -> ${filePath}`);
const readStream = fs2.createReadStream(filePath);
readStream.pipe(res);
readStream.on("error", (err2) => {
log("Stream error: %o", err2);
log2("Stream error: %o", err2);
res.end();

@@ -269,0 +623,0 @@ });

@@ -47,5 +47,20 @@ // src/systemUtilizationSampler.ts

var SystemUtilizationSampler = class {
/**
* The accumulated system utilization data.
*
* This object is populated as samples are collected and can be directly included in
* Flakiness reports. It contains:
* - `samples` - Array of utilization samples with CPU/memory percentages and durations
* - `startTimestamp` - Timestamp when sampling began
* - `totalMemoryBytes` - Total system memory in bytes
*/
result;
_lastSample = getSystemUtilization();
_timer;
/**
* Creates a new SystemUtilizationSampler and starts sampling immediately.
*
* The first sample is collected after 50ms, and subsequent samples are collected
* every 1000ms. Call `dispose()` to stop sampling and clean up resources.
*/
constructor() {

@@ -65,2 +80,8 @@ this.result = {

}
/**
* Stops sampling and cleans up resources.
*
* Call this method when you're done collecting utilization data to stop the sampling
* timer and prevent memory leaks. The `result` object remains accessible after disposal.
*/
dispose() {

@@ -67,0 +88,0 @@ clearTimeout(this._timer);

{
"name": "@flakiness/sdk",
"version": "0.148.0",
"version": "0.149.0",
"private": false,

@@ -12,5 +12,5 @@ "exports": {

"./browser": {
"types": "./types/src/browser/index.d.ts",
"import": "./lib/browser/index.js",
"require": "./lib/browser/index.js"
"types": "./types/src/browser.d.ts",
"import": "./lib/browser.js",
"require": "./lib/browser.js"
}

@@ -26,3 +26,2 @@ },

"devDependencies": {
"@playwright/test": "^1.57.0",
"@types/babel__code-frame": "^7.0.6"

@@ -33,8 +32,7 @@ },

"@flakiness/flakiness-report": "^0.16.0",
"@flakiness/shared": "0.148.0",
"chalk": "^5.6.2",
"debug": "^4.3.7",
"open": "^10.2.0",
"zod": "^3.25.23"
"stable-hash": "^0.0.6"
}
}
# Flakiness SDK
The package provides a set of tools to create Flakiness Reports in Node.js.
The Flakiness SDK provides a comprehensive set of tools for creating and managing Flakiness Reports in Node.js.
Read docs at https://flakiness.io/docs/integrations/custom/
## Quick Start
Here's a minimal example of creating a Flakiness report:
```typescript
import {
FlakinessReport,
GitWorktree,
ReportUtils,
writeReport,
uploadReport,
CIUtils
} from '@flakiness/sdk';
// Initialize git worktree and environment
const worktree = GitWorktree.create(process.cwd());
const env = ReportUtils.createEnvironment({ name: 'CI' });
// Create a simple test report
const report: FlakinessReport.Report = {
category: 'testreport',
commitId: worktree.headCommitId(),
url: CIUtils.runUrl(),
environments: [env],
suites: [{
title: 'My Test Suite',
type: 'describe',
tests: [{
title: 'My Test',
location: { file: 'test.spec.ts', line: 10, column: 1 },
attempts: [{
environmentIdx: 0,
expectedStatus: 'passed',
actualStatus: 'passed',
duration: 100 as FlakinessReport.DurationMS,
}],
}],
}],
startTimestamp: Date.now() as FlakinessReport.UnixTimestampMS,
duration: 100 as FlakinessReport.DurationMS,
};
// Write report to disk or upload to Flakiness.io
await writeReport(report, [], './flakiness-report');
// Or: await uploadReport(report, [], { flakinessAccessToken: 'your-token' });
```
## Entry Points
The SDK provides two entry points:
### `@flakiness/sdk`
The main entry point for Node.js environments. Provides full access to all SDK functionality including:
- Git repository utilities
- File system operations
- System resource monitoring
- Report upload/download
- Local report viewing
### `@flakiness/sdk/browser`
A browser-compatible entry point with a subset of utilities that work in browser environments. Exports:
- `FlakinessReport` - Type definitions for the report format
- `ReportUtils` - Browser-safe utilities (normalizeReport, stripAnsi, visitTests)
Use this entry point when you need to process or manipulate reports in browser-based tools or web applications.
## Top-Level Exports
### Report Type & Validation
- **`FlakinessReport`** - Type definitions and validation for the Flakiness JSON Report format
### Building Reports
- **`CIUtils`** - Utilities to extract CI/CD information (run URLs, environment detection)
- **`GitWorktree`** - Git repository utilities for path conversion and commit information
- **`ReportUtils`** - Namespace with utilities for report creation and manipulation:
- `createEnvironment()` - Create environment objects with system information
- `normalizeReport()` - Deduplicate environments, suites, and tests
- `createTestStepSnippetsInplace()` - Generate code snippets for test steps
- `stripAnsi()` - Remove ANSI escape codes from strings
- `visitTests()` - Recursively visit all tests in a report
- `createFileAttachment()` / `createDataAttachment()` - Create report attachments
- **`SystemUtilizationSampler`** - Monitor and record CPU/memory utilization during test runs
### Working with Reports
- **`showReport()`** - Start a local server and open the report in your browser
- **`uploadReport()`** - Upload reports and attachments to Flakiness.io
- **`writeReport()`** - Write reports to disk in the standard Flakiness report format
### Project Configuration
- **`FlakinessProjectConfig`** - Manage project configuration stored in `.flakiness/config.json`
// src/browser/index.ts
import { FlakinessReport } from "@flakiness/flakiness-report";
// src/reportUtils.ts
import { Multimap } from "@flakiness/shared/common/multimap.js";
import { xxHash, xxHashObject } from "@flakiness/shared/common/utils.js";
var ReportUtils;
((ReportUtils2) => {
function visitTests(report, testVisitor) {
function visitSuite(suite, parents) {
parents.push(suite);
for (const test of suite.tests ?? [])
testVisitor(test, parents);
for (const childSuite of suite.suites ?? [])
visitSuite(childSuite, parents);
parents.pop();
}
for (const test of report.tests ?? [])
testVisitor(test, []);
for (const suite of report.suites)
visitSuite(suite, []);
}
ReportUtils2.visitTests = visitTests;
function normalizeReport(report) {
const gEnvs = /* @__PURE__ */ new Map();
const gSuites = /* @__PURE__ */ new Map();
const gTests = new Multimap();
const gSuiteIds = /* @__PURE__ */ new Map();
const gTestIds = /* @__PURE__ */ new Map();
const gEnvIds = /* @__PURE__ */ new Map();
const gSuiteChildren = new Multimap();
const gSuiteTests = new Multimap();
for (const env of report.environments) {
const envId = computeEnvId(env);
gEnvs.set(envId, env);
gEnvIds.set(env, envId);
}
const usedEnvIds = /* @__PURE__ */ new Set();
function visitTests2(tests, suiteId) {
for (const test of tests ?? []) {
const testId = computeTestId(test, suiteId);
gTests.set(testId, test);
gTestIds.set(test, testId);
gSuiteTests.set(suiteId, test);
for (const attempt of test.attempts) {
const env = report.environments[attempt.environmentIdx];
const envId = gEnvIds.get(env);
usedEnvIds.add(envId);
}
}
}
function visitSuite(suite, parentSuiteId) {
const suiteId = computeSuiteId(suite, parentSuiteId);
gSuites.set(suiteId, suite);
gSuiteIds.set(suite, suiteId);
for (const childSuite of suite.suites ?? []) {
visitSuite(childSuite, suiteId);
gSuiteChildren.set(suiteId, childSuite);
}
visitTests2(suite.tests ?? [], suiteId);
}
function transformTests(tests) {
const testIds = new Set(tests.map((test) => gTestIds.get(test)));
return [...testIds].map((testId) => {
const tests2 = gTests.getAll(testId);
const tags = tests2.map((test) => test.tags ?? []).flat();
return {
location: tests2[0].location,
title: tests2[0].title,
tags: tags.length ? tags : void 0,
attempts: tests2.map((t) => t.attempts).flat().map((attempt) => ({
...attempt,
environmentIdx: envIdToIndex.get(gEnvIds.get(report.environments[attempt.environmentIdx]))
}))
};
});
}
function transformSuites(suites) {
const suiteIds = new Set(suites.map((suite) => gSuiteIds.get(suite)));
return [...suiteIds].map((suiteId) => {
const suite = gSuites.get(suiteId);
return {
location: suite.location,
title: suite.title,
type: suite.type,
suites: transformSuites(gSuiteChildren.getAll(suiteId)),
tests: transformTests(gSuiteTests.getAll(suiteId))
};
});
}
visitTests2(report.tests ?? [], "suiteless");
for (const suite of report.suites)
visitSuite(suite);
const newEnvironments = [...usedEnvIds];
const envIdToIndex = new Map(newEnvironments.map((envId, index) => [envId, index]));
return {
...report,
environments: newEnvironments.map((envId) => gEnvs.get(envId)),
suites: transformSuites(report.suites),
tests: transformTests(report.tests ?? [])
};
}
ReportUtils2.normalizeReport = normalizeReport;
function computeEnvId(env) {
return xxHashObject(env);
}
function computeSuiteId(suite, parentSuiteId) {
return xxHash([
parentSuiteId ?? "",
suite.type,
suite.location?.file ?? "",
suite.title
]);
}
function computeTestId(test, suiteId) {
return xxHash([
suiteId,
test.location?.file ?? "",
test.title
]);
}
})(ReportUtils || (ReportUtils = {}));
export {
FlakinessReport,
ReportUtils
};
//# sourceMappingURL=index.js.map
// src/git.ts
import assert from "assert";
// src/pathutils.ts
import { posix as posixPath, win32 as win32Path } from "path";
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function normalizePath(aPath) {
if (IS_WIN32_PATH.test(aPath)) {
aPath = aPath.split(win32Path.sep).join(posixPath.sep);
}
if (IS_ALMOST_POSIX_PATH.test(aPath))
return "/" + aPath[0] + aPath.substring(2);
return aPath;
}
// src/utils.ts
import { spawnSync } from "child_process";
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function shell(command, args, options) {
try {
const result = spawnSync(command, args, { encoding: "utf-8", ...options });
if (result.status !== 0) {
return void 0;
}
return result.stdout.trim();
} catch (e) {
console.error(e);
return void 0;
}
}
// src/git.ts
function gitCommitInfo(gitRepo) {
const sha = shell(`git`, ["rev-parse", "HEAD"], {
cwd: gitRepo,
encoding: "utf-8"
});
assert(sha, `FAILED: git rev-parse HEAD @ ${gitRepo}`);
return sha.trim();
}
function computeGitRoot(somePathInsideGitRepo) {
const root = shell(`git`, ["rev-parse", "--show-toplevel"], {
cwd: somePathInsideGitRepo,
encoding: "utf-8"
});
assert(root, `FAILED: git rev-parse --show-toplevel HEAD @ ${somePathInsideGitRepo}`);
return normalizePath(root);
}
export {
computeGitRoot,
gitCommitInfo
};
//# sourceMappingURL=git.js.map
// src/httpUtils.ts
import http from "http";
import https from "https";
// src/utils.ts
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
// src/httpUtils.ts
var FLAKINESS_DBG2 = !!process.env.FLAKINESS_DBG;
function errorText2(error) {
return FLAKINESS_DBG2 ? error.stack : error.message;
}
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
export {
errorText2 as errorText,
httpUtils
};
//# sourceMappingURL=httpUtils.js.map
// src/localGit.ts
import { exec } from "child_process";
import debug from "debug";
import { promisify } from "util";
var log = debug("fk:git");
var execAsync = promisify(exec);
async function listLocalCommits(gitRoot, head, count) {
const FIELD_SEPARATOR = "|~|";
const RECORD_SEPARATOR = "\0";
const prettyFormat = [
"%H",
// %H: Full commit hash
"%at",
// %at: Author date as a Unix timestamp (seconds since epoch)
"%an",
// %an: Author name
"%s",
// %s: Subject (the first line of the commit message)
"%P"
// %P: Parent hashes (space-separated)
].join(FIELD_SEPARATOR);
const command = `git log ${head} -n ${count} --pretty=format:"${prettyFormat}" -z`;
try {
const { stdout } = await execAsync(command, { cwd: gitRoot });
if (!stdout) {
return [];
}
return stdout.trim().split(RECORD_SEPARATOR).filter((record) => record).map((record) => {
const [commitId, timestampStr, author, message, parentsStr] = record.split(FIELD_SEPARATOR);
const parents = parentsStr ? parentsStr.split(" ").filter((p) => p) : [];
return {
commitId,
timestamp: parseInt(timestampStr, 10) * 1e3,
author,
message,
parents,
walkIndex: 0
};
});
} catch (error) {
log(`Failed to list commits for repository at ${gitRoot}:`, error);
return [];
}
}
export {
listLocalCommits
};
//# sourceMappingURL=localGit.js.map
// src/pathutils.ts
import { posix as posixPath, win32 as win32Path } from "path";
var IS_WIN32_PATH = new RegExp("^[a-zA-Z]:\\\\", "i");
var IS_ALMOST_POSIX_PATH = new RegExp("^[a-zA-Z]:/", "i");
function normalizePath(aPath) {
if (IS_WIN32_PATH.test(aPath)) {
aPath = aPath.split(win32Path.sep).join(posixPath.sep);
}
if (IS_ALMOST_POSIX_PATH.test(aPath))
return "/" + aPath[0] + aPath.substring(2);
return aPath;
}
function gitFilePath(gitRoot, absolutePath) {
return posixPath.relative(gitRoot, absolutePath);
}
export {
gitFilePath,
normalizePath
};
//# sourceMappingURL=pathutils.js.map
// src/reportUploader.ts
import { compressTextAsync, compressTextSync } from "@flakiness/shared/node/compression.js";
import assert from "assert";
import crypto from "crypto";
import fs from "fs";
import { URL } from "url";
// src/httpUtils.ts
import http from "http";
import https from "https";
// src/utils.ts
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
// src/httpUtils.ts
var FLAKINESS_DBG2 = !!process.env.FLAKINESS_DBG;
var httpUtils;
((httpUtils2) => {
function createRequest({ url, method = "get", headers = {} }) {
let resolve;
let reject;
const responseDataPromise = new Promise((a, b) => {
resolve = a;
reject = b;
});
const protocol = url.startsWith("https") ? https : http;
headers = Object.fromEntries(Object.entries(headers).filter(([key, value]) => value !== void 0));
const request = protocol.request(url, { method, headers }, (res) => {
const chunks = [];
res.on("data", (chunk) => chunks.push(chunk));
res.on("end", () => {
if (res.statusCode && res.statusCode >= 200 && res.statusCode < 300)
resolve(Buffer.concat(chunks));
else
reject(new Error(`Request to ${url} failed with ${res.statusCode}`));
});
res.on("error", (error) => reject(error));
});
request.on("error", reject);
return { request, responseDataPromise };
}
httpUtils2.createRequest = createRequest;
async function getBuffer(url, backoff) {
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url });
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.getBuffer = getBuffer;
async function getText(url, backoff) {
const buffer = await getBuffer(url, backoff);
return buffer.toString("utf-8");
}
httpUtils2.getText = getText;
async function getJSON(url) {
return JSON.parse(await getText(url));
}
httpUtils2.getJSON = getJSON;
async function postText(url, text, backoff) {
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(text) + ""
};
return await retryWithBackoff(async () => {
const { request, responseDataPromise } = createRequest({ url, headers, method: "post" });
request.write(text);
request.end();
return await responseDataPromise;
}, backoff);
}
httpUtils2.postText = postText;
async function postJSON(url, json, backoff) {
const buffer = await postText(url, JSON.stringify(json), backoff);
return JSON.parse(buffer.toString("utf-8"));
}
httpUtils2.postJSON = postJSON;
})(httpUtils || (httpUtils = {}));
// src/reportUploader.ts
function sha1File(filePath) {
return new Promise((resolve, reject) => {
const hash = crypto.createHash("sha1");
const stream = fs.createReadStream(filePath);
stream.on("data", (chunk) => {
hash.update(chunk);
});
stream.on("end", () => {
resolve(hash.digest("hex"));
});
stream.on("error", (err) => {
reject(err);
});
});
}
async function createFileAttachment(contentType, filePath) {
return {
contentType,
id: await sha1File(filePath),
path: filePath
};
}
async function createDataAttachment(contentType, data) {
const hash = crypto.createHash("sha1");
hash.update(data);
const id = hash.digest("hex");
return {
contentType,
id,
body: data
};
}
var ReportUploader = class _ReportUploader {
static optionsFromEnv(overrides) {
const flakinessAccessToken = overrides?.flakinessAccessToken ?? process.env["FLAKINESS_ACCESS_TOKEN"];
if (!flakinessAccessToken)
return void 0;
const flakinessEndpoint = overrides?.flakinessEndpoint ?? process.env["FLAKINESS_ENDPOINT"] ?? "https://flakiness.io";
return { flakinessAccessToken, flakinessEndpoint };
}
static async upload(options) {
const uploaderOptions = _ReportUploader.optionsFromEnv(options);
if (!uploaderOptions) {
if (process.env.CI)
options.log?.(`[flakiness.io] Uploading skipped since no FLAKINESS_ACCESS_TOKEN is specified`);
return void 0;
}
const uploader = new _ReportUploader(uploaderOptions);
const upload = uploader.createUpload(options.report, options.attachments);
const uploadResult = await upload.upload();
if (!uploadResult.success) {
options.log?.(`[flakiness.io] X Failed to upload to ${uploaderOptions.flakinessEndpoint}: ${uploadResult.message}`);
return { errorMessage: uploadResult.message };
}
options.log?.(`[flakiness.io] \u2713 Report uploaded ${uploadResult.message ?? ""}`);
if (uploadResult.reportUrl)
options.log?.(`[flakiness.io] ${uploadResult.reportUrl}`);
}
_options;
constructor(options) {
this._options = options;
}
createUpload(report, attachments) {
const upload = new ReportUpload(this._options, report, attachments);
return upload;
}
};
var HTTP_BACKOFF = [100, 500, 1e3, 1e3, 1e3, 1e3];
var ReportUpload = class {
_report;
_attachments;
_options;
constructor(options, report, attachments) {
this._options = options;
this._report = report;
this._attachments = attachments;
}
async _api(pathname, token, body) {
const url = new URL(this._options.flakinessEndpoint);
url.pathname = pathname;
return await fetch(url, {
method: "POST",
headers: {
"Authorization": `Bearer ${token}`,
"Content-Type": "application/json"
},
body: body ? JSON.stringify(body) : void 0
}).then(async (response) => !response.ok ? {
result: void 0,
error: response.status + " " + url.href + " " + await response.text()
} : {
result: await response.json(),
error: void 0
}).catch((error) => ({
result: void 0,
error
}));
}
async upload(options) {
const response = await this._api("/api/upload/start", this._options.flakinessAccessToken);
if (response?.error || !response.result)
return { success: false, message: response.error };
const webUrl = new URL(response.result.webUrl, this._options.flakinessEndpoint).toString();
const attachmentsPresignedUrls = await this._api("/api/upload/attachments", response.result.uploadToken, {
attachmentIds: this._attachments.map((a) => a.id)
});
if (attachmentsPresignedUrls?.error || !attachmentsPresignedUrls.result)
return { success: false, message: attachmentsPresignedUrls.error };
const attachments = new Map(attachmentsPresignedUrls.result.map((a) => [a.attachmentId, a.presignedUrl]));
await Promise.all([
this._uploadReport(JSON.stringify(this._report), response.result.presignedReportUrl, options?.syncCompression ?? false),
...this._attachments.map((attachment) => {
const uploadURL = attachments.get(attachment.id);
if (!uploadURL)
throw new Error("Internal error: missing upload URL for attachment!");
return this._uploadAttachment(attachment, uploadURL, options?.syncCompression ?? false);
})
]);
await this._api("/api/upload/finish", response.result.uploadToken);
return { success: true, reportUrl: webUrl };
}
async _uploadReport(data, uploadUrl, syncCompression) {
const compressed = syncCompression ? compressTextSync(data) : await compressTextAsync(data);
const headers = {
"Content-Type": "application/json",
"Content-Length": Buffer.byteLength(compressed) + "",
"Content-Encoding": "br"
};
await retryWithBackoff(async () => {
const { request, responseDataPromise } = httpUtils.createRequest({
url: uploadUrl,
headers,
method: "put"
});
request.write(compressed);
request.end();
await responseDataPromise;
}, HTTP_BACKOFF);
}
async _uploadAttachment(attachment, uploadUrl, syncCompression) {
const mimeType = attachment.contentType.toLocaleLowerCase().trim();
const compressable = mimeType.startsWith("text/") || mimeType.endsWith("+json") || mimeType.endsWith("+text") || mimeType.endsWith("+xml");
if (!compressable && attachment.path) {
const attachmentPath = attachment.path;
await retryWithBackoff(async () => {
const { request, responseDataPromise } = httpUtils.createRequest({
url: uploadUrl,
headers: {
"Content-Type": attachment.contentType,
"Content-Length": (await fs.promises.stat(attachmentPath)).size + ""
},
method: "put"
});
fs.createReadStream(attachmentPath).pipe(request);
await responseDataPromise;
}, HTTP_BACKOFF);
return;
}
let buffer = attachment.body ? attachment.body : attachment.path ? await fs.promises.readFile(attachment.path) : void 0;
assert(buffer);
const encoding = compressable ? "br" : void 0;
if (compressable)
buffer = syncCompression ? compressTextSync(buffer) : await compressTextAsync(buffer);
const headers = {
"Content-Type": attachment.contentType,
"Content-Length": Buffer.byteLength(buffer) + "",
"Content-Encoding": encoding
};
await retryWithBackoff(async () => {
const { request, responseDataPromise } = httpUtils.createRequest({
url: uploadUrl,
headers,
method: "put"
});
request.write(buffer);
request.end();
await responseDataPromise;
}, HTTP_BACKOFF);
}
};
export {
ReportUploader,
createDataAttachment,
createFileAttachment
};
//# sourceMappingURL=reportUploader.js.map
// src/utils.ts
import { spawnSync } from "child_process";
var FLAKINESS_DBG = !!process.env.FLAKINESS_DBG;
function errorText(error) {
return FLAKINESS_DBG ? error.stack : error.message;
}
async function retryWithBackoff(job, backoff = []) {
for (const timeout of backoff) {
try {
return await job();
} catch (e) {
if (e instanceof AggregateError)
console.error(`[flakiness.io err]`, errorText(e.errors[0]));
else if (e instanceof Error)
console.error(`[flakiness.io err]`, errorText(e));
else
console.error(`[flakiness.io err]`, e);
await new Promise((x) => setTimeout(x, timeout));
}
}
return await job();
}
var ansiRegex = new RegExp("[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))", "g");
function stripAnsi(str) {
return str.replace(ansiRegex, "");
}
function shell(command, args, options) {
try {
const result = spawnSync(command, args, { encoding: "utf-8", ...options });
if (result.status !== 0) {
return void 0;
}
return result.stdout.trim();
} catch (e) {
console.error(e);
return void 0;
}
}
export {
retryWithBackoff,
shell,
stripAnsi
};
//# sourceMappingURL=utils.js.map

Sorry, the diff of this file is not supported yet