New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More ā†’
Socket
Sign inDemoInstall
Socket

@replayio/test-utils

Package Overview
Dependencies
Maintainers
0
Versions
109
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@replayio/test-utils - npm Package Compare versions

Comparing version 0.0.0-pr549-20240625155921 to 0.0.0-pr577-20240701135511

.turbo/turbo-build.log

17

CHANGELOG.md
# @replayio/test-utils
## 0.0.0-pr549-20240625155921
## 0.0.0-pr577-20240701135511
### Patch Changes
- New build system
- [#573](https://github.com/replayio/replay-cli/pull/573) [`9494138`](https://github.com/replayio/replay-cli/commit/9494138fe6235fd365ce952be384524d30415f21) Thanks [@hbenl](https://github.com/hbenl)! - Log broken entries when reading the recordings.log file
## 3.0.5
### Patch Changes
- Updated dependencies [[`40beb19`](https://github.com/replayio/replay-cli/commit/40beb199c1d1dec640611fec0e04e911e24b5fe3)]:
- @replayio/replay@0.22.10
## 3.0.4
### Patch Changes
- Updated dependencies []:
- @replayio/replay@0.0.0-pr549-20240625155921
- @replayio/replay@0.22.9

@@ -12,0 +23,0 @@ ## 3.0.3

13

dist/index.d.ts

@@ -1,8 +0,11 @@

export { PendingWork, default as ReplayReporter, ReplayReporterConfig, ReporterError } from './reporter.js';
export { buildTestId } from './testId.js';
export { pingTestMetrics } from './metrics.js';
export { removeAnsiCodes } from './terminal.js';
export { TestMetadataV1 } from './legacy-cli/metadata/test/v1.js';
export { TestMetadataV2 } from './legacy-cli/metadata/test/v2.js';
export { fetchWorkspaceConfig } from './config.js';
export { getAccessToken } from './getAccessToken.js';
export { log, warn } from './logging.js';
export { getMetadataFilePath, initMetadataFile } from './metadata.js';
export { TestMetadataV1, TestMetadataV2 } from '@replayio/replay/metadata/test';
export { pingTestMetrics } from './metrics.js';
export { PendingWork, default as ReplayReporter, ReporterError } from './reporter.js';
export { removeAnsiCodes } from './terminal.js';
export { buildTestId } from './testId.js';
export { RecordingEntry, ReplayReporterConfig } from './types.js';
'use strict';
var reporter = require('./reporter.js');
var testId = require('./testId.js');
var metrics = require('./metrics.js');
var terminal = require('./terminal.js');
var config = require('./config.js');
var getAccessToken = require('./getAccessToken.js');
var logging = require('./logging.js');
var metadata = require('./metadata.js');
var metrics = require('./metrics.js');
var reporter = require('./reporter.js');
var terminal = require('./terminal.js');
var testId = require('./testId.js');
exports.ReplayReporter = reporter.default;
exports.ReporterError = reporter.ReporterError;
exports.buildTestId = testId.buildTestId;
exports.pingTestMetrics = metrics.pingTestMetrics;
exports.removeAnsiCodes = terminal.removeAnsiCodes;
exports.fetchWorkspaceConfig = config.fetchWorkspaceConfig;
exports.getAccessToken = getAccessToken.getAccessToken;
exports.log = logging.log;

@@ -23,1 +20,6 @@ exports.warn = logging.warn;

exports.initMetadataFile = metadata.initMetadataFile;
exports.pingTestMetrics = metrics.pingTestMetrics;
exports.ReplayReporter = reporter.default;
exports.ReporterError = reporter.ReporterError;
exports.removeAnsiCodes = terminal.removeAnsiCodes;
exports.buildTestId = testId.buildTestId;
'use strict';
var utils = require('@replayio/replay/utils');
var getReplayPath = require('./shared/dist/getReplayPath.js');
var fs = require('fs');
var path = require('path');
var logging = require('./logging.js');
function getMetadataFilePath(base, workerIndex = 0) {
return process.env.RECORD_REPLAY_METADATA_FILE || path.join(utils.getDirectory(), `${base.toUpperCase()}_METADATA_${workerIndex}`);
return process.env.RECORD_REPLAY_METADATA_FILE || getReplayPath.getReplayPath(`${base.toUpperCase()}_METADATA_${workerIndex}`);
}
function initMetadataFile(path2) {
function initMetadataFile(path) {
try {
if (!fs.existsSync(path2)) {
fs.writeFileSync(path2, "{}");
if (!fs.existsSync(path)) {
fs.writeFileSync(path, "{}");
}
return path2;
return path;
} catch (e) {
logging.warn(`Failed to initialize metadata file${path2 ? ` at ${path2}` : ""}`, e);
logging.warn(`Failed to initialize metadata file${path ? ` at ${path}` : ""}`, e);
}

@@ -20,0 +19,0 @@ return void 0;

@@ -1,2 +0,2 @@

import { TestMetadataV2 } from '@replayio/replay/metadata/test/v2';
import { TestMetadataV2 } from './legacy-cli/metadata/test/v2.js';

@@ -3,0 +3,0 @@ declare function pingTestMetrics(recordingId: string | undefined, runId: string, test: {

'use strict';
var dbg = require('debug');
var os = require('os');
var fetch = require('node-fetch');
var require$$0 = require('os');

@@ -20,3 +20,3 @@ const debug = dbg("replay:test-utils:metrics");

...test,
platform: os.platform(),
platform: require$$0.platform(),
runId,

@@ -23,0 +23,0 @@ env: {

@@ -1,25 +0,7 @@

import * as _replayio_replay from '@replayio/replay';
import { UnstructuredMetadata, RecordingEntry } from '@replayio/replay';
import { TestMetadataV2 } from '@replayio/replay/metadata/test';
export { TestMetadataV1, TestMetadataV2 } from '@replayio/replay/metadata/test';
import { ExternalRecordingEntry } from './legacy-cli/types.js';
import { Properties } from './node_modules/@replay-cli/shared/dist/mixpanel/mixpanelAPI.js';
import { UnstructuredMetadata } from './node_modules/@replay-cli/shared/dist/recording/types.js';
import { TestMetadataV2 } from './legacy-cli/metadata/test/v2.js';
import { ReplayReporterConfig, RecordingEntry } from './types.js';
type UploadStatusThreshold = "all" | "failed-and-flaky" | "failed";
type UploadOption = boolean | {
/**
* Minimize the number of recordings uploaded for a test attempt (within a shard).
* e.g. Only one recording would be uploaded for a failing test attempt, regardless of retries.
* e.g. Two recordings would be uploaded for a flaky test attempt (the passing test and one of the failures).
*/
minimizeUploads?: boolean;
statusThreshold?: UploadStatusThreshold;
};
interface ReplayReporterConfig<TRecordingMetadata extends UnstructuredMetadata = UnstructuredMetadata> {
runTitle?: string;
metadata?: Record<string, any> | string;
metadataKey?: string;
upload?: UploadOption;
apiKey?: string;
/** @deprecated Use `upload.minimizeUploads` and `upload.statusThreshold` instead */
filter?: (r: RecordingEntry<TRecordingMetadata>) => boolean;
}
interface TestRunner {

@@ -84,3 +66,2 @@ name: string;

private _cacheAuthIdsPromise;
private _logger;
private _uploadedRecordings;

@@ -93,3 +74,3 @@ constructor(runner: TestRunner, schemaVersion: string, config?: ReplayReporterConfig<TRecordingMetadata>);

private _parseConfig;
addError(err: Error | ReporterError): void;
addError(error: Error | ReporterError, context?: Properties): void;
setDiagnosticMetadata(metadata: Record<string, unknown>): void;

@@ -111,3 +92,3 @@ onTestSuiteBegin(config?: ReplayReporterConfig<TRecordingMetadata>, metadataKey?: string): void;

executionId: string;
}[]): _replayio_replay.ExternalRecordingEntry[];
}[]): ExternalRecordingEntry[];
private _buildTestMetadata;

@@ -122,2 +103,2 @@ private _setRecordingMetadata;

export { type PendingWork, type PendingWorkError, type ReplayReporterConfig, ReporterError, type Test, type TestRunner, type UploadOption, type UploadStatusThreshold, ReplayReporter as default };
export { type PendingWork, type PendingWorkError, ReporterError, type Test, type TestRun, type TestRunner, ReplayReporter as default };

@@ -5,4 +5,8 @@ 'use strict';

var replay = require('@replayio/replay');
var metadata = require('@replayio/replay/metadata');
var retryOnFailure = require('./shared/dist/async/retryOnFailure.js');
var getAuthInfo = require('./shared/dist/graphql/getAuthInfo.js');
var queryGraphQL = require('./shared/dist/graphql/queryGraphQL.js');
var logger = require('./shared/dist/logger.js');
var mixpanelAPI = require('./shared/dist/mixpanel/mixpanelAPI.js');
var userAgent = require('./shared/dist/userAgent.js');
var child_process = require('child_process');

@@ -13,8 +17,12 @@ var fs = require('fs');

var uuid = require('uuid');
var _package = require('./test-utils/package.json.js');
var getAccessToken = require('./getAccessToken.js');
var main = require('./legacy-cli/main.js');
var index$1 = require('./legacy-cli/metadata/index.js');
var logging = require('./logging.js');
var metadata$1 = require('./metadata.js');
var metadata = require('./metadata.js');
var metrics = require('./metrics.js');
var testId = require('./testId.js');
var logger = require('./logger/logger.js');
var getAuthIds = require('./logger/graphql/getAuthIds.js');
var source = require('./legacy-cli/metadata/source.js');
var index = require('./legacy-cli/metadata/test/index.js');

@@ -57,5 +65,5 @@ function last(arr) {

}
function createGraphqlError(operation, errors, logger) {
function createGraphqlError(operation, errors) {
const errorMessages = errors.map(getErrorMessage);
logger.error("GraphQlOperationFailed", { operation, errors: errors.map(getErrorMessage) });
logger.logger.error("GraphQlOperationFailed", { operation, errors: errors.map(getErrorMessage) });
for (const error of errors) {

@@ -107,3 +115,3 @@ switch (error.extensions?.code) {

class ReplayReporter {
_baseId = metadata.source.getTestRunIdFromEnvironment(process.env) || uuid.v4();
_baseId = source.getTestRunIdFromEnvironment(process.env) || uuid.v4();
_testRunShardId = null;

@@ -124,8 +132,7 @@ _baseMetadata = null;

_cacheAuthIdsPromise = null;
_logger;
_uploadedRecordings = /* @__PURE__ */ new Set();
constructor(runner, schemaVersion, config) {
userAgent.setUserAgent(`${_package.name}/${_package.version}`);
this._runner = runner;
this._schemaVersion = schemaVersion;
this._logger = new logger.Logger(this._runner.name);
if (config) {

@@ -136,7 +143,7 @@ const { metadataKey, ...rest } = config;

if (this._apiKey) {
this._cacheAuthIdsPromise = getAuthIds.getAuthIds(this._apiKey).then((ids) => {
this._logger.identify(ids);
this._logger.info("ReplayReporter:LoggerIdentificationAdded");
this._cacheAuthIdsPromise = getAuthInfo.getAuthInfo(this._apiKey).then((authInfo) => {
logger.logger.identify(authInfo);
logger.logger.info("ReplayReporter:LoggerIdentificationAdded");
}).catch(
(e) => this._logger.info("ReplayReporter:LoggerIdentificationFailed", {
(e) => logger.logger.info("ReplayReporter:LoggerIdentificationFailed", {
errorMessage: getErrorMessage(e)

@@ -207,3 +214,3 @@ })

_parseConfig(config = {}, metadataKey) {
this._apiKey = config.apiKey || process.env.REPLAY_API_KEY || process.env.RECORD_REPLAY_API_KEY;
this._apiKey = getAccessToken.getAccessToken(config);
this._upload = "upload" in config ? !!config.upload : !!process.env.REPLAY_UPLOAD;

@@ -243,7 +250,8 @@ if (this._upload && !this._apiKey) {

}
addError(err) {
if (err.name === "ReporterError") {
this._errors.push(err);
addError(error, context) {
mixpanelAPI.mixpanelAPI.trackEvent(`${this._runner.name}.error.${error.name}`, { context, error });
if (error.name === "ReporterError") {
this._errors.push(error);
} else {
this._errors.push(new ReporterError(-1, "Unexpected error", err));
this._errors.push(new ReporterError(-1, "Unexpected error", error));
}

@@ -256,2 +264,3 @@ }

};
mixpanelAPI.mixpanelAPI.appendAdditionalProperties({ baseMetadata: this._baseMetadata });
}

@@ -262,3 +271,3 @@ onTestSuiteBegin(config, metadataKey) {

}
this._logger.info("OnTestSuiteBegin:ReporterConfiguration", {
logger.logger.info("OnTestSuiteBegin:ReporterConfiguration", {
baseId: this._baseId,

@@ -272,4 +281,11 @@ runTitle: this._runTitle,

});
mixpanelAPI.mixpanelAPI.trackEvent(`${this._runner.name}.test-suite.begin`, {
baseId: this._baseId,
runTitle: this._runTitle,
upload: this._upload,
hasFilter: !!this._filter
});
if (!this._apiKey) {
this._logger.info("OnTestSuiteBegin:NoApiKey");
logger.logger.info("OnTestSuiteBegin:NoApiKey");
mixpanelAPI.mixpanelAPI.trackEvent(`${this._runner.name}.no-api-key`);
return;

@@ -284,8 +300,8 @@ }

async _startTestRunShard() {
this._logger.info("StartTestRunShard:Started");
let metadata$1 = {};
logger.logger.info("StartTestRunShard:Started");
let metadata = {};
try {
metadata$1 = await metadata.source.init();
metadata = await source.init();
} catch (e) {
this._logger.error("StartTestRunShard:InitMetadataFailed", {
logger.logger.error("StartTestRunShard:InitMetadataFailed", {
errorMessage: getErrorMessage(e)

@@ -298,19 +314,19 @@ });

runnerVersion: this._runner.version,
repository: metadata$1.source?.repository ?? null,
repository: metadata.source?.repository ?? null,
title: this._runTitle ?? null,
mode: REPLAY_METADATA_TEST_RUN_MODE ?? RECORD_REPLAY_METADATA_TEST_RUN_MODE ?? null,
branch: metadata$1.source?.branch ?? null,
pullRequestId: metadata$1.source?.merge?.id ?? null,
pullRequestTitle: metadata$1.source?.merge?.title ?? null,
commitId: metadata$1.source?.commit?.id ?? null,
commitTitle: metadata$1.source?.commit?.title ?? null,
commitUser: metadata$1.source?.commit?.user ?? null,
triggerUrl: metadata$1.source?.trigger?.url ?? null,
triggerUser: metadata$1.source?.trigger?.user ?? null,
triggerReason: metadata$1.source?.trigger?.workflow ?? null
branch: metadata.source?.branch ?? null,
pullRequestId: metadata.source?.merge?.id ?? null,
pullRequestTitle: metadata.source?.merge?.title ?? null,
commitId: metadata.source?.commit?.id ?? null,
commitTitle: metadata.source?.commit?.title ?? null,
commitUser: metadata.source?.commit?.user ?? null,
triggerUrl: metadata.source?.trigger?.url ?? null,
triggerUser: metadata.source?.trigger?.user ?? null,
triggerReason: metadata.source?.trigger?.workflow ?? null
};
this._logger.info("StartTestRunShard:WillCreateShard", { baseId: this._baseId });
logger.logger.info("StartTestRunShard:WillCreateShard", { baseId: this._baseId });
try {
return replay.exponentialBackoffRetry(async () => {
const resp = await replay.query(
return retryOnFailure.retryWithExponentialBackoff(async () => {
const resp = await queryGraphQL.queryGraphQL(
"CreateTestRunShard",

@@ -337,3 +353,3 @@ `

type: "test-run",
error: createGraphqlError("CreateTestRunShard", resp.errors, this._logger)
error: createGraphqlError("CreateTestRunShard", resp.errors)
};

@@ -348,3 +364,3 @@ }

}
this._logger.info("StartTestRunShard:CreatedShard", {
logger.logger.info("StartTestRunShard:CreatedShard", {
testRunShardId,

@@ -361,3 +377,3 @@ baseId: this._baseId

} catch (e) {
this._logger.error("StartTestRunShardFailed", {
logger.logger.error("StartTestRunShardFailed", {
errorMessage: getErrorMessage(e)

@@ -372,3 +388,3 @@ });

async _addTestsToShard(tests) {
this._logger.info("AddTestsToSharded", { testsLength: tests.length });
logger.logger.info("AddTestsToSharded", { testsLength: tests.length });
let testRunShardId = this._testRunShardId;

@@ -382,3 +398,3 @@ if (!testRunShardId) {

}
this._logger.info("AddTestsToShard:WillAddTests", {
logger.logger.info("AddTestsToShard:WillAddTests", {
testsLength: tests.length,

@@ -388,4 +404,4 @@ testRunShardId

try {
await replay.exponentialBackoffRetry(async () => {
const resp = await replay.query(
await retryOnFailure.retryWithExponentialBackoff(async () => {
const resp = await queryGraphQL.queryGraphQL(
"AddTestsToShard",

@@ -411,7 +427,7 @@ `

type: "test-run-tests",
error: createGraphqlError("AddTestsToShard", resp.errors, this._logger)
error: createGraphqlError("AddTestsToShard", resp.errors)
};
}
});
this._logger.info("AddTestsToShard:AddedTests", { testRunShardId });
logger.logger.info("AddTestsToShard:AddedTests", { testRunShardId });
return {

@@ -421,3 +437,3 @@ type: "test-run-tests"

} catch (e) {
this._logger.error("AddTestsToShard:Failed", { errorMessage: getErrorMessage(e) });
logger.logger.error("AddTestsToShard:Failed", { errorMessage: getErrorMessage(e) });
return {

@@ -430,3 +446,3 @@ type: "test-run-tests",

async _completeTestRunShard() {
this._logger.info("CompleteTestRunShard:Started");
logger.logger.info("CompleteTestRunShard:Started");
let testRunShardId = this._testRunShardId;

@@ -440,6 +456,6 @@ if (!testRunShardId) {

}
this._logger.info("CompleteTestRunShard:WillMarkCompleted", { testRunShardId });
logger.logger.info("CompleteTestRunShard:WillMarkCompleted", { testRunShardId });
try {
await replay.exponentialBackoffRetry(async () => {
const resp = await replay.query(
await retryOnFailure.retryWithExponentialBackoff(async () => {
const resp = await queryGraphQL.queryGraphQL(
"CompleteTestRunShard",

@@ -463,7 +479,7 @@ `

type: "test-run",
error: createGraphqlError("CompleteTestRunShard", resp.errors, this._logger)
error: createGraphqlError("CompleteTestRunShard", resp.errors)
};
}
});
this._logger.info("CompleteTestRunShard:MarkedComplete", { testRunShardId });
logger.logger.info("CompleteTestRunShard:MarkedComplete", { testRunShardId });
return {

@@ -475,3 +491,3 @@ type: "test-run",

} catch (e) {
this._logger.error("CompleteTestRunShard:Failed", {
logger.logger.error("CompleteTestRunShard:Failed", {
errorMessage: getErrorMessage(e),

@@ -486,4 +502,4 @@ testRunShardId

}
onTestBegin(testExecutionId, metadataFilePath = metadata$1.getMetadataFilePath("REPLAY_TEST", 0)) {
this._logger.info("OnTestBegin:Started", { testExecutionId });
onTestBegin(testExecutionId, metadataFilePath = metadata.getMetadataFilePath("REPLAY_TEST", 0)) {
logger.logger.info("OnTestBegin:Started", { testExecutionId });
this._errors = [];

@@ -496,3 +512,3 @@ const metadata = {

};
this._logger.info("OnTestBegin:WillWriteMetadata", { metadataFilePath, metadata });
logger.logger.info("OnTestBegin:WillWriteMetadata", { metadataFilePath, metadata });
try {

@@ -502,3 +518,3 @@ fs.mkdirSync(path.dirname(metadataFilePath), { recursive: true });

} catch (e) {
this._logger.error("OnTestBegin:InitReplayMetadataFailed", {
logger.logger.error("OnTestBegin:InitReplayMetadataFailed", {
errorMessage: getErrorMessage(e)

@@ -515,5 +531,5 @@ });

}) {
this._logger.info("OnTestEnd:Started", { specFile });
logger.logger.info("OnTestEnd:Started", { specFile });
if (tests.length === 0) {
this._logger.info("OnTestEnd:NoTestsFound", { specFile });
logger.logger.info("OnTestEnd:NoTestsFound", { specFile });
return;

@@ -527,3 +543,3 @@ }

if (this._uploadedRecordings.has(recording.id)) {
this._logger.info("UploadRecording:AlreadyScheduled", {
logger.logger.info("UploadRecording:AlreadyScheduled", {
recordingId: recording.id

@@ -534,5 +550,5 @@ });

this._uploadedRecordings.add(recording.id);
this._logger.info("UploadRecording:Started", { recordingId: recording.id });
logger.logger.info("UploadRecording:Started", { recordingId: recording.id });
try {
await replay.uploadRecording(recording.id, {
await main.uploadRecording(recording.id, {
apiKey: this._apiKey,

@@ -546,4 +562,4 @@ // Per TT-941, we want to throw on any error so it can be caught below

});
this._logger.info("UploadRecording:Succeeded", { recording: recording.id });
const recordings = replay.listAllRecordings({ filter: (r) => r.id === recording.id, all: true });
logger.logger.info("UploadRecording:Succeeded", { recording: recording.id });
const recordings = main.listAllRecordings({ filter: (r) => r.id === recording.id, all: true });
return {

@@ -554,3 +570,3 @@ type: "upload",

} catch (e) {
this._logger.error("UploadRecording:Failed", {
logger.logger.error("UploadRecording:Failed", {
errorMessage: getErrorMessage(e),

@@ -572,7 +588,7 @@ recordingId: recording.id,

])} and $not($exists($v.metadata.test)) }`;
const recordings = replay.listAllRecordings({
const recordings = main.listAllRecordings({
all: false,
filter
});
this._logger.info("GetRecordingsForTest:FoundRecordings", {
logger.logger.info("GetRecordingsForTest:FoundRecordings", {
recoridngsLength: recordings.length,

@@ -614,7 +630,7 @@ filter

async _setRecordingMetadata(recordings, testRun, replayTitle, extraMetadata) {
this._logger.info("SetRecordingMetadata:Started", {
logger.logger.info("SetRecordingMetadata:Started", {
recordingIds: recordings.map((r) => r.id),
errorLength: this._errors.length
});
const validatedTestMetadata = metadata.test.init({
const validatedTestMetadata = index.init({
...testRun,

@@ -629,3 +645,3 @@ schemaVersion: this._schemaVersion

try {
const validatedSourceMetadata = await metadata.source.init();
const validatedSourceMetadata = await source.init();
mergedMetadata = {

@@ -636,8 +652,8 @@ ...mergedMetadata,

} catch (e) {
this._logger.error("SetRecordingMetadata:GenerateSourceMetadataFailed", {
logger.logger.error("SetRecordingMetadata:GenerateSourceMetadataFailed", {
errorMessage: getErrorMessage(e)
});
}
recordings.forEach((rec) => metadata.add(rec.id, mergedMetadata));
const allRecordings = replay.listAllRecordings({ all: true });
recordings.forEach((rec) => index$1.add(rec.id, mergedMetadata));
const allRecordings = main.listAllRecordings({ all: true });
return allRecordings.filter(

@@ -674,3 +690,3 @@ (recordingWithMetadata) => recordings.some((r) => r.id === recordingWithMetadata.id)

} else {
this._logger.info("EnqueuePostTestWork:WillSkipAddTests");
logger.logger.info("EnqueuePostTestWork:WillSkipAddTests");
}

@@ -719,3 +735,3 @@ const testRun = this._buildTestMetadata(tests, specFile);

} catch (e) {
this._logger.error("EnqueuePostTestWork:Failed");
logger.logger.error("EnqueuePostTestWork:Failed");
return {

@@ -850,108 +866,115 @@ type: "post-test",

async onEnd() {
try {
this._logger.info("OnEnd:Started");
await this._cacheAuthIdsPromise?.catch((e) => {
this._logger.error("OnEnd:AddingLoggerAuthFailed", {
errorMessage: getErrorMessage(e)
});
logger.logger.info("OnEnd:Started");
mixpanelAPI.mixpanelAPI.trackEvent(`${this._runner.name}.test-suite.ending`, {
numPendingWork: this._pendingWork.length
});
await this._cacheAuthIdsPromise?.catch((e) => {
logger.logger.error("OnEnd:AddingLoggerAuthFailed", {
errorMessage: getErrorMessage(e)
});
const output = [];
let completedWork = [];
if (this._pendingWork.length) {
logging.log("\u{1F551} Completing some outstanding work ...");
}
while (this._pendingWork.length) {
const pendingWork = this._pendingWork;
this._logger.info("OnEnd:PendingWork", { pendingWorkLength: pendingWork.length });
this._pendingWork = [];
completedWork.push(...await Promise.allSettled(pendingWork));
}
if (this._apiKey) {
const postSettledWork = await Promise.allSettled([this._completeTestRunShard()]);
completedWork.push(...postSettledWork);
});
const output = [];
let completedWork = [];
if (this._pendingWork.length) {
logging.log("\u{1F551} Completing some outstanding work ...");
}
while (this._pendingWork.length) {
const pendingWork = this._pendingWork;
logger.logger.info("OnEnd:PendingWork", { pendingWorkLength: pendingWork.length });
this._pendingWork = [];
completedWork.push(...await Promise.allSettled(pendingWork));
}
if (this._apiKey) {
const postSettledWork = await Promise.allSettled([this._completeTestRunShard()]);
completedWork.push(...postSettledWork);
} else {
logger.logger.info("OnEnd:WillSkipCompletingTestRun");
}
const failures = completedWork.filter((r) => r.status === "rejected");
if (failures.length > 0) {
output.push("Encountered unexpected errors while processing replays");
failures.forEach((f) => output.push(` ${f.reason}`));
}
const results = completedWork.map((r) => r.status === "fulfilled" && r.value).filter((r) => !!r);
const errors = {
"post-test": [],
"test-run": [],
"test-run-tests": [],
upload: []
};
let uploads = [];
for (const r of results) {
if ("error" in r) {
errors[r.type].push(r);
} else {
this._logger.info("OnEnd:WillSkipCompletingTestRun");
}
const failures = completedWork.filter((r) => r.status === "rejected");
if (failures.length > 0) {
output.push("Encountered unexpected errors while processing replays");
failures.forEach((f) => output.push(` ${f.reason}`));
}
const results = completedWork.map((r) => r.status === "fulfilled" && r.value).filter((r) => !!r);
const errors = {
"post-test": [],
"test-run": [],
"test-run-tests": [],
upload: []
};
let uploads = [];
for (const r of results) {
if ("error" in r) {
errors[r.type].push(r);
} else {
if (r.type === "upload") {
uploads.push(r.recording);
}
if (r.type === "upload") {
uploads.push(r.recording);
}
}
if (errors["post-test"].length > 0) {
output.push(`
}
if (errors["post-test"].length > 0) {
output.push(`
\u274C We encountered some unexpected errors processing your recordings`);
output.push(...logPendingWorkErrors(errors["post-test"]));
}
if (errors["test-run-tests"].length > 0 || errors["test-run"].length > 0) {
output.push("\n\u274C We encountered some unexpected errors creating your tests on replay.io");
output.push(...logPendingWorkErrors(errors["test-run-tests"]));
output.push(...logPendingWorkErrors(errors["test-run"]));
}
if (errors["upload"].length > 0) {
output.push(`
output.push(...logPendingWorkErrors(errors["post-test"]));
}
if (errors["test-run-tests"].length > 0 || errors["test-run"].length > 0) {
output.push("\n\u274C We encountered some unexpected errors creating your tests on replay.io");
output.push(...logPendingWorkErrors(errors["test-run-tests"]));
output.push(...logPendingWorkErrors(errors["test-run"]));
}
if (errors["upload"].length > 0) {
output.push(`
\u274C Failed to upload ${errors["upload"].length} recordings:
`);
errors["upload"].forEach((err) => {
if ("recording" in err) {
const r = err.recording;
output.push(` ${r.metadata.title || "Unknown"}`);
output.push(` ${getErrorMessage(err.error)}
errors["upload"].forEach((err) => {
if ("recording" in err) {
const r = err.recording;
output.push(` ${r.metadata.title || "Unknown"}`);
output.push(` ${getErrorMessage(err.error)}
`);
}
});
}
});
}
let numCrashed = 0;
let numUploaded = 0;
if (uploads.length > 0) {
const recordingIds = uploads.map((u) => u.recordingId).filter(isNonNullable);
for (const recordingId of recordingIds) {
main.removeRecording(recordingId);
}
if (uploads.length > 0) {
const recordingIds = uploads.map((u) => u.recordingId).filter(isNonNullable);
for (const recordingId of recordingIds) {
replay.removeRecording(recordingId);
}
const uploaded = uploads.filter((u) => u.status === "uploaded");
const crashed = uploads.filter((u) => u.status === "crashUploaded");
if (uploaded.length > 0) {
output.push(`
const uploaded = uploads.filter((u) => u.status === "uploaded");
const crashed = uploads.filter((u) => u.status === "crashUploaded");
numCrashed = crashed.length;
numUploaded = uploaded.length;
if (uploaded.length > 0) {
output.push(`
\u{1F680} Successfully uploaded ${uploads.length} recordings:
`);
const sortedUploads = sortRecordingsByResult(uploads);
sortedUploads.forEach((r) => {
output.push(
` ${getTestResultEmoji(r)} ${r.metadata.title || "Unknown"}`
);
output.push(
` ${process.env.REPLAY_VIEW_HOST || "https://app.replay.io"}/recording/${r.id}
`
);
});
}
if (crashed.length > 0) {
const sortedUploads = sortRecordingsByResult(uploads);
sortedUploads.forEach((r) => {
output.push(
`
\u2757\uFE0F ${crashed.length} crash reports were generated for tests that crashed while recording.
` ${getTestResultEmoji(r)} ${r.metadata.title || "Unknown"}`
);
output.push(
` ${process.env.REPLAY_VIEW_HOST || "https://app.replay.io"}/recording/${r.id}
`
);
output.push(` The Replay team has been notified.`);
}
});
}
logging.log(output.join("\n"));
return results;
} finally {
await this._logger.close().catch(() => {
});
if (crashed.length > 0) {
output.push(
`
\u2757\uFE0F ${crashed.length} crash reports were generated for tests that crashed while recording.
`
);
output.push(` The Replay team has been notified.`);
}
}
mixpanelAPI.mixpanelAPI.trackEvent(`${this._runner.name}.test-suite.results`, {
errors,
numCrashed,
numUploaded
});
logging.log(output.join("\n"));
return results;
}

@@ -958,0 +981,0 @@ }

{
"name": "@replayio/test-utils",
"version": "0.0.0-pr549-20240625155921",
"version": "0.0.0-pr577-20240701135511",
"description": "Utilities for recording tests with replay.io",

@@ -16,3 +16,3 @@ "main": "./dist/index.js",

"prepare": "yarn run build",
"build": "rm -rf dist/ tsconfig.tsbuildinfo && tsc",
"build": "pkg-build",
"test": "echo \"Error: no test specified\"",

@@ -32,13 +32,22 @@ "typecheck": "tsc --noEmit"

"dependencies": {
"@replayio/replay": "^0.0.0-pr549-20240625155921",
"debug": "^4.3.4",
"fs-extra": "^11.2.0",
"jsonata": "^1.8.6",
"launchdarkly-node-client-sdk": "^3.2.1",
"mixpanel": "^0.18.0",
"node-fetch": "^2.6.7",
"p-map": "^4.0.0",
"query-registry": "^2.6.0",
"semver": "^7.5.4",
"sha-1": "^1.0.0",
"superstruct": "^1.0.4",
"undici": "^5.28.4",
"uuid": "^8.3.2",
"winston": "^3.13.0",
"winston-loki": "^6.1.2"
"winston-loki": "^6.1.2",
"ws": "^7.5.0"
},
"devDependencies": {
"@replay-cli/pkg-build": "^0.0.0",
"@replay-cli/shared": "^0.0.0",
"@replay-cli/tsconfig": "^0.0.0",

@@ -45,0 +54,0 @@ "@types/debug": "^4.1.7",

@@ -1,11 +0,13 @@

import ReplayReporter from "./reporter";
export type { TestMetadataV1, TestMetadataV2, ReplayReporterConfig, PendingWork } from "./reporter";
export { buildTestId } from "./testId";
export type { TestMetadataV1, TestMetadataV2 } from "./legacy-cli/metadata/test";
export { fetchWorkspaceConfig } from "./config";
export { getAccessToken } from "./getAccessToken";
export * from "./logging";
export { getMetadataFilePath, initMetadataFile } from "./metadata";
export { pingTestMetrics } from "./metrics";
export { ReporterError } from "./reporter";
export { pingTestMetrics } from "./metrics";
export type { PendingWork } from "./reporter";
export { removeAnsiCodes } from "./terminal";
export { fetchWorkspaceConfig } from "./config";
export * from "./logging";
export { buildTestId } from "./testId";
export type { RecordingEntry, ReplayReporterConfig } from "./types";
export { ReplayReporter };
export { getMetadataFilePath, initMetadataFile } from "./metadata";
import ReplayReporter from "./reporter";

@@ -1,4 +0,3 @@

import { getDirectory } from "@replayio/replay/utils";
import { getReplayPath } from "@replay-cli/shared/getReplayPath";
import { existsSync, writeFileSync } from "fs";
import path from "path";
import { warn } from "./logging";

@@ -9,3 +8,3 @@

process.env.RECORD_REPLAY_METADATA_FILE ||
path.join(getDirectory(), `${base.toUpperCase()}_METADATA_${workerIndex}`)
getReplayPath(`${base.toUpperCase()}_METADATA_${workerIndex}`)
);

@@ -12,0 +11,0 @@ }

import dbg from "debug";
import fetch from "node-fetch";
import os from "os";
import fetch from "node-fetch";
import { TestMetadataV2 } from "@replayio/replay/metadata/test/v2";
import { TestMetadataV2 } from "./legacy-cli/metadata/test/v2";

@@ -6,0 +6,0 @@ const debug = dbg("replay:test-utils:metrics");

@@ -1,13 +0,9 @@

import {
RecordingEntry,
exponentialBackoffRetry,
listAllRecordings,
query,
removeRecording,
uploadRecording,
} from "@replayio/replay";
import { add, source as sourceMetadata, test as testMetadata } from "@replayio/replay/metadata";
import type { TestMetadataV1, TestMetadataV2 } from "@replayio/replay/metadata/test";
import { retryWithExponentialBackoff } from "@replay-cli/shared/async/retryOnFailure";
import { getAuthInfo } from "@replay-cli/shared/graphql/getAuthInfo";
import { queryGraphQL } from "@replay-cli/shared/graphql/queryGraphQL";
import { logger } from "@replay-cli/shared/logger";
import { Properties, mixpanelAPI } from "@replay-cli/shared/mixpanel/mixpanelAPI";
import { UnstructuredMetadata } from "@replay-cli/shared/recording/types";
import { setUserAgent } from "@replay-cli/shared/userAgent";
import { spawnSync } from "child_process";
import dbg from "debug";
import { mkdirSync, writeFileSync } from "fs";

@@ -17,9 +13,12 @@ import assert from "node:assert/strict";

import { v4 as uuid } from "uuid";
import { UnstructuredMetadata } from "@replayio/replay";
import { log, warn } from "./logging";
import * as pkgJson from "../package.json";
import { getAccessToken } from "./getAccessToken";
import { listAllRecordings, removeRecording, uploadRecording } from "./legacy-cli";
import { add, source as sourceMetadata, test as testMetadata } from "./legacy-cli/metadata";
import type { TestMetadataV2 } from "./legacy-cli/metadata/test";
import { log } from "./logging";
import { getMetadataFilePath } from "./metadata";
import { pingTestMetrics } from "./metrics";
import { buildTestId, generateOpaqueId } from "./testId";
import { Logger, getAuthIds } from "./logger";
import { RecordingEntry, ReplayReporterConfig, UploadStatusThreshold } from "./types";

@@ -44,18 +43,4 @@ function last<T>(arr: T[]): T | undefined {

export type UploadStatusThreshold = "all" | "failed-and-flaky" | "failed";
type UploadStatusThresholdInternal = UploadStatusThreshold | "none";
export type UploadOption =
| boolean
| {
/**
* Minimize the number of recordings uploaded for a test attempt (within a shard).
* e.g. Only one recording would be uploaded for a failing test attempt, regardless of retries.
* e.g. Two recordings would be uploaded for a flaky test attempt (the passing test and one of the failures).
*/
minimizeUploads?: boolean;
statusThreshold?: UploadStatusThreshold;
};
interface UploadableTestExecutionResult<TRecordingMetadata extends UnstructuredMetadata> {

@@ -79,14 +64,2 @@ executionGroupId: string;

export interface ReplayReporterConfig<
TRecordingMetadata extends UnstructuredMetadata = UnstructuredMetadata
> {
runTitle?: string;
metadata?: Record<string, any> | string;
metadataKey?: string;
upload?: UploadOption;
apiKey?: string;
/** @deprecated Use `upload.minimizeUploads` and `upload.statusThreshold` instead */
filter?: (r: RecordingEntry<TRecordingMetadata>) => boolean;
}
export interface TestRunner {

@@ -98,7 +71,7 @@ name: string;

type UserActionEvent = TestMetadataV2.UserActionEvent;
type Test = TestMetadataV2.Test;
type TestResult = TestMetadataV2.TestResult;
type TestError = TestMetadataV2.TestError;
type TestRun = TestMetadataV2.TestRun;
export type UserActionEvent = TestMetadataV2.UserActionEvent;
export type Test = TestMetadataV2.Test;
export type TestResult = TestMetadataV2.TestResult;
export type TestError = TestMetadataV2.TestError;
export type TestRun = TestMetadataV2.TestRun;

@@ -188,3 +161,3 @@ type PendingWorkType = "test-run" | "test-run-tests" | "post-test" | "upload";

function createGraphqlError(operation: string, errors: any, logger: Logger) {
function createGraphqlError(operation: string, errors: any) {
const errorMessages = errors.map(getErrorMessage);

@@ -251,3 +224,5 @@ logger.error("GraphQlOperationFailed", { operation, errors: errors.map(getErrorMessage) });

class ReplayReporter<TRecordingMetadata extends UnstructuredMetadata = UnstructuredMetadata> {
export default class ReplayReporter<
TRecordingMetadata extends UnstructuredMetadata = UnstructuredMetadata
> {
private _baseId = sourceMetadata.getTestRunIdFromEnvironment(process.env) || uuid();

@@ -269,3 +244,2 @@ private _testRunShardId: string | null = null;

private _cacheAuthIdsPromise: Promise<void> | null = null;
private _logger: Logger;
private _uploadedRecordings = new Set<string>();

@@ -278,5 +252,5 @@

) {
setUserAgent(`${pkgJson.name}/${pkgJson.version}`);
this._runner = runner;
this._schemaVersion = schemaVersion;
this._logger = new Logger(this._runner.name);

@@ -289,9 +263,9 @@ if (config) {

if (this._apiKey) {
this._cacheAuthIdsPromise = getAuthIds(this._apiKey)
.then(ids => {
this._logger.identify(ids);
this._logger.info("ReplayReporter:LoggerIdentificationAdded");
this._cacheAuthIdsPromise = getAuthInfo(this._apiKey)
.then(authInfo => {
logger.identify(authInfo);
logger.info("ReplayReporter:LoggerIdentificationAdded");
})
.catch(e =>
this._logger.info("ReplayReporter:LoggerIdentificationFailed", {
logger.info("ReplayReporter:LoggerIdentificationFailed", {
errorMessage: getErrorMessage(e),

@@ -374,3 +348,3 @@ })

) {
this._apiKey = config.apiKey || process.env.REPLAY_API_KEY || process.env.RECORD_REPLAY_API_KEY;
this._apiKey = getAccessToken(config);
this._upload = "upload" in config ? !!config.upload : !!process.env.REPLAY_UPLOAD;

@@ -437,7 +411,9 @@ if (this._upload && !this._apiKey) {

addError(err: Error | ReporterError) {
if (err.name === "ReporterError") {
this._errors.push(err as ReporterError);
addError(error: Error | ReporterError, context?: Properties) {
mixpanelAPI.trackEvent(`${this._runner.name}.error.${error.name}`, { context, error });
if (error.name === "ReporterError") {
this._errors.push(error as ReporterError);
} else {
this._errors.push(new ReporterError(-1, "Unexpected error", err));
this._errors.push(new ReporterError(-1, "Unexpected error", error));
}

@@ -451,2 +427,4 @@ }

};
mixpanelAPI.appendAdditionalProperties({ baseMetadata: this._baseMetadata });
}

@@ -459,3 +437,3 @@

this._logger.info("OnTestSuiteBegin:ReporterConfiguration", {
logger.info("OnTestSuiteBegin:ReporterConfiguration", {
baseId: this._baseId,

@@ -470,4 +448,14 @@ runTitle: this._runTitle,

mixpanelAPI.trackEvent(`${this._runner.name}.test-suite.begin`, {
baseId: this._baseId,
runTitle: this._runTitle,
upload: this._upload,
hasFilter: !!this._filter,
});
if (!this._apiKey) {
this._logger.info("OnTestSuiteBegin:NoApiKey");
logger.info("OnTestSuiteBegin:NoApiKey");
mixpanelAPI.trackEvent(`${this._runner.name}.no-api-key`);
return;

@@ -485,3 +473,3 @@ }

private async _startTestRunShard(): Promise<TestRunPendingWork> {
this._logger.info("StartTestRunShard:Started");
logger.info("StartTestRunShard:Started");

@@ -492,3 +480,3 @@ let metadata: any = {};

} catch (e) {
this._logger.error("StartTestRunShard:InitMetadataFailed", {
logger.error("StartTestRunShard:InitMetadataFailed", {
errorMessage: getErrorMessage(e),

@@ -517,7 +505,7 @@ });

this._logger.info("StartTestRunShard:WillCreateShard", { baseId: this._baseId });
logger.info("StartTestRunShard:WillCreateShard", { baseId: this._baseId });
try {
return exponentialBackoffRetry(async () => {
const resp = await query(
return retryWithExponentialBackoff(async () => {
const resp = await queryGraphQL(
"CreateTestRunShard",

@@ -545,3 +533,3 @@ `

type: "test-run",
error: createGraphqlError("CreateTestRunShard", resp.errors, this._logger),
error: createGraphqlError("CreateTestRunShard", resp.errors),
};

@@ -559,3 +547,3 @@ }

this._logger.info("StartTestRunShard:CreatedShard", {
logger.info("StartTestRunShard:CreatedShard", {
testRunShardId,

@@ -573,3 +561,3 @@ baseId: this._baseId,

} catch (e) {
this._logger.error("StartTestRunShardFailed", {
logger.error("StartTestRunShardFailed", {
errorMessage: getErrorMessage(e),

@@ -588,3 +576,3 @@ });

): Promise<TestRunTestsPendingWork | undefined> {
this._logger.info("AddTestsToSharded", { testsLength: tests.length });
logger.info("AddTestsToSharded", { testsLength: tests.length });

@@ -599,3 +587,3 @@ let testRunShardId = this._testRunShardId;

}
this._logger.info("AddTestsToShard:WillAddTests", {
logger.info("AddTestsToShard:WillAddTests", {
testsLength: tests.length,

@@ -606,4 +594,4 @@ testRunShardId,

try {
await exponentialBackoffRetry(async () => {
const resp = await query(
await retryWithExponentialBackoff(async () => {
const resp = await queryGraphQL(
"AddTestsToShard",

@@ -630,3 +618,3 @@ `

type: "test-run-tests",
error: createGraphqlError("AddTestsToShard", resp.errors, this._logger),
error: createGraphqlError("AddTestsToShard", resp.errors),
};

@@ -636,3 +624,3 @@ }

this._logger.info("AddTestsToShard:AddedTests", { testRunShardId });
logger.info("AddTestsToShard:AddedTests", { testRunShardId });

@@ -643,3 +631,3 @@ return {

} catch (e) {
this._logger.error("AddTestsToShard:Failed", { errorMessage: getErrorMessage(e) });
logger.error("AddTestsToShard:Failed", { errorMessage: getErrorMessage(e) });
return {

@@ -653,3 +641,3 @@ type: "test-run-tests",

private async _completeTestRunShard(): Promise<TestRunPendingWork | undefined> {
this._logger.info("CompleteTestRunShard:Started");
logger.info("CompleteTestRunShard:Started");

@@ -665,7 +653,7 @@ let testRunShardId = this._testRunShardId;

this._logger.info("CompleteTestRunShard:WillMarkCompleted", { testRunShardId });
logger.info("CompleteTestRunShard:WillMarkCompleted", { testRunShardId });
try {
await exponentialBackoffRetry(async () => {
const resp = await query(
await retryWithExponentialBackoff(async () => {
const resp = await queryGraphQL(
"CompleteTestRunShard",

@@ -690,3 +678,3 @@ `

type: "test-run",
error: createGraphqlError("CompleteTestRunShard", resp.errors, this._logger),
error: createGraphqlError("CompleteTestRunShard", resp.errors),
};

@@ -696,3 +684,3 @@ }

this._logger.info("CompleteTestRunShard:MarkedComplete", { testRunShardId });
logger.info("CompleteTestRunShard:MarkedComplete", { testRunShardId });

@@ -705,3 +693,3 @@ return {

} catch (e) {
this._logger.error("CompleteTestRunShard:Failed", {
logger.error("CompleteTestRunShard:Failed", {
errorMessage: getErrorMessage(e),

@@ -718,3 +706,3 @@ testRunShardId,

onTestBegin(testExecutionId?: string, metadataFilePath = getMetadataFilePath("REPLAY_TEST", 0)) {
this._logger.info("OnTestBegin:Started", { testExecutionId });
logger.info("OnTestBegin:Started", { testExecutionId });

@@ -729,3 +717,3 @@ this._errors = [];

this._logger.info("OnTestBegin:WillWriteMetadata", { metadataFilePath, metadata });
logger.info("OnTestBegin:WillWriteMetadata", { metadataFilePath, metadata });

@@ -736,3 +724,3 @@ try {

} catch (e) {
this._logger.error("OnTestBegin:InitReplayMetadataFailed", {
logger.error("OnTestBegin:InitReplayMetadataFailed", {
errorMessage: getErrorMessage(e),

@@ -756,3 +744,3 @@ });

}) {
this._logger.info("OnTestEnd:Started", { specFile });
logger.info("OnTestEnd:Started", { specFile });

@@ -762,3 +750,3 @@ // if we bailed building test metadata because of a crash or because no

if (tests.length === 0) {
this._logger.info("OnTestEnd:NoTestsFound", { specFile });
logger.info("OnTestEnd:NoTestsFound", { specFile });
return;

@@ -777,3 +765,3 @@ }

if (this._uploadedRecordings.has(recording.id)) {
this._logger.info("UploadRecording:AlreadyScheduled", {
logger.info("UploadRecording:AlreadyScheduled", {
recordingId: recording.id,

@@ -784,3 +772,3 @@ });

this._uploadedRecordings.add(recording.id);
this._logger.info("UploadRecording:Started", { recordingId: recording.id });
logger.info("UploadRecording:Started", { recordingId: recording.id });

@@ -798,3 +786,3 @@ try {

this._logger.info("UploadRecording:Succeeded", { recording: recording.id });
logger.info("UploadRecording:Succeeded", { recording: recording.id });

@@ -808,3 +796,3 @@ const recordings = listAllRecordings({ filter: r => r.id === recording.id, all: true });

} catch (e) {
this._logger.error("UploadRecording:Failed", {
logger.error("UploadRecording:Failed", {
errorMessage: getErrorMessage(e),

@@ -833,3 +821,3 @@ recordingId: recording.id,

this._logger.info("GetRecordingsForTest:FoundRecordings", {
logger.info("GetRecordingsForTest:FoundRecordings", {
recoridngsLength: recordings.length,

@@ -881,3 +869,3 @@ filter,

) {
this._logger.info("SetRecordingMetadata:Started", {
logger.info("SetRecordingMetadata:Started", {
recordingIds: recordings.map(r => r.id),

@@ -907,3 +895,3 @@ errorLength: this._errors.length,

} catch (e) {
this._logger.error("SetRecordingMetadata:GenerateSourceMetadataFailed", {
logger.error("SetRecordingMetadata:GenerateSourceMetadataFailed", {
errorMessage: getErrorMessage(e),

@@ -958,3 +946,3 @@ });

} else {
this._logger.info("EnqueuePostTestWork:WillSkipAddTests");
logger.info("EnqueuePostTestWork:WillSkipAddTests");
}

@@ -1008,3 +996,3 @@

} catch (e) {
this._logger.error("EnqueuePostTestWork:Failed");
logger.error("EnqueuePostTestWork:Failed");
return {

@@ -1179,121 +1167,130 @@ type: "post-test",

async onEnd(): Promise<PendingWork[]> {
try {
this._logger.info("OnEnd:Started");
logger.info("OnEnd:Started");
await this._cacheAuthIdsPromise?.catch(e => {
this._logger.error("OnEnd:AddingLoggerAuthFailed", {
errorMessage: getErrorMessage(e),
});
mixpanelAPI.trackEvent(`${this._runner.name}.test-suite.ending`, {
numPendingWork: this._pendingWork.length,
});
await this._cacheAuthIdsPromise?.catch(e => {
logger.error("OnEnd:AddingLoggerAuthFailed", {
errorMessage: getErrorMessage(e),
});
});
const output: string[] = [];
let completedWork: PromiseSettledResult<PendingWork | undefined>[] = [];
const output: string[] = [];
let completedWork: PromiseSettledResult<PendingWork | undefined>[] = [];
if (this._pendingWork.length) {
log("šŸ•‘ Completing some outstanding work ...");
}
if (this._pendingWork.length) {
log("šŸ•‘ Completing some outstanding work ...");
}
while (this._pendingWork.length) {
const pendingWork = this._pendingWork;
this._logger.info("OnEnd:PendingWork", { pendingWorkLength: pendingWork.length });
this._pendingWork = [];
completedWork.push(...(await Promise.allSettled(pendingWork)));
}
while (this._pendingWork.length) {
const pendingWork = this._pendingWork;
logger.info("OnEnd:PendingWork", { pendingWorkLength: pendingWork.length });
this._pendingWork = [];
completedWork.push(...(await Promise.allSettled(pendingWork)));
}
if (this._apiKey) {
const postSettledWork = await Promise.allSettled([this._completeTestRunShard()]);
completedWork.push(...postSettledWork);
} else {
this._logger.info("OnEnd:WillSkipCompletingTestRun");
}
if (this._apiKey) {
const postSettledWork = await Promise.allSettled([this._completeTestRunShard()]);
completedWork.push(...postSettledWork);
} else {
logger.info("OnEnd:WillSkipCompletingTestRun");
}
const failures = completedWork.filter(r => r.status === "rejected");
const failures = completedWork.filter(r => r.status === "rejected");
if (failures.length > 0) {
output.push("Encountered unexpected errors while processing replays");
failures.forEach(f => output.push(` ${f.reason}`));
}
if (failures.length > 0) {
output.push("Encountered unexpected errors while processing replays");
failures.forEach(f => output.push(` ${f.reason}`));
}
const results = completedWork.map(r => r.status === "fulfilled" && r.value).filter(r => !!r);
const results = completedWork.map(r => r.status === "fulfilled" && r.value).filter(r => !!r);
const errors = {
"post-test": [] as Extract<PostTestPendingWork, { error: {} }>[],
"test-run": [] as Extract<TestRunPendingWork, { error: {} }>[],
"test-run-tests": [] as Extract<TestRunTestsPendingWork, { error: {} }>[],
upload: [] as Extract<UploadPendingWork, { error: {} }>[],
};
let uploads: RecordingEntry[] = [];
for (const r of results) {
if ("error" in r) {
errors[r.type].push(r as any);
} else {
if (r.type === "upload") {
uploads.push(r.recording);
}
const errors = {
"post-test": [] as Extract<PostTestPendingWork, { error: {} }>[],
"test-run": [] as Extract<TestRunPendingWork, { error: {} }>[],
"test-run-tests": [] as Extract<TestRunTestsPendingWork, { error: {} }>[],
upload: [] as Extract<UploadPendingWork, { error: {} }>[],
};
let uploads: RecordingEntry[] = [];
for (const r of results) {
if ("error" in r) {
errors[r.type].push(r as any);
} else {
if (r.type === "upload") {
uploads.push(r.recording);
}
}
}
if (errors["post-test"].length > 0) {
output.push(`\nāŒ We encountered some unexpected errors processing your recordings`);
output.push(...logPendingWorkErrors(errors["post-test"]));
}
if (errors["post-test"].length > 0) {
output.push(`\nāŒ We encountered some unexpected errors processing your recordings`);
output.push(...logPendingWorkErrors(errors["post-test"]));
}
if (errors["test-run-tests"].length > 0 || errors["test-run"].length > 0) {
output.push("\nāŒ We encountered some unexpected errors creating your tests on replay.io");
output.push(...logPendingWorkErrors(errors["test-run-tests"]));
output.push(...logPendingWorkErrors(errors["test-run"]));
}
if (errors["test-run-tests"].length > 0 || errors["test-run"].length > 0) {
output.push("\nāŒ We encountered some unexpected errors creating your tests on replay.io");
output.push(...logPendingWorkErrors(errors["test-run-tests"]));
output.push(...logPendingWorkErrors(errors["test-run"]));
}
if (errors["upload"].length > 0) {
output.push(`\nāŒ Failed to upload ${errors["upload"].length} recordings:\n`);
if (errors["upload"].length > 0) {
output.push(`\nāŒ Failed to upload ${errors["upload"].length} recordings:\n`);
errors["upload"].forEach(err => {
if ("recording" in err) {
const r = err.recording;
output.push(` ${(r.metadata.title as string | undefined) || "Unknown"}`);
output.push(` ${getErrorMessage(err.error)}\n`);
}
});
errors["upload"].forEach(err => {
if ("recording" in err) {
const r = err.recording;
output.push(` ${(r.metadata.title as string | undefined) || "Unknown"}`);
output.push(` ${getErrorMessage(err.error)}\n`);
}
});
}
let numCrashed = 0;
let numUploaded = 0;
if (uploads.length > 0) {
const recordingIds = uploads.map(u => u.recordingId).filter(isNonNullable);
for (const recordingId of recordingIds) {
removeRecording(recordingId);
}
if (uploads.length > 0) {
const recordingIds = uploads.map(u => u.recordingId).filter(isNonNullable);
for (const recordingId of recordingIds) {
removeRecording(recordingId);
}
const uploaded = uploads.filter(u => u.status === "uploaded");
const crashed = uploads.filter(u => u.status === "crashUploaded");
const uploaded = uploads.filter(u => u.status === "uploaded");
const crashed = uploads.filter(u => u.status === "crashUploaded");
numCrashed = crashed.length;
numUploaded = uploaded.length;
if (uploaded.length > 0) {
output.push(`\nšŸš€ Successfully uploaded ${uploads.length} recordings:\n`);
const sortedUploads = sortRecordingsByResult(uploads);
sortedUploads.forEach(r => {
output.push(
` ${getTestResultEmoji(r)} ${(r.metadata.title as string | undefined) || "Unknown"}`
);
output.push(
` ${process.env.REPLAY_VIEW_HOST || "https://app.replay.io"}/recording/${r.id}\n`
);
});
}
if (crashed.length > 0) {
if (uploaded.length > 0) {
output.push(`\nšŸš€ Successfully uploaded ${uploads.length} recordings:\n`);
const sortedUploads = sortRecordingsByResult(uploads);
sortedUploads.forEach(r => {
output.push(
`\nā—ļø ${crashed.length} crash reports were generated for tests that crashed while recording.\n`
` ${getTestResultEmoji(r)} ${(r.metadata.title as string | undefined) || "Unknown"}`
);
output.push(` The Replay team has been notified.`);
}
output.push(
` ${process.env.REPLAY_VIEW_HOST || "https://app.replay.io"}/recording/${r.id}\n`
);
});
}
log(output.join("\n"));
if (crashed.length > 0) {
output.push(
`\nā—ļø ${crashed.length} crash reports were generated for tests that crashed while recording.\n`
);
output.push(` The Replay team has been notified.`);
}
}
return results;
} finally {
await this._logger.close().catch(() => {});
}
mixpanelAPI.trackEvent(`${this._runner.name}.test-suite.results`, {
errors,
numCrashed,
numUploaded,
});
log(output.join("\n"));
return results;
}
}
export default ReplayReporter;
export type { Test, TestError, TestMetadataV1, TestMetadataV2, TestResult, UserActionEvent };

@@ -12,4 +12,7 @@ {

"path": "../replay"
},
{
"path": "../shared"
}
]
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with āš”ļø by Socket Inc