Socket
Socket
Sign inDemoInstall

nx

Package Overview
Dependencies
Maintainers
8
Versions
1360
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

nx - npm Package Compare versions

Comparing version 19.6.1 to 19.7.0-beta.0

src/daemon/server/handle-task-history.d.ts

24

package.json
{
"name": "nx",
"version": "19.6.1",
"version": "19.7.0-beta.0",
"private": false,

@@ -74,3 +74,3 @@ "description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",

"ora": "5.3.0",
"@nrwl/tao": "19.6.1"
"@nrwl/tao": "19.7.0-beta.0"
},

@@ -90,12 +90,12 @@ "peerDependencies": {

"optionalDependencies": {
"@nx/nx-darwin-x64": "19.6.1",
"@nx/nx-darwin-arm64": "19.6.1",
"@nx/nx-linux-x64-gnu": "19.6.1",
"@nx/nx-linux-x64-musl": "19.6.1",
"@nx/nx-win32-x64-msvc": "19.6.1",
"@nx/nx-linux-arm64-gnu": "19.6.1",
"@nx/nx-linux-arm64-musl": "19.6.1",
"@nx/nx-linux-arm-gnueabihf": "19.6.1",
"@nx/nx-win32-arm64-msvc": "19.6.1",
"@nx/nx-freebsd-x64": "19.6.1"
"@nx/nx-darwin-x64": "19.7.0-beta.0",
"@nx/nx-darwin-arm64": "19.7.0-beta.0",
"@nx/nx-linux-x64-gnu": "19.7.0-beta.0",
"@nx/nx-linux-x64-musl": "19.7.0-beta.0",
"@nx/nx-win32-x64-msvc": "19.7.0-beta.0",
"@nx/nx-linux-arm64-gnu": "19.7.0-beta.0",
"@nx/nx-linux-arm64-musl": "19.7.0-beta.0",
"@nx/nx-linux-arm-gnueabihf": "19.7.0-beta.0",
"@nx/nx-win32-arm64-msvc": "19.7.0-beta.0",
"@nx/nx-freebsd-x64": "19.7.0-beta.0"
},

@@ -102,0 +102,0 @@ "nx-migrations": {

@@ -25,3 +25,3 @@ <p style="text-align: center;">

Nx is a build system with built-in tooling and advanced CI capabilities. It helps you maintain and scale monorepos, both locally and on CI.
Nx is a build system, optimized for monorepos, with plugins for popular frameworks and tools and advanced CI capabilities including caching and distribution.

@@ -28,0 +28,0 @@ ## Getting Started

@@ -21,7 +21,3 @@ "use strict";

function addHandler(options) {
if (options.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const isVerbose = process.env.NX_VERBOSE_LOGGING === 'true';
return (0, params_1.handleErrors)(isVerbose, async () => {
return (0, params_1.handleErrors)(options.verbose, async () => {
output_1.output.addNewline();

@@ -28,0 +24,0 @@ const [pkgName, version] = parsePackageSpecifier(options.packageSpecifier);

@@ -8,3 +8,3 @@ "use strict";

describe: 'Install a plugin and initialize it.',
builder: (yargs) => yargs
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs)
.parserConfiguration({

@@ -22,6 +22,2 @@ 'strip-dashed': true,

})
.option('verbose', {
type: 'boolean',
description: 'Prints additional information about the commands (e.g., stack traces)',
})
.example('$0 add @nx/react', 'Install the latest version of the `@nx/react` package and run its `@nx/react:init` generator')

@@ -28,0 +24,0 @@ .example('$0 add non-core-nx-plugin', 'Install the latest version of the `non-core-nx-plugin` package and run its `non-core-nx-plugin:init` generator if available')

@@ -30,5 +30,2 @@ "use strict";

}, nxJson);
if (nxArgs.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
await (0, connect_to_nx_cloud_1.connectToNxCloudIfExplicitlyAsked)(nxArgs);

@@ -35,0 +32,0 @@ const projectGraph = await (0, project_graph_1.createProjectGraphAsync)({ exitOnError: true });

@@ -23,5 +23,2 @@ "use strict";

const { nxArgs, overrides } = (0, command_line_utils_1.splitArgsIntoNxArgsAndOverrides)(args, 'run-many', { printWarnings: args.graph !== 'stdout' }, nxJson);
if (nxArgs.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const scriptArgV = readScriptArgV(overrides);

@@ -28,0 +25,0 @@ const projectGraph = await (0, project_graph_1.createProjectGraphAsync)({ exitOnError: true });

@@ -5,2 +5,3 @@ "use strict";

const path_1 = require("../../utils/path");
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsGenerateCommand = {

@@ -19,3 +20,3 @@ command: 'generate <generator> [_..]',

const generatorWillShowHelp = process.argv[3] && !process.argv[3].startsWith('-');
const res = yargs
const res = (0, shared_options_1.withVerbose)(yargs)
.positional('generator', {

@@ -37,6 +38,2 @@ describe: 'Name of the generator (e.g., @nx/js:library, library)',

})
.option('verbose', {
describe: 'Prints additional information about the commands (e.g., stack traces)',
type: 'boolean',
})
.option('quiet', {

@@ -43,0 +40,0 @@ describe: 'Hides logs from tree operations (e.g. `CREATE package.json`)',

@@ -209,7 +209,3 @@ "use strict";

async function generate(cwd, args) {
if (args['verbose']) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const verbose = process.env.NX_VERBOSE_LOGGING === 'true';
return (0, params_1.handleErrors)(verbose, async () => {
return (0, params_1.handleErrors)(args.verbose, async () => {
const nxJsonConfiguration = (0, configuration_1.readNxJson)();

@@ -233,5 +229,5 @@ const projectGraph = await (0, project_graph_1.createProjectGraphAsync)();

}
const combinedOpts = await (0, params_1.combineOptionsForGenerator)(opts.generatorOptions, opts.collectionName, normalizedGeneratorName, projectsConfigurations, nxJsonConfiguration, schema, opts.interactive, (0, calculate_default_project_name_1.calculateDefaultProjectName)(cwd, workspace_root_1.workspaceRoot, projectsConfigurations, nxJsonConfiguration), (0, path_1.relative)(workspace_root_1.workspaceRoot, cwd), verbose);
const combinedOpts = await (0, params_1.combineOptionsForGenerator)(opts.generatorOptions, opts.collectionName, normalizedGeneratorName, projectsConfigurations, nxJsonConfiguration, schema, opts.interactive, (0, calculate_default_project_name_1.calculateDefaultProjectName)(cwd, workspace_root_1.workspaceRoot, projectsConfigurations, nxJsonConfiguration), (0, path_1.relative)(workspace_root_1.workspaceRoot, cwd), args.verbose);
if ((0, generator_utils_1.getGeneratorInformation)(opts.collectionName, normalizedGeneratorName, workspace_root_1.workspaceRoot, projectsConfigurations.projects).isNxGenerator) {
const host = new tree_1.FsTree(workspace_root_1.workspaceRoot, verbose, `generating (${opts.collectionName}:${normalizedGeneratorName})`);
const host = new tree_1.FsTree(workspace_root_1.workspaceRoot, args.verbose, `generating (${opts.collectionName}:${normalizedGeneratorName})`);
const implementation = implementationFactory();

@@ -264,5 +260,5 @@ // @todo(v17): Remove this, isStandalonePreset property is defunct.

generatorOptions: combinedOpts,
}, projectsConfigurations.projects, verbose);
}, projectsConfigurations.projects, args.verbose);
}
});
}

@@ -33,3 +33,3 @@ "use strict";

handler: async (args) => {
const exitCode = await (0, params_1.handleErrors)(args.verbose ?? process.env.NX_VERBOSE_LOGGING === 'true', async () => {
const exitCode = await (0, params_1.handleErrors)(args.verbose, async () => {
return (await Promise.resolve().then(() => require('./import'))).importHandler(args);

@@ -36,0 +36,0 @@ });

@@ -64,3 +64,3 @@ "use strict";

});
await gitClient.commit('chore(repo): prepare for import');
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
needsSquash = true;

@@ -96,3 +96,3 @@ }

}
await gitClient.commit('chore(repo): prepare for import 2');
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
if (needsSquash) {

@@ -99,0 +99,0 @@ await gitClient.squashLastTwoCommits();

@@ -11,2 +11,3 @@ "use strict";

const workspace_root_1 = require("../../utils/workspace-root");
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsMigrateCommand = {

@@ -31,3 +32,3 @@ command: 'migrate [packageAndVersion]',

const defaultCommitPrefix = 'chore: [nx migration] ';
return yargs
return (0, shared_options_1.withVerbose)(yargs)
.positional('packageAndVersion', {

@@ -151,3 +152,3 @@ describe: `The target package and version (e.g, @nx/workspace@16.0.0)`,

console.error(`Failed to install the ${version} version of the migration script. Using the current version.`);
if (process.env.NX_VERBOSE_LOGGING) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.error(e);

@@ -154,0 +155,0 @@ }

@@ -1043,5 +1043,2 @@ "use strict";

async function migrate(root, args, rawArgs) {
if (args['verbose']) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
await client_1.daemonClient.stop();

@@ -1048,0 +1045,0 @@ return (0, params_1.handleErrors)(process.env.NX_VERBOSE_LOGGING === 'true', async () => {

@@ -47,5 +47,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -52,0 +49,0 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

@@ -12,3 +12,3 @@ "use strict";

describe: 'Orchestrate versioning and publishing of applications and libraries',
builder: (yargs) => yargs
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs)
.command(releaseCommand)

@@ -41,6 +41,2 @@ .command(versionCommand)

})
.option('verbose', {
type: 'boolean',
describe: 'Prints additional information about the commands (e.g., stack traces)',
})
// NOTE: The camel case format is required for the coerce() function to be called correctly. It still supports --print-config casing.

@@ -47,0 +43,0 @@ .option('printConfig', {

@@ -27,5 +27,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -32,0 +29,0 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

@@ -30,5 +30,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -35,0 +32,0 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

@@ -38,5 +38,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (_args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -118,5 +115,2 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

}
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
if (args.firstRelease) {

@@ -123,0 +117,0 @@ overrides.firstRelease = args.firstRelease;

@@ -34,5 +34,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const hasVersionGitConfig = Object.keys(userProvidedReleaseConfig.version?.git ?? {}).length > 0;

@@ -39,0 +36,0 @@ const hasChangelogGitConfig = Object.keys(userProvidedReleaseConfig.changelog?.git ?? {}).length > 0;

@@ -47,5 +47,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -52,0 +49,0 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

@@ -5,2 +5,3 @@ "use strict";

const documentation_1 = require("../yargs-utils/documentation");
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsRepairCommand = {

@@ -18,7 +19,4 @@ command: 'repair',

`,
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(yargs, 'repair').option('verbose', {
type: 'boolean',
describe: 'Prints additional information about the commands (e.g., stack traces)',
}),
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)((0, shared_options_1.withVerbose)(yargs), 'repair'),
handler: async (args) => process.exit(await (await Promise.resolve().then(() => require('./repair'))).repair(args)),
};

@@ -9,7 +9,3 @@ "use strict";

async function repair(args, extraMigrations = []) {
if (args['verbose']) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const verbose = process.env.NX_VERBOSE_LOGGING === 'true';
return (0, params_1.handleErrors)(verbose, async () => {
return (0, params_1.handleErrors)(args.verbose, async () => {
const nxMigrations = Object.entries(migrationsJson.generators).reduce((agg, [name, migration]) => {

@@ -29,3 +25,3 @@ const skip = migration['x-repair-skip'];

const migrations = [...nxMigrations, ...extraMigrations];
const migrationsThatMadeNoChanges = await (0, migrate_1.executeMigrations)(process.cwd(), migrations, verbose, false, '');
const migrationsThatMadeNoChanges = await (0, migrate_1.executeMigrations)(process.cwd(), migrations, args.verbose, false, '');
if (migrationsThatMadeNoChanges.length < migrations.length) {

@@ -32,0 +28,0 @@ output_1.output.success({

@@ -31,5 +31,2 @@ "use strict";

}, 'run-one', { printWarnings: args.graph !== 'stdout' }, nxJson);
if (nxArgs.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
if (nxArgs.help) {

@@ -36,0 +33,0 @@ await (await Promise.resolve().then(() => require('./run'))).printTargetRunHelp(opts, workspace_root_1.workspaceRoot);

@@ -68,3 +68,3 @@ "use strict";

handler: async (args) => {
const exitCode = await (0, params_1.handleErrors)(args.verbose ?? process.env.NX_VERBOSE_LOGGING === 'true', async () => {
const exitCode = await (0, params_1.handleErrors)(args.verbose, async () => {
const { showProjectsHandler } = await Promise.resolve().then(() => require('./projects'));

@@ -110,3 +110,3 @@ await showProjectsHandler(args);

handler: async (args) => {
const exitCode = await (0, params_1.handleErrors)(args.verbose ?? process.env.NX_VERBOSE_LOGGING === 'true', async () => {
const exitCode = await (0, params_1.handleErrors)(args.verbose, async () => {
const { showProjectHandler } = await Promise.resolve().then(() => require('./project'));

@@ -113,0 +113,0 @@ await showProjectHandler(args);

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.yargsSyncCheckCommand = exports.yargsSyncCommand = void 0;
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsSyncCommand = {
command: 'sync',
describe: false,
builder: (yargs) => yargs.option('verbose', {
type: 'boolean',
description: 'Prints additional information about the commands (e.g., stack traces)',
}),
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs),
handler: async (args) => {

@@ -18,6 +16,3 @@ process.exit(await Promise.resolve().then(() => require('./sync')).then((m) => m.syncHandler(args)));

describe: false,
builder: (yargs) => yargs.option('verbose', {
type: 'boolean',
description: 'Prints additional information about the commands (e.g., stack traces)',
}),
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs),
handler: async (args) => {

@@ -24,0 +19,0 @@ process.exit(await Promise.resolve().then(() => require('./sync')).then((m) => m.syncHandler({ ...args, check: true })));

@@ -11,7 +11,3 @@ "use strict";

function syncHandler(options) {
if (options.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const isVerbose = process.env.NX_VERBOSE_LOGGING === 'true';
return (0, params_1.handleErrors)(isVerbose, async () => {
return (0, params_1.handleErrors)(options.verbose, async () => {
const projectGraph = await (0, project_graph_1.createProjectGraphAsync)();

@@ -18,0 +14,0 @@ const syncGenerators = await (0, sync_generators_1.collectAllRegisteredSyncGenerators)(projectGraph);

@@ -15,3 +15,3 @@ "use strict";

function withWatchOptions(yargs) {
return yargs
return (0, shared_options_1.withVerbose)(yargs)
.parserConfiguration({

@@ -18,0 +18,0 @@ 'strip-dashed': true,

@@ -117,5 +117,2 @@ "use strict";

const projectReplacementRegex = new RegExp(args.projectNameEnvName ?? DEFAULT_PROJECT_NAME_ENV, 'g');
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
if (!client_1.daemonClient.enabled()) {

@@ -122,0 +119,0 @@ output_1.output.error({

@@ -114,5 +114,5 @@ "use strict";

.middleware((args) => {
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
args.verbose ??= process.env.NX_VERBOSE_LOGGING === 'true';
// If NX_VERBOSE_LOGGING=false and --verbose is passed, we want to set it to true favoring the arg
process.env.NX_VERBOSE_LOGGING = args.verbose.toString();
});

@@ -119,0 +119,0 @@ }

@@ -6,4 +6,3 @@ import { ChildProcess } from 'child_process';

import { ConfigurationSourceMaps } from '../../project-graph/utils/project-configuration-utils';
import { NxWorkspaceFiles } from '../../native';
import { TaskRun } from '../../utils/task-history';
import { NxWorkspaceFiles, TaskRun } from '../../native';
import type { SyncGeneratorChangesResult } from '../../utils/sync-generators';

@@ -55,6 +54,4 @@ export type UnregisterCallback = () => void;

hashGlob(globs: string[], exclude?: string[]): Promise<string>;
getTaskHistoryForHashes(hashes: string[]): Promise<{
[hash: string]: TaskRun[];
}>;
writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
getFlakyTasks(hashes: string[]): Promise<string[]>;
recordTaskRuns(taskRuns: TaskRun[]): Promise<void>;
getSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;

@@ -61,0 +58,0 @@ flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;

@@ -28,2 +28,3 @@ "use strict";

const hash_glob_1 = require("../message-types/hash-glob");
const task_history_1 = require("../message-types/task-history");
const force_shutdown_1 = require("../message-types/force-shutdown");

@@ -239,5 +240,5 @@ const get_sync_generator_changes_1 = require("../message-types/get-sync-generator-changes");

}
getTaskHistoryForHashes(hashes) {
getFlakyTasks(hashes) {
const message = {
type: 'GET_TASK_HISTORY_FOR_HASHES',
type: task_history_1.GET_FLAKY_TASKS,
hashes,

@@ -247,5 +248,5 @@ };

}
writeTaskRunsToHistory(taskRuns) {
recordTaskRuns(taskRuns) {
const message = {
type: 'WRITE_TASK_RUNS_TO_HISTORY',
type: task_history_1.RECORD_TASK_RUNS,
taskRuns,

@@ -252,0 +253,0 @@ };

@@ -1,13 +0,13 @@

import { TaskRun } from '../../utils/task-history';
export declare const GET_TASK_HISTORY_FOR_HASHES: "GET_TASK_HISTORY_FOR_HASHES";
export type HandleGetTaskHistoryForHashesMessage = {
type: typeof GET_TASK_HISTORY_FOR_HASHES;
import type { TaskRun } from '../../native';
export declare const GET_FLAKY_TASKS: "GET_FLAKY_TASKS";
export type HandleGetFlakyTasks = {
type: typeof GET_FLAKY_TASKS;
hashes: string[];
};
export declare function isHandleGetTaskHistoryForHashesMessage(message: unknown): message is HandleGetTaskHistoryForHashesMessage;
export declare const WRITE_TASK_RUNS_TO_HISTORY: "WRITE_TASK_RUNS_TO_HISTORY";
export type HandleWriteTaskRunsToHistoryMessage = {
type: typeof WRITE_TASK_RUNS_TO_HISTORY;
export declare function isHandleGetFlakyTasksMessage(message: unknown): message is HandleGetFlakyTasks;
export declare const RECORD_TASK_RUNS: "RECORD_TASK_RUNS";
export type HandleRecordTaskRunsMessage = {
type: typeof RECORD_TASK_RUNS;
taskRuns: TaskRun[];
};
export declare function isHandleWriteTaskRunsToHistoryMessage(message: unknown): message is HandleWriteTaskRunsToHistoryMessage;
export declare function isHandleWriteTaskRunsToHistoryMessage(message: unknown): message is HandleRecordTaskRunsMessage;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.WRITE_TASK_RUNS_TO_HISTORY = exports.GET_TASK_HISTORY_FOR_HASHES = void 0;
exports.isHandleGetTaskHistoryForHashesMessage = isHandleGetTaskHistoryForHashesMessage;
exports.RECORD_TASK_RUNS = exports.GET_FLAKY_TASKS = void 0;
exports.isHandleGetFlakyTasksMessage = isHandleGetFlakyTasksMessage;
exports.isHandleWriteTaskRunsToHistoryMessage = isHandleWriteTaskRunsToHistoryMessage;
exports.GET_TASK_HISTORY_FOR_HASHES = 'GET_TASK_HISTORY_FOR_HASHES';
function isHandleGetTaskHistoryForHashesMessage(message) {
exports.GET_FLAKY_TASKS = 'GET_FLAKY_TASKS';
function isHandleGetFlakyTasksMessage(message) {
return (typeof message === 'object' &&
message !== null &&
'type' in message &&
message['type'] === exports.GET_TASK_HISTORY_FOR_HASHES);
message['type'] === exports.GET_FLAKY_TASKS);
}
exports.WRITE_TASK_RUNS_TO_HISTORY = 'WRITE_TASK_RUNS_TO_HISTORY';
exports.RECORD_TASK_RUNS = 'RECORD_TASK_RUNS';
function isHandleWriteTaskRunsToHistoryMessage(message) {

@@ -18,3 +18,3 @@ return (typeof message === 'object' &&

'type' in message &&
message['type'] === exports.WRITE_TASK_RUNS_TO_HISTORY);
message['type'] === exports.RECORD_TASK_RUNS);
}

@@ -41,4 +41,3 @@ "use strict";

const task_history_1 = require("../message-types/task-history");
const handle_get_task_history_1 = require("./handle-get-task-history");
const handle_write_task_runs_to_history_1 = require("./handle-write-task-runs-to-history");
const handle_task_history_1 = require("./handle-task-history");
const force_shutdown_1 = require("../message-types/force-shutdown");

@@ -144,7 +143,7 @@ const handle_force_shutdown_1 = require("./handle-force-shutdown");

}
else if ((0, task_history_1.isHandleGetTaskHistoryForHashesMessage)(payload)) {
await handleResult(socket, 'GET_TASK_HISTORY_FOR_HASHES', () => (0, handle_get_task_history_1.handleGetTaskHistoryForHashes)(payload.hashes));
else if ((0, task_history_1.isHandleGetFlakyTasksMessage)(payload)) {
await handleResult(socket, 'GET_TASK_HISTORY_FOR_HASHES', () => (0, handle_task_history_1.handleGetFlakyTasks)(payload.hashes));
}
else if ((0, task_history_1.isHandleWriteTaskRunsToHistoryMessage)(payload)) {
await handleResult(socket, 'WRITE_TASK_RUNS_TO_HISTORY', () => (0, handle_write_task_runs_to_history_1.handleWriteTaskRunsToHistory)(payload.taskRuns));
await handleResult(socket, 'WRITE_TASK_RUNS_TO_HISTORY', () => (0, handle_task_history_1.handleRecordTaskRuns)(payload.taskRuns));
}

@@ -151,0 +150,0 @@ else if ((0, force_shutdown_1.isHandleForceShutdownMessage)(payload)) {

@@ -9,4 +9,18 @@ "use strict";

const nx_json_1 = require("../config/nx-json");
const native_1 = require("../native");
const db_connection_1 = require("../utils/db-connection");
let taskDetails;
function getTaskDetails() {
// TODO: Remove when wasm supports sqlite
if (native_1.IS_WASM) {
return null;
}
if (!taskDetails) {
taskDetails = new native_1.TaskDetails((0, db_connection_1.getDbConnection)());
}
return taskDetails;
}
async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGraph, taskGraph, nxJson) {
performance.mark('hashMultipleTasks:start');
const taskDetails = getTaskDetails();
const tasks = Object.values(taskGraph.tasks);

@@ -27,3 +41,3 @@ const tasksWithHashers = await Promise.all(tasks.map(async (task) => {

.map((t) => t.task);
const hashes = await hasher.hashTasks(tasksToHash, taskGraph);
const hashes = await hasher.hashTasks(tasksToHash, taskGraph, process.env);
for (let i = 0; i < tasksToHash.length; i++) {

@@ -33,2 +47,11 @@ tasksToHash[i].hash = hashes[i].value;

}
// TODO: Remove if when wasm supports sqlite
if (taskDetails) {
taskDetails.recordTaskDetails(tasksToHash.map((task) => ({
hash: task.hash,
project: task.target.project,
target: task.target.target,
configuration: task.target.configuration,
})));
}
performance.mark('hashMultipleTasks:end');

@@ -39,2 +62,3 @@ performance.measure('hashMultipleTasks', 'hashMultipleTasks:start', 'hashMultipleTasks:end');

performance.mark('hashSingleTask:start');
const taskDetails = getTaskDetails();
const customHasher = (0, utils_1.getCustomHasher)(task, projectGraph);

@@ -55,4 +79,15 @@ const projectsConfigurations = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);

task.hashDetails = details;
// TODO: Remove if when wasm supports sqlite
if (taskDetails) {
taskDetails.recordTaskDetails([
{
hash: task.hash,
project: task.target.project,
target: task.target.target,
configuration: task.target.configuration,
},
]);
}
performance.mark('hashSingleTask:end');
performance.measure('hashSingleTask', 'hashSingleTask:start', 'hashSingleTask:end');
}

@@ -21,3 +21,3 @@ "use strict";

];
if (process.env.NX_VERBOSE_LOGGING == 'true') {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
bodyLines.push('', 'Additional error information:', e.message);

@@ -24,0 +24,0 @@ }

@@ -29,2 +29,19 @@ /* auto-generated by NAPI-RS */

export declare class NxCache {
cacheDirectory: string
constructor(workspaceRoot: string, cachePath: string, dbConnection: ExternalObject<Connection>)
get(hash: string): CachedResult | null
put(hash: string, terminalOutput: string, outputs: Array<string>, code: number): void
applyRemoteCacheResults(hash: string, result: CachedResult): void
getTaskOutputsPath(hash: string): string
copyFilesFromCache(cachedResult: CachedResult, outputs: Array<string>): void
removeOldCacheRecords(): void
}
export declare class NxTaskHistory {
constructor(db: ExternalObject<Connection>)
recordTaskRuns(taskRuns: Array<TaskRun>): void
getFlakyTasks(hashes: Array<string>): Array<string>
}
export declare class RustPseudoTerminal {

@@ -40,2 +57,7 @@ constructor()

export declare class TaskDetails {
constructor(db: ExternalObject<Connection>)
recordTaskDetails(tasks: Array<HashedTask>): void
}
export declare class TaskHasher {

@@ -72,2 +94,10 @@ constructor(workspaceRoot: string, projectGraph: ExternalObject<ProjectGraph>, projectFileMap: ExternalObject<ProjectFiles>, allWorkspaceFiles: ExternalObject<Array<FileData>>, tsConfig: Buffer, tsConfigPaths: Record<string, Array<string>>, options?: HasherOptions | undefined | null)

export interface CachedResult {
code: number
terminalOutput: string
outputsPath: string
}
export declare export function connectToNxDb(cacheDir: string, nxVersion: string): ExternalObject<Connection>
export declare export function copy(src: string, dest: string): void

@@ -90,6 +120,2 @@

/**
* Expands the given entries into a list of existing directories and files.
* This is used for copying outputs to and from the cache
*/
export declare export function expandOutputs(directory: string, entries: Array<string>): Array<string>

@@ -138,2 +164,9 @@

export interface HashedTask {
hash: string
project: string
target: string
configuration?: string
}
export interface HasherOptions {

@@ -211,2 +244,10 @@ selectivelyHashTsConfig: boolean

export interface TaskRun {
hash: string
status: string
code: number
start: number
end: number
}
export interface TaskTarget {

@@ -213,0 +254,0 @@ project: string

@@ -367,6 +367,10 @@ // prettier-ignore

module.exports.ImportResult = nativeBinding.ImportResult
module.exports.NxCache = nativeBinding.NxCache
module.exports.NxTaskHistory = nativeBinding.NxTaskHistory
module.exports.RustPseudoTerminal = nativeBinding.RustPseudoTerminal
module.exports.TaskDetails = nativeBinding.TaskDetails
module.exports.TaskHasher = nativeBinding.TaskHasher
module.exports.Watcher = nativeBinding.Watcher
module.exports.WorkspaceContext = nativeBinding.WorkspaceContext
module.exports.connectToNxDb = nativeBinding.connectToNxDb
module.exports.copy = nativeBinding.copy

@@ -373,0 +377,0 @@ module.exports.EventType = nativeBinding.EventType

@@ -17,3 +17,3 @@ import {

const __sharedMemory = new WebAssembly.Memory({
initial: 16384,
initial: 1024,
maximum: 32768,

@@ -59,40 +59,53 @@ shared: true,

__napiInstance.exports['__napi_register__copy_3']?.()
__napiInstance.exports['__napi_register__hash_array_4']?.()
__napiInstance.exports['__napi_register__hash_file_5']?.()
__napiInstance.exports['__napi_register__ImportResult_struct_6']?.()
__napiInstance.exports['__napi_register__find_imports_7']?.()
__napiInstance.exports['__napi_register__transfer_project_graph_8']?.()
__napiInstance.exports['__napi_register__ExternalNode_struct_9']?.()
__napiInstance.exports['__napi_register__Target_struct_10']?.()
__napiInstance.exports['__napi_register__Project_struct_11']?.()
__napiInstance.exports['__napi_register__ProjectGraph_struct_12']?.()
__napiInstance.exports['__napi_register__HashPlanner_struct_13']?.()
__napiInstance.exports['__napi_register__HashPlanner_impl_17']?.()
__napiInstance.exports['__napi_register__HashDetails_struct_18']?.()
__napiInstance.exports['__napi_register__HasherOptions_struct_19']?.()
__napiInstance.exports['__napi_register__TaskHasher_struct_20']?.()
__napiInstance.exports['__napi_register__TaskHasher_impl_23']?.()
__napiInstance.exports['__napi_register__Task_struct_24']?.()
__napiInstance.exports['__napi_register__TaskTarget_struct_25']?.()
__napiInstance.exports['__napi_register__TaskGraph_struct_26']?.()
__napiInstance.exports['__napi_register__FileData_struct_27']?.()
__napiInstance.exports['__napi_register__InputsInput_struct_28']?.()
__napiInstance.exports['__napi_register__FileSetInput_struct_29']?.()
__napiInstance.exports['__napi_register__RuntimeInput_struct_30']?.()
__napiInstance.exports['__napi_register__EnvironmentInput_struct_31']?.()
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_32']?.()
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_33']?.()
__napiInstance.exports['__napi_register__NxJson_struct_34']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_struct_35']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_impl_44']?.()
__napiInstance.exports['__napi_register__WorkspaceErrors_45']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_46']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_47']?.()
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_48']?.()
__napiInstance.exports['__napi_register__FileMap_struct_49']?.()
__napiInstance.exports['__napi_register____test_only_transfer_file_map_50']?.()
__napiInstance.exports['__napi_register__IS_WASM_51']?.()
__napiInstance.exports['__napi_register__CachedResult_struct_4']?.()
__napiInstance.exports['__napi_register__NxCache_struct_5']?.()
__napiInstance.exports['__napi_register__NxCache_impl_13']?.()
__napiInstance.exports['__napi_register__hash_array_14']?.()
__napiInstance.exports['__napi_register__hash_file_15']?.()
__napiInstance.exports['__napi_register__IS_WASM_16']?.()
__napiInstance.exports['__napi_register__get_binary_target_17']?.()
__napiInstance.exports['__napi_register__ImportResult_struct_18']?.()
__napiInstance.exports['__napi_register__find_imports_19']?.()
__napiInstance.exports['__napi_register__transfer_project_graph_20']?.()
__napiInstance.exports['__napi_register__ExternalNode_struct_21']?.()
__napiInstance.exports['__napi_register__Target_struct_22']?.()
__napiInstance.exports['__napi_register__Project_struct_23']?.()
__napiInstance.exports['__napi_register__ProjectGraph_struct_24']?.()
__napiInstance.exports['__napi_register__HashedTask_struct_25']?.()
__napiInstance.exports['__napi_register__TaskDetails_struct_26']?.()
__napiInstance.exports['__napi_register__TaskDetails_impl_29']?.()
__napiInstance.exports['__napi_register__HashPlanner_struct_30']?.()
__napiInstance.exports['__napi_register__HashPlanner_impl_34']?.()
__napiInstance.exports['__napi_register__HashDetails_struct_35']?.()
__napiInstance.exports['__napi_register__HasherOptions_struct_36']?.()
__napiInstance.exports['__napi_register__TaskHasher_struct_37']?.()
__napiInstance.exports['__napi_register__TaskHasher_impl_40']?.()
__napiInstance.exports['__napi_register__TaskRun_struct_41']?.()
__napiInstance.exports['__napi_register__NxTaskHistory_struct_42']?.()
__napiInstance.exports['__napi_register__NxTaskHistory_impl_46']?.()
__napiInstance.exports['__napi_register__Task_struct_47']?.()
__napiInstance.exports['__napi_register__TaskTarget_struct_48']?.()
__napiInstance.exports['__napi_register__TaskGraph_struct_49']?.()
__napiInstance.exports['__napi_register__FileData_struct_50']?.()
__napiInstance.exports['__napi_register__InputsInput_struct_51']?.()
__napiInstance.exports['__napi_register__FileSetInput_struct_52']?.()
__napiInstance.exports['__napi_register__RuntimeInput_struct_53']?.()
__napiInstance.exports['__napi_register__EnvironmentInput_struct_54']?.()
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_55']?.()
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_56']?.()
__napiInstance.exports['__napi_register__NxJson_struct_57']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_struct_58']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_impl_67']?.()
__napiInstance.exports['__napi_register__WorkspaceErrors_68']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_69']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_70']?.()
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_71']?.()
__napiInstance.exports['__napi_register__FileMap_struct_72']?.()
__napiInstance.exports['__napi_register____test_only_transfer_file_map_73']?.()
}
export const HashPlanner = __napiModule.exports.HashPlanner
export const ImportResult = __napiModule.exports.ImportResult
export const NxCache = __napiModule.exports.NxCache
export const NxTaskHistory = __napiModule.exports.NxTaskHistory
export const TaskDetails = __napiModule.exports.TaskDetails
export const TaskHasher = __napiModule.exports.TaskHasher

@@ -103,2 +116,3 @@ export const WorkspaceContext = __napiModule.exports.WorkspaceContext

export const findImports = __napiModule.exports.findImports
export const getBinaryTarget = __napiModule.exports.getBinaryTarget
export const getFilesForOutputs = __napiModule.exports.getFilesForOutputs

@@ -105,0 +119,0 @@ export const hashArray = __napiModule.exports.hashArray

import type { CloudTaskRunnerOptions } from './nx-cloud-tasks-runner-shell';
import { TasksRunner } from '../tasks-runner/tasks-runner';
import { RemoteCacheV2 } from '../tasks-runner/default-tasks-runner';
export declare class NxCloudEnterpriseOutdatedError extends Error {

@@ -13,2 +14,3 @@ constructor(url: string);

nxCloudTasksRunner: TasksRunner<CloudTaskRunnerOptions>;
remoteCache: RemoteCacheV2;
}

@@ -15,0 +17,0 @@ export declare function verifyOrUpdateNxCloudClient(options: CloudTaskRunnerOptions): Promise<{

@@ -98,3 +98,3 @@ "use strict";

catch (e) {
if (process.env.NX_VERBOSE_LOGGING) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
devkit_exports_1.logger.warn(`Failed to access system features. GitHub integration assumed to be disabled.

@@ -101,0 +101,0 @@ ${e}`);

@@ -21,2 +21,3 @@ import { ProjectFileMap, ProjectGraph, ProjectGraphProjectNode } from '../../../config/project-graph';

helperDependencies?: string[];
skipPackageManager?: boolean;
}, fileMap?: ProjectFileMap): PackageJson;

@@ -23,0 +24,0 @@ export declare function findProjectsNpmDependencies(projectNode: ProjectGraphProjectNode, graph: ProjectGraph, target: string, rootPackageJson: PackageJson, options: {

@@ -111,3 +111,3 @@ "use strict";

packageJson.peerDependenciesMeta &&= (0, object_sort_1.sortObjectByKeys)(packageJson.peerDependenciesMeta);
if (rootPackageJson.packageManager) {
if (rootPackageJson.packageManager && !options.skipPackageManager) {
if (packageJson.packageManager &&

@@ -114,0 +114,0 @@ packageJson.packageManager !== rootPackageJson.packageManager) {

@@ -1,2 +0,2 @@

import { DefaultTasksRunnerOptions } from './default-tasks-runner';
import { DefaultTasksRunnerOptions, RemoteCache } from './default-tasks-runner';
import { Task } from '../config/task-graph';

@@ -13,2 +13,22 @@ export type CachedResult = {

};
export declare class DbCache {
private readonly options;
private cache;
private remoteCache;
private remoteCachePromise;
setup(): Promise<void>;
constructor(options: {
nxCloudRemoteCache: RemoteCache;
});
get(task: Task): Promise<CachedResult | null>;
put(task: Task, terminalOutput: string | null, outputs: string[], code: number): Promise<void>;
copyFilesFromCache(_: string, cachedResult: CachedResult, outputs: string[]): Promise<void>;
removeOldCacheRecords(): void;
temporaryOutputPath(task: Task): string;
private getRemoteCache;
private _getRemoteCache;
}
/**
* @deprecated Use the {@link DbCache} class instead. This will be removed in Nx 21.
*/
export declare class Cache {

@@ -36,3 +56,2 @@ private readonly options;

private createTerminalOutputsDir;
private tryAndRetry;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Cache = void 0;
exports.Cache = exports.DbCache = void 0;
const workspace_root_1 = require("../utils/workspace-root");

@@ -8,5 +8,95 @@ const fs_extra_1 = require("fs-extra");

const perf_hooks_1 = require("perf_hooks");
const default_tasks_runner_1 = require("./default-tasks-runner");
const child_process_1 = require("child_process");
const cache_directory_1 = require("../utils/cache-directory");
const node_machine_id_1 = require("node-machine-id");
const native_1 = require("../native");
const db_connection_1 = require("../utils/db-connection");
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
const nx_json_1 = require("../config/nx-json");
const update_manager_1 = require("../nx-cloud/update-manager");
const get_cloud_options_1 = require("../nx-cloud/utilities/get-cloud-options");
class DbCache {
async setup() {
this.remoteCache = await this.getRemoteCache();
}
constructor(options) {
this.options = options;
this.cache = new native_1.NxCache(workspace_root_1.workspaceRoot, cache_directory_1.cacheDir, (0, db_connection_1.getDbConnection)());
}
async get(task) {
const res = this.cache.get(task.hash);
if (res) {
return {
...res,
remote: false,
};
}
await this.setup();
if (this.remoteCache) {
// didn't find it locally but we have a remote cache
// attempt remote cache
const res = await this.remoteCache.retrieve(task.hash, this.cache.cacheDirectory);
if (res) {
this.cache.applyRemoteCacheResults(task.hash, res);
return {
...res,
remote: true,
};
}
else {
return null;
}
}
else {
return null;
}
}
async put(task, terminalOutput, outputs, code) {
return tryAndRetry(async () => {
this.cache.put(task.hash, terminalOutput, outputs, code);
await this.setup();
if (this.remoteCache) {
await this.remoteCache.store(task.hash, this.cache.cacheDirectory, terminalOutput, code);
}
});
}
copyFilesFromCache(_, cachedResult, outputs) {
return tryAndRetry(async () => this.cache.copyFilesFromCache(cachedResult, outputs));
}
removeOldCacheRecords() {
return this.cache.removeOldCacheRecords();
}
temporaryOutputPath(task) {
return this.cache.getTaskOutputsPath(task.hash);
}
async getRemoteCache() {
if (this.remoteCachePromise) {
return this.remoteCachePromise;
}
this.remoteCachePromise = this._getRemoteCache();
return this.remoteCachePromise;
}
async _getRemoteCache() {
const nxJson = (0, nx_json_1.readNxJson)();
if ((0, nx_cloud_utils_1.isNxCloudUsed)(nxJson)) {
const options = (0, get_cloud_options_1.getCloudOptions)();
const { nxCloudClient } = await (0, update_manager_1.verifyOrUpdateNxCloudClient)(options);
if (nxCloudClient.remoteCache) {
return nxCloudClient.remoteCache;
}
else {
// old nx cloud instance
return default_tasks_runner_1.RemoteCacheV2.fromCacheV1(this.options.nxCloudRemoteCache);
}
}
else {
return null;
}
}
}
exports.DbCache = DbCache;
/**
* @deprecated Use the {@link DbCache} class instead. This will be removed in Nx 21.
*/
class Cache {

@@ -48,3 +138,3 @@ constructor(options) {

catch (e) {
if (process.env.NX_VERBOSE_LOGGING == 'true') {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.log(`Unable to get machineId. Error: ${e.message}`);

@@ -76,3 +166,6 @@ }

async put(task, terminalOutput, outputs, code) {
return this.tryAndRetry(async () => {
return tryAndRetry(async () => {
/**
* This is the directory with the cached artifacts
*/
const td = (0, path_1.join)(this.cachePath, task.hash);

@@ -111,3 +204,3 @@ const tdCommit = (0, path_1.join)(this.cachePath, `${task.hash}.commit`);

async copyFilesFromCache(hash, cachedResult, outputs) {
return this.tryAndRetry(async () => {
return tryAndRetry(async () => {
const expandedOutputs = await this.expandOutputsInCache(outputs, cachedResult);

@@ -220,25 +313,24 @@ await Promise.all(expandedOutputs.map(async (f) => {

}
tryAndRetry(fn) {
let attempts = 0;
const baseTimeout = 5;
// Generate a random number between 2 and 4 to raise to the power of attempts
const baseExponent = Math.random() * 2 + 2;
const _try = async () => {
try {
attempts++;
return await fn();
}
exports.Cache = Cache;
function tryAndRetry(fn) {
let attempts = 0;
// Generate a random number between 2 and 4 to raise to the power of attempts
const baseExponent = Math.random() * 2 + 2;
const _try = async () => {
try {
attempts++;
return await fn();
}
catch (e) {
// Max time is 5 * 4^3 = 20480ms
if (attempts === 6) {
// After enough attempts, throw the error
throw e;
}
catch (e) {
// Max time is 5 * 4^3 = 20480ms
if (attempts === 6) {
// After enough attempts, throw the error
throw e;
}
await new Promise((res) => setTimeout(res, baseExponent ** attempts));
return await _try();
}
};
return _try();
}
await new Promise((res) => setTimeout(res, baseExponent ** attempts));
return await _try();
}
};
return _try();
}
exports.Cache = Cache;
import { TasksRunner } from './tasks-runner';
import { LifeCycle } from './life-cycle';
import { CachedResult } from '../native';
export interface RemoteCache {

@@ -7,2 +8,7 @@ retrieve: (hash: string, cacheDirectory: string) => Promise<boolean>;

}
export declare abstract class RemoteCacheV2 {
static fromCacheV1(cache: RemoteCache): RemoteCacheV2;
abstract retrieve(hash: string, cacheDirectory: string): Promise<CachedResult | null>;
abstract store(hash: string, cacheDirectory: string, terminalOutput: string, code: number): Promise<boolean>;
}
export interface DefaultTasksRunnerOptions {

@@ -9,0 +15,0 @@ parallel?: number;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.defaultTasksRunner = void 0;
exports.defaultTasksRunner = exports.RemoteCacheV2 = void 0;
const task_orchestrator_1 = require("./task-orchestrator");
const promises_1 = require("fs/promises");
const path_1 = require("path");
class RemoteCacheV2 {
static fromCacheV1(cache) {
return {
retrieve: async (hash, cacheDirectory) => {
const res = cache.retrieve(hash, cacheDirectory);
const [terminalOutput, code] = await Promise.all([
(0, promises_1.readFile)((0, path_1.join)(cacheDirectory, hash, 'terminalOutputs'), 'utf-8'),
(0, promises_1.readFile)((0, path_1.join)(cacheDirectory, hash, 'code'), 'utf-8').then((s) => +s),
]);
if (res) {
return {
outputsPath: cacheDirectory,
terminalOutput,
code,
};
}
else {
return null;
}
},
store: async (hash, cacheDirectory, __, code) => {
await (0, promises_1.writeFile)((0, path_1.join)(cacheDirectory, hash, 'code'), code.toString());
return cache.store(hash, cacheDirectory);
},
};
}
}
exports.RemoteCacheV2 = RemoteCacheV2;
const defaultTasksRunner = async (tasks, options, context) => {

@@ -6,0 +36,0 @@ if (options['parallel'] === 'false' ||

@@ -6,2 +6,3 @@ import { Task } from '../../config/task-graph';

private taskRuns;
private taskHistory;
startTasks(tasks: Task[]): void;

@@ -8,0 +9,0 @@ endTasks(taskResults: TaskResult[]): Promise<void>;

@@ -10,3 +10,4 @@ "use strict";

this.startTimings = {};
this.taskRuns = [];
this.taskRuns = new Map();
this.taskHistory = new task_history_1.TaskHistory();
}

@@ -19,25 +20,19 @@ startTasks(tasks) {

async endTasks(taskResults) {
const taskRuns = taskResults.map((taskResult) => ({
project: taskResult.task.target.project,
target: taskResult.task.target.target,
configuration: taskResult.task.target.configuration,
taskResults
.map((taskResult) => ({
hash: taskResult.task.hash,
code: taskResult.code.toString(),
target: taskResult.task.target,
code: taskResult.code,
status: taskResult.status,
start: (taskResult.task.startTime ?? this.startTimings[taskResult.task.id]).toString(),
end: (taskResult.task.endTime ?? new Date().getTime()).toString(),
}));
this.taskRuns.push(...taskRuns);
start: taskResult.task.startTime ?? this.startTimings[taskResult.task.id],
end: taskResult.task.endTime ?? Date.now(),
}))
.forEach((taskRun) => {
this.taskRuns.set(taskRun.hash, taskRun);
});
}
async endCommand() {
await (0, task_history_1.writeTaskRunsToHistory)(this.taskRuns);
const history = await (0, task_history_1.getHistoryForHashes)(this.taskRuns.map((t) => t.hash));
const flakyTasks = [];
// check if any hash has different exit codes => flaky
for (let hash in history) {
if (history[hash].length > 1 &&
history[hash].some((run) => run.code !== history[hash][0].code)) {
flakyTasks.push((0, serialize_target_1.serializeTarget)(history[hash][0].project, history[hash][0].target, history[hash][0].configuration));
}
}
const entries = Array.from(this.taskRuns);
await this.taskHistory.recordTaskRuns(entries.map(([_, v]) => v));
const flakyTasks = await this.taskHistory.getFlakyTasks(entries.map(([hash]) => hash));
if (flakyTasks.length > 0) {

@@ -48,3 +43,6 @@ output_1.output.warn({

,
...flakyTasks.map((t) => ` ${t}`),
...flakyTasks.map((hash) => {
const taskRun = this.taskRuns.get(hash);
return ` ${(0, serialize_target_1.serializeTarget)(taskRun.target.project, taskRun.target.target, taskRun.target.configuration)}`;
}),
'',

@@ -51,0 +49,0 @@ `Flaky tasks can disrupt your CI pipeline. Automatically retry them with Nx Cloud. Learn more at https://nx.dev/ci/features/flaky-tasks`,

@@ -30,2 +30,3 @@ "use strict";

const task_history_life_cycle_1 = require("./life-cycles/task-history-life-cycle");
const task_history_life_cycle_old_1 = require("./life-cycles/task-history-life-cycle-old");
const task_profiling_life_cycle_1 = require("./life-cycles/task-profiling-life-cycle");

@@ -36,2 +37,3 @@ const task_timings_life_cycle_1 = require("./life-cycles/task-timings-life-cycle");

const chalk = require("chalk");
const native_1 = require("../native");
async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {

@@ -338,3 +340,3 @@ const { runnerOptions } = getRunner(nxArgs, nxJson);

if (!(0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())) {
lifeCycles.push(new task_history_life_cycle_1.TaskHistoryLifeCycle());
lifeCycles.push(!native_1.IS_WASM ? new task_history_life_cycle_1.TaskHistoryLifeCycle() : new task_history_life_cycle_old_1.LegacyTaskHistoryLifeCycle());
}

@@ -341,0 +343,0 @@ return lifeCycles;

@@ -18,2 +18,4 @@ "use strict";

const params_1 = require("../utils/params");
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
const nx_json_1 = require("../config/nx-json");
class TaskOrchestrator {

@@ -29,3 +31,10 @@ // endregion internal state

this.daemon = daemon;
this.cache = new cache_1.Cache(this.options);
this.cache = process.env.NX_DB_CACHE === 'true'
? new cache_1.DbCache({
// Remove this in Nx 21
nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
? this.options.remoteCache
: null,
})
: new cache_1.Cache(this.options);
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);

@@ -32,0 +41,0 @@ this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);

@@ -7,1 +7,2 @@ /**

export declare const workspaceDataDirectory: string;
export declare function workspaceDataDirectoryForWorkspace(workspaceRoot: string): string;

@@ -5,2 +5,3 @@ "use strict";

exports.cacheDirectoryForWorkspace = cacheDirectoryForWorkspace;
exports.workspaceDataDirectoryForWorkspace = workspaceDataDirectoryForWorkspace;
const fs_1 = require("fs");

@@ -66,4 +67,7 @@ const path_1 = require("path");

}
exports.workspaceDataDirectory = absolutePath(workspace_root_1.workspaceRoot, process.env.NX_WORKSPACE_DATA_DIRECTORY ??
process.env.NX_PROJECT_GRAPH_CACHE_DIRECTORY ??
defaultWorkspaceDataDirectory(workspace_root_1.workspaceRoot));
exports.workspaceDataDirectory = workspaceDataDirectoryForWorkspace(workspace_root_1.workspaceRoot);
function workspaceDataDirectoryForWorkspace(workspaceRoot) {
return absolutePath(workspaceRoot, process.env.NX_WORKSPACE_DATA_DIRECTORY ??
process.env.NX_PROJECT_GRAPH_CACHE_DIRECTORY ??
defaultWorkspaceDataDirectory(workspaceRoot));
}

@@ -86,3 +86,3 @@ "use strict";

async move(path, destination) {
return this.execAsync(`git mv ${path} ${destination}`);
return this.execAsync(`git mv "${path}" "${destination}"`);
}

@@ -89,0 +89,0 @@ async push(ref, remoteName) {

@@ -39,3 +39,3 @@ "use strict";

verbose: (...s) => {
if (process.env.NX_VERBOSE_LOGGING) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.log(...s);

@@ -42,0 +42,0 @@ }

@@ -1,8 +0,6 @@

declare const taskRunKeys: readonly ["project", "target", "configuration", "hash", "code", "status", "start", "end"];
export type TaskRun = Record<(typeof taskRunKeys)[number], string>;
export declare function getHistoryForHashes(hashes: string[]): Promise<{
[hash: string]: TaskRun[];
}>;
export declare function writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
export declare const taskHistoryFile: string;
export {};
import { NxTaskHistory, TaskRun } from '../native';
export declare class TaskHistory {
taskHistory: NxTaskHistory;
getFlakyTasks(hashes: string[]): Promise<string[]>;
recordTaskRuns(taskRuns: TaskRun[]): Promise<void>;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.taskHistoryFile = void 0;
exports.getHistoryForHashes = getHistoryForHashes;
exports.writeTaskRunsToHistory = writeTaskRunsToHistory;
const fs_1 = require("fs");
const path_1 = require("path");
exports.TaskHistory = void 0;
const client_1 = require("../daemon/client/client");
const is_on_daemon_1 = require("../daemon/is-on-daemon");
const cache_directory_1 = require("./cache-directory");
const taskRunKeys = [
'project',
'target',
'configuration',
'hash',
'code',
'status',
'start',
'end',
];
let taskHistory = undefined;
let taskHashToIndicesMap = new Map();
async function getHistoryForHashes(hashes) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
if (taskHistory === undefined) {
loadTaskHistoryFromDisk();
const native_1 = require("../native");
const db_connection_1 = require("./db-connection");
class TaskHistory {
constructor() {
this.taskHistory = new native_1.NxTaskHistory((0, db_connection_1.getDbConnection)());
}
async getFlakyTasks(hashes) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
return this.taskHistory.getFlakyTasks(hashes);
}
const result = {};
for (let hash of hashes) {
const indices = taskHashToIndicesMap.get(hash);
if (!indices) {
result[hash] = [];
}
else {
result[hash] = indices.map((index) => taskHistory[index]);
}
}
return result;
return await client_1.daemonClient.getFlakyTasks(hashes);
}
return await client_1.daemonClient.getTaskHistoryForHashes(hashes);
}
async function writeTaskRunsToHistory(taskRuns) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
if (taskHistory === undefined) {
loadTaskHistoryFromDisk();
async recordTaskRuns(taskRuns) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
return this.taskHistory.recordTaskRuns(taskRuns);
}
const serializedLines = [];
for (let taskRun of taskRuns) {
const serializedLine = taskRunKeys.map((key) => taskRun[key]).join(',');
serializedLines.push(serializedLine);
recordTaskRunInMemory(taskRun);
}
if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
(0, fs_1.writeFileSync)(exports.taskHistoryFile, `${taskRunKeys.join(',')}\n`);
}
(0, fs_1.appendFileSync)(exports.taskHistoryFile, serializedLines.join('\n') + '\n');
return client_1.daemonClient.recordTaskRuns(taskRuns);
}
else {
await client_1.daemonClient.writeTaskRunsToHistory(taskRuns);
}
}
exports.taskHistoryFile = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'task-history.csv');
function loadTaskHistoryFromDisk() {
taskHashToIndicesMap.clear();
taskHistory = [];
if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
return;
}
const fileContent = (0, fs_1.readFileSync)(exports.taskHistoryFile, 'utf8');
if (!fileContent) {
return;
}
const lines = fileContent.split('\n');
// if there are no lines or just the header, return
if (lines.length <= 1) {
return;
}
const contentLines = lines.slice(1).filter((l) => l.trim() !== '');
// read the values from csv format where each header is a key and the value is the value
for (let line of contentLines) {
const values = line.trim().split(',');
const run = {};
taskRunKeys.forEach((header, index) => {
run[header] = values[index];
});
recordTaskRunInMemory(run);
}
}
function recordTaskRunInMemory(taskRun) {
const index = taskHistory.push(taskRun) - 1;
if (taskHashToIndicesMap.has(taskRun.hash)) {
taskHashToIndicesMap.get(taskRun.hash).push(index);
}
else {
taskHashToIndicesMap.set(taskRun.hash, [index]);
}
}
exports.TaskHistory = TaskHistory;

@@ -22,3 +22,3 @@ "use strict";

perf_hooks_1.performance.mark('workspace-context');
workspaceContext = new WorkspaceContext(workspaceRoot, (0, cache_directory_1.cacheDirectoryForWorkspace)(workspaceRoot));
workspaceContext = new WorkspaceContext(workspaceRoot, (0, cache_directory_1.workspaceDataDirectoryForWorkspace)(workspaceRoot));
perf_hooks_1.performance.mark('workspace-context:end');

@@ -25,0 +25,0 @@ perf_hooks_1.performance.measure('workspace context init', 'workspace-context', 'workspace-context:end');

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc