Socket
Socket
Sign inDemoInstall

nx

Package Overview
Dependencies
Maintainers
8
Versions
1371
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

nx - npm Package Compare versions

Comparing version 0.0.0-pr-27552-8322864 to 0.0.0-pr-27640-2822b72

src/command-line/login/command-object.d.ts

13

.eslintrc.json

@@ -103,3 +103,14 @@ {

"prettier", // This is coming from @storybook/builder-manager since it uses the browser polyfill
"util" // This is coming from @storybook/builder-manager since it uses the browser polyfill
"util", // This is coming from @storybook/builder-manager since it uses the browser polyfill
// The native modules are optional and only one of them will ever be installed on a given machine
"@nx/nx-darwin-x64",
"@nx/nx-darwin-arm64",
"@nx/nx-linux-x64-gnu",
"@nx/nx-linux-x64-musl",
"@nx/nx-win32-x64-msvc",
"@nx/nx-linux-arm64-gnu",
"@nx/nx-linux-arm64-musl",
"@nx/nx-linux-arm-gnueabihf",
"@nx/nx-win32-arm64-msvc",
"@nx/nx-freebsd-x64"
]

@@ -106,0 +117,0 @@ }

24

package.json
{
"name": "nx",
"version": "0.0.0-pr-27552-8322864",
"version": "0.0.0-pr-27640-2822b72",
"private": false,

@@ -74,3 +74,3 @@ "description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",

"ora": "5.3.0",
"@nrwl/tao": "0.0.0-pr-27552-8322864"
"@nrwl/tao": "0.0.0-pr-27640-2822b72"
},

@@ -90,12 +90,12 @@ "peerDependencies": {

"optionalDependencies": {
"@nx/nx-darwin-x64": "0.0.0-pr-27552-8322864",
"@nx/nx-darwin-arm64": "0.0.0-pr-27552-8322864",
"@nx/nx-linux-x64-gnu": "0.0.0-pr-27552-8322864",
"@nx/nx-linux-x64-musl": "0.0.0-pr-27552-8322864",
"@nx/nx-win32-x64-msvc": "0.0.0-pr-27552-8322864",
"@nx/nx-linux-arm64-gnu": "0.0.0-pr-27552-8322864",
"@nx/nx-linux-arm64-musl": "0.0.0-pr-27552-8322864",
"@nx/nx-linux-arm-gnueabihf": "0.0.0-pr-27552-8322864",
"@nx/nx-win32-arm64-msvc": "0.0.0-pr-27552-8322864",
"@nx/nx-freebsd-x64": "0.0.0-pr-27552-8322864"
"@nx/nx-darwin-x64": "0.0.0-pr-27640-2822b72",
"@nx/nx-darwin-arm64": "0.0.0-pr-27640-2822b72",
"@nx/nx-linux-x64-gnu": "0.0.0-pr-27640-2822b72",
"@nx/nx-linux-x64-musl": "0.0.0-pr-27640-2822b72",
"@nx/nx-win32-x64-msvc": "0.0.0-pr-27640-2822b72",
"@nx/nx-linux-arm64-gnu": "0.0.0-pr-27640-2822b72",
"@nx/nx-linux-arm64-musl": "0.0.0-pr-27640-2822b72",
"@nx/nx-linux-arm-gnueabihf": "0.0.0-pr-27640-2822b72",
"@nx/nx-win32-arm64-msvc": "0.0.0-pr-27640-2822b72",
"@nx/nx-freebsd-x64": "0.0.0-pr-27640-2822b72"
},

@@ -102,0 +102,0 @@ "nx-migrations": {

@@ -21,7 +21,3 @@ "use strict";

function addHandler(options) {
if (options.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const isVerbose = process.env.NX_VERBOSE_LOGGING === 'true';
return (0, params_1.handleErrors)(isVerbose, async () => {
return (0, params_1.handleErrors)(options.verbose, async () => {
output_1.output.addNewline();

@@ -28,0 +24,0 @@ const [pkgName, version] = parsePackageSpecifier(options.packageSpecifier);

@@ -8,3 +8,3 @@ "use strict";

describe: 'Install a plugin and initialize it.',
builder: (yargs) => yargs
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs)
.parserConfiguration({

@@ -22,6 +22,2 @@ 'strip-dashed': true,

})
.option('verbose', {
type: 'boolean',
description: 'Prints additional information about the commands (e.g., stack traces)',
})
.example('$0 add @nx/react', 'Install the latest version of the `@nx/react` package and run its `@nx/react:init` generator')

@@ -28,0 +24,0 @@ .example('$0 add non-core-nx-plugin', 'Install the latest version of the `non-core-nx-plugin` package and run its `non-core-nx-plugin:init` generator if available')

@@ -30,5 +30,2 @@ "use strict";

}, nxJson);
if (nxArgs.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
await (0, connect_to_nx_cloud_1.connectToNxCloudIfExplicitlyAsked)(nxArgs);

@@ -35,0 +32,0 @@ const projectGraph = await (0, project_graph_1.createProjectGraphAsync)({ exitOnError: true });

@@ -63,5 +63,7 @@ "use strict";

if ((0, nx_cloud_utils_1.isNxCloudUsed)(nxJson)) {
const token = process.env.NX_CLOUD_ACCESS_TOKEN || nxJson.nxCloudAccessToken;
const token = process.env.NX_CLOUD_ACCESS_TOKEN ||
nxJson.nxCloudAccessToken ||
nxJson.nxCloudId;
if (!token) {
throw new Error(`Unable to authenticate. Either define accessToken in nx.json or set the NX_CLOUD_ACCESS_TOKEN env variable.`);
throw new Error(`Unable to authenticate. If you are connecting to Nx Cloud locally, set Nx Cloud ID in nx.json. If you are connecting in a CI context, either define accessToken in nx.json or set the NX_CLOUD_ACCESS_TOKEN env variable.`);
}

@@ -72,6 +74,5 @@ const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)(installationSource, token);

bodyLines: [
'If you have not done so already, connect your workspace to your Nx Cloud account:',
`- Connect with Nx Cloud at:
${connectCloudUrl}`,
'If you have not done so already, connect your workspace to your Nx Cloud account with the following URL:',
'',
`${connectCloudUrl}`,
],

@@ -78,0 +79,0 @@ });

@@ -23,5 +23,2 @@ "use strict";

const { nxArgs, overrides } = (0, command_line_utils_1.splitArgsIntoNxArgsAndOverrides)(args, 'run-many', { printWarnings: args.graph !== 'stdout' }, nxJson);
if (nxArgs.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const scriptArgV = readScriptArgV(overrides);

@@ -28,0 +25,0 @@ const projectGraph = await (0, project_graph_1.createProjectGraphAsync)({ exitOnError: true });

@@ -5,2 +5,3 @@ "use strict";

const path_1 = require("../../utils/path");
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsGenerateCommand = {

@@ -19,3 +20,3 @@ command: 'generate <generator> [_..]',

const generatorWillShowHelp = process.argv[3] && !process.argv[3].startsWith('-');
const res = yargs
const res = (0, shared_options_1.withVerbose)(yargs)
.positional('generator', {

@@ -37,6 +38,2 @@ describe: 'Name of the generator (e.g., @nx/js:library, library)',

})
.option('verbose', {
describe: 'Prints additional information about the commands (e.g., stack traces)',
type: 'boolean',
})
.option('quiet', {

@@ -43,0 +40,0 @@ describe: 'Hides logs from tree operations (e.g. `CREATE package.json`)',

@@ -209,7 +209,3 @@ "use strict";

async function generate(cwd, args) {
if (args['verbose']) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const verbose = process.env.NX_VERBOSE_LOGGING === 'true';
return (0, params_1.handleErrors)(verbose, async () => {
return (0, params_1.handleErrors)(args.verbose, async () => {
const nxJsonConfiguration = (0, configuration_1.readNxJson)();

@@ -233,5 +229,5 @@ const projectGraph = await (0, project_graph_1.createProjectGraphAsync)();

}
const combinedOpts = await (0, params_1.combineOptionsForGenerator)(opts.generatorOptions, opts.collectionName, normalizedGeneratorName, projectsConfigurations, nxJsonConfiguration, schema, opts.interactive, (0, calculate_default_project_name_1.calculateDefaultProjectName)(cwd, workspace_root_1.workspaceRoot, projectsConfigurations, nxJsonConfiguration), (0, path_1.relative)(workspace_root_1.workspaceRoot, cwd), verbose);
const combinedOpts = await (0, params_1.combineOptionsForGenerator)(opts.generatorOptions, opts.collectionName, normalizedGeneratorName, projectsConfigurations, nxJsonConfiguration, schema, opts.interactive, (0, calculate_default_project_name_1.calculateDefaultProjectName)(cwd, workspace_root_1.workspaceRoot, projectsConfigurations, nxJsonConfiguration), (0, path_1.relative)(workspace_root_1.workspaceRoot, cwd), args.verbose);
if ((0, generator_utils_1.getGeneratorInformation)(opts.collectionName, normalizedGeneratorName, workspace_root_1.workspaceRoot, projectsConfigurations.projects).isNxGenerator) {
const host = new tree_1.FsTree(workspace_root_1.workspaceRoot, verbose, `generating (${opts.collectionName}:${normalizedGeneratorName})`);
const host = new tree_1.FsTree(workspace_root_1.workspaceRoot, args.verbose, `generating (${opts.collectionName}:${normalizedGeneratorName})`);
const implementation = implementationFactory();

@@ -264,5 +260,5 @@ // @todo(v17): Remove this, isStandalonePreset property is defunct.

generatorOptions: combinedOpts,
}, projectsConfigurations.projects, verbose);
}, projectsConfigurations.projects, args.verbose);
}
});
}

@@ -1,2 +0,2 @@

import { ProjectFileMap, ProjectGraphDependency, ProjectGraphProjectNode } from '../../config/project-graph';
import { ProjectFileMap, ProjectGraph, ProjectGraphDependency, ProjectGraphProjectNode } from '../../config/project-graph';
import { TaskGraph } from '../../config/task-graph';

@@ -51,1 +51,18 @@ export interface GraphError {

}, affectedProjects: string[]): Promise<void>;
/**
* The data type that `nx graph --file graph.json` or `nx build --graph graph.json` contains
*/
export interface GraphJson {
/**
* A graph of tasks populated with `nx build --graph`
*/
tasks?: TaskGraph;
/**
* The plans for hashing a task in the task graph
*/
taskPlans?: Record<string, string[]>;
/**
* The project graph
*/
graph: ProjectGraph;
}

@@ -33,3 +33,3 @@ "use strict";

handler: async (args) => {
const exitCode = await (0, params_1.handleErrors)(args.verbose ?? process.env.NX_VERBOSE_LOGGING === 'true', async () => {
const exitCode = await (0, params_1.handleErrors)(args.verbose, async () => {
return (await Promise.resolve().then(() => require('./import'))).importHandler(args);

@@ -36,0 +36,0 @@ });

@@ -11,2 +11,3 @@ "use strict";

const workspace_root_1 = require("../../utils/workspace-root");
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsMigrateCommand = {

@@ -31,3 +32,3 @@ command: 'migrate [packageAndVersion]',

const defaultCommitPrefix = 'chore: [nx migration] ';
return yargs
return (0, shared_options_1.withVerbose)(yargs)
.positional('packageAndVersion', {

@@ -151,3 +152,3 @@ describe: `The target package and version (e.g, @nx/workspace@16.0.0)`,

console.error(`Failed to install the ${version} version of the migration script. Using the current version.`);
if (process.env.NX_VERBOSE_LOGGING) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.error(e);

@@ -154,0 +155,0 @@ }

@@ -1043,5 +1043,2 @@ "use strict";

async function migrate(root, args, rawArgs) {
if (args['verbose']) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
await client_1.daemonClient.stop();

@@ -1086,35 +1083,13 @@ return (0, params_1.handleErrors)(process.env.NX_VERBOSE_LOGGING === 'true', async () => {

}
// TODO (v17): This should just become something like:
// ```
// return !collection.generators[name] && collection.schematics[name]
// ```
// TODO (v21): Remove CLI determination of Angular Migration
function isAngularMigration(collection, collectionPath, name) {
const entry = collection.generators?.[name] || collection.schematics?.[name];
// In the future we will determine this based on the location of the entry in the collection.
// If the entry is under `schematics`, it will be assumed to be an angular cli migration.
// If the entry is under `generators`, it will be assumed to be an nx migration.
// For now, we will continue to obey the cli property, if it exists.
// If it doesn't exist, we will check if the implementation references @angular/devkit.
const shouldBeNx = !!collection.generators?.[name];
const shouldBeNg = !!collection.schematics?.[name];
let useAngularDevkitToRunMigration = false;
const { path: implementationPath } = getImplementationPath(collection, collectionPath, name);
const implStringContents = (0, fs_1.readFileSync)(implementationPath, 'utf-8');
// TODO (v17): Remove this check and the cli property access - it is only here for backwards compatibility.
if (['@angular/material', '@angular/cdk'].includes(collection.name) ||
[
"import('@angular-devkit",
'import("@angular-devkit',
"require('@angular-devkit",
'require("@angular-devkit',
"from '@angular-devkit",
'from "@angular-devkit',
].some((s) => implStringContents.includes(s))) {
useAngularDevkitToRunMigration = true;
}
if (useAngularDevkitToRunMigration && shouldBeNx) {
if (entry.cli && entry.cli !== 'nx' && collection.generators?.[name]) {
output_1.output.warn({
title: `The migration '${collection.name}:${name}' appears to be an Angular CLI migration, but is located in the 'generators' section of migrations.json.`,
bodyLines: [
'In Nx 17, migrations inside `generators` will be treated as Angular Devkit migrations.',
'In Nx 21, migrations inside `generators` will be treated as Nx Devkit migrations and therefore may not run correctly if they are using Angular Devkit.',
'If the migration should be run with Angular Devkit, please place the migration inside `schematics` instead.',
"Please open an issue on the plugin's repository if you believe this is an error.",

@@ -1124,14 +1099,5 @@ ],

}
if (!useAngularDevkitToRunMigration && entry.cli === 'nx' && shouldBeNg) {
output_1.output.warn({
title: `The migration '${collection.name}:${name}' appears to be an Nx migration, but is located in the 'schematics' section of migrations.json.`,
bodyLines: [
'In Nx 17, migrations inside `generators` will be treated as nx devkit migrations.',
"Please open an issue on the plugin's repository if you believe this is an error.",
],
});
}
// Currently, if the cli property exists we listen to it. If its nx, its not an ng cli migration.
// If the property is not set, we will fall back to our intuition.
return entry.cli ? entry.cli !== 'nx' : useAngularDevkitToRunMigration;
return entry.cli ? entry.cli !== 'nx' : !shouldBeNx && shouldBeNg;
}

@@ -1138,0 +1104,0 @@ const getNgCompatLayer = (() => {

@@ -27,4 +27,6 @@ "use strict";

const command_object_21 = require("./add/command-object");
const command_object_22 = require("./login/command-object");
const command_object_23 = require("./logout/command-object");
const command_objects_1 = require("./deprecated/command-objects");
const command_object_22 = require("./sync/command-object");
const command_object_24 = require("./sync/command-object");
// Ensure that the output takes up the available width of the terminal.

@@ -74,7 +76,9 @@ yargs.wrap(yargs.terminalWidth());

.command(command_object_17.yargsShowCommand)
.command(command_object_22.yargsSyncCommand)
.command(command_object_22.yargsSyncCheckCommand)
.command(command_object_24.yargsSyncCommand)
.command(command_object_24.yargsSyncCheckCommand)
.command(command_object_2.yargsViewLogsCommand)
.command(command_object_18.yargsWatchCommand)
.command(command_object_15.yargsNxInfixCommand)
.command(command_object_22.yargsLoginCommand)
.command(command_object_23.yargsLogoutCommand)
.scriptName('nx')

@@ -81,0 +85,0 @@ .help()

@@ -47,5 +47,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -52,0 +49,0 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

@@ -12,3 +12,3 @@ "use strict";

describe: 'Orchestrate versioning and publishing of applications and libraries',
builder: (yargs) => yargs
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs)
.command(releaseCommand)

@@ -41,6 +41,2 @@ .command(versionCommand)

})
.option('verbose', {
type: 'boolean',
describe: 'Prints additional information about the commands (e.g., stack traces)',
})
// NOTE: The camel case format is required for the coerce() function to be called correctly. It still supports --print-config casing.

@@ -47,0 +43,0 @@ .option('printConfig', {

@@ -27,5 +27,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -32,0 +29,0 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

@@ -30,5 +30,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -35,0 +32,0 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

@@ -38,5 +38,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (_args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -118,5 +115,2 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

}
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
if (args.firstRelease) {

@@ -123,0 +117,0 @@ overrides.firstRelease = args.firstRelease;

@@ -34,5 +34,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const hasVersionGitConfig = Object.keys(userProvidedReleaseConfig.version?.git ?? {}).length > 0;

@@ -39,0 +36,0 @@ const hasChangelogGitConfig = Object.keys(userProvidedReleaseConfig.changelog?.git ?? {}).length > 0;

@@ -47,5 +47,2 @@ "use strict";

const userProvidedReleaseConfig = (0, deep_merge_json_1.deepMergeJson)(nxJson.release ?? {}, overrideReleaseConfig ?? {});
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
// Apply default configuration to any optional user configuration

@@ -52,0 +49,0 @@ const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);

@@ -5,2 +5,3 @@ "use strict";

const documentation_1 = require("../yargs-utils/documentation");
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsRepairCommand = {

@@ -18,7 +19,4 @@ command: 'repair',

`,
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(yargs, 'repair').option('verbose', {
type: 'boolean',
describe: 'Prints additional information about the commands (e.g., stack traces)',
}),
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)((0, shared_options_1.withVerbose)(yargs), 'repair'),
handler: async (args) => process.exit(await (await Promise.resolve().then(() => require('./repair'))).repair(args)),
};

@@ -9,7 +9,3 @@ "use strict";

async function repair(args, extraMigrations = []) {
if (args['verbose']) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const verbose = process.env.NX_VERBOSE_LOGGING === 'true';
return (0, params_1.handleErrors)(verbose, async () => {
return (0, params_1.handleErrors)(args.verbose, async () => {
const nxMigrations = Object.entries(migrationsJson.generators).reduce((agg, [name, migration]) => {

@@ -29,3 +25,3 @@ const skip = migration['x-repair-skip'];

const migrations = [...nxMigrations, ...extraMigrations];
const migrationsThatMadeNoChanges = await (0, migrate_1.executeMigrations)(process.cwd(), migrations, verbose, false, '');
const migrationsThatMadeNoChanges = await (0, migrate_1.executeMigrations)(process.cwd(), migrations, args.verbose, false, '');
if (migrationsThatMadeNoChanges.length < migrations.length) {

@@ -32,0 +28,0 @@ output_1.output.success({

@@ -31,5 +31,2 @@ "use strict";

}, 'run-one', { printWarnings: args.graph !== 'stdout' }, nxJson);
if (nxArgs.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
if (nxArgs.help) {

@@ -36,0 +33,0 @@ await (await Promise.resolve().then(() => require('./run'))).printTargetRunHelp(opts, workspace_root_1.workspaceRoot);

@@ -68,3 +68,3 @@ "use strict";

handler: async (args) => {
const exitCode = await (0, params_1.handleErrors)(args.verbose ?? process.env.NX_VERBOSE_LOGGING === 'true', async () => {
const exitCode = await (0, params_1.handleErrors)(args.verbose, async () => {
const { showProjectsHandler } = await Promise.resolve().then(() => require('./projects'));

@@ -110,3 +110,3 @@ await showProjectsHandler(args);

handler: async (args) => {
const exitCode = await (0, params_1.handleErrors)(args.verbose ?? process.env.NX_VERBOSE_LOGGING === 'true', async () => {
const exitCode = await (0, params_1.handleErrors)(args.verbose, async () => {
const { showProjectHandler } = await Promise.resolve().then(() => require('./project'));

@@ -113,0 +113,0 @@ await showProjectHandler(args);

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.yargsSyncCheckCommand = exports.yargsSyncCommand = void 0;
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsSyncCommand = {
command: 'sync',
describe: false,
builder: (yargs) => yargs.option('verbose', {
type: 'boolean',
description: 'Prints additional information about the commands (e.g., stack traces)',
}),
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs),
handler: async (args) => {

@@ -18,6 +16,3 @@ process.exit(await Promise.resolve().then(() => require('./sync')).then((m) => m.syncHandler(args)));

describe: false,
builder: (yargs) => yargs.option('verbose', {
type: 'boolean',
description: 'Prints additional information about the commands (e.g., stack traces)',
}),
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs),
handler: async (args) => {

@@ -24,0 +19,0 @@ process.exit(await Promise.resolve().then(() => require('./sync')).then((m) => m.syncHandler({ ...args, check: true })));

@@ -11,7 +11,3 @@ "use strict";

function syncHandler(options) {
if (options.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
const isVerbose = process.env.NX_VERBOSE_LOGGING === 'true';
return (0, params_1.handleErrors)(isVerbose, async () => {
return (0, params_1.handleErrors)(options.verbose, async () => {
const projectGraph = await (0, project_graph_1.createProjectGraphAsync)();

@@ -18,0 +14,0 @@ const syncGenerators = await (0, sync_generators_1.collectAllRegisteredSyncGenerators)(projectGraph);

@@ -15,3 +15,3 @@ "use strict";

function withWatchOptions(yargs) {
return yargs
return (0, shared_options_1.withVerbose)(yargs)
.parserConfiguration({

@@ -18,0 +18,0 @@ 'strip-dashed': true,

@@ -117,5 +117,2 @@ "use strict";

const projectReplacementRegex = new RegExp(args.projectNameEnvName ?? DEFAULT_PROJECT_NAME_ENV, 'g');
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
if (!client_1.daemonClient.enabled()) {

@@ -122,0 +119,0 @@ output_1.output.error({

@@ -1,5 +0,6 @@

import { Argv } from 'yargs';
import { Argv, ParserConfigurationOptions } from 'yargs';
interface ExcludeOptions {
exclude: string[];
}
export declare const defaultYargsParserConfiguration: Partial<ParserConfigurationOptions>;
export declare function withExcludeOption(yargs: Argv): Argv<ExcludeOptions>;

@@ -6,0 +7,0 @@ export interface RunOptions {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.defaultYargsParserConfiguration = void 0;
exports.withExcludeOption = withExcludeOption;

@@ -15,2 +16,9 @@ exports.withRunOptions = withRunOptions;

exports.parseCSV = parseCSV;
exports.defaultYargsParserConfiguration = {
'strip-dashed': true,
'unknown-options-as-args': true,
'populate--': true,
'parse-numbers': false,
'parse-positional-numbers': false,
};
function withExcludeOption(yargs) {

@@ -115,5 +123,5 @@ return yargs.option('exclude', {

.middleware((args) => {
if (args.verbose) {
process.env.NX_VERBOSE_LOGGING = 'true';
}
args.verbose ??= process.env.NX_VERBOSE_LOGGING === 'true';
// If NX_VERBOSE_LOGGING=false and --verbose is passed, we want to set it to true favoring the arg
process.env.NX_VERBOSE_LOGGING = args.verbose.toString();
});

@@ -133,7 +141,3 @@ }

return withExcludeOption(yargs)
.parserConfiguration({
'strip-dashed': true,
'unknown-options-as-args': true,
'populate--': true,
})
.parserConfiguration(exports.defaultYargsParserConfiguration)
.option('files', {

@@ -175,7 +179,3 @@ describe: 'Change the way Nx is calculating the affected command by providing directly changed files, list of files delimited by commas or spaces',

return withRunOptions(yargs)
.parserConfiguration({
'strip-dashed': true,
'unknown-options-as-args': true,
'populate--': true,
})
.parserConfiguration(exports.defaultYargsParserConfiguration)
.option('projects', {

@@ -232,7 +232,3 @@ type: 'string',

const res = withRunOptions(withOutputStyleOption(withConfiguration(yargs), allOutputStyles))
.parserConfiguration({
'strip-dashed': true,
'unknown-options-as-args': true,
'populate--': true,
})
.parserConfiguration(exports.defaultYargsParserConfiguration)
.option('project', {

@@ -239,0 +235,0 @@ describe: 'Target project',

@@ -1,1 +0,1 @@

"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[532],{79207:()=>{}},s=>{var e;e=79207,s(s.s=e)}]);
"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[532],{46134:()=>{}},s=>{var e;e=46134,s(s.s=e)}]);

@@ -6,4 +6,3 @@ import { ChildProcess } from 'child_process';

import { ConfigurationSourceMaps } from '../../project-graph/utils/project-configuration-utils';
import { NxWorkspaceFiles } from '../../native';
import { TaskRun } from '../../utils/task-history';
import { NxWorkspaceFiles, TaskRun } from '../../native';
import type { SyncGeneratorChangesResult } from '../../utils/sync-generators';

@@ -55,6 +54,4 @@ export type UnregisterCallback = () => void;

hashGlob(globs: string[], exclude?: string[]): Promise<string>;
getTaskHistoryForHashes(hashes: string[]): Promise<{
[hash: string]: TaskRun[];
}>;
writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
getFlakyTasks(hashes: string[]): Promise<string[]>;
recordTaskRuns(taskRuns: TaskRun[]): Promise<void>;
getSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;

@@ -61,0 +58,0 @@ flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;

@@ -28,2 +28,3 @@ "use strict";

const hash_glob_1 = require("../message-types/hash-glob");
const task_history_1 = require("../message-types/task-history");
const force_shutdown_1 = require("../message-types/force-shutdown");

@@ -75,5 +76,7 @@ const get_sync_generator_changes_1 = require("../message-types/get-sync-generator-changes");

// CI=true,env=true => daemon
// docker=true,env=undefined => no daemon
// docker=true,env=false => no daemon
// docker=true,env=true => daemon
// WASM => no daemon because file watching does not work
if (((0, is_ci_1.isCI)() && env !== 'true') ||
isDocker() ||
if ((((0, is_ci_1.isCI)() || isDocker()) && env !== 'true') ||
(0, tmp_dir_1.isDaemonDisabled)() ||

@@ -240,5 +243,5 @@ nxJsonIsNotPresent() ||

}
getTaskHistoryForHashes(hashes) {
getFlakyTasks(hashes) {
const message = {
type: 'GET_TASK_HISTORY_FOR_HASHES',
type: task_history_1.GET_FLAKY_TASKS,
hashes,

@@ -248,5 +251,5 @@ };

}
writeTaskRunsToHistory(taskRuns) {
recordTaskRuns(taskRuns) {
const message = {
type: 'WRITE_TASK_RUNS_TO_HISTORY',
type: task_history_1.RECORD_TASK_RUNS,
taskRuns,

@@ -253,0 +256,0 @@ };

@@ -1,13 +0,13 @@

import { TaskRun } from '../../utils/task-history';
export declare const GET_TASK_HISTORY_FOR_HASHES: "GET_TASK_HISTORY_FOR_HASHES";
export type HandleGetTaskHistoryForHashesMessage = {
type: typeof GET_TASK_HISTORY_FOR_HASHES;
import type { TaskRun } from '../../native';
export declare const GET_FLAKY_TASKS: "GET_FLAKY_TASKS";
export type HandleGetFlakyTasks = {
type: typeof GET_FLAKY_TASKS;
hashes: string[];
};
export declare function isHandleGetTaskHistoryForHashesMessage(message: unknown): message is HandleGetTaskHistoryForHashesMessage;
export declare const WRITE_TASK_RUNS_TO_HISTORY: "WRITE_TASK_RUNS_TO_HISTORY";
export type HandleWriteTaskRunsToHistoryMessage = {
type: typeof WRITE_TASK_RUNS_TO_HISTORY;
export declare function isHandleGetFlakyTasksMessage(message: unknown): message is HandleGetFlakyTasks;
export declare const RECORD_TASK_RUNS: "RECORD_TASK_RUNS";
export type HandleRecordTaskRunsMessage = {
type: typeof RECORD_TASK_RUNS;
taskRuns: TaskRun[];
};
export declare function isHandleWriteTaskRunsToHistoryMessage(message: unknown): message is HandleWriteTaskRunsToHistoryMessage;
export declare function isHandleWriteTaskRunsToHistoryMessage(message: unknown): message is HandleRecordTaskRunsMessage;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.WRITE_TASK_RUNS_TO_HISTORY = exports.GET_TASK_HISTORY_FOR_HASHES = void 0;
exports.isHandleGetTaskHistoryForHashesMessage = isHandleGetTaskHistoryForHashesMessage;
exports.RECORD_TASK_RUNS = exports.GET_FLAKY_TASKS = void 0;
exports.isHandleGetFlakyTasksMessage = isHandleGetFlakyTasksMessage;
exports.isHandleWriteTaskRunsToHistoryMessage = isHandleWriteTaskRunsToHistoryMessage;
exports.GET_TASK_HISTORY_FOR_HASHES = 'GET_TASK_HISTORY_FOR_HASHES';
function isHandleGetTaskHistoryForHashesMessage(message) {
exports.GET_FLAKY_TASKS = 'GET_FLAKY_TASKS';
function isHandleGetFlakyTasksMessage(message) {
return (typeof message === 'object' &&
message !== null &&
'type' in message &&
message['type'] === exports.GET_TASK_HISTORY_FOR_HASHES);
message['type'] === exports.GET_FLAKY_TASKS);
}
exports.WRITE_TASK_RUNS_TO_HISTORY = 'WRITE_TASK_RUNS_TO_HISTORY';
exports.RECORD_TASK_RUNS = 'RECORD_TASK_RUNS';
function isHandleWriteTaskRunsToHistoryMessage(message) {

@@ -18,3 +18,3 @@ return (typeof message === 'object' &&

'type' in message &&
message['type'] === exports.WRITE_TASK_RUNS_TO_HISTORY);
message['type'] === exports.RECORD_TASK_RUNS);
}

@@ -41,4 +41,3 @@ "use strict";

const task_history_1 = require("../message-types/task-history");
const handle_get_task_history_1 = require("./handle-get-task-history");
const handle_write_task_runs_to_history_1 = require("./handle-write-task-runs-to-history");
const handle_task_history_1 = require("./handle-task-history");
const force_shutdown_1 = require("../message-types/force-shutdown");

@@ -144,7 +143,7 @@ const handle_force_shutdown_1 = require("./handle-force-shutdown");

}
else if ((0, task_history_1.isHandleGetTaskHistoryForHashesMessage)(payload)) {
await handleResult(socket, 'GET_TASK_HISTORY_FOR_HASHES', () => (0, handle_get_task_history_1.handleGetTaskHistoryForHashes)(payload.hashes));
else if ((0, task_history_1.isHandleGetFlakyTasksMessage)(payload)) {
await handleResult(socket, 'GET_TASK_HISTORY_FOR_HASHES', () => (0, handle_task_history_1.handleGetFlakyTasks)(payload.hashes));
}
else if ((0, task_history_1.isHandleWriteTaskRunsToHistoryMessage)(payload)) {
await handleResult(socket, 'WRITE_TASK_RUNS_TO_HISTORY', () => (0, handle_write_task_runs_to_history_1.handleWriteTaskRunsToHistory)(payload.taskRuns));
await handleResult(socket, 'WRITE_TASK_RUNS_TO_HISTORY', () => (0, handle_task_history_1.handleRecordTaskRuns)(payload.taskRuns));
}

@@ -151,0 +150,0 @@ else if ((0, force_shutdown_1.isHandleForceShutdownMessage)(payload)) {

@@ -70,2 +70,3 @@ /**

export type { ProjectFileMap, FileMap, FileData, ProjectGraph, ProjectGraphDependency, ProjectGraphNode, ProjectGraphProjectNode, ProjectGraphExternalNode, ProjectGraphProcessorContext, } from './config/project-graph';
export type { GraphJson } from './command-line/graph/graph';
/**

@@ -72,0 +73,0 @@ * @category Project Graph

@@ -9,4 +9,18 @@ "use strict";

const nx_json_1 = require("../config/nx-json");
const native_1 = require("../native");
const db_connection_1 = require("../utils/db-connection");
let taskDetails;
function getTaskDetails() {
// TODO: Remove when wasm supports sqlite
if (native_1.IS_WASM) {
return null;
}
if (!taskDetails) {
taskDetails = new native_1.TaskDetails((0, db_connection_1.getDbConnection)());
}
return taskDetails;
}
async function hashTasksThatDoNotDependOnOutputsOfOtherTasks(hasher, projectGraph, taskGraph, nxJson) {
performance.mark('hashMultipleTasks:start');
const taskDetails = getTaskDetails();
const tasks = Object.values(taskGraph.tasks);

@@ -27,3 +41,3 @@ const tasksWithHashers = await Promise.all(tasks.map(async (task) => {

.map((t) => t.task);
const hashes = await hasher.hashTasks(tasksToHash, taskGraph);
const hashes = await hasher.hashTasks(tasksToHash, taskGraph, process.env);
for (let i = 0; i < tasksToHash.length; i++) {

@@ -33,2 +47,11 @@ tasksToHash[i].hash = hashes[i].value;

}
// TODO: Remove if when wasm supports sqlite
if (taskDetails) {
taskDetails.recordTaskDetails(tasksToHash.map((task) => ({
hash: task.hash,
project: task.target.project,
target: task.target.target,
configuration: task.target.configuration,
})));
}
performance.mark('hashMultipleTasks:end');

@@ -39,2 +62,3 @@ performance.measure('hashMultipleTasks', 'hashMultipleTasks:start', 'hashMultipleTasks:end');

performance.mark('hashSingleTask:start');
const taskDetails = getTaskDetails();
const customHasher = (0, utils_1.getCustomHasher)(task, projectGraph);

@@ -55,4 +79,15 @@ const projectsConfigurations = (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph);

task.hashDetails = details;
// TODO: Remove if when wasm supports sqlite
if (taskDetails) {
taskDetails.recordTaskDetails([
{
hash: task.hash,
project: task.target.project,
target: task.target.target,
configuration: task.target.configuration,
},
]);
}
performance.mark('hashSingleTask:end');
performance.measure('hashSingleTask', 'hashSingleTask:start', 'hashSingleTask:end');
}

@@ -21,3 +21,3 @@ "use strict";

];
if (process.env.NX_VERBOSE_LOGGING == 'true') {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
bodyLines.push('', 'Additional error information:', e.message);

@@ -24,0 +24,0 @@ }

@@ -29,2 +29,19 @@ /* auto-generated by NAPI-RS */

export declare class NxCache {
cacheDirectory: string
constructor(workspaceRoot: string, cachePath: string, dbConnection: ExternalObject<Connection>)
get(hash: string): CachedResult | null
put(hash: string, terminalOutput: string, outputs: Array<string>, code: number): void
applyRemoteCacheResults(hash: string, result: CachedResult): void
getTaskOutputsPath(hash: string): string
copyFilesFromCache(cachedResult: CachedResult, outputs: Array<string>): void
removeOldCacheRecords(): void
}
export declare class NxTaskHistory {
constructor(db: ExternalObject<Connection>)
recordTaskRuns(taskRuns: Array<TaskRun>): void
getFlakyTasks(hashes: Array<string>): Array<string>
}
export declare class RustPseudoTerminal {

@@ -40,2 +57,7 @@ constructor()

export declare class TaskDetails {
constructor(db: ExternalObject<Connection>)
recordTaskDetails(tasks: Array<HashedTask>): void
}
export declare class TaskHasher {

@@ -72,2 +94,10 @@ constructor(workspaceRoot: string, projectGraph: ExternalObject<ProjectGraph>, projectFileMap: ExternalObject<ProjectFiles>, allWorkspaceFiles: ExternalObject<Array<FileData>>, tsConfig: Buffer, tsConfigPaths: Record<string, Array<string>>, options?: HasherOptions | undefined | null)

export interface CachedResult {
code: number
terminalOutput: string
outputsPath: string
}
export declare export function connectToNxDb(cacheDir: string, nxVersion: string): ExternalObject<Connection>
export declare export function copy(src: string, dest: string): void

@@ -90,6 +120,2 @@

/**
* Expands the given entries into a list of existing directories and files.
* This is used for copying outputs to and from the cache
*/
export declare export function expandOutputs(directory: string, entries: Array<string>): Array<string>

@@ -138,2 +164,9 @@

export interface HashedTask {
hash: string
project: string
target: string
configuration?: string
}
export interface HasherOptions {

@@ -211,2 +244,10 @@ selectivelyHashTsConfig: boolean

export interface TaskRun {
hash: string
status: string
code: number
start: number
end: number
}
export interface TaskTarget {

@@ -213,0 +254,0 @@ project: string

@@ -367,6 +367,10 @@ // prettier-ignore

module.exports.ImportResult = nativeBinding.ImportResult
module.exports.NxCache = nativeBinding.NxCache
module.exports.NxTaskHistory = nativeBinding.NxTaskHistory
module.exports.RustPseudoTerminal = nativeBinding.RustPseudoTerminal
module.exports.TaskDetails = nativeBinding.TaskDetails
module.exports.TaskHasher = nativeBinding.TaskHasher
module.exports.Watcher = nativeBinding.Watcher
module.exports.WorkspaceContext = nativeBinding.WorkspaceContext
module.exports.connectToNxDb = nativeBinding.connectToNxDb
module.exports.copy = nativeBinding.copy

@@ -373,0 +377,0 @@ module.exports.EventType = nativeBinding.EventType

@@ -17,3 +17,3 @@ import {

const __sharedMemory = new WebAssembly.Memory({
initial: 16384,
initial: 1024,
maximum: 32768,

@@ -59,40 +59,53 @@ shared: true,

__napiInstance.exports['__napi_register__copy_3']?.()
__napiInstance.exports['__napi_register__hash_array_4']?.()
__napiInstance.exports['__napi_register__hash_file_5']?.()
__napiInstance.exports['__napi_register__ImportResult_struct_6']?.()
__napiInstance.exports['__napi_register__find_imports_7']?.()
__napiInstance.exports['__napi_register__transfer_project_graph_8']?.()
__napiInstance.exports['__napi_register__ExternalNode_struct_9']?.()
__napiInstance.exports['__napi_register__Target_struct_10']?.()
__napiInstance.exports['__napi_register__Project_struct_11']?.()
__napiInstance.exports['__napi_register__ProjectGraph_struct_12']?.()
__napiInstance.exports['__napi_register__HashPlanner_struct_13']?.()
__napiInstance.exports['__napi_register__HashPlanner_impl_17']?.()
__napiInstance.exports['__napi_register__HashDetails_struct_18']?.()
__napiInstance.exports['__napi_register__HasherOptions_struct_19']?.()
__napiInstance.exports['__napi_register__TaskHasher_struct_20']?.()
__napiInstance.exports['__napi_register__TaskHasher_impl_23']?.()
__napiInstance.exports['__napi_register__Task_struct_24']?.()
__napiInstance.exports['__napi_register__TaskTarget_struct_25']?.()
__napiInstance.exports['__napi_register__TaskGraph_struct_26']?.()
__napiInstance.exports['__napi_register__FileData_struct_27']?.()
__napiInstance.exports['__napi_register__InputsInput_struct_28']?.()
__napiInstance.exports['__napi_register__FileSetInput_struct_29']?.()
__napiInstance.exports['__napi_register__RuntimeInput_struct_30']?.()
__napiInstance.exports['__napi_register__EnvironmentInput_struct_31']?.()
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_32']?.()
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_33']?.()
__napiInstance.exports['__napi_register__NxJson_struct_34']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_struct_35']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_impl_44']?.()
__napiInstance.exports['__napi_register__WorkspaceErrors_45']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_46']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_47']?.()
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_48']?.()
__napiInstance.exports['__napi_register__FileMap_struct_49']?.()
__napiInstance.exports['__napi_register____test_only_transfer_file_map_50']?.()
__napiInstance.exports['__napi_register__IS_WASM_51']?.()
__napiInstance.exports['__napi_register__CachedResult_struct_4']?.()
__napiInstance.exports['__napi_register__NxCache_struct_5']?.()
__napiInstance.exports['__napi_register__NxCache_impl_13']?.()
__napiInstance.exports['__napi_register__hash_array_14']?.()
__napiInstance.exports['__napi_register__hash_file_15']?.()
__napiInstance.exports['__napi_register__IS_WASM_16']?.()
__napiInstance.exports['__napi_register__get_binary_target_17']?.()
__napiInstance.exports['__napi_register__ImportResult_struct_18']?.()
__napiInstance.exports['__napi_register__find_imports_19']?.()
__napiInstance.exports['__napi_register__transfer_project_graph_20']?.()
__napiInstance.exports['__napi_register__ExternalNode_struct_21']?.()
__napiInstance.exports['__napi_register__Target_struct_22']?.()
__napiInstance.exports['__napi_register__Project_struct_23']?.()
__napiInstance.exports['__napi_register__ProjectGraph_struct_24']?.()
__napiInstance.exports['__napi_register__HashedTask_struct_25']?.()
__napiInstance.exports['__napi_register__TaskDetails_struct_26']?.()
__napiInstance.exports['__napi_register__TaskDetails_impl_29']?.()
__napiInstance.exports['__napi_register__HashPlanner_struct_30']?.()
__napiInstance.exports['__napi_register__HashPlanner_impl_34']?.()
__napiInstance.exports['__napi_register__HashDetails_struct_35']?.()
__napiInstance.exports['__napi_register__HasherOptions_struct_36']?.()
__napiInstance.exports['__napi_register__TaskHasher_struct_37']?.()
__napiInstance.exports['__napi_register__TaskHasher_impl_40']?.()
__napiInstance.exports['__napi_register__TaskRun_struct_41']?.()
__napiInstance.exports['__napi_register__NxTaskHistory_struct_42']?.()
__napiInstance.exports['__napi_register__NxTaskHistory_impl_46']?.()
__napiInstance.exports['__napi_register__Task_struct_47']?.()
__napiInstance.exports['__napi_register__TaskTarget_struct_48']?.()
__napiInstance.exports['__napi_register__TaskGraph_struct_49']?.()
__napiInstance.exports['__napi_register__FileData_struct_50']?.()
__napiInstance.exports['__napi_register__InputsInput_struct_51']?.()
__napiInstance.exports['__napi_register__FileSetInput_struct_52']?.()
__napiInstance.exports['__napi_register__RuntimeInput_struct_53']?.()
__napiInstance.exports['__napi_register__EnvironmentInput_struct_54']?.()
__napiInstance.exports['__napi_register__ExternalDependenciesInput_struct_55']?.()
__napiInstance.exports['__napi_register__DepsOutputsInput_struct_56']?.()
__napiInstance.exports['__napi_register__NxJson_struct_57']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_struct_58']?.()
__napiInstance.exports['__napi_register__WorkspaceContext_impl_67']?.()
__napiInstance.exports['__napi_register__WorkspaceErrors_68']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFiles_struct_69']?.()
__napiInstance.exports['__napi_register__NxWorkspaceFilesExternals_struct_70']?.()
__napiInstance.exports['__napi_register__UpdatedWorkspaceFiles_struct_71']?.()
__napiInstance.exports['__napi_register__FileMap_struct_72']?.()
__napiInstance.exports['__napi_register____test_only_transfer_file_map_73']?.()
}
export const HashPlanner = __napiModule.exports.HashPlanner
export const ImportResult = __napiModule.exports.ImportResult
export const NxCache = __napiModule.exports.NxCache
export const NxTaskHistory = __napiModule.exports.NxTaskHistory
export const TaskDetails = __napiModule.exports.TaskDetails
export const TaskHasher = __napiModule.exports.TaskHasher

@@ -103,2 +116,3 @@ export const WorkspaceContext = __napiModule.exports.WorkspaceContext

export const findImports = __napiModule.exports.findImports
export const getBinaryTarget = __napiModule.exports.getBinaryTarget
export const getFilesForOutputs = __napiModule.exports.getFilesForOutputs

@@ -105,0 +119,0 @@ export const hashArray = __napiModule.exports.hashArray

import type { CloudTaskRunnerOptions } from './nx-cloud-tasks-runner-shell';
import { TasksRunner } from '../tasks-runner/tasks-runner';
import { RemoteCacheV2 } from '../tasks-runner/default-tasks-runner';
export declare class NxCloudEnterpriseOutdatedError extends Error {

@@ -13,2 +14,3 @@ constructor(url: string);

nxCloudTasksRunner: TasksRunner<CloudTaskRunnerOptions>;
remoteCache: RemoteCacheV2;
}

@@ -15,0 +17,0 @@ export declare function verifyOrUpdateNxCloudClient(options: CloudTaskRunnerOptions): Promise<{

@@ -14,8 +14,3 @@ "use strict";

if (!accessToken && !nxCloudId) {
if (process.env.NX_ENABLE_LOGIN === 'true' && !nxCloudId) {
throw new Error(`Unable to authenticate. Please connect your workspace to Nx Cloud to define a valid Nx Cloud Id. If you are in a CI context, please set the NX_CLOUD_ACCESS_TOKEN environment variable or define an access token in your nx.json.`);
}
else {
throw new Error(`Unable to authenticate. Either define accessToken in nx.json or set the NX_CLOUD_ACCESS_TOKEN env variable. If you do not want to use Nx Cloud for this command, either set NX_NO_CLOUD=true, or pass the --no-cloud flag.`);
}
throw new Error(`Unable to authenticate. If you are connecting to Nx Cloud locally, set an Nx Cloud ID in your nx.json with "nx connect". If you are in a CI context, please set the NX_CLOUD_ACCESS_TOKEN environment variable or define an access token in your nx.json.`);
}

@@ -22,0 +17,0 @@ if (options.customProxyConfigPath) {

@@ -98,3 +98,3 @@ "use strict";

catch (e) {
if (process.env.NX_VERBOSE_LOGGING) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
devkit_exports_1.logger.warn(`Failed to access system features. GitHub integration assumed to be disabled.

@@ -101,0 +101,0 @@ ${e}`);

@@ -22,2 +22,3 @@ import { ProjectFileMap, ProjectGraph, ProjectGraphProjectNode } from '../../../config/project-graph';

skipPackageManager?: boolean;
skipOverrides?: boolean;
}, fileMap?: ProjectFileMap): PackageJson;

@@ -24,0 +25,0 @@ export declare function findProjectsNpmDependencies(projectNode: ProjectGraphProjectNode, graph: ProjectGraph, target: string, rootPackageJson: PackageJson, options: {

@@ -124,2 +124,26 @@ "use strict";

}
// region Overrides/Resolutions
// npm
if (rootPackageJson.overrides && !options.skipOverrides) {
packageJson.overrides = {
...rootPackageJson.overrides,
...packageJson.overrides,
};
}
// pnpm
if (rootPackageJson.pnpm?.overrides && !options.skipOverrides) {
packageJson.pnpm ??= {};
packageJson.pnpm.overrides = {
...rootPackageJson.pnpm.overrides,
...packageJson.pnpm.overrides,
};
}
// yarn
if (rootPackageJson.resolutions && !options.skipOverrides) {
packageJson.resolutions = {
...rootPackageJson.resolutions,
...packageJson.resolutions,
};
}
// endregion Overrides/Resolutions
return packageJson;

@@ -126,0 +150,0 @@ }

@@ -143,12 +143,17 @@ "use strict";

const version = (0, semver_1.clean)(externalPackageJson.version);
const npmProjectKey = `npm:${externalPackageJson.name}@${version}`;
let matchingExternalNode = this.npmProjects[npmProjectKey];
let matchingExternalNode = this.npmProjects[`npm:${externalPackageJson.name}@${version}`];
if (!matchingExternalNode) {
// check if it's a package alias, where the resolved package key is used as the version
const aliasNpmProjectKey = `npm:${packageName}@${npmProjectKey}`;
const aliasNpmProjectKey = `npm:${packageName}@npm:${externalPackageJson.name}@${version}`;
matchingExternalNode = this.npmProjects[aliasNpmProjectKey];
if (!matchingExternalNode) {
return null;
}
}
if (!matchingExternalNode) {
// Fallback to package name as key. This can happen if the version in project graph is not the same as in the resolved package.json.
// e.g. Version in project graph is a git remote, but the resolved version is semver.
matchingExternalNode =
this.npmProjects[`npm:${externalPackageJson.name}`];
}
if (!matchingExternalNode) {
return null;
}
this.npmResolutionCache.set(npmImportForProject, matchingExternalNode.name);

@@ -155,0 +160,0 @@ return matchingExternalNode.name;

@@ -1,2 +0,2 @@

import { DefaultTasksRunnerOptions } from './default-tasks-runner';
import { DefaultTasksRunnerOptions, RemoteCache } from './default-tasks-runner';
import { Task } from '../config/task-graph';

@@ -13,2 +13,22 @@ export type CachedResult = {

};
export declare class DbCache {
private readonly options;
private cache;
private remoteCache;
private remoteCachePromise;
setup(): Promise<void>;
constructor(options: {
nxCloudRemoteCache: RemoteCache;
});
get(task: Task): Promise<CachedResult | null>;
put(task: Task, terminalOutput: string | null, outputs: string[], code: number): Promise<void>;
copyFilesFromCache(_: string, cachedResult: CachedResult, outputs: string[]): Promise<void>;
removeOldCacheRecords(): void;
temporaryOutputPath(task: Task): string;
private getRemoteCache;
private _getRemoteCache;
}
/**
* @deprecated Use the {@link DbCache} class instead. This will be removed in Nx 21.
*/
export declare class Cache {

@@ -36,3 +56,2 @@ private readonly options;

private createTerminalOutputsDir;
private tryAndRetry;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Cache = void 0;
exports.Cache = exports.DbCache = void 0;
const workspace_root_1 = require("../utils/workspace-root");

@@ -8,5 +8,95 @@ const fs_extra_1 = require("fs-extra");

const perf_hooks_1 = require("perf_hooks");
const default_tasks_runner_1 = require("./default-tasks-runner");
const child_process_1 = require("child_process");
const cache_directory_1 = require("../utils/cache-directory");
const node_machine_id_1 = require("node-machine-id");
const native_1 = require("../native");
const db_connection_1 = require("../utils/db-connection");
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
const nx_json_1 = require("../config/nx-json");
const update_manager_1 = require("../nx-cloud/update-manager");
const get_cloud_options_1 = require("../nx-cloud/utilities/get-cloud-options");
class DbCache {
async setup() {
this.remoteCache = await this.getRemoteCache();
}
constructor(options) {
this.options = options;
this.cache = new native_1.NxCache(workspace_root_1.workspaceRoot, cache_directory_1.cacheDir, (0, db_connection_1.getDbConnection)());
}
async get(task) {
const res = this.cache.get(task.hash);
if (res) {
return {
...res,
remote: false,
};
}
await this.setup();
if (this.remoteCache) {
// didn't find it locally but we have a remote cache
// attempt remote cache
const res = await this.remoteCache.retrieve(task.hash, this.cache.cacheDirectory);
if (res) {
this.cache.applyRemoteCacheResults(task.hash, res);
return {
...res,
remote: true,
};
}
else {
return null;
}
}
else {
return null;
}
}
async put(task, terminalOutput, outputs, code) {
return tryAndRetry(async () => {
this.cache.put(task.hash, terminalOutput, outputs, code);
await this.setup();
if (this.remoteCache) {
await this.remoteCache.store(task.hash, this.cache.cacheDirectory, terminalOutput, code);
}
});
}
copyFilesFromCache(_, cachedResult, outputs) {
return tryAndRetry(async () => this.cache.copyFilesFromCache(cachedResult, outputs));
}
removeOldCacheRecords() {
return this.cache.removeOldCacheRecords();
}
temporaryOutputPath(task) {
return this.cache.getTaskOutputsPath(task.hash);
}
async getRemoteCache() {
if (this.remoteCachePromise) {
return this.remoteCachePromise;
}
this.remoteCachePromise = this._getRemoteCache();
return this.remoteCachePromise;
}
async _getRemoteCache() {
const nxJson = (0, nx_json_1.readNxJson)();
if ((0, nx_cloud_utils_1.isNxCloudUsed)(nxJson)) {
const options = (0, get_cloud_options_1.getCloudOptions)();
const { nxCloudClient } = await (0, update_manager_1.verifyOrUpdateNxCloudClient)(options);
if (nxCloudClient.remoteCache) {
return nxCloudClient.remoteCache;
}
else {
// old nx cloud instance
return await default_tasks_runner_1.RemoteCacheV2.fromCacheV1(this.options.nxCloudRemoteCache);
}
}
else {
return null;
}
}
}
exports.DbCache = DbCache;
/**
* @deprecated Use the {@link DbCache} class instead. This will be removed in Nx 21.
*/
class Cache {

@@ -48,3 +138,3 @@ constructor(options) {

catch (e) {
if (process.env.NX_VERBOSE_LOGGING == 'true') {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.log(`Unable to get machineId. Error: ${e.message}`);

@@ -76,3 +166,6 @@ }

async put(task, terminalOutput, outputs, code) {
return this.tryAndRetry(async () => {
return tryAndRetry(async () => {
/**
* This is the directory with the cached artifacts
*/
const td = (0, path_1.join)(this.cachePath, task.hash);

@@ -111,3 +204,3 @@ const tdCommit = (0, path_1.join)(this.cachePath, `${task.hash}.commit`);

async copyFilesFromCache(hash, cachedResult, outputs) {
return this.tryAndRetry(async () => {
return tryAndRetry(async () => {
const expandedOutputs = await this.expandOutputsInCache(outputs, cachedResult);

@@ -220,25 +313,24 @@ await Promise.all(expandedOutputs.map(async (f) => {

}
tryAndRetry(fn) {
let attempts = 0;
const baseTimeout = 5;
// Generate a random number between 2 and 4 to raise to the power of attempts
const baseExponent = Math.random() * 2 + 2;
const _try = async () => {
try {
attempts++;
return await fn();
}
exports.Cache = Cache;
function tryAndRetry(fn) {
let attempts = 0;
// Generate a random number between 2 and 4 to raise to the power of attempts
const baseExponent = Math.random() * 2 + 2;
const _try = async () => {
try {
attempts++;
return await fn();
}
catch (e) {
// Max time is 5 * 4^3 = 20480ms
if (attempts === 6) {
// After enough attempts, throw the error
throw e;
}
catch (e) {
// Max time is 5 * 4^3 = 20480ms
if (attempts === 6) {
// After enough attempts, throw the error
throw e;
}
await new Promise((res) => setTimeout(res, baseExponent ** attempts));
return await _try();
}
};
return _try();
}
await new Promise((res) => setTimeout(res, baseExponent ** attempts));
return await _try();
}
};
return _try();
}
exports.Cache = Cache;
import { TasksRunner } from './tasks-runner';
import { LifeCycle } from './life-cycle';
import { CachedResult } from '../native';
export interface RemoteCache {

@@ -7,2 +8,7 @@ retrieve: (hash: string, cacheDirectory: string) => Promise<boolean>;

}
export declare abstract class RemoteCacheV2 {
static fromCacheV1(cache: RemoteCache): Promise<RemoteCacheV2>;
abstract retrieve(hash: string, cacheDirectory: string): Promise<CachedResult | null>;
abstract store(hash: string, cacheDirectory: string, terminalOutput: string, code: number): Promise<boolean>;
}
export interface DefaultTasksRunnerOptions {

@@ -9,0 +15,0 @@ parallel?: number;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.defaultTasksRunner = void 0;
exports.defaultTasksRunner = exports.RemoteCacheV2 = void 0;
const task_orchestrator_1 = require("./task-orchestrator");
const cache_directory_1 = require("../utils/cache-directory");
const promises_1 = require("fs/promises");
const path_1 = require("path");
class RemoteCacheV2 {
static async fromCacheV1(cache) {
await (0, promises_1.mkdir)((0, path_1.join)(cache_directory_1.cacheDir, 'terminalOutputs'), { recursive: true });
return {
retrieve: async (hash, cacheDirectory) => {
const res = await cache.retrieve(hash, cacheDirectory);
if (res) {
const [terminalOutput, oldTerminalOutput, code] = await Promise.all([
(0, promises_1.readFile)((0, path_1.join)(cacheDirectory, hash, 'terminalOutputs'), 'utf-8').catch(() => null),
(0, promises_1.readFile)((0, path_1.join)(cache_directory_1.cacheDir, 'terminalOutputs', hash), 'utf-8').catch(() => null),
(0, promises_1.readFile)((0, path_1.join)(cacheDirectory, hash, 'code'), 'utf-8').then((s) => +s),
]);
return {
outputsPath: cacheDirectory,
terminalOutput: terminalOutput ?? oldTerminalOutput,
code,
};
}
else {
return null;
}
},
store: async (hash, cacheDirectory, __, code) => {
await (0, promises_1.writeFile)((0, path_1.join)(cacheDirectory, hash, 'code'), code.toString());
return cache.store(hash, cacheDirectory);
},
};
}
}
exports.RemoteCacheV2 = RemoteCacheV2;
const defaultTasksRunner = async (tasks, options, context) => {

@@ -6,0 +39,0 @@ if (options['parallel'] === 'false' ||

import { NxArgs } from '../utils/command-line-utils';
import { Task, TaskGraph } from '../config/task-graph';
import { TaskResult } from './life-cycle';
export declare function initTasksRunner(nxArgs: NxArgs): Promise<{

@@ -10,3 +11,4 @@ invoke: (opts: {

taskGraph: TaskGraph;
taskResults: Record<string, TaskResult>;
}>;
}>;

@@ -55,2 +55,3 @@ "use strict";

taskGraph,
taskResults: lifeCycle.getTaskResults(),
};

@@ -57,0 +58,0 @@ },

import { TaskStatus } from '../tasks-runner';
import type { LifeCycle } from '../life-cycle';
import type { LifeCycle, TaskResult } from '../life-cycle';
import { Task } from '../../config/task-graph';

@@ -8,11 +8,9 @@ export declare class InvokeRunnerTerminalOutputLifeCycle implements LifeCycle {

cachedTasks: Task[];
private taskResults;
constructor(tasks: Task[]);
startCommand(): void;
endCommand(): void;
endTasks(taskResults: {
task: Task;
status: TaskStatus;
code: number;
}[]): void;
endTasks(taskResults: TaskResult[]): void;
printTaskTerminalOutput(task: Task, cacheStatus: TaskStatus, terminalOutput: string): void;
getTaskResults(): Record<string, TaskResult>;
}

@@ -11,2 +11,3 @@ "use strict";

this.cachedTasks = [];
this.taskResults = {};
}

@@ -49,2 +50,3 @@ startCommand() {

for (let t of taskResults) {
this.taskResults[t.task.id] = t;
if (t.status === 'failure') {

@@ -68,3 +70,6 @@ this.failedTasks.push(t.task);

}
getTaskResults() {
return this.taskResults;
}
}
exports.InvokeRunnerTerminalOutputLifeCycle = InvokeRunnerTerminalOutputLifeCycle;

@@ -6,2 +6,3 @@ import { Task } from '../../config/task-graph';

private taskRuns;
private taskHistory;
startTasks(tasks: Task[]): void;

@@ -8,0 +9,0 @@ endTasks(taskResults: TaskResult[]): Promise<void>;

@@ -10,3 +10,4 @@ "use strict";

this.startTimings = {};
this.taskRuns = [];
this.taskRuns = new Map();
this.taskHistory = new task_history_1.TaskHistory();
}

@@ -19,25 +20,19 @@ startTasks(tasks) {

async endTasks(taskResults) {
const taskRuns = taskResults.map((taskResult) => ({
project: taskResult.task.target.project,
target: taskResult.task.target.target,
configuration: taskResult.task.target.configuration,
taskResults
.map((taskResult) => ({
hash: taskResult.task.hash,
code: taskResult.code.toString(),
target: taskResult.task.target,
code: taskResult.code,
status: taskResult.status,
start: (taskResult.task.startTime ?? this.startTimings[taskResult.task.id]).toString(),
end: (taskResult.task.endTime ?? new Date().getTime()).toString(),
}));
this.taskRuns.push(...taskRuns);
start: taskResult.task.startTime ?? this.startTimings[taskResult.task.id],
end: taskResult.task.endTime ?? Date.now(),
}))
.forEach((taskRun) => {
this.taskRuns.set(taskRun.hash, taskRun);
});
}
async endCommand() {
await (0, task_history_1.writeTaskRunsToHistory)(this.taskRuns);
const history = await (0, task_history_1.getHistoryForHashes)(this.taskRuns.map((t) => t.hash));
const flakyTasks = [];
// check if any hash has different exit codes => flaky
for (let hash in history) {
if (history[hash].length > 1 &&
history[hash].some((run) => run.code !== history[hash][0].code)) {
flakyTasks.push((0, serialize_target_1.serializeTarget)(history[hash][0].project, history[hash][0].target, history[hash][0].configuration));
}
}
const entries = Array.from(this.taskRuns);
await this.taskHistory.recordTaskRuns(entries.map(([_, v]) => v));
const flakyTasks = await this.taskHistory.getFlakyTasks(entries.map(([hash]) => hash));
if (flakyTasks.length > 0) {

@@ -48,3 +43,6 @@ output_1.output.warn({

,
...flakyTasks.map((t) => ` ${t}`),
...flakyTasks.map((hash) => {
const taskRun = this.taskRuns.get(hash);
return ` ${(0, serialize_target_1.serializeTarget)(taskRun.target.project, taskRun.target.target, taskRun.target.configuration)}`;
}),
'',

@@ -51,0 +49,0 @@ `Flaky tasks can disrupt your CI pipeline. Automatically retry them with Nx Cloud. Learn more at https://nx.dev/ci/features/flaky-tasks`,

@@ -30,2 +30,3 @@ "use strict";

const task_history_life_cycle_1 = require("./life-cycles/task-history-life-cycle");
const task_history_life_cycle_old_1 = require("./life-cycles/task-history-life-cycle-old");
const task_profiling_life_cycle_1 = require("./life-cycles/task-profiling-life-cycle");

@@ -36,2 +37,3 @@ const task_timings_life_cycle_1 = require("./life-cycles/task-timings-life-cycle");

const chalk = require("chalk");
const native_1 = require("../native");
async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {

@@ -338,3 +340,3 @@ const { runnerOptions } = getRunner(nxArgs, nxJson);

if (!(0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())) {
lifeCycles.push(new task_history_life_cycle_1.TaskHistoryLifeCycle());
lifeCycles.push(!native_1.IS_WASM ? new task_history_life_cycle_1.TaskHistoryLifeCycle() : new task_history_life_cycle_old_1.LegacyTaskHistoryLifeCycle());
}

@@ -445,3 +447,3 @@ return lifeCycles;

process.env.NX_CLOUD_ACCESS_TOKEN ||
// Nx Cloud Id specified in nxJson
// Nx Cloud ID specified in nxJson
nxJson.nxCloudId;

@@ -448,0 +450,0 @@ return isCloudRunner ? 'nx-cloud' : require.resolve('./default-tasks-runner');

@@ -18,2 +18,4 @@ "use strict";

const params_1 = require("../utils/params");
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
const nx_json_1 = require("../config/nx-json");
class TaskOrchestrator {

@@ -29,3 +31,10 @@ // endregion internal state

this.daemon = daemon;
this.cache = new cache_1.Cache(this.options);
this.cache = process.env.NX_DB_CACHE === 'true'
? new cache_1.DbCache({
// Remove this in Nx 21
nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
? this.options.remoteCache
: null,
})
: new cache_1.Cache(this.options);
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);

@@ -32,0 +41,0 @@ this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);

@@ -7,1 +7,2 @@ /**

export declare const workspaceDataDirectory: string;
export declare function workspaceDataDirectoryForWorkspace(workspaceRoot: string): string;

@@ -5,2 +5,3 @@ "use strict";

exports.cacheDirectoryForWorkspace = cacheDirectoryForWorkspace;
exports.workspaceDataDirectoryForWorkspace = workspaceDataDirectoryForWorkspace;
const fs_1 = require("fs");

@@ -66,4 +67,7 @@ const path_1 = require("path");

}
exports.workspaceDataDirectory = absolutePath(workspace_root_1.workspaceRoot, process.env.NX_WORKSPACE_DATA_DIRECTORY ??
process.env.NX_PROJECT_GRAPH_CACHE_DIRECTORY ??
defaultWorkspaceDataDirectory(workspace_root_1.workspaceRoot));
exports.workspaceDataDirectory = workspaceDataDirectoryForWorkspace(workspace_root_1.workspaceRoot);
function workspaceDataDirectoryForWorkspace(workspaceRoot) {
return absolutePath(workspaceRoot, process.env.NX_WORKSPACE_DATA_DIRECTORY ??
process.env.NX_PROJECT_GRAPH_CACHE_DIRECTORY ??
defaultWorkspaceDataDirectory(workspaceRoot));
}

@@ -39,3 +39,3 @@ "use strict";

verbose: (...s) => {
if (process.env.NX_VERBOSE_LOGGING) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.log(...s);

@@ -42,0 +42,0 @@ }

@@ -45,2 +45,5 @@ import { ProjectConfiguration, ProjectMetadata, TargetConfiguration } from '../config/workspace-json-project-json';

resolutions?: Record<string, string>;
pnpm?: {
overrides?: PackageOverride;
};
overrides?: PackageOverride;

@@ -47,0 +50,0 @@ bin?: Record<string, string> | string;

@@ -1,8 +0,6 @@

declare const taskRunKeys: readonly ["project", "target", "configuration", "hash", "code", "status", "start", "end"];
export type TaskRun = Record<(typeof taskRunKeys)[number], string>;
export declare function getHistoryForHashes(hashes: string[]): Promise<{
[hash: string]: TaskRun[];
}>;
export declare function writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
export declare const taskHistoryFile: string;
export {};
import { NxTaskHistory, TaskRun } from '../native';
export declare class TaskHistory {
taskHistory: NxTaskHistory;
getFlakyTasks(hashes: string[]): Promise<string[]>;
recordTaskRuns(taskRuns: TaskRun[]): Promise<void>;
}
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.taskHistoryFile = void 0;
exports.getHistoryForHashes = getHistoryForHashes;
exports.writeTaskRunsToHistory = writeTaskRunsToHistory;
const fs_1 = require("fs");
const path_1 = require("path");
exports.TaskHistory = void 0;
const client_1 = require("../daemon/client/client");
const is_on_daemon_1 = require("../daemon/is-on-daemon");
const cache_directory_1 = require("./cache-directory");
const taskRunKeys = [
'project',
'target',
'configuration',
'hash',
'code',
'status',
'start',
'end',
];
let taskHistory = undefined;
let taskHashToIndicesMap = new Map();
async function getHistoryForHashes(hashes) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
if (taskHistory === undefined) {
loadTaskHistoryFromDisk();
const native_1 = require("../native");
const db_connection_1 = require("./db-connection");
class TaskHistory {
constructor() {
this.taskHistory = new native_1.NxTaskHistory((0, db_connection_1.getDbConnection)());
}
async getFlakyTasks(hashes) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
return this.taskHistory.getFlakyTasks(hashes);
}
const result = {};
for (let hash of hashes) {
const indices = taskHashToIndicesMap.get(hash);
if (!indices) {
result[hash] = [];
}
else {
result[hash] = indices.map((index) => taskHistory[index]);
}
}
return result;
return await client_1.daemonClient.getFlakyTasks(hashes);
}
return await client_1.daemonClient.getTaskHistoryForHashes(hashes);
}
async function writeTaskRunsToHistory(taskRuns) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
if (taskHistory === undefined) {
loadTaskHistoryFromDisk();
async recordTaskRuns(taskRuns) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
return this.taskHistory.recordTaskRuns(taskRuns);
}
const serializedLines = [];
for (let taskRun of taskRuns) {
const serializedLine = taskRunKeys.map((key) => taskRun[key]).join(',');
serializedLines.push(serializedLine);
recordTaskRunInMemory(taskRun);
}
if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
(0, fs_1.writeFileSync)(exports.taskHistoryFile, `${taskRunKeys.join(',')}\n`);
}
(0, fs_1.appendFileSync)(exports.taskHistoryFile, serializedLines.join('\n') + '\n');
return client_1.daemonClient.recordTaskRuns(taskRuns);
}
else {
await client_1.daemonClient.writeTaskRunsToHistory(taskRuns);
}
}
exports.taskHistoryFile = (0, path_1.join)(cache_directory_1.workspaceDataDirectory, 'task-history.csv');
function loadTaskHistoryFromDisk() {
taskHashToIndicesMap.clear();
taskHistory = [];
if (!(0, fs_1.existsSync)(exports.taskHistoryFile)) {
return;
}
const fileContent = (0, fs_1.readFileSync)(exports.taskHistoryFile, 'utf8');
if (!fileContent) {
return;
}
const lines = fileContent.split('\n');
// if there are no lines or just the header, return
if (lines.length <= 1) {
return;
}
const contentLines = lines.slice(1).filter((l) => l.trim() !== '');
// read the values from csv format where each header is a key and the value is the value
for (let line of contentLines) {
const values = line.trim().split(',');
const run = {};
taskRunKeys.forEach((header, index) => {
run[header] = values[index];
});
recordTaskRunInMemory(run);
}
}
function recordTaskRunInMemory(taskRun) {
const index = taskHistory.push(taskRun) - 1;
if (taskHashToIndicesMap.has(taskRun.hash)) {
taskHashToIndicesMap.get(taskRun.hash).push(index);
}
else {
taskHashToIndicesMap.set(taskRun.hash, [index]);
}
}
exports.TaskHistory = TaskHistory;

@@ -22,3 +22,3 @@ "use strict";

perf_hooks_1.performance.mark('workspace-context');
workspaceContext = new WorkspaceContext(workspaceRoot, (0, cache_directory_1.cacheDirectoryForWorkspace)(workspaceRoot));
workspaceContext = new WorkspaceContext(workspaceRoot, (0, cache_directory_1.workspaceDataDirectoryForWorkspace)(workspaceRoot));
perf_hooks_1.performance.mark('workspace-context:end');

@@ -25,0 +25,0 @@ perf_hooks_1.performance.measure('workspace context init', 'workspace-context', 'workspace-context:end');

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc