New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

nx

Package Overview
Dependencies
Maintainers
8
Versions
1669
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

nx - npm Package Compare versions

Comparing version 0.0.0-pr-3-d8b1402 to 0.0.0-pr-30082-3da42ee

src/command-line/init/implementation/add-nx-to-turborepo.d.ts

23

package.json
{
"name": "nx",
"version": "0.0.0-pr-3-d8b1402",
"version": "0.0.0-pr-30082-3da42ee",
"private": false,

@@ -66,3 +66,2 @@ "description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",

"tmp": "~0.2.1",
"tree-kill": "^1.2.2",
"tsconfig-paths": "^4.1.2",

@@ -87,12 +86,12 @@ "tslib": "^2.3.0",

"optionalDependencies": {
"@nx/nx-darwin-arm64": "0.0.0-pr-3-d8b1402",
"@nx/nx-darwin-x64": "0.0.0-pr-3-d8b1402",
"@nx/nx-freebsd-x64": "0.0.0-pr-3-d8b1402",
"@nx/nx-linux-arm-gnueabihf": "0.0.0-pr-3-d8b1402",
"@nx/nx-linux-arm64-gnu": "0.0.0-pr-3-d8b1402",
"@nx/nx-linux-arm64-musl": "0.0.0-pr-3-d8b1402",
"@nx/nx-linux-x64-gnu": "0.0.0-pr-3-d8b1402",
"@nx/nx-linux-x64-musl": "0.0.0-pr-3-d8b1402",
"@nx/nx-win32-arm64-msvc": "0.0.0-pr-3-d8b1402",
"@nx/nx-win32-x64-msvc": "0.0.0-pr-3-d8b1402"
"@nx/nx-darwin-arm64": "0.0.0-pr-30082-3da42ee",
"@nx/nx-darwin-x64": "0.0.0-pr-30082-3da42ee",
"@nx/nx-freebsd-x64": "0.0.0-pr-30082-3da42ee",
"@nx/nx-linux-arm-gnueabihf": "0.0.0-pr-30082-3da42ee",
"@nx/nx-linux-arm64-gnu": "0.0.0-pr-30082-3da42ee",
"@nx/nx-linux-arm64-musl": "0.0.0-pr-30082-3da42ee",
"@nx/nx-linux-x64-gnu": "0.0.0-pr-30082-3da42ee",
"@nx/nx-linux-x64-musl": "0.0.0-pr-30082-3da42ee",
"@nx/nx-win32-arm64-msvc": "0.0.0-pr-30082-3da42ee",
"@nx/nx-win32-x64-msvc": "0.0.0-pr-30082-3da42ee"
},

@@ -99,0 +98,0 @@ "nx-migrations": {

@@ -24,2 +24,6 @@ {

},
"extends": {
"type": "string",
"description": "Specifies the base config to extend."
},
"defaultBase": {

@@ -173,4 +177,19 @@ "type": "string",

"releaseTagPattern": {
"type": "string"
"type": "string",
"description": "Optionally override the git/release tag pattern to use for this group."
},
"releaseTagPatternCheckAllBranchesWhen": {
"oneOf": [
{
"type": "boolean"
},
{
"type": "array",
"items": {
"type": "string"
}
}
],
"description": "By default, we will try and resolve the latest match for the releaseTagPattern from the current branch, falling back to all branches if no match is found on the current branch. Setting this to true will cause us to ALWAYS check all branches for the latest match. Setting it to false will cause us to ONLY check the current branch for the latest match. Setting it to an array of strings will cause us to check all branches WHEN the current branch is one of the strings in the array. Glob patterns are supported."
},
"versionPlans": {

@@ -248,3 +267,18 @@ "oneOf": [

"releaseTagPattern": {
"type": "string"
"type": "string",
"description": "Optionally override the git/release tag pattern to use. This field is the source of truth for changelog generation and release tagging, as well as for conventional commits parsing. It supports interpolating the version as {version} and (if releasing independently or forcing project level version control system releases) the project name as {projectName} within the string. The default releaseTagPattern for fixed/unified releases is: \"v{version}\". The default releaseTagPattern for independent releases at the project level is: \"{projectName}@{version}\""
},
"releaseTagPatternCheckAllBranchesWhen": {
"oneOf": [
{
"type": "boolean"
},
{
"type": "array",
"items": {
"type": "string"
}
}
],
"description": "By default, we will try and resolve the latest match for the releaseTagPattern from the current branch, falling back to all branches if no match is found on the current branch. Setting this to true will cause us to ALWAYS check all branches for the latest match. Setting it to false will cause us to ONLY check the current branch for the latest match. Setting it to an array of strings will cause us to check all branches WHEN the current branch is one of the strings in the array. Glob patterns are supported."
}

@@ -251,0 +285,0 @@ }

export declare const allowedProjectExtensions: readonly ["tags", "implicitDependencies", "configFilePath", "$schema", "generators", "namedInputs", "name", "files", "root", "sourceRoot", "projectType", "release", "includedScripts", "metadata"];
export declare const allowedWorkspaceExtensions: readonly ["implicitDependencies", "affected", "defaultBase", "tasksRunnerOptions", "workspaceLayout", "plugins", "targetDefaults", "files", "generators", "namedInputs", "extends", "cli", "pluginsConfig", "defaultProject", "installation", "release", "nxCloudAccessToken", "nxCloudId", "nxCloudUrl", "nxCloudEncryptionKey", "parallel", "cacheDirectory", "useDaemonProcess", "useInferencePlugins", "neverConnectToCloud", "sync", "useLegacyCache"];
export declare const allowedWorkspaceExtensions: readonly ["$schema", "implicitDependencies", "affected", "defaultBase", "tasksRunnerOptions", "workspaceLayout", "plugins", "targetDefaults", "files", "generators", "namedInputs", "extends", "cli", "pluginsConfig", "defaultProject", "installation", "release", "nxCloudAccessToken", "nxCloudId", "nxCloudUrl", "nxCloudEncryptionKey", "parallel", "cacheDirectory", "useDaemonProcess", "useInferencePlugins", "neverConnectToCloud", "sync", "useLegacyCache"];

@@ -40,2 +40,3 @@ "use strict";

exports.allowedWorkspaceExtensions = [
'$schema',
'implicitDependencies',

@@ -42,0 +43,0 @@ 'affected',

@@ -8,3 +8,2 @@ "use strict";

const ora = require("ora");
const yargsParser = require("yargs-parser");
const nx_json_1 = require("../../config/nx-json");

@@ -83,18 +82,9 @@ const child_process_2 = require("../../utils/child-process");

async function initializePlugin(pkgName, options, nxJson) {
const parsedCommandArgs = yargsParser(options.__overrides_unparsed__, {
configuration: {
'parse-numbers': false,
'parse-positional-numbers': false,
'dot-notation': false,
'camel-case-expansion': false,
},
});
if (exports.coreNxPluginVersions.has(pkgName)) {
parsedCommandArgs.keepExistingVersions = true;
if (options.updatePackageScripts ||
let updatePackageScripts = false;
if (exports.coreNxPluginVersions.has(pkgName) &&
(options.updatePackageScripts ||
(options.updatePackageScripts === undefined &&
nxJson.useInferencePlugins !== false &&
process.env.NX_ADD_PLUGINS !== 'false')) {
parsedCommandArgs.updatePackageScripts = true;
}
process.env.NX_ADD_PLUGINS !== 'false'))) {
updatePackageScripts = true;
}

@@ -104,3 +94,3 @@ const spinner = ora(`Initializing ${pkgName}...`);

try {
await (0, configure_plugins_1.installPlugin)(pkgName, workspace_root_1.workspaceRoot, options.verbose, parsedCommandArgs);
await (0, configure_plugins_1.installPlugin)(pkgName, workspace_root_1.workspaceRoot, updatePackageScripts, options.verbose);
}

@@ -107,0 +97,0 @@ catch (e) {

@@ -632,3 +632,2 @@ "use strict";

dependencies: {},
continuousDependencies: {},
roots: [],

@@ -649,3 +648,2 @@ };

dependencies: {},
continuousDependencies: {},
roots: [],

@@ -652,0 +650,0 @@ };

@@ -10,10 +10,17 @@ "use strict";

handler: async (args) => {
const useV2 = await isInitV2();
if (useV2) {
await require('./init-v2').initHandler(args);
try {
const useV2 = await isInitV2();
if (useV2) {
await require('./init-v2').initHandler(args);
}
else {
await require('./init-v1').initHandler(args);
}
process.exit(0);
}
else {
await require('./init-v1').initHandler(args);
catch {
// Ensure the cursor is always restored just in case the user has bailed during interactive prompts
process.stdout.write('\x1b[?25h');
process.exit(1);
}
process.exit(0);
},

@@ -43,2 +50,7 @@ };

default: false,
})
.option('force', {
describe: 'Force the migration to continue and ignore custom webpack setup or uncommitted changes. Only for CRA projects.',
type: 'boolean',
default: false,
});

@@ -45,0 +57,0 @@ }

@@ -7,9 +7,8 @@ import { PackageManagerCommands } from '../../utils/package-manager';

* @param repoRoot repo root
* @param verbose verbose
* @param options options passed to init generator
* @param pmc package manager commands
* @param updatePackageScripts whether to update package scripts
* @param verbose whether to run in verbose mode
* @returns void
*/
export declare function installPlugin(plugin: string, repoRoot: string, verbose: boolean, options: {
[k: string]: any;
}): Promise<void>;
export declare function installPlugin(plugin: string, repoRoot?: string, updatePackageScripts?: boolean, verbose?: boolean, pmc?: PackageManagerCommands): Promise<void>;
/**

@@ -20,3 +19,3 @@ * Install plugins

*/
export declare function installPlugins(plugins: string[], updatePackageScripts: boolean, repoRoot?: string, verbose?: boolean): Promise<{
export declare function installPlugins(plugins: string[], updatePackageScripts: boolean, pmc: PackageManagerCommands, repoRoot?: string, verbose?: boolean): Promise<{
succeededPlugins: string[];

@@ -23,0 +22,0 @@ failedPlugins: {

@@ -10,6 +10,5 @@ "use strict";

const chalk_1 = require("chalk");
const child_process_1 = require("child_process");
const package_manager_1 = require("../../utils/package-manager");
const output_1 = require("../../utils/output");
const tree_1 = require("../../generators/tree");
const generator_utils_1 = require("../generate/generator-utils");
const workspace_root_1 = require("../../utils/workspace-root");

@@ -31,8 +30,8 @@ const utils_1 = require("./implementation/utils");

* @param repoRoot repo root
* @param verbose verbose
* @param options options passed to init generator
* @param pmc package manager commands
* @param updatePackageScripts whether to update package scripts
* @param verbose whether to run in verbose mode
* @returns void
*/
async function installPlugin(plugin, repoRoot = workspace_root_1.workspaceRoot, verbose = false, options) {
const host = new tree_1.FsTree(repoRoot, verbose, `install ${plugin}`);
async function installPlugin(plugin, repoRoot = workspace_root_1.workspaceRoot, updatePackageScripts = false, verbose = false, pmc = (0, package_manager_1.getPackageManagerCommand)()) {
const capabilities = await (0, plugins_1.getPluginCapabilities)(repoRoot, plugin, {});

@@ -50,9 +49,7 @@ const generators = capabilities?.generators;

}
const { implementationFactory } = (0, generator_utils_1.getGeneratorInformation)(plugin, initGenerator, repoRoot, {});
const implementation = implementationFactory();
const task = await implementation(host, options);
(0, tree_1.flushChanges)(repoRoot, host.listChanges());
if (task) {
await task();
}
(0, child_process_1.execSync)(`${pmc.exec} nx g ${plugin}:init --keepExistingVersions ${updatePackageScripts ? '--updatePackageScripts' : ''} ${verbose ? '--verbose' : ''}`, {
stdio: [0, 1, 2],
cwd: repoRoot,
windowsHide: false,
});
}

@@ -64,3 +61,3 @@ /**

*/
async function installPlugins(plugins, updatePackageScripts, repoRoot = workspace_root_1.workspaceRoot, verbose = false) {
async function installPlugins(plugins, updatePackageScripts, pmc, repoRoot = workspace_root_1.workspaceRoot, verbose = false) {
if (plugins.length === 0) {

@@ -78,9 +75,3 @@ return {

spinner.start('Installing plugin ' + plugin);
await installPlugin(plugin, repoRoot, verbose, {
keepExistingVersions: true,
updatePackageScripts,
addPlugin: true,
skipFormat: false,
skipPackageJson: false,
});
await installPlugin(plugin, repoRoot, updatePackageScripts, verbose, pmc);
succeededPlugins.push(plugin);

@@ -111,3 +102,3 @@ spinner.succeed('Installed plugin ' + plugin);

output_1.output.log({ title: '🔨 Configuring plugins' });
let { succeededPlugins, failedPlugins } = await installPlugins(plugins, updatePackageScripts, repoRoot, verbose);
let { succeededPlugins, failedPlugins } = await installPlugins(plugins, updatePackageScripts, pmc, repoRoot, verbose);
if (succeededPlugins.length > 0) {

@@ -114,0 +105,0 @@ output_1.output.success({

@@ -12,8 +12,10 @@ "use strict";

...packageJson.scripts,
start: 'nx exec -- vite',
serve: 'nx exec -- vite',
build: `nx exec -- vite build`,
test: 'nx exec -- vitest',
// These should be replaced by the vite init generator later.
start: 'vite',
test: 'vitest',
dev: 'vite',
build: 'vite build',
eject: undefined,
};
(0, fileutils_1.writeJsonFile)(packageJsonPath, packageJson, { spaces: 2 });
}
import { InitArgs } from '../../init-v1';
type Options = InitArgs;
export declare function addNxToCraRepo(options: Options): Promise<void>;
export declare function addNxToCraRepo(_options: Options): Promise<void>;
export {};

@@ -5,4 +5,4 @@ "use strict";

const child_process_1 = require("child_process");
const node_fs_1 = require("node:fs");
const path_1 = require("path");
const fs_1 = require("fs");
const fileutils_1 = require("../../../../utils/fileutils");

@@ -12,33 +12,37 @@ const output_1 = require("../../../../utils/output");

const check_for_custom_webpack_setup_1 = require("./check-for-custom-webpack-setup");
const check_for_uncommitted_changes_1 = require("./check-for-uncommitted-changes");
const clean_up_files_1 = require("./clean-up-files");
const read_name_from_package_json_1 = require("./read-name-from-package-json");
const rename_js_to_jsx_1 = require("./rename-js-to-jsx");
const tsconfig_setup_1 = require("./tsconfig-setup");
const write_craco_config_1 = require("./write-craco-config");
const write_vite_config_1 = require("./write-vite-config");
const write_vite_index_html_1 = require("./write-vite-index-html");
const connect_to_nx_cloud_1 = require("../../../connect/connect-to-nx-cloud");
async function addNxToCraRepo(options) {
if (!options.force) {
(0, check_for_uncommitted_changes_1.checkForUncommittedChanges)();
async function addNxToCraRepo(_options) {
if (!_options.force) {
(0, check_for_custom_webpack_setup_1.checkForCustomWebpackSetup)();
}
output_1.output.log({ title: '🐳 Nx initialization' });
const normalizedOptions = await normalizeOptions(options);
await reorgnizeWorkspaceStructure(normalizedOptions);
const options = await normalizeOptions(_options);
await addBundler(options);
(0, fs_1.appendFileSync)(`.gitignore`, '\nnode_modules');
(0, fs_1.appendFileSync)(`.gitignore`, '\ndist');
installDependencies(options);
// Vite expects index.html to be in the root as the main entry point.
const indexPath = options.isStandalone
? 'index.html'
: (0, path_1.join)('apps', options.reactAppName, 'index.html');
const oldIndexPath = options.isStandalone
? (0, path_1.join)('public', 'index.html')
: (0, path_1.join)('apps', options.reactAppName, 'public', 'index.html');
output_1.output.note({
title: `A new ${indexPath} has been created. Compare it to the previous ${oldIndexPath} file and make any changes needed, then delete the previous file.`,
});
}
function installDependencies(options) {
const dependencies = [
'@rollup/plugin-replace',
'@testing-library/jest-dom',
'@vitejs/plugin-react',
'eslint-config-react-app',
'web-vitals',
'jest-watch-typeahead',
'vite',
'vitest',
];
if (options.isVite) {
dependencies.push('vite', 'vitest', '@vitejs/plugin-react');
}
else {
dependencies.push('@craco/craco', 'cross-env', 'react-scripts', 'tsconfig-paths-webpack-plugin');
}
(0, child_process_1.execSync)(`${options.pmc.addDev} ${dependencies.join(' ')}`, {

@@ -54,20 +58,5 @@ stdio: [0, 1, 2],

const reactAppName = (0, read_name_from_package_json_1.readNameFromPackageJson)();
const packageJson = (0, fileutils_1.readJsonFile)((0, path_1.join)(process.cwd(), 'package.json'));
const deps = {
...packageJson.dependencies,
...packageJson.devDependencies,
};
const isCRA5 = /^[^~]?5/.test(deps['react-scripts']);
const npmVersion = (0, child_process_1.execSync)('npm -v', {
windowsHide: false,
}).toString();
// Should remove this check 04/2023 once Node 14 & npm 6 reach EOL
const npxYesFlagNeeded = !npmVersion.startsWith('6'); // npm 7 added -y flag to npx
const isVite = options.vite;
const isStandalone = !options.integrated;
const nxCloud = options.nxCloud ??
(options.interactive ? await (0, connect_to_nx_cloud_1.connectExistingRepoToNxCloudPrompt)() : false);
return {
...options,
nxCloud,
packageManager,

@@ -77,158 +66,11 @@ pmc,

reactAppName,
isCRA5,
npxYesFlagNeeded,
isVite,
isStandalone,
};
}
/**
* - Create a temp workspace
* - Move all files to temp workspace
* - Add bundler to temp workspace
* - Move files back to root
* - Clean up unused files
*/
async function reorgnizeWorkspaceStructure(options) {
createTempWorkspace(options);
moveFilesToTempWorkspace(options);
await addBundler(options);
output_1.output.log({ title: '🧶 Updating .gitignore file' });
(0, child_process_1.execSync)(`echo "node_modules" >> .gitignore`, {
stdio: [0, 1, 2],
windowsHide: false,
});
(0, child_process_1.execSync)(`echo "dist" >> .gitignore`, {
stdio: [0, 1, 2],
windowsHide: false,
});
process.chdir('..');
copyFromTempWorkspaceToRoot();
cleanUpUnusedFilesAndAddConfigFiles(options);
output_1.output.log({ title: '🙂 Please be patient, one final step remaining!' });
output_1.output.log({ title: '📦 Installing dependencies' });
installDependencies(options);
if (options.isVite) {
const indexPath = options.isStandalone
? 'index.html'
: (0, path_1.join)('apps', options.reactAppName, 'index.html');
const oldIndexPath = options.isStandalone
? (0, path_1.join)('public', 'index.html')
: (0, path_1.join)('apps', options.reactAppName, 'public', 'index.html');
output_1.output.note({
title: `A new ${indexPath} has been created. Compare it to the previous ${oldIndexPath} file and make any changes needed, then delete the previous file.`,
});
}
}
function createTempWorkspace(options) {
(0, node_fs_1.rmSync)('temp-workspace', { recursive: true, force: true });
(0, child_process_1.execSync)(`npx ${options.npxYesFlagNeeded ? '-y' : ''} create-nx-workspace@latest temp-workspace --appName=${options.reactAppName} --preset=react-monorepo --style=css --bundler=${options.isVite ? 'vite' : 'webpack'} --packageManager=${options.packageManager} ${options.nxCloud ? '--nxCloud=yes' : '--nxCloud=skip'} ${options.addE2e ? '--e2eTestRunner=playwright' : '--e2eTestRunner=none'}`, { stdio: [0, 1, 2], windowsHide: false });
output_1.output.log({ title: '👋 Welcome to Nx!' });
output_1.output.log({ title: '🧹 Clearing unused files' });
(0, node_fs_1.cpSync)((0, path_1.join)('temp-workspace', 'apps', options.reactAppName, 'project.json'), 'project.json', { recursive: true });
(0, node_fs_1.rmSync)((0, path_1.join)('temp-workspace', 'apps', options.reactAppName), {
recursive: true,
force: true,
});
(0, node_fs_1.rmSync)('node_modules', { recursive: true, force: true });
}
function copyPackageJsonDepsFromTempWorkspace() {
const repoRoot = process.cwd();
let rootPackageJson = (0, fileutils_1.readJsonFile)((0, path_1.join)(repoRoot, 'package.json'));
const tempWorkspacePackageJson = (0, fileutils_1.readJsonFile)((0, path_1.join)(repoRoot, 'temp-workspace', 'package.json'));
rootPackageJson = overridePackageDeps('dependencies', rootPackageJson, tempWorkspacePackageJson);
rootPackageJson = overridePackageDeps('devDependencies', rootPackageJson, tempWorkspacePackageJson);
rootPackageJson.scripts = {}; // remove existing scripts
(0, fileutils_1.writeJsonFile)((0, path_1.join)(repoRoot, 'package.json'), rootPackageJson);
(0, fileutils_1.writeJsonFile)((0, path_1.join)(repoRoot, 'temp-workspace', 'package.json'), rootPackageJson);
}
function overridePackageDeps(depConfigName, base, override) {
if (!base[depConfigName]) {
base[depConfigName] = override[depConfigName];
return base;
}
const deps = override[depConfigName];
Object.keys(deps).forEach((dep) => {
if (base.dependencies?.[dep]) {
delete base.dependencies[dep];
}
if (base.devDependencies?.[dep]) {
delete base.devDependencies[dep];
}
base[depConfigName][dep] = deps[dep];
});
return base;
}
function moveSync(src, dest) {
const destParentDir = (0, path_1.dirname)(dest);
(0, node_fs_1.mkdirSync)(destParentDir, { recursive: true });
(0, node_fs_1.rmSync)(dest, { recursive: true, force: true });
return (0, node_fs_1.renameSync)(src, dest);
}
function moveFilesToTempWorkspace(options) {
output_1.output.log({ title: '🚚 Moving your React app in your new Nx workspace' });
copyPackageJsonDepsFromTempWorkspace();
const requiredCraFiles = [
'project.json',
'package.json',
'src',
'public',
options.appIsJs ? null : 'tsconfig.json',
options.packageManager === 'yarn' ? 'yarn.lock' : null,
options.packageManager === 'pnpm' ? 'pnpm-lock.yaml' : null,
options.packageManager === 'npm' ? 'package-lock.json' : null,
options.packageManager === 'bun' ? 'bun.lockb' : null,
];
const optionalCraFiles = ['README.md'];
const filesToMove = [...requiredCraFiles, ...optionalCraFiles].filter(Boolean);
filesToMove.forEach((f) => {
try {
moveSync(f, options.isStandalone
? (0, path_1.join)('temp-workspace', f)
: (0, path_1.join)('temp-workspace', 'apps', options.reactAppName, f));
}
catch (error) {
if (requiredCraFiles.includes(f)) {
throw error;
}
}
});
process.chdir('temp-workspace');
}
async function addBundler(options) {
if (options.isVite) {
output_1.output.log({ title: '🧑‍🔧 Setting up Vite' });
const { addViteCommandsToPackageScripts } = await Promise.resolve().then(() => require('./add-vite-commands-to-package-scripts'));
addViteCommandsToPackageScripts(options.reactAppName, options.isStandalone);
(0, write_vite_config_1.writeViteConfig)(options.reactAppName, options.isStandalone, options.appIsJs);
(0, write_vite_index_html_1.writeViteIndexHtml)(options.reactAppName, options.isStandalone, options.appIsJs);
await (0, rename_js_to_jsx_1.renameJsToJsx)(options.reactAppName, options.isStandalone);
}
else {
output_1.output.log({ title: '🧑‍🔧 Setting up craco + Webpack' });
const { addCracoCommandsToPackageScripts } = await Promise.resolve().then(() => require('./add-craco-commands-to-package-scripts'));
addCracoCommandsToPackageScripts(options.reactAppName, options.isStandalone);
(0, write_craco_config_1.writeCracoConfig)(options.reactAppName, options.isCRA5, options.isStandalone);
output_1.output.log({
title: '🛬 Skip CRA preflight check since Nx manages the monorepo',
});
(0, child_process_1.execSync)(`echo "SKIP_PREFLIGHT_CHECK=true" > .env`, {
stdio: [0, 1, 2],
windowsHide: false,
});
}
const { addViteCommandsToPackageScripts } = await Promise.resolve().then(() => require('./add-vite-commands-to-package-scripts'));
addViteCommandsToPackageScripts(options.reactAppName, options.isStandalone);
(0, write_vite_config_1.writeViteConfig)(options.reactAppName, options.isStandalone, options.appIsJs);
(0, write_vite_index_html_1.writeViteIndexHtml)(options.reactAppName, options.isStandalone, options.appIsJs);
await (0, rename_js_to_jsx_1.renameJsToJsx)(options.reactAppName, options.isStandalone);
}
function copyFromTempWorkspaceToRoot() {
output_1.output.log({ title: '🚚 Folder restructuring.' });
(0, node_fs_1.readdirSync)('temp-workspace').forEach((f) => {
moveSync((0, path_1.join)('temp-workspace', f), f);
});
}
function cleanUpUnusedFilesAndAddConfigFiles(options) {
output_1.output.log({ title: '🧹 Cleaning up.' });
(0, clean_up_files_1.cleanUpFiles)(options.reactAppName, options.isStandalone);
output_1.output.log({ title: "📃 Extend the app's tsconfig.json from the base" });
(0, tsconfig_setup_1.setupTsConfig)(options.reactAppName, options.isStandalone);
if (options.isStandalone) {
(0, node_fs_1.rmSync)('apps', { recursive: true, force: true });
}
}

@@ -18,3 +18,14 @@ "use strict";

import react from '@vitejs/plugin-react'
import { default as replace } from '@rollup/plugin-replace';
// Match CRA's environment variables.
// TODO: Replace these with VITE_ prefixed environment variables, and using import.meta.env.VITE_* instead of process.env.REACT_APP_*.
const craEnvVarRegex = /^REACT_APP/i;
const craEnvVars = Object.keys(process.env)
.filter((key) => craEnvVarRegex.test(key))
.reduce((env, key) => {
env[\`process.env.\${key}\`] = JSON.stringify(process.env[key]);
return env;
}, {});
// https://vitejs.dev/config/

@@ -35,5 +46,5 @@ export default defineConfig({

},
plugins: [react()],
plugins: [react(), replace(craEnvVars)],
});
`);
}

@@ -0,1 +1,2 @@

import { NxJsonConfiguration } from '../../../config/nx-json';
import { PackageJson } from '../../../utils/package-json';

@@ -6,6 +7,7 @@ import { PackageManagerCommands } from '../../../utils/package-manager';

}): void;
export declare function createNxJsonFromTurboJson(turboJson: Record<string, any>): NxJsonConfiguration;
export declare function addDepsToPackageJson(repoRoot: string, additionalPackages?: string[]): void;
export declare function updateGitIgnore(root: string): void;
export declare function runInstall(repoRoot: string, pmc?: PackageManagerCommands): void;
export declare function initCloud(installationSource: 'nx-init' | 'nx-init-angular' | 'nx-init-cra' | 'nx-init-monorepo' | 'nx-init-nest' | 'nx-init-npm-repo'): Promise<void>;
export declare function initCloud(installationSource: 'nx-init' | 'nx-init-angular' | 'nx-init-cra' | 'nx-init-monorepo' | 'nx-init-nest' | 'nx-init-npm-repo' | 'nx-init-turborepo'): Promise<void>;
export declare function addVsCodeRecommendedExtensions(repoRoot: string, extensions: string[]): void;

@@ -18,1 +20,2 @@ export declare function markRootPackageJsonAsNxProjectLegacy(repoRoot: string, cacheableScripts: string[], pmc: PackageManagerCommands): void;

export declare function isMonorepo(packageJson: PackageJson): boolean;
export declare function isCRA(packageJson: PackageJson): boolean;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createNxJsonFile = createNxJsonFile;
exports.createNxJsonFromTurboJson = createNxJsonFromTurboJson;
exports.addDepsToPackageJson = addDepsToPackageJson;

@@ -13,2 +14,3 @@ exports.updateGitIgnore = updateGitIgnore;

exports.isMonorepo = isMonorepo;
exports.isCRA = isCRA;
const child_process_1 = require("child_process");

@@ -25,3 +27,3 @@ const path_1 = require("path");

const connect_to_nx_cloud_2 = require("../../connect/connect-to-nx-cloud");
const default_base_1 = require("../../../utils/default-base");
const deduce_default_base_1 = require("./deduce-default-base");
function createNxJsonFile(repoRoot, topologicalTargets, cacheableOperations, scriptOutputs) {

@@ -58,52 +60,100 @@ const nxJsonPath = (0, path_2.joinPathFragments)(repoRoot, 'nx.json');

}
nxJson.defaultBase ??= deduceDefaultBase();
const defaultBase = (0, deduce_default_base_1.deduceDefaultBase)();
// Do not add defaultBase if it is inferred to be the Nx default value of main
if (defaultBase !== 'main') {
nxJson.defaultBase ??= defaultBase;
}
(0, fileutils_1.writeJsonFile)(nxJsonPath, nxJson);
}
function deduceDefaultBase() {
try {
(0, child_process_1.execSync)(`git rev-parse --verify main`, {
stdio: ['ignore', 'ignore', 'ignore'],
windowsHide: false,
});
return 'main';
function createNxJsonFromTurboJson(turboJson) {
const nxJson = {
$schema: './node_modules/nx/schemas/nx-schema.json',
};
// Handle global dependencies
if (turboJson.globalDependencies?.length > 0) {
nxJson.namedInputs = {
sharedGlobals: turboJson.globalDependencies.map((dep) => `{workspaceRoot}/${dep}`),
default: ['{projectRoot}/**/*', 'sharedGlobals'],
};
}
catch {
try {
(0, child_process_1.execSync)(`git rev-parse --verify dev`, {
stdio: ['ignore', 'ignore', 'ignore'],
windowsHide: false,
});
return 'dev';
// Handle global env vars
if (turboJson.globalEnv?.length > 0) {
nxJson.namedInputs = nxJson.namedInputs || {};
nxJson.namedInputs.sharedGlobals = nxJson.namedInputs.sharedGlobals || [];
nxJson.namedInputs.sharedGlobals.push(...turboJson.globalEnv.map((env) => ({ env })));
nxJson.namedInputs.default = nxJson.namedInputs.default || [];
if (!nxJson.namedInputs.default.includes('{projectRoot}/**/*')) {
nxJson.namedInputs.default.push('{projectRoot}/**/*');
}
catch {
try {
(0, child_process_1.execSync)(`git rev-parse --verify develop`, {
stdio: ['ignore', 'ignore', 'ignore'],
windowsHide: false,
if (!nxJson.namedInputs.default.includes('sharedGlobals')) {
nxJson.namedInputs.default.push('sharedGlobals');
}
}
// Handle task configurations
if (turboJson.tasks) {
nxJson.targetDefaults = {};
for (const [taskName, taskConfig] of Object.entries(turboJson.tasks)) {
// Skip project-specific tasks (containing #)
if (taskName.includes('#'))
continue;
const config = taskConfig;
nxJson.targetDefaults[taskName] = {};
// Handle dependsOn
if (config.dependsOn?.length > 0) {
nxJson.targetDefaults[taskName].dependsOn = config.dependsOn;
}
// Handle inputs
if (config.inputs?.length > 0) {
nxJson.targetDefaults[taskName].inputs = config.inputs
.map((input) => {
if (input === '$TURBO_DEFAULT$') {
return '{projectRoot}/**/*';
}
// Don't add projectRoot if it's already there or if it's an env var
if (input.startsWith('{projectRoot}/') ||
input.startsWith('{env.') ||
input.startsWith('$'))
return input;
return `{projectRoot}/${input}`;
})
.map((input) => {
// Don't add projectRoot if it's already there or if it's an env var
if (input.startsWith('{projectRoot}/') ||
input.startsWith('{env.') ||
input.startsWith('$'))
return input;
return `{projectRoot}/${input}`;
});
return 'develop';
}
catch {
try {
(0, child_process_1.execSync)(`git rev-parse --verify next`, {
stdio: ['ignore', 'ignore', 'ignore'],
windowsHide: false,
});
return 'next';
}
catch {
try {
(0, child_process_1.execSync)(`git rev-parse --verify master`, {
stdio: ['ignore', 'ignore', 'ignore'],
windowsHide: false,
});
return 'master';
// Handle outputs
if (config.outputs?.length > 0) {
nxJson.targetDefaults[taskName].outputs = config.outputs.map((output) => {
// Don't add projectRoot if it's already there
if (output.startsWith('{projectRoot}/'))
return output;
// Handle negated patterns by adding projectRoot after the !
if (output.startsWith('!')) {
return `!{projectRoot}/${output.slice(1)}`;
}
catch {
return (0, default_base_1.deduceDefaultBase)();
}
}
return `{projectRoot}/${output}`;
});
}
// Handle cache setting - true by default in Turbo
nxJson.targetDefaults[taskName].cache = config.cache !== false;
}
}
/**
* The fact that cacheDir was in use suggests the user had a reason for deviating from the default.
* We can't know what that reason was, nor if it would still be applicable in Nx, but we can at least
* improve discoverability of the relevant Nx option by explicitly including it with its default value.
*/
if (turboJson.cacheDir) {
nxJson.cacheDirectory = '.nx/cache';
}
const defaultBase = (0, deduce_default_base_1.deduceDefaultBase)();
// Do not add defaultBase if it is inferred to be the Nx default value of main
if (defaultBase !== 'main') {
nxJson.defaultBase ??= defaultBase;
}
return nxJson;
}

@@ -223,1 +273,14 @@ function addDepsToPackageJson(repoRoot, additionalPackages) {

}
function isCRA(packageJson) {
const combinedDependencies = {
...packageJson.dependencies,
...packageJson.devDependencies,
};
return (
// Required dependencies for CRA projects
combinedDependencies['react'] &&
combinedDependencies['react-dom'] &&
combinedDependencies['react-scripts'] &&
(0, fileutils_1.directoryExists)('src') &&
(0, fileutils_1.directoryExists)('public'));
}

@@ -8,2 +8,3 @@ import { NxJsonConfiguration } from '../../config/nx-json';

verbose?: boolean;
force?: boolean;
}

@@ -10,0 +11,0 @@ export declare function initHandler(options: InitArgs): Promise<void>;

@@ -6,19 +6,21 @@ "use strict";

const fs_1 = require("fs");
const enquirer_1 = require("enquirer");
const semver_1 = require("semver");
const nx_json_1 = require("../../config/nx-json");
const child_process_1 = require("../../utils/child-process");
const fileutils_1 = require("../../utils/fileutils");
const get_package_name_from_import_path_1 = require("../../utils/get-package-name-from-import-path");
const output_1 = require("../../utils/output");
const package_manager_1 = require("../../utils/package-manager");
const add_nx_scripts_1 = require("./implementation/dot-nx/add-nx-scripts");
const child_process_1 = require("../../utils/child-process");
const fileutils_1 = require("../../utils/fileutils");
const versions_1 = require("../../utils/versions");
const utils_1 = require("./implementation/utils");
const enquirer_1 = require("enquirer");
const angular_1 = require("./implementation/angular");
const workspace_context_1 = require("../../utils/workspace-context");
const connect_to_nx_cloud_1 = require("../connect/connect-to-nx-cloud");
const configure_plugins_1 = require("./configure-plugins");
const add_nx_to_monorepo_1 = require("./implementation/add-nx-to-monorepo");
const add_nx_to_npm_repo_1 = require("./implementation/add-nx-to-npm-repo");
const add_nx_to_monorepo_1 = require("./implementation/add-nx-to-monorepo");
const nx_json_1 = require("../../config/nx-json");
const get_package_name_from_import_path_1 = require("../../utils/get-package-name-from-import-path");
const configure_plugins_1 = require("./configure-plugins");
const add_nx_to_turborepo_1 = require("./implementation/add-nx-to-turborepo");
const angular_1 = require("./implementation/angular");
const add_nx_scripts_1 = require("./implementation/dot-nx/add-nx-scripts");
const utils_1 = require("./implementation/utils");
const react_1 = require("./implementation/react");
async function initHandler(options) {

@@ -61,3 +63,37 @@ process.env.NX_RUNNING_NX_INIT = 'true';

const packageJson = (0, fileutils_1.readJsonFile)('package.json');
if ((0, utils_1.isMonorepo)(packageJson)) {
const _isTurborepo = (0, fs_1.existsSync)('turbo.json');
const _isMonorepo = (0, utils_1.isMonorepo)(packageJson);
const _isCRA = (0, utils_1.isCRA)(packageJson);
const learnMoreLink = _isTurborepo
? 'https://nx.dev/recipes/adopting-nx/from-turborepo'
: _isMonorepo
? 'https://nx.dev/getting-started/tutorials/npm-workspaces-tutorial'
: 'https://nx.dev/recipes/adopting-nx/adding-to-existing-project';
/**
* Turborepo users must have set up individual scripts already, and we keep the transition as minimal as possible.
* We log a message during the conversion process in addNxToTurborepo about how they can learn more about the power
* of Nx plugins and how it would allow them to infer all the relevant scripts automatically, including all cache
* inputs and outputs.
*/
if (_isTurborepo) {
await (0, add_nx_to_turborepo_1.addNxToTurborepo)({
interactive: options.interactive,
});
(0, utils_1.printFinalMessage)({
learnMoreLink,
});
return;
}
const pmc = (0, package_manager_1.getPackageManagerCommand)();
if (_isCRA) {
await (0, react_1.addNxToCraRepo)({
addE2e: false,
force: false,
vite: true,
integrated: false,
interactive: options.interactive,
nxCloud: false,
});
}
else if (_isMonorepo) {
await (0, add_nx_to_monorepo_1.addNxToMonorepo)({

@@ -74,9 +110,5 @@ interactive: options.interactive,

}
const learnMoreLink = (0, utils_1.isMonorepo)(packageJson)
? 'https://nx.dev/getting-started/tutorials/npm-workspaces-tutorial'
: 'https://nx.dev/recipes/adopting-nx/adding-to-existing-project';
const useNxCloud = options.nxCloud ??
(options.interactive ? await (0, connect_to_nx_cloud_1.connectExistingRepoToNxCloudPrompt)() : false);
const repoRoot = process.cwd();
const pmc = (0, package_manager_1.getPackageManagerCommand)();
(0, utils_1.createNxJsonFile)(repoRoot, [], [], {});

@@ -86,3 +118,13 @@ (0, utils_1.updateGitIgnore)(repoRoot);

output_1.output.log({ title: '🧐 Checking dependencies' });
const { plugins, updatePackageScripts } = await detectPlugins(nxJson, options.interactive);
let plugins;
let updatePackageScripts;
if (_isCRA) {
plugins = ['@nx/vite'];
updatePackageScripts = true;
}
else {
const { plugins: _plugins, updatePackageScripts: _updatePackageScripts } = await detectPlugins(nxJson, options.interactive);
plugins = _plugins;
updatePackageScripts = _updatePackageScripts;
}
output_1.output.log({ title: '📦 Installing Nx' });

@@ -89,0 +131,0 @@ (0, configure_plugins_1.runPackageManagerInstallPlugins)(repoRoot, pmc, plugins);

@@ -194,4 +194,3 @@ "use strict";

let workspaceChangelogFromRef = args.from ||
(await (0, git_1.getLatestGitTagForPattern)(nxReleaseConfig.releaseTagPattern))
?.tag;
(await (0, git_1.getLatestGitTagForPattern)(nxReleaseConfig.releaseTagPattern, {}, nxReleaseConfig.releaseTagPatternCheckAllBranchesWhen))?.tag;
if (!workspaceChangelogFromRef) {

@@ -339,3 +338,3 @@ if (useAutomaticFromRef) {

releaseGroupName: releaseGroup.name,
}))?.tag;
}, releaseGroup.releaseTagPatternCheckAllBranchesWhen))?.tag;
if (!fromRef && useAutomaticFromRef) {

@@ -447,4 +446,3 @@ const firstCommit = await (0, git_1.getFirstGitCommit)();

let fromRef = args.from ||
(await (0, git_1.getLatestGitTagForPattern)(releaseGroup.releaseTagPattern))
?.tag;
(await (0, git_1.getLatestGitTagForPattern)(releaseGroup.releaseTagPattern, {}, releaseGroup.releaseTagPatternCheckAllBranchesWhen))?.tag;
if (!fromRef) {

@@ -451,0 +449,0 @@ if (useAutomaticFromRef) {

@@ -61,2 +61,3 @@ import { CommandModule } from 'yargs';

yes?: boolean;
preid?: VersionOptions['preid'];
skipPublish?: boolean;

@@ -63,0 +64,0 @@ };

@@ -79,2 +79,7 @@ "use strict";

})
.option('preid', {
type: 'string',
describe: 'The optional prerelease identifier to apply to the version. This will only be applied in the case that the specifier argument has been set to `prerelease` OR when conventional commits are enabled, in which case it will modify the resolved specifier from conventional commits to be its prerelease equivalent. E.g. minor -> preminor.',
default: '',
})
.option('yes', {

@@ -81,0 +86,0 @@ type: 'boolean',

@@ -180,2 +180,3 @@ "use strict";

: defaultFixedReleaseTagPattern),
releaseTagPatternCheckAllBranchesWhen: userConfig.releaseTagPatternCheckAllBranchesWhen ?? undefined,
conventionalCommits: conventional_commits_1.DEFAULT_CONVENTIONAL_COMMITS_CONFIG,

@@ -211,2 +212,3 @@ versionPlans: (userConfig.versionPlans ||

: WORKSPACE_DEFAULTS.releaseTagPattern,
releaseTagPatternCheckAllBranchesWhen: userConfig.releaseTagPatternCheckAllBranchesWhen ?? undefined,
versionPlans: false,

@@ -370,2 +372,5 @@ };

: userConfig.releaseTagPattern || defaultFixedReleaseTagPattern),
releaseTagPatternCheckAllBranchesWhen: releaseGroup.releaseTagPatternCheckAllBranchesWhen ??
userConfig.releaseTagPatternCheckAllBranchesWhen ??
undefined,
versionPlans: releaseGroup.versionPlans ?? rootVersionPlansConfig,

@@ -419,2 +424,3 @@ };

releaseTagPattern: WORKSPACE_DEFAULTS.releaseTagPattern,
releaseTagPatternCheckAllBranchesWhen: WORKSPACE_DEFAULTS.releaseTagPatternCheckAllBranchesWhen,
git: rootGitConfig,

@@ -421,0 +427,0 @@ version: rootVersionConfig,

@@ -25,3 +25,3 @@ export interface GitCommitAuthor {

}
export declare function getLatestGitTagForPattern(releaseTagPattern: string, additionalInterpolationData?: {}): Promise<{
export declare function getLatestGitTagForPattern(releaseTagPattern: string, additionalInterpolationData?: {}, checkAllBranchesWhen?: boolean | string[]): Promise<{
tag: string;

@@ -28,0 +28,0 @@ extractedVersion: string;

@@ -19,2 +19,3 @@ "use strict";

const node_path_1 = require("node:path");
const minimatch_1 = require("minimatch");
const utils_1 = require("../../../tasks-runner/utils");

@@ -28,10 +29,52 @@ const workspace_root_1 = require("../../../utils/workspace-root");

const SEMVER_REGEX = /^(0|[1-9]\d*)\.(0|[1-9]\d*)\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$/g;
async function getLatestGitTagForPattern(releaseTagPattern, additionalInterpolationData = {}) {
async function getLatestGitTagForPattern(releaseTagPattern, additionalInterpolationData = {}, checkAllBranchesWhen) {
/**
* By default, we will try and resolve the latest match for the releaseTagPattern from the current branch,
* falling back to all branches if no match is found on the current branch.
*
* - If checkAllBranchesWhen is true it will cause us to ALWAYS check all branches for the latest match.
* - If checkAllBranchesWhen is explicitly set to false it will cause us to ONLY check the current branch for the latest match.
* - If checkAllBranchesWhen is an array of strings it will cause us to check all branches WHEN the current branch is one of the strings in the array.
*/
let alwaysCheckAllBranches = false;
if (typeof checkAllBranchesWhen !== 'undefined') {
if (typeof checkAllBranchesWhen === 'boolean') {
alwaysCheckAllBranches = checkAllBranchesWhen;
}
else if (Array.isArray(checkAllBranchesWhen)) {
/**
* Get the current git branch and determine whether to check all branches based on the checkAllBranchesWhen parameter
*/
const currentBranch = await (0, exec_command_1.execCommand)('git', [
'rev-parse',
'--abbrev-ref',
'HEAD',
]).then((r) => r.trim());
// Check exact match first
alwaysCheckAllBranches = checkAllBranchesWhen.includes(currentBranch);
// Check if any glob pattern matches next
if (!alwaysCheckAllBranches) {
alwaysCheckAllBranches = checkAllBranchesWhen.some((pattern) => {
const r = minimatch_1.minimatch.makeRe(pattern, { dot: true });
if (!r) {
return false;
}
return r.test(currentBranch);
});
}
}
}
const defaultGitArgs = [
// Apply git config to take version suffixes into account when sorting, e.g. 1.0.0 is newer than 1.0.0-beta.1
'-c',
'versionsort.suffix=-',
'tag',
'--sort',
'-v:refname',
];
try {
let tags;
tags = await (0, exec_command_1.execCommand)('git', [
'tag',
'--sort',
'-v:refname',
'--merged',
...defaultGitArgs,
...(alwaysCheckAllBranches ? [] : ['--merged']),
]).then((r) => r

@@ -42,5 +85,10 @@ .trim()

.filter(Boolean));
if (!tags.length) {
if (
// Do not run this fallback if the user explicitly set checkAllBranchesWhen to false
checkAllBranchesWhen !== false &&
!tags.length &&
// There is no point in running this fallback if we already checked against all branches
!alwaysCheckAllBranches) {
// try again, but include all tags on the repo instead of just --merged ones
tags = await (0, exec_command_1.execCommand)('git', ['tag', '--sort', '-v:refname']).then((r) => r
tags = await (0, exec_command_1.execCommand)('git', defaultGitArgs).then((r) => r
.trim()

@@ -47,0 +95,0 @@ .split('\n')

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getCommandProjects = getCommandProjects;
const utils_1 = require("../tasks-runner/utils");
const create_command_graph_1 = require("./create-command-graph");

@@ -15,20 +16,4 @@ function getCommandProjects(projectGraph, projects, nxArgs) {

sortedProjects.push(...roots);
const newGraph = removeIdsFromGraph(commandGraph, roots, commandGraph.dependencies);
const newGraph = (0, utils_1.removeIdsFromGraph)(commandGraph, roots, commandGraph.dependencies);
return getSortedProjects(newGraph, sortedProjects);
}
function removeIdsFromGraph(graph, ids, mapWithIds) {
const filteredMapWithIds = {};
const dependencies = {};
const removedSet = new Set(ids);
for (let id of Object.keys(mapWithIds)) {
if (!removedSet.has(id)) {
filteredMapWithIds[id] = mapWithIds[id];
dependencies[id] = graph.dependencies[id].filter((depId) => !removedSet.has(depId));
}
}
return {
mapWithIds: filteredMapWithIds,
dependencies: dependencies,
roots: Object.keys(dependencies).filter((k) => dependencies[k].length === 0),
};
}
import type { ChangelogRenderOptions } from '../../release/changelog-renderer';
import { PackageManager } from '../utils/package-manager';
import { InputDefinition, TargetConfiguration, TargetDependencyConfig } from './workspace-json-project-json';
import type { PackageManager } from '../utils/package-manager';
import type { InputDefinition, TargetConfiguration, TargetDependencyConfig } from './workspace-json-project-json';
export type ImplicitDependencyEntry<T = '*' | string[]> = {

@@ -219,2 +219,11 @@ [key: string]: T | ImplicitJsonSubsetDependency<T>;

/**
* By default, we will try and resolve the latest match for the releaseTagPattern from the current branch,
* falling back to all branches if no match is found on the current branch.
*
* - Setting this to true will cause us to ALWAYS check all branches for the latest match.
* - Setting it to false will cause us to ONLY check the current branch for the latest match.
* - Setting it to an array of strings will cause us to check all branches WHEN the current branch matches one of the strings in the array. Glob patterns are supported.
*/
releaseTagPatternCheckAllBranchesWhen?: boolean | string[];
/**
* Enables using version plans as a specifier source for versioning and

@@ -286,2 +295,11 @@ * to determine changes for changelog generation.

/**
* By default, we will try and resolve the latest match for the releaseTagPattern from the current branch,
* falling back to all branches if no match is found on the current branch.
*
* - Setting this to true will cause us to ALWAYS check all branches for the latest match.
* - Setting it to false will cause us to ONLY check the current branch for the latest match.
* - Setting it to an array of strings will cause us to check all branches WHEN the current branch matches one of the strings in the array. Glob patterns are supported.
*/
releaseTagPatternCheckAllBranchesWhen?: boolean | string[];
/**
* Enable and configure automatic git operations as part of the release

@@ -326,2 +344,3 @@ */

export interface NxJsonConfiguration<T = '*' | string[]> {
$schema?: string;
/**

@@ -328,0 +347,0 @@ * Optional (additional) Nx.json configuration file which becomes a base for this one

@@ -87,6 +87,2 @@ /**

parallelism: boolean;
/**
* This denotes if the task runs continuously
*/
continuous?: boolean;
}

@@ -109,3 +105,2 @@ /**

dependencies: Record<string, string[]>;
continuousDependencies: Record<string, string[]>;
}

@@ -242,6 +242,2 @@ import type { PackageJson } from '../utils/package-json';

/**
* Whether this target runs continuously
*/
continuous?: boolean;
/**
* List of generators to run before the target to ensure the workspace

@@ -248,0 +244,0 @@ * is up to date.

@@ -1,1 +0,1 @@

(()=>{"use strict";var e,r={},t={};function o(e){var n=t[e];if(void 0!==n)return n.exports;var i=t[e]={id:e,loaded:!1,exports:{}};return r[e].call(i.exports,i,i.exports,o),i.loaded=!0,i.exports}o.m=r,e=[],o.O=(r,t,n,i)=>{if(!t){var l=1/0;for(f=0;f<e.length;f++){for(var[t,n,i]=e[f],a=!0,u=0;u<t.length;u++)(!1&i||l>=i)&&Object.keys(o.O).every((e=>o.O[e](t[u])))?t.splice(u--,1):(a=!1,i<l&&(l=i));if(a){e.splice(f--,1);var d=n();void 0!==d&&(r=d)}}return r}i=i||0;for(var f=e.length;f>0&&e[f-1][2]>i;f--)e[f]=e[f-1];e[f]=[t,n,i]},o.n=e=>{var r=e&&e.__esModule?()=>e.default:()=>e;return o.d(r,{a:r}),r},o.d=(e,r)=>{for(var t in r)o.o(r,t)&&!o.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:r[t]})},o.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),o.o=(e,r)=>Object.prototype.hasOwnProperty.call(e,r),o.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.nmd=e=>(e.paths=[],e.children||(e.children=[]),e),(()=>{var e={666:0};o.O.j=r=>0===e[r];var r=(r,t)=>{var n,i,[l,a,u]=t,d=0;if(l.some((r=>0!==e[r]))){for(n in a)o.o(a,n)&&(o.m[n]=a[n]);if(u)var f=u(o)}for(r&&r(t);d<l.length;d++)i=l[d],o.o(e,i)&&e[i]&&e[i][0](),e[i]=0;return o.O(f)},t=self.webpackChunk=self.webpackChunk||[];t.forEach(r.bind(null,0)),t.push=r.bind(null,t.push.bind(t))})()})();
(()=>{"use strict";var e,r={},t={};function o(e){var n=t[e];if(void 0!==n)return n.exports;var i=t[e]={exports:{}};return r[e](i,i.exports,o),i.exports}o.m=r,e=[],o.O=(r,t,n,i)=>{if(!t){var a=1/0;for(s=0;s<e.length;s++){for(var[t,n,i]=e[s],l=!0,u=0;u<t.length;u++)(!1&i||a>=i)&&Object.keys(o.O).every((e=>o.O[e](t[u])))?t.splice(u--,1):(l=!1,i<a&&(a=i));if(l){e.splice(s--,1);var f=n();void 0!==f&&(r=f)}}return r}i=i||0;for(var s=e.length;s>0&&e[s-1][2]>i;s--)e[s]=e[s-1];e[s]=[t,n,i]},o.n=e=>{var r=e&&e.__esModule?()=>e.default:()=>e;return o.d(r,{a:r}),r},o.d=(e,r)=>{for(var t in r)o.o(r,t)&&!o.o(e,t)&&Object.defineProperty(e,t,{enumerable:!0,get:r[t]})},o.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),o.o=(e,r)=>Object.prototype.hasOwnProperty.call(e,r),o.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},(()=>{var e={666:0};o.O.j=r=>0===e[r];var r=(r,t)=>{var n,i,[a,l,u]=t,f=0;if(a.some((r=>0!==e[r]))){for(n in l)o.o(l,n)&&(o.m[n]=l[n]);if(u)var s=u(o)}for(r&&r(t);f<a.length;f++)i=a[f],o.o(e,i)&&e[i]&&e[i][0](),e[i]=0;return o.O(s)},t=self.webpackChunk=self.webpackChunk||[];t.forEach(r.bind(null,0)),t.push=r.bind(null,t.push.bind(t))})()})();

@@ -1,1 +0,1 @@

"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[532],{6395:()=>{}},s=>{var e;e=6395,s(s.s=e)}]);
"use strict";(self.webpackChunk=self.webpackChunk||[]).push([[532],{5751:()=>{}},s=>{var e;e=5751,s(s.s=e)}]);

@@ -50,2 +50,3 @@ import { ChildProcess } from 'child_process';

glob(globs: string[], exclude?: string[]): Promise<string[]>;
multiGlob(globs: string[], exclude?: string[]): Promise<string[][]>;
getWorkspaceContextFileData(): Promise<FileData[]>;

@@ -55,2 +56,3 @@ getWorkspaceFiles(projectRootMap: Record<string, string>): Promise<NxWorkspaceFiles>;

hashGlob(globs: string[], exclude?: string[]): Promise<string>;
hashMultiGlob(globGroups: string[][]): Promise<string[]>;
getFlakyTasks(hashes: string[]): Promise<string[]>;

@@ -77,2 +79,1 @@ getEstimatedTaskTimings(targets: TaskTarget[]): Promise<Record<string, number>>;

export declare const daemonClient: DaemonClient;
export declare function isDaemonEnabled(): boolean;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.daemonClient = exports.DaemonClient = void 0;
exports.isDaemonEnabled = isDaemonEnabled;
const workspace_root_1 = require("../../utils/workspace-root");

@@ -15,4 +14,2 @@ const child_process_1 = require("child_process");

const tmp_dir_1 = require("../tmp-dir");
const is_ci_1 = require("../../utils/is-ci");
const nx_json_1 = require("../../config/nx-json");
const configuration_1 = require("../../config/configuration");

@@ -23,3 +20,2 @@ const promised_based_queue_1 = require("../../utils/promised-based-queue");

const error_types_1 = require("../../project-graph/error-types");
const native_1 = require("../../native");
const get_nx_workspace_files_1 = require("../message-types/get-nx-workspace-files");

@@ -37,2 +33,3 @@ const get_context_file_data_1 = require("../message-types/get-context-file-data");

const run_tasks_execution_hooks_1 = require("../message-types/run-tasks-execution-hooks");
const enabled_1 = require("./enabled");
const DAEMON_ENV_SETTINGS = {

@@ -64,39 +61,3 @@ NX_PROJECT_GLOB_CACHE: 'false',

enabled() {
if (this._enabled === undefined) {
const useDaemonProcessOption = this.nxJson?.useDaemonProcess;
const env = process.env.NX_DAEMON;
// env takes precedence
// option=true,env=false => no daemon
// option=false,env=undefined => no daemon
// option=false,env=false => no daemon
// option=undefined,env=undefined => daemon
// option=true,env=true => daemon
// option=false,env=true => daemon
// CI=true,env=undefined => no daemon
// CI=true,env=false => no daemon
// CI=true,env=true => daemon
// docker=true,env=undefined => no daemon
// docker=true,env=false => no daemon
// docker=true,env=true => daemon
// WASM => no daemon because file watching does not work
if ((((0, is_ci_1.isCI)() || isDocker()) && env !== 'true') ||
(0, tmp_dir_1.isDaemonDisabled)() ||
nxJsonIsNotPresent() ||
(useDaemonProcessOption === undefined && env === 'false') ||
(useDaemonProcessOption === true && env === 'false') ||
(useDaemonProcessOption === false && env === undefined) ||
(useDaemonProcessOption === false && env === 'false')) {
this._enabled = false;
}
else if (native_1.IS_WASM) {
output_1.output.warn({
title: 'The Nx Daemon is unsupported in WebAssembly environments. Some things may be slower than or not function as expected.',
});
this._enabled = false;
}
else {
this._enabled = true;
}
}
return this._enabled;
return (0, enabled_1.isDaemonEnabled)(this.nxJson);
}

@@ -222,2 +183,10 @@ reset() {

}
multiGlob(globs, exclude) {
const message = {
type: 'MULTI_GLOB',
globs,
exclude,
};
return this.sendToDaemonViaQueue(message);
}
getWorkspaceContextFileData() {

@@ -251,2 +220,9 @@ const message = {

}
hashMultiGlob(globGroups) {
const message = {
type: hash_glob_1.HASH_MULTI_GLOB,
globGroups: globGroups,
};
return this.sendToDaemonViaQueue(message);
}
getFlakyTasks(hashes) {

@@ -495,21 +471,2 @@ const message = {

exports.daemonClient = new DaemonClient();
function isDaemonEnabled() {
return exports.daemonClient.enabled();
}
function isDocker() {
try {
(0, node_fs_1.statSync)('/.dockerenv');
return true;
}
catch {
try {
return (0, node_fs_1.readFileSync)('/proc/self/cgroup', 'utf8')?.includes('docker');
}
catch { }
return false;
}
}
function nxJsonIsNotPresent() {
return !(0, nx_json_1.hasNxJson)(workspace_root_1.workspaceRoot);
}
function daemonProcessException(message) {

@@ -516,0 +473,0 @@ try {

@@ -8,1 +8,8 @@ export declare const GLOB: "GLOB";

export declare function isHandleGlobMessage(message: unknown): message is HandleGlobMessage;
export declare const MULTI_GLOB: "MULTI_GLOB";
export type HandleMultiGlobMessage = {
type: typeof MULTI_GLOB;
globs: string[];
exclude?: string[];
};
export declare function isHandleMultiGlobMessage(message: unknown): message is HandleMultiGlobMessage;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GLOB = void 0;
exports.MULTI_GLOB = exports.GLOB = void 0;
exports.isHandleGlobMessage = isHandleGlobMessage;
exports.isHandleMultiGlobMessage = isHandleMultiGlobMessage;
exports.GLOB = 'GLOB';

@@ -12,1 +13,8 @@ function isHandleGlobMessage(message) {

}
exports.MULTI_GLOB = 'MULTI_GLOB';
function isHandleMultiGlobMessage(message) {
return (typeof message === 'object' &&
message !== null &&
'type' in message &&
message['type'] === exports.MULTI_GLOB);
}

@@ -8,1 +8,7 @@ export declare const HASH_GLOB: "HASH_GLOB";

export declare function isHandleHashGlobMessage(message: unknown): message is HandleHashGlobMessage;
export declare const HASH_MULTI_GLOB: "HASH_MULTI_GLOB";
export type HandleHashMultiGlobMessage = {
type: typeof HASH_MULTI_GLOB;
globGroups: string[][];
};
export declare function isHandleHashMultiGlobMessage(message: unknown): message is HandleHashMultiGlobMessage;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.HASH_GLOB = void 0;
exports.HASH_MULTI_GLOB = exports.HASH_GLOB = void 0;
exports.isHandleHashGlobMessage = isHandleHashGlobMessage;
exports.isHandleHashMultiGlobMessage = isHandleHashMultiGlobMessage;
exports.HASH_GLOB = 'HASH_GLOB';

@@ -12,1 +13,8 @@ function isHandleHashGlobMessage(message) {

}
exports.HASH_MULTI_GLOB = 'HASH_MULTI_GLOB';
function isHandleHashMultiGlobMessage(message) {
return (typeof message === 'object' &&
message !== null &&
'type' in message &&
message['type'] === exports.HASH_MULTI_GLOB);
}
import { HandlerResult } from './server';
export declare function handleGlob(globs: string[], exclude?: string[]): Promise<HandlerResult>;
export declare function handleMultiGlob(globs: string[], exclude?: string[]): Promise<HandlerResult>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.handleGlob = handleGlob;
exports.handleMultiGlob = handleMultiGlob;
const workspace_root_1 = require("../../utils/workspace-root");

@@ -13,1 +14,8 @@ const workspace_context_1 = require("../../utils/workspace-context");

}
async function handleMultiGlob(globs, exclude) {
const files = await (0, workspace_context_1.multiGlobWithWorkspaceContext)(workspace_root_1.workspaceRoot, globs, exclude);
return {
response: JSON.stringify(files),
description: 'handleMultiGlob',
};
}
import { HandlerResult } from './server';
export declare function handleHashGlob(globs: string[], exclude?: string[]): Promise<HandlerResult>;
export declare function handleHashMultiGlob(globs: string[][]): Promise<HandlerResult>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.handleHashGlob = handleHashGlob;
exports.handleHashMultiGlob = handleHashMultiGlob;
const workspace_root_1 = require("../../utils/workspace-root");

@@ -13,1 +14,8 @@ const workspace_context_1 = require("../../utils/workspace-context");

}
async function handleHashMultiGlob(globs) {
const files = await (0, workspace_context_1.hashMultiGlobWithWorkspaceContext)(workspace_root_1.workspaceRoot, globs);
return {
response: JSON.stringify(files),
description: 'handleHashMultiGlob',
};
}

@@ -132,2 +132,5 @@ "use strict";

}
else if ((0, glob_1.isHandleMultiGlobMessage)(payload)) {
await handleResult(socket, glob_1.MULTI_GLOB, () => (0, handle_glob_1.handleMultiGlob)(payload.globs, payload.exclude));
}
else if ((0, get_nx_workspace_files_1.isHandleNxWorkspaceFilesMessage)(payload)) {

@@ -145,2 +148,5 @@ await handleResult(socket, get_nx_workspace_files_1.GET_NX_WORKSPACE_FILES, () => (0, handle_nx_workspace_files_1.handleNxWorkspaceFiles)(payload.projectRootMap));

}
else if ((0, hash_glob_1.isHandleHashMultiGlobMessage)(payload)) {
await handleResult(socket, hash_glob_1.HASH_GLOB, () => (0, handle_hash_glob_1.handleHashMultiGlob)(payload.globGroups));
}
else if ((0, task_history_1.isHandleGetFlakyTasksMessage)(payload)) {

@@ -147,0 +153,0 @@ await handleResult(socket, task_history_1.GET_FLAKY_TASKS, () => (0, handle_task_history_1.handleGetFlakyTasks)(payload.hashes));

@@ -139,2 +139,2 @@ /**

export { createProjectFileMapUsingProjectGraph } from './project-graph/file-map-utils';
export { isDaemonEnabled } from './daemon/client/client';
export { isDaemonEnabled } from './daemon/client/enabled';

@@ -139,3 +139,3 @@ "use strict";

Object.defineProperty(exports, "createProjectFileMapUsingProjectGraph", { enumerable: true, get: function () { return file_map_utils_1.createProjectFileMapUsingProjectGraph; } });
var client_1 = require("./daemon/client/client");
Object.defineProperty(exports, "isDaemonEnabled", { enumerable: true, get: function () { return client_1.isDaemonEnabled; } });
var enabled_1 = require("./daemon/client/enabled");
Object.defineProperty(exports, "isDaemonEnabled", { enumerable: true, get: function () { return enabled_1.isDaemonEnabled; } });

@@ -20,3 +20,3 @@ /**

export { hashObject } from './hasher/file-hasher';
export { hashWithWorkspaceContext } from './utils/workspace-context';
export { hashWithWorkspaceContext, hashMultiGlobWithWorkspaceContext, } from './utils/workspace-context';
export { createProjectRootMappingsFromProjectConfigurations, findProjectForPath, } from './project-graph/utils/find-project-for-path';

@@ -23,0 +23,0 @@ export { retrieveProjectConfigurations } from './project-graph/utils/retrieve-workspace-files';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isCI = exports.interpolate = exports.registerTsProject = exports.LoadedNxPlugin = exports.retrieveProjectConfigurations = exports.findProjectForPath = exports.createProjectRootMappingsFromProjectConfigurations = exports.hashWithWorkspaceContext = exports.hashObject = exports.splitByColons = exports.readModulePackageJson = exports.stripIndent = exports.sortObjectByKeys = exports.combineOptionsForExecutor = exports.splitTarget = exports.findMatchingConfigFiles = exports.readProjectConfigurationsFromRootMap = exports.mergeTargetConfigurations = exports.retrieveProjectConfigurationsWithAngularProjects = exports.calculateDefaultProjectName = exports.readNxJsonFromDisk = exports.getExecutorInformation = exports.createTempNpmDirectory = void 0;
exports.isCI = exports.interpolate = exports.registerTsProject = exports.LoadedNxPlugin = exports.retrieveProjectConfigurations = exports.findProjectForPath = exports.createProjectRootMappingsFromProjectConfigurations = exports.hashMultiGlobWithWorkspaceContext = exports.hashWithWorkspaceContext = exports.hashObject = exports.splitByColons = exports.readModulePackageJson = exports.stripIndent = exports.sortObjectByKeys = exports.combineOptionsForExecutor = exports.splitTarget = exports.findMatchingConfigFiles = exports.readProjectConfigurationsFromRootMap = exports.mergeTargetConfigurations = exports.retrieveProjectConfigurationsWithAngularProjects = exports.calculateDefaultProjectName = exports.readNxJsonFromDisk = exports.getExecutorInformation = exports.createTempNpmDirectory = void 0;
const tslib_1 = require("tslib");

@@ -41,2 +41,3 @@ /**

Object.defineProperty(exports, "hashWithWorkspaceContext", { enumerable: true, get: function () { return workspace_context_1.hashWithWorkspaceContext; } });
Object.defineProperty(exports, "hashMultiGlobWithWorkspaceContext", { enumerable: true, get: function () { return workspace_context_1.hashMultiGlobWithWorkspaceContext; } });
var find_project_for_path_1 = require("./project-graph/utils/find-project-for-path");

@@ -43,0 +44,0 @@ Object.defineProperty(exports, "createProjectRootMappingsFromProjectConfigurations", { enumerable: true, get: function () { return find_project_for_path_1.createProjectRootMappingsFromProjectConfigurations; } });

import { ExecutorContext } from '../../config/misc-interfaces';
import { ParallelRunningTasks, SeriallyRunningTasks } from './running-tasks';
export declare const LARGE_BUFFER: number;

@@ -7,18 +6,17 @@ export type Json = {

};
export interface RunCommandsCommandOptions {
command: string;
forwardAllArgs?: boolean;
/**
* description was added to allow users to document their commands inline,
* it is not intended to be used as part of the execution of the command.
*/
description?: string;
prefix?: string;
prefixColor?: string;
color?: string;
bgColor?: string;
}
export interface RunCommandsOptions extends Json {
command?: string | string[];
commands?: Array<RunCommandsCommandOptions | string>;
commands?: ({
command: string;
forwardAllArgs?: boolean;
/**
* description was added to allow users to document their commands inline,
* it is not intended to be used as part of the execution of the command.
*/
description?: string;
prefix?: string;
prefixColor?: string;
color?: string;
bgColor?: string;
} | string)[];
color?: boolean;

@@ -61,4 +59,2 @@ parallel?: boolean;

}>;
export declare function runCommands(options: RunCommandsOptions, context: ExecutorContext): Promise<ParallelRunningTasks | SeriallyRunningTasks>;
export declare function normalizeOptions(options: RunCommandsOptions): NormalizedRunCommandsOptions;
export declare function interpolateArgsIntoCommand(command: string, opts: Pick<NormalizedRunCommandsOptions, 'args' | 'parsedArgs' | '__unparsed__' | 'unknownOptions' | 'unparsedCommandArgs'>, forwardAllArgs: boolean): string;

@@ -5,10 +5,21 @@ "use strict";

exports.default = default_1;
exports.runCommands = runCommands;
exports.normalizeOptions = normalizeOptions;
exports.interpolateArgsIntoCommand = interpolateArgsIntoCommand;
const child_process_1 = require("child_process");
const path = require("path");
const yargsParser = require("yargs-parser");
const npm_run_path_1 = require("npm-run-path");
const chalk = require("chalk");
const pseudo_terminal_1 = require("../../tasks-runner/pseudo-terminal");
const exit_codes_1 = require("../../utils/exit-codes");
const running_tasks_1 = require("./running-tasks");
const task_env_1 = require("../../tasks-runner/task-env");
exports.LARGE_BUFFER = 1024 * 1000000;
let pseudoTerminal;
const childProcesses = new Set();
function loadEnvVarsFile(path, env = {}) {
(0, task_env_1.unloadDotEnvFile)(path, env);
const result = (0, task_env_1.loadAndExpandDotEnvFile)(path, env);
if (result.error) {
throw result.error;
}
}
const propKeys = [

@@ -34,10 +45,3 @@ 'command',

async function default_1(options, context) {
const task = await runCommands(options, context);
const results = await task.getResults();
return {
...results,
success: results.code === 0,
};
}
async function runCommands(options, context) {
registerProcessListener();
const normalized = normalizeOptions(options);

@@ -51,11 +55,7 @@ if (normalized.readyWhenStatus.length && !normalized.parallel) {

}
const pseudoTerminal = !options.parallel && pseudo_terminal_1.PseudoTerminal.isSupported()
? (0, pseudo_terminal_1.getPseudoTerminal)()
: null;
try {
const runningTask = options.parallel
? new running_tasks_1.ParallelRunningTasks(normalized, context)
: new running_tasks_1.SeriallyRunningTasks(normalized, context, pseudoTerminal);
registerProcessListener(runningTask, pseudoTerminal);
return runningTask;
const result = options.parallel
? await runInParallel(normalized, context)
: await runSerially(normalized, context);
return result;
}

@@ -69,2 +69,48 @@ catch (e) {

}
async function runInParallel(options, context) {
const procs = options.commands.map((c) => createProcess(null, c, options.readyWhenStatus, options.color, calculateCwd(options.cwd, context), options.env ?? {}, true, options.usePty, options.streamOutput, options.tty, options.envFile).then((result) => ({
result,
command: c.command,
})));
let terminalOutput = '';
if (options.readyWhenStatus.length) {
const r = await Promise.race(procs);
terminalOutput += r.result.terminalOutput;
if (!r.result.success) {
const output = `Warning: command "${r.command}" exited with non-zero status code`;
terminalOutput += output;
if (options.streamOutput) {
process.stderr.write(output);
}
return { success: false, terminalOutput };
}
else {
return { success: true, terminalOutput };
}
}
else {
const r = await Promise.all(procs);
terminalOutput += r.map((f) => f.result.terminalOutput).join('');
const failed = r.filter((v) => !v.result.success);
if (failed.length > 0) {
const output = failed
.map((f) => `Warning: command "${f.command}" exited with non-zero status code`)
.join('\r\n');
terminalOutput += output;
if (options.streamOutput) {
process.stderr.write(output);
}
return {
success: false,
terminalOutput,
};
}
else {
return {
success: true,
terminalOutput,
};
}
}
}
function normalizeOptions(options) {

@@ -117,2 +163,163 @@ if (options.readyWhen && typeof options.readyWhen === 'string') {

}
async function runSerially(options, context) {
pseudoTerminal ??= pseudo_terminal_1.PseudoTerminal.isSupported() ? (0, pseudo_terminal_1.getPseudoTerminal)() : null;
let terminalOutput = '';
for (const c of options.commands) {
const result = await createProcess(pseudoTerminal, c, [], options.color, calculateCwd(options.cwd, context), options.processEnv ?? options.env ?? {}, false, options.usePty, options.streamOutput, options.tty, options.envFile);
terminalOutput += result.terminalOutput;
if (!result.success) {
const output = `Warning: command "${c.command}" exited with non-zero status code`;
result.terminalOutput += output;
if (options.streamOutput) {
process.stderr.write(output);
}
return { success: false, terminalOutput };
}
}
return { success: true, terminalOutput };
}
async function createProcess(pseudoTerminal, commandConfig, readyWhenStatus = [], color, cwd, env, isParallel, usePty = true, streamOutput = true, tty, envFile) {
env = processEnv(color, cwd, env, envFile);
// The rust runCommand is always a tty, so it will not look nice in parallel and if we need prefixes
// currently does not work properly in windows
if (pseudoTerminal &&
process.env.NX_NATIVE_COMMAND_RUNNER !== 'false' &&
!commandConfig.prefix &&
readyWhenStatus.length === 0 &&
!isParallel &&
usePty) {
let terminalOutput = chalk.dim('> ') + commandConfig.command + '\r\n\r\n';
if (streamOutput) {
process.stdout.write(terminalOutput);
}
const cp = pseudoTerminal.runCommand(commandConfig.command, {
cwd,
jsEnv: env,
quiet: !streamOutput,
tty,
});
childProcesses.add(cp);
return new Promise((res) => {
cp.onOutput((output) => {
terminalOutput += output;
});
cp.onExit((code) => {
if (code >= 128) {
process.exit(code);
}
else {
res({ success: code === 0, terminalOutput });
}
});
});
}
return nodeProcess(commandConfig, cwd, env, readyWhenStatus, streamOutput);
}
function nodeProcess(commandConfig, cwd, env, readyWhenStatus, streamOutput = true) {
let terminalOutput = chalk.dim('> ') + commandConfig.command + '\r\n\r\n';
if (streamOutput) {
process.stdout.write(terminalOutput);
}
return new Promise((res) => {
const childProcess = (0, child_process_1.exec)(commandConfig.command, {
maxBuffer: exports.LARGE_BUFFER,
env,
cwd,
windowsHide: false,
});
childProcesses.add(childProcess);
childProcess.stdout.on('data', (data) => {
const output = addColorAndPrefix(data, commandConfig);
terminalOutput += output;
if (streamOutput) {
process.stdout.write(output);
}
if (readyWhenStatus.length && isReady(readyWhenStatus, data.toString())) {
res({ success: true, terminalOutput });
}
});
childProcess.stderr.on('data', (err) => {
const output = addColorAndPrefix(err, commandConfig);
terminalOutput += output;
if (streamOutput) {
process.stderr.write(output);
}
if (readyWhenStatus.length && isReady(readyWhenStatus, err.toString())) {
res({ success: true, terminalOutput });
}
});
childProcess.on('error', (err) => {
const ouptput = addColorAndPrefix(err.toString(), commandConfig);
terminalOutput += ouptput;
if (streamOutput) {
process.stderr.write(ouptput);
}
res({ success: false, terminalOutput });
});
childProcess.on('exit', (code) => {
childProcesses.delete(childProcess);
if (!readyWhenStatus.length || isReady(readyWhenStatus)) {
res({ success: code === 0, terminalOutput });
}
});
});
}
function addColorAndPrefix(out, config) {
if (config.prefix) {
out = out
.split('\n')
.map((l) => {
let prefixText = config.prefix;
if (config.prefixColor && chalk[config.prefixColor]) {
prefixText = chalk[config.prefixColor](prefixText);
}
prefixText = chalk.bold(prefixText);
return l.trim().length > 0 ? `${prefixText} ${l}` : l;
})
.join('\n');
}
if (config.color && chalk[config.color]) {
out = chalk[config.color](out);
}
if (config.bgColor && chalk[config.bgColor]) {
out = chalk[config.bgColor](out);
}
return out;
}
function calculateCwd(cwd, context) {
if (!cwd)
return context.root;
if (path.isAbsolute(cwd))
return cwd;
return path.join(context.root, cwd);
}
/**
* Env variables are processed in the following order:
* - env option from executor options
* - env file from envFile option if provided
* - local env variables
*/
function processEnv(color, cwd, envOptionFromExecutor, envFile) {
let localEnv = (0, npm_run_path_1.env)({ cwd: cwd ?? process.cwd() });
localEnv = {
...process.env,
...localEnv,
};
if (process.env.NX_LOAD_DOT_ENV_FILES !== 'false' && envFile) {
loadEnvVarsFile(envFile, localEnv);
}
let res = {
...localEnv,
...envOptionFromExecutor,
};
// need to override PATH to make sure we are using the local node_modules
if (localEnv.PATH)
res.PATH = localEnv.PATH; // UNIX-like
if (localEnv.Path)
res.Path = localEnv.Path; // Windows
if (color) {
res.FORCE_COLOR = `${color}`;
}
return res;
}
function interpolateArgsIntoCommand(command, opts, forwardAllArgs) {

@@ -205,3 +412,3 @@ if (command.indexOf('{args.') > -1) {

let registered = false;
function registerProcessListener(runningTask, pseudoTerminal) {
function registerProcessListener() {
if (registered) {

@@ -217,10 +424,22 @@ return;

}
runningTask.send(message);
childProcesses.forEach((p) => {
if ('connected' in p && p.connected) {
p.send(message);
}
});
});
// Terminate any task processes on exit
process.on('exit', () => {
runningTask.kill();
childProcesses.forEach((p) => {
if ('connected' in p ? p.connected : p.isAlive) {
p.kill();
}
});
});
process.on('SIGINT', () => {
runningTask.kill('SIGTERM');
childProcesses.forEach((p) => {
if ('connected' in p ? p.connected : p.isAlive) {
p.kill('SIGTERM');
}
});
// we exit here because we don't need to write anything to cache.

@@ -230,3 +449,7 @@ process.exit((0, exit_codes_1.signalToCode)('SIGINT'));

process.on('SIGTERM', () => {
runningTask.kill('SIGTERM');
childProcesses.forEach((p) => {
if ('connected' in p ? p.connected : p.isAlive) {
p.kill('SIGTERM');
}
});
// no exit here because we expect child processes to terminate which

@@ -236,3 +459,7 @@ // will store results to the cache and will terminate this process

process.on('SIGHUP', () => {
runningTask.kill('SIGTERM');
childProcesses.forEach((p) => {
if ('connected' in p ? p.connected : p.isAlive) {
p.kill('SIGTERM');
}
});
// no exit here because we expect child processes to terminate which

@@ -259,1 +486,12 @@ // will store results to the cache and will terminate this process

}
function isReady(readyWhenStatus = [], data) {
if (data) {
for (const readyWhenElement of readyWhenStatus) {
if (data.toString().indexOf(readyWhenElement.stringToMatch) > -1) {
readyWhenElement.found = true;
break;
}
}
}
return readyWhenStatus.every((readyWhenElement) => readyWhenElement.found);
}

@@ -10,11 +10,2 @@ /* auto-generated by NAPI-RS */

}
export declare class AppLifeCycle {
constructor(projectNames: Array<string>, tasks: Array<Task>, nxArgs: object, overrides: object)
scheduleTask(task: Task): void
startTasks(tasks: Array<Task>, metadata: object): void
printTaskTerminalOutput(task: Task, status: string, output: string): void
endTasks(taskResults: Array<TaskResult>, metadata: TaskMetadata): void
__runCommandsForTask(task: Task, options: NormalizedRunCommandsOptions): Promise<RunningTask>
}
export declare class ChildProcess {

@@ -24,3 +15,2 @@ kill(): void

onOutput(callback: (message: string) => void): void
cleanup(): void
}

@@ -69,12 +59,2 @@

export declare class RunningTask {
/**
* Get results always needs the up to date pty instance, so we can't embed this in the __runCommandsForTask
* method, and instead need to look up the pty instance in the TasksList component.
*/
getResults(): Promise<{ code: number; terminalOutput: string }>
onExit(callback: (arg0: number, arg1: string) => any): void
kill(signal?: number | undefined | null): Promise<void>
}
export declare class RustPseudoTerminal {

@@ -119,2 +99,10 @@ constructor()

glob(globs: Array<string>, exclude?: Array<string> | undefined | null): Array<string>
/**
* Performs multiple glob pattern matches against workspace files in parallel
* @returns An array of arrays, where each inner array contains the file paths
* that matched the corresponding glob pattern in the input. The outer array maintains the same order
* as the input globs.
*/
multiGlob(globs: Array<string>, exclude?: Array<string> | undefined | null): Array<Array<string>>
hashFilesMatchingGlobs(globGroups: Array<Array<string>>): Array<string>
hashFilesMatchingGlob(globs: Array<string>, exclude?: Array<string> | undefined | null): string

@@ -139,4 +127,2 @@ incrementalUpdate(updatedFiles: Array<string>, deletedFiles: Array<string>): Record<string, string>

export declare export function createExternalAppLifecycle(projectNames: Array<string>, tasks: Array<Task>, nxArgs: object, overrides: object): ExternalObject<AppLifeCycle>
export interface DepsOutputsInput {

@@ -169,4 +155,2 @@ dependentTasksOutputFiles: string

export declare export function extractLifeCycleRef(appLifecycle: ExternalObject<AppLifeCycle>): AppLifeCycle
export interface FileData {

@@ -198,3 +182,3 @@ file: string

export declare export function hashArray(input: Array<string>): string
export declare export function hashArray(input: Array<string | undefined | null>): string

@@ -219,4 +203,2 @@ export interface HashDetails {

export declare export function initTerminal(appLifecycle: ExternalObject<AppLifeCycle>, doneCallback: () => any): void
export interface InputsInput {

@@ -230,28 +212,2 @@ input: string

export interface NormalizedCommandOptions {
command: string
forwardAllArgs?: boolean
}
export interface NormalizedRunCommandsOptions {
commands: Array<NormalizedCommandOptions>
unknownOptions?: Record<string, any>
parsedArgs: Record<string, any>
unparsedCommandArgs?: Record<string, any>
args?: string
readyWhenStatus: Array<ReadyWhenStatus>
command?: string | string[]
color?: boolean
parallel?: boolean
readyWhen?: Array<string>
cwd?: string
env?: Record<string, string>
forwardAllArgs?: boolean
envFile?: string
__unparsed__: Array<string>
usePty?: boolean
streamOutput?: boolean
tty?: boolean
}
/** Stripped version of the NxJson interface for use in rust */

@@ -287,11 +243,4 @@ export interface NxJson {

export interface ReadyWhenStatus {
stringToMatch: string
found: boolean
}
export declare export function remove(src: string): void
export declare export function restoreTerminal(): void
export interface RuntimeInput {

@@ -317,16 +266,2 @@ runtime: string

export interface Task {
id: string
target: TaskTarget
overrides: any
outputs: Array<string>
projectRoot?: string
hash?: string
startTime?: number
endTime?: number
cache?: boolean
parallelism: boolean
continuous?: boolean
}
export interface TaskGraph {

@@ -338,22 +273,2 @@ roots: Array<string>

export interface TaskMetadata {
groupId: number
}
export interface TaskOutput {
code: number
terminalOutput: string
}
export interface TaskOverrides {
}
export interface TaskResult {
task: Task
status: string
code: number
terminalOutput?: string
}
export interface TaskRun {

@@ -373,8 +288,2 @@ hash: string

export interface TaskTarget {
project: string
target: string
configuration?: string
}
export declare export function testOnlyTransferFileMap(projectFiles: Record<string, Array<FileData>>, nonProjectFiles: Array<FileData>): NxWorkspaceFilesExternals

@@ -381,0 +290,0 @@

@@ -364,3 +364,2 @@ // prettier-ignore

module.exports.AppLifeCycle = nativeBinding.AppLifeCycle
module.exports.ChildProcess = nativeBinding.ChildProcess

@@ -372,3 +371,2 @@ module.exports.FileLock = nativeBinding.FileLock

module.exports.NxTaskHistory = nativeBinding.NxTaskHistory
module.exports.RunningTask = nativeBinding.RunningTask
module.exports.RustPseudoTerminal = nativeBinding.RustPseudoTerminal

@@ -382,6 +380,4 @@ module.exports.TaskDetails = nativeBinding.TaskDetails

module.exports.copy = nativeBinding.copy
module.exports.createExternalAppLifecycle = nativeBinding.createExternalAppLifecycle
module.exports.EventType = nativeBinding.EventType
module.exports.expandOutputs = nativeBinding.expandOutputs
module.exports.extractLifeCycleRef = nativeBinding.extractLifeCycleRef
module.exports.findImports = nativeBinding.findImports

@@ -393,6 +389,4 @@ module.exports.getBinaryTarget = nativeBinding.getBinaryTarget

module.exports.hashFile = nativeBinding.hashFile
module.exports.initTerminal = nativeBinding.initTerminal
module.exports.IS_WASM = nativeBinding.IS_WASM
module.exports.remove = nativeBinding.remove
module.exports.restoreTerminal = nativeBinding.restoreTerminal
module.exports.testOnlyTransferFileMap = nativeBinding.testOnlyTransferFileMap

@@ -399,0 +393,0 @@ module.exports.transferProjectGraph = nativeBinding.transferProjectGraph

@@ -26,7 +26,7 @@ "use strict";

const readResult = tsModule.readConfigFile(tsConfigPath, tsModule.sys.readFile);
// we don't need to scan the files, we only care about options
// We only care about options, so we don't need to scan source files, and thus
// `readDirectory` is stubbed for performance.
const host = {
...tsModule.sys,
readDirectory: () => [],
readFile: () => '',
fileExists: tsModule.sys.fileExists,
};

@@ -33,0 +33,0 @@ return tsModule.parseJsonConfigFileContent(readResult.config, host, (0, path_1.dirname)(tsConfigPath)).options;

@@ -23,4 +23,4 @@ "use strict";

}
const plugins = await (0, get_plugins_1.getPlugins)();
return (0, globs_1.combineGlobPatterns)((0, retrieve_workspace_files_1.configurationGlobs)(plugins));
const plugins = (await (0, get_plugins_1.getPlugins)()).filter((p) => !!p.createNodes);
return (0, globs_1.combineGlobPatterns)((0, retrieve_workspace_files_1.getGlobPatternsOfPlugins)(plugins));
})();

@@ -27,0 +27,0 @@ const touchedProjects = new Set();

@@ -219,3 +219,6 @@ "use strict";

const errorBodyLines = [
`${error.errors.length > 1 ? `${error.errors.length} errors` : 'An error'} occurred while processing files for the ${pluginName} plugin.`,
`${error.errors.length > 1 ? `${error.errors.length} errors` : 'An error'} occurred while processing files for the ${pluginName} plugin${error.pluginIndex
? ` (Defined at nx.json#plugins[${error.pluginIndex}])`
: ''}`,
`.`,
];

@@ -222,0 +225,0 @@ const errorStackLines = [];

@@ -111,3 +111,3 @@ "use strict";

const ret = [
await Promise.all(plugins.map(async (plugin) => {
await Promise.all(plugins.map(async (plugin, index) => {
const pluginPath = typeof plugin === 'string' ? plugin : plugin.plugin;

@@ -118,2 +118,3 @@ performance.mark(`Load Nx Plugin: ${pluginPath} - start`);

const res = await loadedPluginPromise;
res.index = index;
performance.mark(`Load Nx Plugin: ${pluginPath} - end`);

@@ -120,0 +121,0 @@ performance.measure(`Load Nx Plugin: ${pluginPath}`, `Load Nx Plugin: ${pluginPath} - start`, `Load Nx Plugin: ${pluginPath} - end`);

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const node_perf_hooks_1 = require("node:perf_hooks");
node_perf_hooks_1.performance.mark(`plugin worker ${process.pid} code loading -- start`);
const messaging_1 = require("./messaging");

@@ -11,2 +13,4 @@ const serializable_error_1 = require("../../../utils/serializable-error");

}
node_perf_hooks_1.performance.mark(`plugin worker ${process.pid} code loading -- end`);
node_perf_hooks_1.performance.measure(`plugin worker ${process.pid} code loading`, `plugin worker ${process.pid} code loading -- start`, `plugin worker ${process.pid} code loading -- end`);
global.NX_GRAPH_CREATION = true;

@@ -13,0 +17,0 @@ global.NX_PLUGIN_WORKER = true;

import type { ProjectGraph } from '../../config/project-graph';
import type { PluginConfiguration } from '../../config/nx-json';
import { type PluginConfiguration } from '../../config/nx-json';
import type { RawProjectGraphDependency } from '../project-graph-builder';
import type { CreateDependenciesContext, CreateMetadataContext, CreateNodesContextV2, CreateNodesResult, NxPluginV2, PostTasksExecutionContext, PreTasksExecutionContext, ProjectsMetadata } from './public-api';
export declare class LoadedNxPlugin {
index?: number;
readonly name: string;

@@ -7,0 +8,0 @@ readonly createNodes?: [

@@ -7,3 +7,3 @@ "use strict";

const enabled_1 = require("./isolation/enabled");
const client_1 = require("../../daemon/client/client");
const enabled_2 = require("../../daemon/client/enabled");
class LoadedNxPlugin {

@@ -62,3 +62,4 @@ constructor(plugin, pluginDefinition) {

let originalEnv = process.env;
if ((0, enabled_1.isIsolationEnabled)() || (0, client_1.isDaemonEnabled)()) {
if ((0, enabled_1.isIsolationEnabled)() ||
(0, enabled_2.isDaemonEnabled)(context.nxJsonConfiguration)) {
process.env = new Proxy(originalEnv, {

@@ -65,0 +66,0 @@ set: (target, key, value) => {

@@ -8,4 +8,5 @@ "use strict";

const client_1 = require("../../daemon/client/client");
const enabled_1 = require("../../daemon/client/enabled");
async function runPreTasksExecution(pluginContext) {
if ((0, is_on_daemon_1.isOnDaemon)() || !(0, client_1.isDaemonEnabled)()) {
if ((0, is_on_daemon_1.isOnDaemon)() || !(0, enabled_1.isDaemonEnabled)(pluginContext.nxJsonConfiguration)) {
performance.mark(`preTasksExecution:start`);

@@ -25,3 +26,3 @@ const plugins = await (0, get_plugins_1.getPlugins)(pluginContext.workspaceRoot);

}));
if (!(0, client_1.isDaemonEnabled)()) {
if (!(0, enabled_1.isDaemonEnabled)(pluginContext.nxJsonConfiguration)) {
applyProcessEnvs(envs);

@@ -46,3 +47,3 @@ }

async function runPostTasksExecution(context) {
if ((0, is_on_daemon_1.isOnDaemon)() || !(0, client_1.isDaemonEnabled)()) {
if ((0, is_on_daemon_1.isOnDaemon)() || !(0, enabled_1.isDaemonEnabled)(context.nxJsonConfiguration)) {
performance.mark(`postTasksExecution:start`);

@@ -49,0 +50,0 @@ const plugins = await (0, get_plugins_1.getPlugins)();

@@ -1,2 +0,2 @@

import { CreateNodesContextV2, CreateNodesFunction, CreateNodesResult } from './public-api';
export declare function createNodesFromFiles<T = unknown>(createNodes: CreateNodesFunction<T>, configFiles: readonly string[], options: T, context: CreateNodesContextV2): Promise<[file: string, value: CreateNodesResult][]>;
import { CreateNodesContext, CreateNodesContextV2, CreateNodesResult } from './public-api';
export declare function createNodesFromFiles<T = unknown>(createNodes: (projectConfigurationFile: string, options: T | undefined, context: CreateNodesContext, idx: number) => CreateNodesResult | Promise<CreateNodesResult>, configFiles: readonly string[], options: T, context: CreateNodesContextV2): Promise<[file: string, value: CreateNodesResult][]>;

@@ -8,3 +8,3 @@ "use strict";

const errors = [];
await Promise.all(configFiles.map(async (file) => {
await Promise.all(configFiles.map(async (file, idx) => {
try {

@@ -14,3 +14,3 @@ const value = await createNodes(file, options, {

configFiles,
});
}, idx);
if (value) {

@@ -17,0 +17,0 @@ results.push([file, value]);

@@ -283,3 +283,3 @@ "use strict";

finally {
lock.unlock();
lock?.unlock();
}

@@ -286,0 +286,0 @@ }

@@ -38,3 +38,3 @@ import { NxJsonConfiguration, TargetDefaults } from '../../config/nx-json';

*/
export declare function createProjectConfigurations(root: string, nxJson: NxJsonConfiguration, projectFiles: string[], // making this parameter allows devkit to pick up newly created projects
export declare function createProjectConfigurationsWithPlugins(root: string, nxJson: NxJsonConfiguration, projectFiles: string[][], // making this parameter allows devkit to pick up newly created projects
plugins: LoadedNxPlugin[]): Promise<ConfigurationResult>;

@@ -41,0 +41,0 @@ export declare function findMatchingConfigFiles(projectFiles: string[], pattern: string, include: string[], exclude: string[]): string[];

@@ -5,3 +5,3 @@ "use strict";

exports.mergeMetadata = mergeMetadata;
exports.createProjectConfigurations = createProjectConfigurations;
exports.createProjectConfigurationsWithPlugins = createProjectConfigurationsWithPlugins;
exports.findMatchingConfigFiles = findMatchingConfigFiles;

@@ -226,3 +226,3 @@ exports.readProjectConfigurationsFromRootMap = readProjectConfigurationsFromRootMap;

*/
async function createProjectConfigurations(root = workspace_root_1.workspaceRoot, nxJson, projectFiles, // making this parameter allows devkit to pick up newly created projects
async function createProjectConfigurationsWithPlugins(root = workspace_root_1.workspaceRoot, nxJson, projectFiles, // making this parameter allows devkit to pick up newly created projects
plugins) {

@@ -253,3 +253,3 @@ perf_hooks_1.performance.mark('build-project-configs:start');

// We iterate over plugins first - this ensures that plugins specified first take precedence.
for (const [index, { createNodes: createNodesTuple, include, exclude, name: pluginName },] of plugins.entries()) {
for (const [index, { index: pluginIndex, createNodes: createNodesTuple, include, exclude, name: pluginName, },] of plugins.entries()) {
const [pattern, createNodes] = createNodesTuple ?? [];

@@ -259,3 +259,3 @@ if (!pattern) {

}
const matchingConfigFiles = findMatchingConfigFiles(projectFiles, pattern, include, exclude);
const matchingConfigFiles = findMatchingConfigFiles(projectFiles[index], pattern, include, exclude);
inProgressPlugins.add(pluginName);

@@ -272,4 +272,6 @@ let r = createNodes(matchingConfigFiles, {

new error_types_1.AggregateCreateNodesError([[null, e]], []);
if (pluginIndex) {
error.pluginIndex = pluginIndex;
}
(0, error_types_1.formatAggregateCreateNodesError)(error, pluginName);
error.pluginIndex = index;
// This represents a single plugin erroring out with a hard error.

@@ -297,3 +299,3 @@ errors.push(error);

sourceMaps: configurationSourceMaps,
matchingProjectFiles: projectFiles,
matchingProjectFiles: projectFiles.flat(),
};

@@ -307,3 +309,3 @@ }

sourceMaps: configurationSourceMaps,
matchingProjectFiles: projectFiles,
matchingProjectFiles: projectFiles.flat(),
});

@@ -319,3 +321,3 @@ }

for (const result of results.flat()) {
const [pluginName, file, nodes, index] = result;
const [pluginName, file, nodes, pluginIndex] = result;
const { projects: projectNodes, externalNodes: pluginExternalNodes } = nodes;

@@ -340,3 +342,3 @@ const sourceInfo = [file, pluginName];

error,
pluginIndex: index,
pluginIndex,
}));

@@ -343,0 +345,0 @@ }

@@ -26,2 +26,2 @@ import { ProjectConfiguration } from '../../config/workspace-json-project-json';

export declare function retrieveProjectConfigurationsWithoutPluginInference(root: string): Promise<Record<string, ProjectConfiguration>>;
export declare function configurationGlobs(plugins: Array<LoadedNxPlugin>): string[];
export declare function getGlobPatternsOfPlugins(plugins: Array<LoadedNxPlugin>): string[];

@@ -8,3 +8,3 @@ "use strict";

exports.retrieveProjectConfigurationsWithoutPluginInference = retrieveProjectConfigurationsWithoutPluginInference;
exports.configurationGlobs = configurationGlobs;
exports.getGlobPatternsOfPlugins = getGlobPatternsOfPlugins;
const perf_hooks_1 = require("perf_hooks");

@@ -45,5 +45,6 @@ const angular_json_1 = require("../../adapter/angular-json");

async function retrieveProjectConfigurations(plugins, workspaceRoot, nxJson) {
const globPatterns = configurationGlobs(plugins);
const workspaceFiles = await (0, workspace_context_1.globWithWorkspaceContext)(workspaceRoot, globPatterns);
return (0, project_configuration_utils_1.createProjectConfigurations)(workspaceRoot, nxJson, workspaceFiles, plugins);
const pluginsWithCreateNodes = plugins.filter((p) => !!p.createNodes);
const globPatterns = getGlobPatternsOfPlugins(pluginsWithCreateNodes);
const pluginConfigFiles = await (0, workspace_context_1.multiGlobWithWorkspaceContext)(workspaceRoot, globPatterns);
return (0, project_configuration_utils_1.createProjectConfigurationsWithPlugins)(workspaceRoot, nxJson, pluginConfigFiles, pluginsWithCreateNodes);
}

@@ -61,5 +62,6 @@ async function retrieveProjectConfigurationsWithAngularProjects(workspaceRoot, nxJson) {

}
function retrieveProjectConfigurationPaths(root, plugins) {
const projectGlobPatterns = configurationGlobs(plugins);
return (0, workspace_context_1.globWithWorkspaceContext)(root, projectGlobPatterns);
async function retrieveProjectConfigurationPaths(root, plugins) {
const projectGlobPatterns = getGlobPatternsOfPlugins(plugins);
const pluginConfigFiles = await (0, workspace_context_1.multiGlobWithWorkspaceContext)(root, projectGlobPatterns);
return pluginConfigFiles.flat();
}

@@ -71,3 +73,3 @@ const projectsWithoutPluginCache = new Map();

const plugins = await (0, get_plugins_1.getOnlyDefaultPlugins)(); // only load default plugins
const projectGlobPatterns = await retrieveProjectConfigurationPaths(root, plugins);
const projectGlobPatterns = getGlobPatternsOfPlugins(plugins);
const cacheKey = root + ',' + projectGlobPatterns.join(',');

@@ -77,15 +79,9 @@ if (projectsWithoutPluginCache.has(cacheKey)) {

}
const projectFiles = (await (0, workspace_context_1.globWithWorkspaceContext)(root, projectGlobPatterns)) ?? [];
const { projects } = await (0, project_configuration_utils_1.createProjectConfigurations)(root, nxJson, projectFiles, plugins);
const projectFiles = (await (0, workspace_context_1.multiGlobWithWorkspaceContext)(root, projectGlobPatterns)) ?? [];
const { projects } = await (0, project_configuration_utils_1.createProjectConfigurationsWithPlugins)(root, nxJson, projectFiles, plugins);
projectsWithoutPluginCache.set(cacheKey, projects);
return projects;
}
function configurationGlobs(plugins) {
const globPatterns = [];
for (const plugin of plugins) {
if ('createNodes' in plugin && plugin.createNodes) {
globPatterns.push(plugin.createNodes[0]);
}
}
return globPatterns;
function getGlobPatternsOfPlugins(plugins) {
return plugins.map((p) => p.createNodes[0]);
}

@@ -14,5 +14,2 @@ import { ProjectGraph, ProjectGraphProjectNode } from '../config/project-graph';

};
readonly continuousDependencies: {
[k: string]: string[];
};
private readonly allTargetNames;

@@ -19,0 +16,0 @@ constructor(extraTargetDependencies: TargetDependencies, projectGraph: ProjectGraph);

@@ -20,3 +20,2 @@ "use strict";

this.dependencies = {};
this.continuousDependencies = {};
const allTargetNames = new Set();

@@ -41,3 +40,2 @@ for (const projectName in projectGraph.nodes) {

this.dependencies[task.id] = [];
this.continuousDependencies[task.id] = [];
}

@@ -57,3 +55,2 @@ }

delete this.dependencies[t];
delete this.continuousDependencies[t];
}

@@ -64,5 +61,2 @@ }

}
for (let d of Object.keys(this.continuousDependencies)) {
this.continuousDependencies[d] = this.continuousDependencies[d].filter((dd) => !!initialTasks[dd]);
}
}

@@ -77,12 +71,3 @@ filterDummyTasks(this.dependencies);

}
filterDummyTasks(this.continuousDependencies);
for (const taskId of Object.keys(this.continuousDependencies)) {
if (this.continuousDependencies[taskId].length > 0) {
this.continuousDependencies[taskId] = [
...new Set(this.continuousDependencies[taskId].filter((d) => d !== taskId)).values(),
];
}
}
return Object.keys(this.tasks).filter((d) => this.dependencies[d].length === 0 &&
this.continuousDependencies[d].length === 0);
return Object.keys(this.dependencies).filter((d) => this.dependencies[d].length === 0);
}

@@ -129,2 +114,5 @@ processTask(task, projectUsedToDeriveDependencies, configuration, overrides) {

const selfTaskId = this.getId(selfProject.name, dependencyConfig.target, resolvedConfiguration);
if (task.id !== selfTaskId) {
this.dependencies[task.id].push(selfTaskId);
}
if (!this.tasks[selfTaskId]) {

@@ -134,13 +122,4 @@ const newTask = this.createTask(selfTaskId, selfProject, dependencyConfig.target, resolvedConfiguration, taskOverrides);

this.dependencies[selfTaskId] = [];
this.continuousDependencies[selfTaskId] = [];
this.processTask(newTask, newTask.target.project, configuration, overrides);
}
if (task.id !== selfTaskId) {
if (this.tasks[selfTaskId].continuous) {
this.continuousDependencies[task.id].push(selfTaskId);
}
else {
this.dependencies[task.id].push(selfTaskId);
}
}
}

@@ -160,10 +139,4 @@ }

const depTargetId = this.getId(depProject.name, dependencyConfig.target, resolvedConfiguration);
const depTargetConfiguration = this.projectGraph.nodes[depProject.name].data.targets[dependencyConfig.target];
if (task.id !== depTargetId) {
if (depTargetConfiguration.continuous) {
this.continuousDependencies[task.id].push(depTargetId);
}
else {
this.dependencies[task.id].push(depTargetId);
}
this.dependencies[task.id].push(depTargetId);
}

@@ -174,3 +147,2 @@ if (!this.tasks[depTargetId]) {

this.dependencies[depTargetId] = [];
this.continuousDependencies[depTargetId] = [];
this.processTask(newTask, newTask.target.project, configuration, overrides);

@@ -187,3 +159,2 @@ }

this.dependencies[dummyId] ??= [];
this.continuousDependencies[dummyId] ??= [];
const noopTask = this.createDummyTask(dummyId, task);

@@ -221,3 +192,2 @@ this.processTask(noopTask, depProject.name, configuration, overrides);

parallelism: project.data.targets[target].parallelism ?? true,
continuous: project.data.targets[target].continuous ?? false,
};

@@ -248,3 +218,2 @@ }

dependencies: p.dependencies,
continuousDependencies: p.continuousDependencies,
};

@@ -251,0 +220,0 @@ }

import { DefaultTasksRunnerOptions } from './default-tasks-runner';
import { Batch } from './tasks-schedule';
import { BatchResults } from './batch/batch-messages';
import { Task, TaskGraph } from '../config/task-graph';
import { PseudoTtyProcess } from './pseudo-terminal';
import { BatchProcess } from './running-tasks/batch-process';
import { RunningTask } from './running-tasks/running-task';
export declare class ForkedProcessTaskRunner {

@@ -15,3 +13,3 @@ private readonly options;

init(): Promise<void>;
forkProcessForBatch({ executorName, taskGraph: batchTaskGraph }: Batch, fullTaskGraph: TaskGraph, env: NodeJS.ProcessEnv): Promise<BatchProcess>;
forkProcessForBatch({ executorName, taskGraph: batchTaskGraph }: Batch, fullTaskGraph: TaskGraph, env: NodeJS.ProcessEnv): Promise<BatchResults>;
forkProcessLegacy(task: Task, { temporaryOutputPath, streamOutput, pipeOutput, taskGraph, env, }: {

@@ -23,3 +21,6 @@ temporaryOutputPath: string;

env: NodeJS.ProcessEnv;
}): Promise<RunningTask>;
}): Promise<{
code: number;
terminalOutput: string;
}>;
forkProcess(task: Task, { temporaryOutputPath, streamOutput, taskGraph, env, disablePseudoTerminal, }: {

@@ -32,8 +33,13 @@ temporaryOutputPath: string;

disablePseudoTerminal: boolean;
}): Promise<RunningTask | PseudoTtyProcess>;
}): Promise<{
code: number;
terminalOutput: string;
}>;
private forkProcessWithPseudoTerminal;
private forkProcessPipeOutputCapture;
private forkProcessWithPrefixAndNotTTY;
private forkProcessDirectOutputCapture;
private readTerminalOutput;
private writeTerminalOutput;
private setupProcessEventListeners;
}

@@ -6,2 +6,3 @@ "use strict";

const child_process_1 = require("child_process");
const chalk = require("chalk");
const output_1 = require("../utils/output");

@@ -12,6 +13,5 @@ const utils_1 = require("./utils");

const strip_indents_1 = require("../utils/strip-indents");
const stream_1 = require("stream");
const pseudo_terminal_1 = require("./pseudo-terminal");
const exit_codes_1 = require("../utils/exit-codes");
const node_child_process_1 = require("./running-tasks/node-child-process");
const batch_process_1 = require("./running-tasks/batch-process");
const forkScript = (0, path_1.join)(__dirname, './fork.js');

@@ -36,32 +36,66 @@ const workerPath = (0, path_1.join)(__dirname, './batch/run-batch.js');

// TODO: vsavkin delegate terminal output printing
async forkProcessForBatch({ executorName, taskGraph: batchTaskGraph }, fullTaskGraph, env) {
const count = Object.keys(batchTaskGraph.tasks).length;
if (count > 1) {
output_1.output.logSingleLine(`Running ${output_1.output.bold(count)} ${output_1.output.bold('tasks')} with ${output_1.output.bold(executorName)}`);
}
else {
const args = (0, utils_1.getPrintableCommandArgsForTask)(Object.values(batchTaskGraph.tasks)[0]);
output_1.output.logCommand(args.join(' '));
}
const p = (0, child_process_1.fork)(workerPath, {
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
env,
forkProcessForBatch({ executorName, taskGraph: batchTaskGraph }, fullTaskGraph, env) {
return new Promise((res, rej) => {
try {
const count = Object.keys(batchTaskGraph.tasks).length;
if (count > 1) {
output_1.output.logSingleLine(`Running ${output_1.output.bold(count)} ${output_1.output.bold('tasks')} with ${output_1.output.bold(executorName)}`);
}
else {
const args = (0, utils_1.getPrintableCommandArgsForTask)(Object.values(batchTaskGraph.tasks)[0]);
output_1.output.logCommand(args.join(' '));
}
const p = (0, child_process_1.fork)(workerPath, {
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
env,
});
this.processes.add(p);
p.once('exit', (code, signal) => {
this.processes.delete(p);
if (code === null)
code = (0, exit_codes_1.signalToCode)(signal);
if (code !== 0) {
const results = {};
for (const rootTaskId of batchTaskGraph.roots) {
results[rootTaskId] = {
success: false,
terminalOutput: '',
};
}
rej(new Error(`"${executorName}" exited unexpectedly with code: ${code}`));
}
});
p.on('message', (message) => {
switch (message.type) {
case batch_messages_1.BatchMessageType.CompleteBatchExecution: {
res(message.results);
break;
}
case batch_messages_1.BatchMessageType.RunTasks: {
break;
}
default: {
// Re-emit any non-batch messages from the task process
if (process.send) {
process.send(message);
}
}
}
});
// Start the tasks
p.send({
type: batch_messages_1.BatchMessageType.RunTasks,
executorName,
batchTaskGraph,
fullTaskGraph,
});
}
catch (e) {
rej(e);
}
});
const cp = new batch_process_1.BatchProcess(p, executorName);
this.processes.add(cp);
cp.onExit(() => {
this.processes.delete(cp);
});
// Start the tasks
cp.send({
type: batch_messages_1.BatchMessageType.RunTasks,
executorName,
batchTaskGraph,
fullTaskGraph,
});
return cp;
}
async forkProcessLegacy(task, { temporaryOutputPath, streamOutput, pipeOutput, taskGraph, env, }) {
return pipeOutput
? this.forkProcessWithPrefixAndNotTTY(task, {
? await this.forkProcessPipeOutputCapture(task, {
temporaryOutputPath,

@@ -72,3 +106,3 @@ streamOutput,

})
: this.forkProcessDirectOutputCapture(task, {
: await this.forkProcessDirectOutputCapture(task, {
temporaryOutputPath,

@@ -127,78 +161,133 @@ streamOutput,

});
p.onExit((code) => {
if (code > 128) {
process.exit(code);
}
this.processes.delete(p);
this.writeTerminalOutput(temporaryOutputPath, terminalOutput);
return new Promise((res) => {
p.onExit((code) => {
// If the exit code is greater than 128, it's a special exit code for a signal
if (code >= 128) {
process.exit(code);
}
this.writeTerminalOutput(temporaryOutputPath, terminalOutput);
res({
code,
terminalOutput,
});
});
});
return p;
}
forkProcessPipeOutputCapture(task, { streamOutput, temporaryOutputPath, taskGraph, env, }) {
return this.forkProcessWithPrefixAndNotTTY(task, {
streamOutput,
temporaryOutputPath,
taskGraph,
env,
});
}
forkProcessWithPrefixAndNotTTY(task, { streamOutput, temporaryOutputPath, taskGraph, env, }) {
try {
const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
if (streamOutput) {
output_1.output.logCommand(args.join(' '));
}
const p = (0, child_process_1.fork)(this.cliPath, {
stdio: ['inherit', 'pipe', 'pipe', 'ipc'],
env,
});
// Send message to run the executor
p.send({
targetDescription: task.target,
overrides: task.overrides,
taskGraph,
isVerbose: this.verbose,
});
const cp = new node_child_process_1.NodeChildProcessWithNonDirectOutput(p, {
streamOutput,
prefix: task.target.project,
});
this.processes.add(cp);
cp.onExit((code, terminalOutput) => {
this.processes.delete(cp);
if (!streamOutput) {
this.options.lifeCycle.printTaskTerminalOutput(task, code === 0 ? 'success' : 'failure', terminalOutput);
return new Promise((res, rej) => {
try {
const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
if (streamOutput) {
output_1.output.logCommand(args.join(' '));
}
this.writeTerminalOutput(temporaryOutputPath, terminalOutput);
});
return cp;
}
catch (e) {
console.error(e);
throw e;
}
}
forkProcessDirectOutputCapture(task, { streamOutput, temporaryOutputPath, taskGraph, env, }) {
try {
const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
if (streamOutput) {
output_1.output.logCommand(args.join(' '));
}
const p = (0, child_process_1.fork)(this.cliPath, {
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
env,
});
const cp = new node_child_process_1.NodeChildProcessWithDirectOutput(p, temporaryOutputPath);
this.processes.add(cp);
// Send message to run the executor
p.send({
targetDescription: task.target,
overrides: task.overrides,
taskGraph,
isVerbose: this.verbose,
});
cp.onExit((code, signal) => {
this.processes.delete(cp);
// we didn't print any output as we were running the command
// print all the collected output
try {
const terminalOutput = cp.getTerminalOutput();
const p = (0, child_process_1.fork)(this.cliPath, {
stdio: ['inherit', 'pipe', 'pipe', 'ipc'],
env,
});
this.processes.add(p);
// Re-emit any messages from the task process
p.on('message', (message) => {
if (process.send) {
process.send(message);
}
});
// Send message to run the executor
p.send({
targetDescription: task.target,
overrides: task.overrides,
taskGraph,
isVerbose: this.verbose,
});
if (streamOutput) {
if (process.env.NX_PREFIX_OUTPUT === 'true') {
const color = getColor(task.target.project);
const prefixText = `${task.target.project}:`;
p.stdout
.pipe(logClearLineToPrefixTransformer(color.bold(prefixText) + ' '))
.pipe(addPrefixTransformer(color.bold(prefixText)))
.pipe(process.stdout);
p.stderr
.pipe(logClearLineToPrefixTransformer(color(prefixText) + ' '))
.pipe(addPrefixTransformer(color(prefixText)))
.pipe(process.stderr);
}
else {
p.stdout.pipe(addPrefixTransformer()).pipe(process.stdout);
p.stderr.pipe(addPrefixTransformer()).pipe(process.stderr);
}
}
let outWithErr = [];
p.stdout.on('data', (chunk) => {
outWithErr.push(chunk.toString());
});
p.stderr.on('data', (chunk) => {
outWithErr.push(chunk.toString());
});
p.on('exit', (code, signal) => {
this.processes.delete(p);
if (code === null)
code = (0, exit_codes_1.signalToCode)(signal);
// we didn't print any output as we were running the command
// print all the collected output|
const terminalOutput = outWithErr.join('');
if (!streamOutput) {
this.options.lifeCycle.printTaskTerminalOutput(task, code === 0 ? 'success' : 'failure', terminalOutput);
}
this.writeTerminalOutput(temporaryOutputPath, terminalOutput);
res({ code, terminalOutput });
});
}
catch (e) {
console.error(e);
rej(e);
}
});
}
forkProcessDirectOutputCapture(task, { streamOutput, temporaryOutputPath, taskGraph, env, }) {
return new Promise((res, rej) => {
try {
const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
if (streamOutput) {
output_1.output.logCommand(args.join(' '));
}
catch (e) {
console.log((0, strip_indents_1.stripIndents) `
const p = (0, child_process_1.fork)(this.cliPath, {
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
env,
});
this.processes.add(p);
// Re-emit any messages from the task process
p.on('message', (message) => {
if (process.send) {
process.send(message);
}
});
// Send message to run the executor
p.send({
targetDescription: task.target,
overrides: task.overrides,
taskGraph,
isVerbose: this.verbose,
});
p.on('exit', (code, signal) => {
if (code === null)
code = (0, exit_codes_1.signalToCode)(signal);
// we didn't print any output as we were running the command
// print all the collected output
let terminalOutput = '';
try {
terminalOutput = this.readTerminalOutput(temporaryOutputPath);
if (!streamOutput) {
this.options.lifeCycle.printTaskTerminalOutput(task, code === 0 ? 'success' : 'failure', terminalOutput);
}
}
catch (e) {
console.log((0, strip_indents_1.stripIndents) `
Unable to print terminal output for Task "${task.id}".

@@ -210,11 +299,18 @@ Task failed with Exit Code ${code} and Signal "${signal}".

`);
}
});
return cp;
}
catch (e) {
console.error(e);
throw e;
}
}
res({
code,
terminalOutput,
});
});
}
catch (e) {
console.error(e);
rej(e);
}
});
}
readTerminalOutput(outputPath) {
return (0, fs_1.readFileSync)(outputPath).toString();
}
writeTerminalOutput(outputPath, content) {

@@ -231,2 +327,3 @@ (0, fs_1.writeFileSync)(outputPath, content);

process.on('message', (message) => {
// this.publisher.publish(message.toString());
if (this.pseudoTerminal) {

@@ -236,3 +333,3 @@ this.pseudoTerminal.sendMessageToChildren(message);

this.processes.forEach((p) => {
if ('send' in p) {
if ('connected' in p && p.connected) {
p.send(message);

@@ -245,3 +342,5 @@ }

this.processes.forEach((p) => {
p.kill();
if ('connected' in p ? p.connected : p.isAlive) {
p.kill();
}
});

@@ -251,3 +350,5 @@ });

this.processes.forEach((p) => {
p.kill('SIGTERM');
if ('connected' in p ? p.connected : p.isAlive) {
p.kill('SIGTERM');
}
});

@@ -259,3 +360,5 @@ // we exit here because we don't need to write anything to cache.

this.processes.forEach((p) => {
p.kill('SIGTERM');
if ('connected' in p ? p.connected : p.isAlive) {
p.kill('SIGTERM');
}
});

@@ -267,3 +370,5 @@ // no exit here because we expect child processes to terminate which

this.processes.forEach((p) => {
p.kill('SIGTERM');
if ('connected' in p ? p.connected : p.isAlive) {
p.kill('SIGTERM');
}
});

@@ -276,1 +381,49 @@ // no exit here because we expect child processes to terminate which

exports.ForkedProcessTaskRunner = ForkedProcessTaskRunner;
const colors = [
chalk.green,
chalk.greenBright,
chalk.red,
chalk.redBright,
chalk.cyan,
chalk.cyanBright,
chalk.yellow,
chalk.yellowBright,
chalk.magenta,
chalk.magentaBright,
];
function getColor(projectName) {
let code = 0;
for (let i = 0; i < projectName.length; ++i) {
code += projectName.charCodeAt(i);
}
const colorIndex = code % colors.length;
return colors[colorIndex];
}
/**
* Prevents terminal escape sequence from clearing line prefix.
*/
function logClearLineToPrefixTransformer(prefix) {
let prevChunk = null;
return new stream_1.Transform({
transform(chunk, _encoding, callback) {
if (prevChunk && prevChunk.toString() === '\x1b[2K') {
chunk = chunk.toString().replace(/\x1b\[1G/g, (m) => m + prefix);
}
this.push(chunk);
prevChunk = chunk;
callback();
},
});
}
function addPrefixTransformer(prefix) {
const newLineSeparator = process.platform.startsWith('win') ? '\r\n' : '\n';
return new stream_1.Transform({
transform(chunk, _encoding, callback) {
const list = chunk.toString().split(/\r\n|[\n\v\f\r\x85\u2028\u2029]/g);
list
.filter(Boolean)
.forEach((m) => this.push(prefix ? prefix + ' ' + m + newLineSeparator : m + newLineSeparator));
callback();
},
});
}

@@ -39,6 +39,2 @@ "use strict";

}, {}),
continuousDependencies: opts.tasks.reduce((acc, task) => {
acc[task.id] = [];
return acc;
}, {}),
};

@@ -45,0 +41,0 @@ const taskResults = await (0, run_command_1.invokeTasksRunner)({

@@ -0,5 +1,3 @@

import { TaskStatus } from './tasks-runner';
import { Task } from '../config/task-graph';
import { RunCommandsOptions } from '../executors/run-commands/run-commands.impl';
import { RunningTask } from './running-tasks/running-task';
import { TaskStatus } from './tasks-runner';
/**

@@ -21,10 +19,2 @@ * The result of a completed {@link Task}

}
interface RustRunningTask extends RunningTask {
getResults(): Promise<{
code: number;
terminalOutput: string;
}>;
onExit(cb: (code: number, terminalOutput: string) => void): void;
kill(signal?: NodeJS.Signals | number): Promise<void> | void;
}
export interface LifeCycle {

@@ -49,3 +39,2 @@ startCommand?(): void | Promise<void>;

printTaskTerminalOutput?(task: Task, status: TaskStatus, output: string): void;
__runCommandsForTask?(task: Task, options: RunCommandsOptions): Promise<RustRunningTask>;
}

@@ -63,4 +52,2 @@ export declare class CompositeLifeCycle implements LifeCycle {

printTaskTerminalOutput(task: Task, status: TaskStatus, output: string): void;
__runCommandsForTask(task: Task, options: RunCommandsOptions): Promise<RustRunningTask>;
}
export {};

@@ -70,12 +70,3 @@ "use strict";

}
async __runCommandsForTask(task, options) {
// This is clunky...
// We have to assume there is is only one life cycle with __runCommandsForTask
const lifeCycleWithRunCommandsForTask = this.lifeCycles.find((l) => l.__runCommandsForTask);
if (lifeCycleWithRunCommandsForTask) {
return await lifeCycleWithRunCommandsForTask.__runCommandsForTask(task, options);
}
throw new Error('No life cycle with __runCommandsForTask found');
}
}
exports.CompositeLifeCycle = CompositeLifeCycle;

@@ -41,6 +41,2 @@ "use strict";

}
// Do not directly print output when using the TUI
if (process.env.NX_TUI === 'true') {
return;
}
if (flakyTasks.length > 0) {

@@ -47,0 +43,0 @@ output_1.output.warn({

@@ -39,6 +39,2 @@ "use strict";

const flakyTasks = await this.taskHistory.getFlakyTasks(entries.map(([hash]) => hash));
// Do not directly print output when using the TUI
if (process.env.NX_TUI === 'true') {
return;
}
if (flakyTasks.length > 0) {

@@ -45,0 +41,0 @@ output_1.output.warn({

@@ -34,10 +34,4 @@ import { ChildProcess, RustPseudoTerminal } from '../native';

isAlive: boolean;
private exitCallbacks;
private outputCallbacks;
private terminalOutput;
exitCallbacks: any[];
constructor(childProcess: ChildProcess);
getResults(): Promise<{
code: number;
terminalOutput: string;
}>;
onExit(callback: (code: number) => void): void;

@@ -44,0 +38,0 @@ onOutput(callback: (message: string) => void): void;

@@ -81,22 +81,8 @@ "use strict";

this.exitCallbacks = [];
this.outputCallbacks = [];
this.terminalOutput = '';
childProcess.onOutput((output) => {
this.terminalOutput += output;
this.outputCallbacks.forEach((cb) => cb(output));
});
childProcess.onExit((message) => {
this.isAlive = false;
const code = messageToCode(message);
childProcess.cleanup();
this.exitCallbacks.forEach((cb) => cb(code));
const exitCode = messageToCode(message);
this.exitCallbacks.forEach((cb) => cb(exitCode));
});
}
async getResults() {
return new Promise((res) => {
this.onExit((code) => {
res({ code, terminalOutput: this.terminalOutput });
});
});
}
onExit(callback) {

@@ -106,14 +92,14 @@ this.exitCallbacks.push(callback);

onOutput(callback) {
this.outputCallbacks.push(callback);
this.childProcess.onOutput(callback);
}
kill() {
if (this.isAlive) {
try {
this.childProcess.kill();
}
catch {
// when the child process completes before we explicitly call kill, this will throw
// do nothing
}
finally {
try {
this.childProcess.kill();
}
catch {
// when the child process completes before we explicitly call kill, this will throw
// do nothing
}
finally {
if (this.isAlive == true) {
this.isAlive = false;

@@ -120,0 +106,0 @@ }

@@ -42,17 +42,2 @@ "use strict";

async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {
if (process.env.NX_TUI === 'true') {
const { createExternalAppLifecycle, extractLifeCycleRef, initTerminal, restoreTerminal, } = require('../native');
const externalLifeCycle = createExternalAppLifecycle(projectNames, tasks, nxArgs, overrides);
const renderIsDone = new Promise((resolve) => {
initTerminal(externalLifeCycle, () => {
resolve();
});
}).then(() => {
restoreTerminal();
});
return {
lifeCycle: extractLifeCycleRef(externalLifeCycle),
renderIsDone,
};
}
const { runnerOptions } = getRunner(nxArgs, nxJson);

@@ -59,0 +44,0 @@ const isRunOne = initiatingProject != null;

@@ -8,3 +8,2 @@ import { TaskHasher } from '../hasher/task-hasher';

import { NxJsonConfiguration } from '../config/nx-json';
import { NxArgs } from '../utils/command-line-utils';
export declare class TaskOrchestrator {

@@ -32,5 +31,3 @@ private readonly hasher;

private bailed;
private runningContinuousTasks;
private cleaningUp;
constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, nxJson: NxJsonConfiguration, options: NxArgs & DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient, outputStyle: string);
constructor(hasher: TaskHasher, initiatingProject: string | undefined, projectGraph: ProjectGraph, taskGraph: TaskGraph, nxJson: NxJsonConfiguration, options: DefaultTasksRunnerOptions, bail: boolean, daemon: DaemonClient, outputStyle: string);
run(): Promise<{

@@ -48,5 +45,3 @@ [id: string]: TaskStatus;

private applyFromCacheOrRunTask;
private runTask;
private runTaskInForkedProcess;
private startContinuousTask;
private preRunSteps;

@@ -61,3 +56,2 @@ private postRunSteps;

private recordOutputsHash;
private cleanup;
}

@@ -18,3 +18,2 @@ "use strict";

const params_1 = require("../utils/params");
const noop_child_process_1 = require("./running-tasks/noop-child-process");
class TaskOrchestrator {

@@ -45,4 +44,2 @@ // endregion internal state

this.bailed = false;
this.runningContinuousTasks = new Map();
this.cleaningUp = false;
}

@@ -59,9 +56,7 @@ async run() {

perf_hooks_1.performance.mark('task-execution:start');
const threadCount = this.options.parallel +
Object.values(this.taskGraph.tasks).filter((t) => t.continuous).length;
const threads = [];
process.stdout.setMaxListeners(threadCount + events_1.defaultMaxListeners);
process.stderr.setMaxListeners(threadCount + events_1.defaultMaxListeners);
process.stdout.setMaxListeners(this.options.parallel + events_1.defaultMaxListeners);
process.stderr.setMaxListeners(this.options.parallel + events_1.defaultMaxListeners);
// initial seeding of the queue
for (let i = 0; i < threadCount; ++i) {
for (let i = 0; i < this.options.parallel; ++i) {
threads.push(this.executeNextBatchOfTasksUsingTaskSchedule());

@@ -73,3 +68,2 @@ }

this.cache.removeOldCacheRecords();
await this.cleanup();
return this.completedTasks;

@@ -95,8 +89,3 @@ }

const groupId = this.closeGroup();
if (task.continuous) {
await this.startContinuousTask(task, groupId);
}
else {
await this.applyFromCacheOrRunTask(doNotSkipCache, task, groupId);
}
await this.applyFromCacheOrRunTask(doNotSkipCache, task, groupId);
this.openGroup(groupId);

@@ -196,4 +185,3 @@ return this.executeNextBatchOfTasksUsingTaskSchedule();

try {
const batchProcess = await this.forkedProcessTaskRunner.forkProcessForBatch(batch, this.taskGraph, env);
const results = await batchProcess.getResults();
const results = await this.forkedProcessTaskRunner.forkProcessForBatch(batch, this.taskGraph, env);
const batchResultEntries = Object.entries(results);

@@ -238,87 +226,76 @@ return batchResultEntries.map(([taskId, result]) => ({

if (results.length === 0) {
const childProcess = await this.runTask(task, streamOutput, env, temporaryOutputPath, pipeOutput);
const { code, terminalOutput } = await childProcess.getResults();
results.push({
task,
status: code === 0 ? 'success' : 'failure',
terminalOutput,
});
}
await this.postRunSteps([task], results, doNotSkipCache, { groupId });
}
async runTask(task, streamOutput, env, temporaryOutputPath, pipeOutput) {
const shouldPrefix = streamOutput && process.env.NX_PREFIX_OUTPUT === 'true';
const targetConfiguration = (0, utils_1.getTargetConfigurationForTask)(task, this.projectGraph);
if (process.env.NX_RUN_COMMANDS_DIRECTLY !== 'false' &&
targetConfiguration.executor === 'nx:run-commands' &&
!shouldPrefix) {
try {
const { schema } = (0, utils_1.getExecutorForTask)(task, this.projectGraph);
const isRunOne = this.initiatingProject != null;
const combinedOptions = (0, params_1.combineOptionsForExecutor)(task.overrides, task.target.configuration ?? targetConfiguration.defaultConfiguration, targetConfiguration, schema, task.target.project, (0, path_1.relative)(task.projectRoot ?? workspace_root_1.workspaceRoot, process.cwd()), process.env.NX_VERBOSE_LOGGING === 'true');
if (combinedOptions.env) {
env = {
...env,
...combinedOptions.env,
};
}
if (streamOutput) {
const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
output_1.output.logCommand(args.join(' '));
}
let runCommandsOptions = {
...combinedOptions,
env,
usePty: isRunOne &&
!this.tasksSchedule.hasTasks() &&
this.runningContinuousTasks.size === 0,
streamOutput,
};
const useTui = process.env.NX_TUI === 'true';
if (useTui) {
// Preprocess options on the JS side before sending to Rust
runCommandsOptions = (0, run_commands_impl_1.normalizeOptions)(runCommandsOptions);
}
if (useTui &&
typeof this.options.lifeCycle.__runCommandsForTask !== 'function') {
throw new Error('Incorrect lifeCycle applied for NX_TUI');
}
const runningTask =
// Run the command directly in Rust if the task has a single command
useTui && runCommandsOptions.commands?.length === 1
? await this.options.lifeCycle.__runCommandsForTask(task, runCommandsOptions)
: // Currently always run in JS if there are multiple commands defined for a single task
await (0, run_commands_impl_1.runCommands)(runCommandsOptions, {
root: workspace_root_1.workspaceRoot, // only root is needed in runCommands
});
runningTask.onExit((code, terminalOutput) => {
const shouldPrefix = streamOutput && process.env.NX_PREFIX_OUTPUT === 'true';
const targetConfiguration = (0, utils_1.getTargetConfigurationForTask)(task, this.projectGraph);
if (process.env.NX_RUN_COMMANDS_DIRECTLY !== 'false' &&
targetConfiguration.executor === 'nx:run-commands' &&
!shouldPrefix) {
try {
const { schema } = (0, utils_1.getExecutorForTask)(task, this.projectGraph);
const isRunOne = this.initiatingProject != null;
const combinedOptions = (0, params_1.combineOptionsForExecutor)(task.overrides, task.target.configuration ??
targetConfiguration.defaultConfiguration, targetConfiguration, schema, task.target.project, (0, path_1.relative)(task.projectRoot ?? workspace_root_1.workspaceRoot, process.cwd()), process.env.NX_VERBOSE_LOGGING === 'true');
if (combinedOptions.env) {
env = {
...env,
...combinedOptions.env,
};
}
if (streamOutput) {
const args = (0, utils_1.getPrintableCommandArgsForTask)(task);
output_1.output.logCommand(args.join(' '));
}
const { success, terminalOutput } = await (0, run_commands_impl_1.default)({
...combinedOptions,
env,
usePty: isRunOne && !this.tasksSchedule.hasTasks(),
streamOutput,
}, {
root: workspace_root_1.workspaceRoot, // only root is needed in runCommandsImpl
});
const status = success ? 'success' : 'failure';
if (!streamOutput) {
this.options.lifeCycle.printTaskTerminalOutput(task, code === 0 ? 'success' : 'failure', terminalOutput);
(0, fs_1.writeFileSync)(temporaryOutputPath, terminalOutput);
this.options.lifeCycle.printTaskTerminalOutput(task, status, terminalOutput);
}
});
return runningTask;
}
catch (e) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.error(e);
(0, fs_1.writeFileSync)(temporaryOutputPath, terminalOutput);
results.push({
task,
status,
terminalOutput,
});
}
else {
console.error(e.message);
catch (e) {
if (process.env.NX_VERBOSE_LOGGING === 'true') {
console.error(e);
}
else {
console.error(e.message);
}
const terminalOutput = e.stack ?? e.message ?? '';
(0, fs_1.writeFileSync)(temporaryOutputPath, terminalOutput);
results.push({
task,
status: 'failure',
terminalOutput,
});
}
const terminalOutput = e.stack ?? e.message ?? '';
(0, fs_1.writeFileSync)(temporaryOutputPath, terminalOutput);
}
else if (targetConfiguration.executor === 'nx:noop') {
(0, fs_1.writeFileSync)(temporaryOutputPath, '');
results.push({
task,
status: 'success',
terminalOutput: '',
});
}
else {
// cache prep
const { code, terminalOutput } = await this.runTaskInForkedProcess(task, env, pipeOutput, temporaryOutputPath, streamOutput);
results.push({
task,
status: code === 0 ? 'success' : 'failure',
terminalOutput,
});
}
}
else if (targetConfiguration.executor === 'nx:noop') {
(0, fs_1.writeFileSync)(temporaryOutputPath, '');
return new noop_child_process_1.NoopChildProcess({
code: 0,
terminalOutput: '',
});
}
else {
// cache prep
return await this.runTaskInForkedProcess(task, env, pipeOutput, temporaryOutputPath, streamOutput);
}
await this.postRunSteps([task], results, doNotSkipCache, { groupId });
}

@@ -328,6 +305,6 @@ async runTaskInForkedProcess(task, env, pipeOutput, temporaryOutputPath, streamOutput) {

const usePtyFork = process.env.NX_NATIVE_COMMAND_RUNNER !== 'false';
// Disable the pseudo terminal if this is a run-many or when running a continuous task as part of a run-one
const disablePseudoTerminal = !this.initiatingProject || task.continuous;
// Disable the pseudo terminal if this is a run-many
const disablePseudoTerminal = !this.initiatingProject;
// execution
const childProcess = usePtyFork
const { code, terminalOutput } = usePtyFork
? await this.forkedProcessTaskRunner.forkProcess(task, {

@@ -348,3 +325,6 @@ temporaryOutputPath,

});
return childProcess;
return {
code,
terminalOutput,
};
}

@@ -355,45 +335,7 @@ catch (e) {

}
return new noop_child_process_1.NoopChildProcess({
return {
code: 1,
terminalOutput: undefined,
});
};
}
}
async startContinuousTask(task, groupId) {
const taskSpecificEnv = await this.processedTasks.get(task.id);
await this.preRunSteps([task], { groupId });
const pipeOutput = await this.pipeOutputCapture(task);
// obtain metadata
const temporaryOutputPath = this.cache.temporaryOutputPath(task);
const streamOutput = this.outputStyle === 'static'
? false
: (0, utils_1.shouldStreamOutput)(task, this.initiatingProject);
let env = pipeOutput
? (0, task_env_1.getEnvVariablesForTask)(task, taskSpecificEnv, process.env.FORCE_COLOR === undefined
? 'true'
: process.env.FORCE_COLOR, this.options.skipNxCache, this.options.captureStderr, null, null)
: (0, task_env_1.getEnvVariablesForTask)(task, taskSpecificEnv, undefined, this.options.skipNxCache, this.options.captureStderr, temporaryOutputPath, streamOutput);
const childProcess = await this.runTask(task, streamOutput, env, temporaryOutputPath, pipeOutput);
this.runningContinuousTasks.set(task.id, childProcess);
childProcess.onExit((code) => {
if (!this.cleaningUp) {
console.error(`Task "${task.id}" is continuous but exited with code ${code}`);
this.cleanup().then(() => {
process.exit(1);
});
}
});
if (this.initiatingProject === task.target.project &&
this.options.targets.length === 1 &&
this.options.targets[0] === task.target.target) {
await childProcess.getResults();
}
else {
await this.tasksSchedule.scheduleNextTasks();
// release blocked threads
this.waitingForTasks.forEach((f) => f(null));
this.waitingForTasks.length = 0;
}
return childProcess;
}
// endregion Single Task

@@ -529,15 +471,3 @@ // region Lifecycle

}
// endregion utils
async cleanup() {
this.cleaningUp = true;
await Promise.all(Array.from(this.runningContinuousTasks).map(async ([taskId, t]) => {
try {
return t.kill();
}
catch (e) {
console.error(`Unable to terminate ${taskId}\nError:`, e);
}
}));
}
}
exports.TaskOrchestrator = TaskOrchestrator;

@@ -150,3 +150,2 @@ "use strict";

dependencies: {},
continuousDependencies: {},
roots: [],

@@ -157,4 +156,2 @@ });

this.notScheduledTaskGraph.dependencies[task.id];
batch.continuousDependencies[task.id] =
this.notScheduledTaskGraph.continuousDependencies[task.id];
if (isRoot) {

@@ -176,5 +173,4 @@ batch.roots.push(task.id);

const hasDependenciesCompleted = this.taskGraph.dependencies[taskId].every((id) => this.completedTasks.has(id));
const hasContinuousDependenciesStarted = this.taskGraph.continuousDependencies[taskId].every((id) => this.runningTasks.has(id));
// if dependencies have not completed, cannot schedule
if (!hasDependenciesCompleted || !hasContinuousDependenciesStarted) {
if (!hasDependenciesCompleted) {
return false;

@@ -181,0 +177,0 @@ }

@@ -40,2 +40,10 @@ import { Task, TaskGraph } from '../config/task-graph';

export declare function removeTasksFromTaskGraph(graph: TaskGraph, ids: string[]): TaskGraph;
export declare function removeIdsFromGraph<T>(graph: {
roots: string[];
dependencies: Record<string, string[]>;
}, ids: string[], mapWithIds: Record<string, T>): {
mapWithIds: Record<string, T>;
roots: string[];
dependencies: Record<string, string[]>;
};
export declare function calculateReverseDeps(taskGraph: TaskGraph): Record<string, string[]>;

@@ -42,0 +50,0 @@ export declare function getCliPath(): string;

@@ -20,2 +20,3 @@ "use strict";

exports.removeTasksFromTaskGraph = removeTasksFromTaskGraph;
exports.removeIdsFromGraph = removeIdsFromGraph;
exports.calculateReverseDeps = calculateReverseDeps;

@@ -305,6 +306,5 @@ exports.getCliPath = getCliPath;

function removeTasksFromTaskGraph(graph, ids) {
const newGraph = removeIdsFromTaskGraph(graph, ids, graph.tasks);
const newGraph = removeIdsFromGraph(graph, ids, graph.tasks);
return {
dependencies: newGraph.dependencies,
continuousDependencies: newGraph.continuousDependencies,
roots: newGraph.roots,

@@ -314,6 +314,5 @@ tasks: newGraph.mapWithIds,

}
function removeIdsFromTaskGraph(graph, ids, mapWithIds) {
function removeIdsFromGraph(graph, ids, mapWithIds) {
const filteredMapWithIds = {};
const dependencies = {};
const continuousDependencies = {};
const removedSet = new Set(ids);

@@ -324,3 +323,2 @@ for (let id of Object.keys(mapWithIds)) {

dependencies[id] = graph.dependencies[id].filter((depId) => !removedSet.has(depId));
continuousDependencies[id] = graph.continuousDependencies[id].filter((depId) => !removedSet.has(depId));
}

@@ -331,4 +329,3 @@ }

dependencies: dependencies,
continuousDependencies,
roots: Object.keys(filteredMapWithIds).filter((k) => dependencies[k].length === 0 && continuousDependencies[k].length === 0),
roots: Object.keys(dependencies).filter((k) => dependencies[k].length === 0),
};

@@ -346,7 +343,2 @@ }

});
Object.keys(taskGraph.continuousDependencies).forEach((taskId) => {
taskGraph.continuousDependencies[taskId].forEach((d) => {
reverseTaskDeps[d].push(taskId);
});
});
return reverseTaskDeps;

@@ -377,5 +369,2 @@ }

function shouldStreamOutput(task, initiatingProject) {
// For now, disable streaming output on the JS side when running the TUI
if (process.env.NX_TUI === 'true')
return false;
if (process.env.NX_STREAM_OUTPUT === 'true')

@@ -382,0 +371,0 @@ return true;

@@ -13,3 +13,5 @@ import type { NxWorkspaceFilesExternals } from '../native';

export declare function globWithWorkspaceContext(workspaceRoot: string, globs: string[], exclude?: string[]): Promise<string[]>;
export declare function multiGlobWithWorkspaceContext(workspaceRoot: string, globs: string[], exclude?: string[]): Promise<string[][]>;
export declare function hashWithWorkspaceContext(workspaceRoot: string, globs: string[], exclude?: string[]): Promise<string>;
export declare function hashMultiGlobWithWorkspaceContext(workspaceRoot: string, globGroups: string[][]): Promise<string[]>;
export declare function updateContextWithChangedFiles(workspaceRoot: string, createdFiles: string[], updatedFiles: string[], deletedFiles: string[]): Promise<void>;

@@ -16,0 +18,0 @@ export declare function updateFilesInContext(workspaceRoot: string, updatedFiles: string[], deletedFiles: string[]): Record<string, string>;

@@ -7,3 +7,5 @@ "use strict";

exports.globWithWorkspaceContext = globWithWorkspaceContext;
exports.multiGlobWithWorkspaceContext = multiGlobWithWorkspaceContext;
exports.hashWithWorkspaceContext = hashWithWorkspaceContext;
exports.hashMultiGlobWithWorkspaceContext = hashMultiGlobWithWorkspaceContext;
exports.updateContextWithChangedFiles = updateContextWithChangedFiles;

@@ -54,2 +56,9 @@ exports.updateFilesInContext = updateFilesInContext;

}
async function multiGlobWithWorkspaceContext(workspaceRoot, globs, exclude) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
ensureContextAvailable(workspaceRoot);
return workspaceContext.multiGlob(globs, exclude);
}
return client_1.daemonClient.multiGlob(globs, exclude);
}
async function hashWithWorkspaceContext(workspaceRoot, globs, exclude) {

@@ -62,2 +71,9 @@ if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {

}
async function hashMultiGlobWithWorkspaceContext(workspaceRoot, globGroups) {
if ((0, is_on_daemon_1.isOnDaemon)() || !client_1.daemonClient.enabled()) {
ensureContextAvailable(workspaceRoot);
return workspaceContext.hashFilesMatchingGlobs(globGroups);
}
return client_1.daemonClient.hashMultiGlob(globGroups);
}
async function updateContextWithChangedFiles(workspaceRoot, createdFiles, updatedFiles, deletedFiles) {

@@ -64,0 +80,0 @@ if (!client_1.daemonClient.enabled()) {

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc