Socket
Socket
Sign inDemoInstall

nx

Package Overview
Dependencies
Maintainers
8
Versions
1357
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

nx - npm Package Compare versions

Comparing version 19.6.0-canary.20240809-d3747e0 to 19.6.0-canary.20240813-c72ba9b

src/command-line/import/command-object.d.ts

8

bin/post-install.js

@@ -56,1 +56,9 @@ "use strict";

}
process.on('uncaughtException', (e) => {
logger_1.logger.verbose(e);
process.exit(0);
});
process.on('unhandledRejection', (e) => {
logger_1.logger.verbose(e);
process.exit(0);
});

24

package.json
{
"name": "nx",
"version": "19.6.0-canary.20240809-d3747e0",
"version": "19.6.0-canary.20240813-c72ba9b",
"private": false,

@@ -74,3 +74,3 @@ "description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",

"ora": "5.3.0",
"@nrwl/tao": "19.6.0-canary.20240809-d3747e0"
"@nrwl/tao": "19.6.0-canary.20240813-c72ba9b"
},

@@ -90,12 +90,12 @@ "peerDependencies": {

"optionalDependencies": {
"@nx/nx-darwin-x64": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-darwin-arm64": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-linux-x64-gnu": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-linux-x64-musl": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-win32-x64-msvc": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-linux-arm64-gnu": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-linux-arm64-musl": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-linux-arm-gnueabihf": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-win32-arm64-msvc": "19.6.0-canary.20240809-d3747e0",
"@nx/nx-freebsd-x64": "19.6.0-canary.20240809-d3747e0"
"@nx/nx-darwin-x64": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-darwin-arm64": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-linux-x64-gnu": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-linux-x64-musl": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-win32-x64-msvc": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-linux-arm64-gnu": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-linux-arm64-musl": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-linux-arm-gnueabihf": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-win32-arm64-msvc": "19.6.0-canary.20240813-c72ba9b",
"@nx/nx-freebsd-x64": "19.6.0-canary.20240813-c72ba9b"
},

@@ -102,0 +102,0 @@ "nx-migrations": {

@@ -189,4 +189,11 @@ {

"versionPlans": {
"type": "boolean",
"description": "Enables using version plans as a specifier source for versioning and to determine changes for changelog generation."
"oneOf": [
{
"$ref": "#/definitions/NxReleaseVersionPlansConfiguration"
},
{
"type": "boolean",
"description": "Enables using version plans as a specifier source for versioning and to determine changes for changelog generation."
}
]
}

@@ -243,4 +250,11 @@ },

"versionPlans": {
"type": "boolean",
"description": "Enables using version plans as a specifier source for versioning and to determine changes for changelog generation."
"oneOf": [
{
"$ref": "#/definitions/NxReleaseVersionPlansConfiguration"
},
{
"type": "boolean",
"description": "Enables using version plans as a specifier source for versioning and to determine changes for changelog generation."
}
]
},

@@ -251,2 +265,27 @@ "releaseTagPattern": {

}
},
"sync": {
"type": "object",
"description": "Configuration for the `nx sync` command",
"properties": {
"globalGenerators": {
"type": "array",
"items": {
"type": "string"
},
"description": "List of workspace-wide sync generators to be run (not attached to targets)"
},
"generatorOptions": {
"type": "object",
"description": "Options for the sync generators.",
"additionalProperties": {
"type": "object"
}
},
"applyChanges": {
"type": "boolean",
"description": "Whether to automatically apply sync generator changes when running tasks. If not set, the user will be prompted. If set to `true`, the user will not be prompted and the changes will be applied. If set to `false`, the user will not be prompted and the changes will not be applied."
}
},
"additionalProperties": false
}

@@ -679,2 +718,14 @@ },

},
"NxReleaseVersionPlansConfiguration": {
"type": "object",
"properties": {
"ignorePatternsForPlanCheck": {
"type": "array",
"items": {
"type": "string"
},
"description": "Changes to files matching any of these optional patterns will be excluded from the affected project logic within the `nx release plan:check` command. This is useful for ignoring files that are not relevant to the versioning process, such as documentation or configuration files."
}
}
},
"ChangelogRenderOptions": {

@@ -681,0 +732,0 @@ "type": "object",

@@ -156,2 +156,9 @@ {

"additionalProperties": true
},
"syncGenerators": {
"type": "array",
"items": {
"type": "string"
},
"description": "List of generators to run before the target to ensure the workspace is up to date"
}

@@ -158,0 +165,0 @@ }

export declare const allowedProjectExtensions: readonly ["tags", "implicitDependencies", "configFilePath", "$schema", "generators", "namedInputs", "name", "files", "root", "sourceRoot", "projectType", "release", "includedScripts", "metadata"];
export declare const allowedWorkspaceExtensions: readonly ["implicitDependencies", "affected", "defaultBase", "tasksRunnerOptions", "workspaceLayout", "plugins", "targetDefaults", "files", "generators", "namedInputs", "extends", "cli", "pluginsConfig", "defaultProject", "installation", "release", "nxCloudAccessToken", "nxCloudId", "nxCloudUrl", "nxCloudEncryptionKey", "parallel", "cacheDirectory", "useDaemonProcess", "useInferencePlugins", "neverConnectToCloud"];
export declare const allowedWorkspaceExtensions: readonly ["implicitDependencies", "affected", "defaultBase", "tasksRunnerOptions", "workspaceLayout", "plugins", "targetDefaults", "files", "generators", "namedInputs", "extends", "cli", "pluginsConfig", "defaultProject", "installation", "release", "nxCloudAccessToken", "nxCloudId", "nxCloudUrl", "nxCloudEncryptionKey", "parallel", "cacheDirectory", "useDaemonProcess", "useInferencePlugins", "neverConnectToCloud", "sync"];

@@ -65,2 +65,3 @@ "use strict";

'neverConnectToCloud',
'sync',
];

@@ -67,0 +68,0 @@ if (!patched) {

@@ -0,1 +1,3 @@

import { PackageManagerCommands } from '../../utils/package-manager';
import { NxJsonConfiguration } from '../../config/nx-json';
export interface InitArgs {

@@ -7,2 +9,7 @@ interactive: boolean;

}
export declare function installPlugins(repoRoot: string, plugins: string[], pmc: PackageManagerCommands, updatePackageScripts: boolean): void;
export declare function initHandler(options: InitArgs): Promise<void>;
export declare function detectPlugins(nxJson: NxJsonConfiguration, interactive: boolean): Promise<{
plugins: string[];
updatePackageScripts: boolean;
}>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.installPlugins = installPlugins;
exports.initHandler = initHandler;
exports.detectPlugins = detectPlugins;
const fs_1 = require("fs");

@@ -20,2 +22,18 @@ const semver_1 = require("semver");

const add_nx_to_monorepo_1 = require("./implementation/add-nx-to-monorepo");
const nx_json_1 = require("../../config/nx-json");
const get_package_name_from_import_path_1 = require("../../utils/get-package-name-from-import-path");
function installPlugins(repoRoot, plugins, pmc, updatePackageScripts) {
if (plugins.length === 0) {
return;
}
(0, utils_1.addDepsToPackageJson)(repoRoot, plugins);
(0, utils_1.runInstall)(repoRoot, pmc);
output_1.output.log({ title: '🔨 Configuring plugins' });
for (const plugin of plugins) {
(0, child_process_2.execSync)(`${pmc.exec} nx g ${plugin}:init --keepExistingVersions ${updatePackageScripts ? '--updatePackageScripts' : ''} --no-interactive`, {
stdio: [0, 1, 2],
cwd: repoRoot,
});
}
}
async function initHandler(options) {

@@ -35,5 +53,6 @@ process.env.NX_RUNNING_NX_INIT = 'true';

(0, add_nx_scripts_1.generateDotNxSetup)(version);
const { plugins } = await detectPlugins();
const nxJson = (0, nx_json_1.readNxJson)(process.cwd());
const { plugins } = await detectPlugins(nxJson, options.interactive);
plugins.forEach((plugin) => {
(0, child_process_2.execSync)(`./nx add ${plugin}`, {
(0, child_process_1.runNxSync)(`add ${plugin}`, {
stdio: 'inherit',

@@ -57,4 +76,2 @@ });

}
output_1.output.log({ title: '🧐 Checking dependencies' });
const { plugins, updatePackageScripts } = await detectPlugins();
const packageJson = (0, fileutils_1.readJsonFile)('package.json');

@@ -82,14 +99,7 @@ if ((0, utils_1.isMonorepo)(packageJson)) {

(0, utils_1.updateGitIgnore)(repoRoot);
(0, utils_1.addDepsToPackageJson)(repoRoot, plugins);
const nxJson = (0, nx_json_1.readNxJson)(repoRoot);
output_1.output.log({ title: '🧐 Checking dependencies' });
const { plugins, updatePackageScripts } = await detectPlugins(nxJson, options.interactive);
output_1.output.log({ title: '📦 Installing Nx' });
(0, utils_1.runInstall)(repoRoot, pmc);
if (plugins.length > 0) {
output_1.output.log({ title: '🔨 Configuring plugins' });
for (const plugin of plugins) {
(0, child_process_2.execSync)(`${pmc.exec} nx g ${plugin}:init --keepExistingVersions ${updatePackageScripts ? '--updatePackageScripts' : ''} --no-interactive`, {
stdio: [0, 1, 2],
cwd: repoRoot,
});
}
}
installPlugins(repoRoot, plugins, pmc, updatePackageScripts);
if (useNxCloud) {

@@ -124,4 +134,8 @@ output_1.output.log({ title: '🛠️ Setting up Nx Cloud' });

};
async function detectPlugins() {
async function detectPlugins(nxJson, interactive) {
let files = ['package.json'].concat(await (0, workspace_context_1.globWithWorkspaceContext)(process.cwd(), ['**/*/package.json']));
const currentPlugins = new Set((nxJson.plugins ?? []).map((p) => {
const plugin = typeof p === 'string' ? p : p.plugin;
return (0, get_package_name_from_import_path_1.getPackageNameFromImportPath)(plugin);
}));
const detectedPlugins = new Set();

@@ -152,2 +166,8 @@ for (const file of files) {

}
// Remove existing plugins
for (const plugin of detectedPlugins) {
if (currentPlugins.has(plugin)) {
detectedPlugins.delete(plugin);
}
}
const plugins = Array.from(detectedPlugins);

@@ -160,2 +180,15 @@ if (plugins.length === 0) {

}
if (!interactive) {
output_1.output.log({
title: `Recommended Plugins:`,
bodyLines: [
`Adding these Nx plugins to integrate with the tools used in your workspace:`,
...plugins.map((p) => `- ${p}`),
],
});
return {
plugins,
updatePackageScripts: true,
};
}
output_1.output.log({

@@ -162,0 +195,0 @@ title: `Recommended Plugins:`,

@@ -13,16 +13,18 @@ "use strict";

const command_object_7 = require("./generate/command-object");
const command_object_8 = require("./init/command-object");
const command_object_9 = require("./list/command-object");
const command_object_10 = require("./migrate/command-object");
const command_object_11 = require("./new/command-object");
const command_object_12 = require("./repair/command-object");
const command_object_13 = require("./report/command-object");
const command_object_14 = require("./run/command-object");
const command_object_15 = require("./run-many/command-object");
const command_object_16 = require("./show/command-object");
const command_object_17 = require("./watch/command-object");
const command_object_18 = require("./reset/command-object");
const command_object_19 = require("./release/command-object");
const command_object_20 = require("./add/command-object");
const command_object_8 = require("./import/command-object");
const command_object_9 = require("./init/command-object");
const command_object_10 = require("./list/command-object");
const command_object_11 = require("./migrate/command-object");
const command_object_12 = require("./new/command-object");
const command_object_13 = require("./repair/command-object");
const command_object_14 = require("./report/command-object");
const command_object_15 = require("./run/command-object");
const command_object_16 = require("./run-many/command-object");
const command_object_17 = require("./show/command-object");
const command_object_18 = require("./watch/command-object");
const command_object_19 = require("./reset/command-object");
const command_object_20 = require("./release/command-object");
const command_object_21 = require("./add/command-object");
const command_objects_1 = require("./deprecated/command-objects");
const command_object_22 = require("./sync/command-object");
// Ensure that the output takes up the available width of the terminal.

@@ -44,3 +46,3 @@ yargs.wrap(yargs.terminalWidth());

.demandCommand(1, '')
.command(command_object_20.yargsAddCommand)
.command(command_object_21.yargsAddCommand)
.command(command_object_1.yargsAffectedBuildCommand)

@@ -59,18 +61,21 @@ .command(command_object_1.yargsAffectedCommand)

.command(command_object_7.yargsGenerateCommand)
.command(command_object_8.yargsInitCommand)
.command(command_object_10.yargsInternalMigrateCommand)
.command(command_object_9.yargsListCommand)
.command(command_object_10.yargsMigrateCommand)
.command(command_object_11.yargsNewCommand)
.command(command_object_8.yargsImportCommand)
.command(command_object_9.yargsInitCommand)
.command(command_object_11.yargsInternalMigrateCommand)
.command(command_object_10.yargsListCommand)
.command(command_object_11.yargsMigrateCommand)
.command(command_object_12.yargsNewCommand)
.command(command_objects_1.yargsPrintAffectedCommand)
.command(command_object_19.yargsReleaseCommand)
.command(command_object_12.yargsRepairCommand)
.command(command_object_13.yargsReportCommand)
.command(command_object_18.yargsResetCommand)
.command(command_object_14.yargsRunCommand)
.command(command_object_15.yargsRunManyCommand)
.command(command_object_16.yargsShowCommand)
.command(command_object_20.yargsReleaseCommand)
.command(command_object_13.yargsRepairCommand)
.command(command_object_14.yargsReportCommand)
.command(command_object_19.yargsResetCommand)
.command(command_object_15.yargsRunCommand)
.command(command_object_16.yargsRunManyCommand)
.command(command_object_17.yargsShowCommand)
.command(command_object_22.yargsSyncCommand)
.command(command_object_22.yargsSyncCheckCommand)
.command(command_object_2.yargsViewLogsCommand)
.command(command_object_17.yargsWatchCommand)
.command(command_object_14.yargsNxInfixCommand)
.command(command_object_18.yargsWatchCommand)
.command(command_object_15.yargsNxInfixCommand)
.scriptName('nx')

@@ -77,0 +82,0 @@ .help()

@@ -87,3 +87,3 @@ "use strict";

const rawVersionPlans = await (0, version_plans_1.readRawVersionPlans)();
(0, version_plans_1.setVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
(0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
if (args.deleteVersionPlans === undefined) {

@@ -144,3 +144,3 @@ // default to deleting version plans in this command instead of after versioning

// If there are multiple release groups, we'll just skip the workspace changelog anyway.
const versionPlansEnabledForWorkspaceChangelog = releaseGroups[0].versionPlans;
const versionPlansEnabledForWorkspaceChangelog = releaseGroups[0].resolvedVersionPlans;
if (versionPlansEnabledForWorkspaceChangelog) {

@@ -150,3 +150,3 @@ if (releaseGroups.length === 1) {

if (releaseGroup.projectsRelationship === 'fixed') {
const versionPlans = releaseGroup.versionPlans;
const versionPlans = releaseGroup.resolvedVersionPlans;
workspaceChangelogChanges = versionPlans

@@ -302,4 +302,4 @@ .flatMap((vp) => {

let commits;
if (releaseGroup.versionPlans) {
changes = releaseGroup.versionPlans
if (releaseGroup.resolvedVersionPlans) {
changes = releaseGroup.resolvedVersionPlans
.map((vp) => {

@@ -403,4 +403,4 @@ const bumpForProject = vp.projectVersionBumps[project.name];

let commits = [];
if (releaseGroup.versionPlans) {
changes = releaseGroup.versionPlans
if (releaseGroup.resolvedVersionPlans) {
changes = releaseGroup.resolvedVersionPlans
.flatMap((vp) => {

@@ -593,4 +593,4 @@ const releaseType = versionPlanSemverReleaseTypeToChangelogType(vp.groupVersionBump);

releaseGroups.forEach((group) => {
if (group.versionPlans) {
group.versionPlans.forEach((plan) => {
if (group.resolvedVersionPlans) {
group.resolvedVersionPlans.forEach((plan) => {
(0, fs_extra_1.removeSync)(plan.absolutePath);

@@ -597,0 +597,0 @@ planFiles.add(plan.relativePath);

import { CommandModule } from 'yargs';
import { OutputStyle, RunManyOptions } from '../yargs-utils/shared-options';
import { VersionData } from './utils/shared';
export interface NxReleaseArgs {
export interface BaseNxReleaseArgs {
verbose?: boolean;
printConfig?: boolean | 'debug';
}
export interface NxReleaseArgs extends BaseNxReleaseArgs {
groups?: string[];
projects?: string[];
dryRun?: boolean;
verbose?: boolean;
printConfig?: boolean | 'debug';
}

@@ -46,2 +48,9 @@ interface GitCommitAndTagOptions {

};
export type PlanCheckOptions = BaseNxReleaseArgs & {
base?: string;
head?: string;
files?: string;
uncommitted?: boolean;
untracked?: boolean;
};
export type ReleaseOptions = NxReleaseArgs & FirstReleaseArgs & {

@@ -48,0 +57,0 @@ yes?: boolean;

@@ -6,5 +6,5 @@ "use strict";

const nx_json_1 = require("../../config/nx-json");
const command_line_utils_1 = require("../../utils/command-line-utils");
const logger_1 = require("../../utils/logger");
const shared_options_1 = require("../yargs-utils/shared-options");
const command_line_utils_1 = require("../../utils/command-line-utils");
exports.yargsReleaseCommand = {

@@ -19,2 +19,3 @@ command: 'release',

.command(planCommand)
.command(planCheckCommand)
.demandCommand()

@@ -211,2 +212,3 @@ // Error on typos/mistyped CLI args, there is no reason to support arbitrary unknown args for these commands

aliases: ['pl'],
// TODO: Remove this when docs are added
// Create a plan to pick a new version and generate a changelog entry.

@@ -243,2 +245,15 @@ // Hidden for now until the feature is more stable

};
const planCheckCommand = {
command: 'plan:check',
// TODO: Remove this when docs are added
// Create a plan to pick a new version and generate a changelog entry.
// Hidden for now until the feature is more stable
describe: false,
builder: (yargs) => (0, shared_options_1.withAffectedOptions)(yargs),
handler: async (args) => {
const release = await Promise.resolve().then(() => require('./plan-check'));
const result = await release.releasePlanCheckCLIHandler(args);
process.exit(result);
},
};
function coerceParallelOption(args) {

@@ -245,0 +260,0 @@ return {

@@ -147,3 +147,4 @@ "use strict";

conventionalCommits: conventional_commits_1.DEFAULT_CONVENTIONAL_COMMITS_CONFIG,
versionPlans: false,
versionPlans: (userConfig.versionPlans ||
false),
};

@@ -204,3 +205,4 @@ const groupProjectsRelationship = userConfig.projectsRelationship || WORKSPACE_DEFAULTS.projectsRelationship;

], normalizeTrueToEmptyObject(userConfig.changelog));
const rootVersionPlansConfig = userConfig.versionPlans ?? WORKSPACE_DEFAULTS.versionPlans;
const rootVersionPlansConfig = (userConfig.versionPlans ??
WORKSPACE_DEFAULTS.versionPlans);
const rootConventionalCommitsConfig = deepMergeDefaults([WORKSPACE_DEFAULTS.conventionalCommits], fillUnspecifiedConventionalCommitsProperties(normalizeConventionalCommitsConfig(userConfig.conventionalCommits)));

@@ -207,0 +209,0 @@ // these options are not supported at the group level, only the root/command level

import { ProjectGraph } from '../../../config/project-graph';
import { NxReleaseConfig } from './config';
import { GroupVersionPlan, ProjectsVersionPlan } from './version-plans';
export type ReleaseGroupWithName = Omit<NxReleaseConfig['groups'][string], 'versionPlans'> & {
export type ReleaseGroupWithName = NxReleaseConfig['groups'][string] & {
name: string;
versionPlans: (ProjectsVersionPlan | GroupVersionPlan)[] | false;
resolvedVersionPlans: (ProjectsVersionPlan | GroupVersionPlan)[] | false;
};

@@ -8,0 +8,0 @@ export declare function filterReleaseGroups(projectGraph: ProjectGraph, nxReleaseConfig: NxReleaseConfig, projectsFilter?: string[], groupsFilter?: string[]): {

@@ -12,3 +12,3 @@ "use strict";

name,
versionPlans: group.versionPlans ? [] : false,
resolvedVersionPlans: group.versionPlans ? [] : false,
};

@@ -15,0 +15,0 @@ });

@@ -28,3 +28,3 @@ import { ReleaseType } from 'semver';

export declare function readRawVersionPlans(): Promise<RawVersionPlan[]>;
export declare function setVersionPlansOnGroups(rawVersionPlans: RawVersionPlan[], releaseGroups: ReleaseGroupWithName[], allProjectNamesInWorkspace: string[]): ReleaseGroupWithName[];
export declare function setResolvedVersionPlansOnGroups(rawVersionPlans: RawVersionPlan[], releaseGroups: ReleaseGroupWithName[], allProjectNamesInWorkspace: string[]): ReleaseGroupWithName[];
export declare function getVersionPlansAbsolutePath(): string;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.readRawVersionPlans = readRawVersionPlans;
exports.setVersionPlansOnGroups = setVersionPlansOnGroups;
exports.setResolvedVersionPlansOnGroups = setResolvedVersionPlansOnGroups;
exports.getVersionPlansAbsolutePath = getVersionPlansAbsolutePath;

@@ -38,3 +38,3 @@ const fs_1 = require("fs");

}
function setVersionPlansOnGroups(rawVersionPlans, releaseGroups, allProjectNamesInWorkspace) {
function setResolvedVersionPlansOnGroups(rawVersionPlans, releaseGroups, allProjectNamesInWorkspace) {
const groupsByName = releaseGroups.reduce((acc, group) => acc.set(group.name, group), new Map());

@@ -49,3 +49,3 @@ const isDefaultGroup = isDefault(releaseGroups);

const group = groupsByName.get(key);
if (!group.versionPlans) {
if (!group.resolvedVersionPlans) {
if (isDefaultGroup) {

@@ -74,3 +74,3 @@ throw new Error(`Found a version bump in '${rawVersionPlan.fileName}' but version plans are not enabled.`);

}
const existingPlan = (group.versionPlans.find((plan) => plan.fileName === rawVersionPlan.fileName));
const existingPlan = (group.resolvedVersionPlans.find((plan) => plan.fileName === rawVersionPlan.fileName));
if (existingPlan) {

@@ -87,3 +87,3 @@ if (existingPlan.groupVersionBump !== value) {

else {
group.versionPlans.push({
group.resolvedVersionPlans.push({
absolutePath: rawVersionPlan.absolutePath,

@@ -111,3 +111,3 @@ relativePath: rawVersionPlan.relativePath,

}
if (!groupForProject.versionPlans) {
if (!groupForProject.resolvedVersionPlans) {
if (isDefaultGroup) {

@@ -124,3 +124,3 @@ throw new Error(`Found a version bump for project '${key}' in '${rawVersionPlan.fileName}' but version plans are not enabled.`);

if (groupForProject.projectsRelationship === 'independent') {
const existingPlan = (groupForProject.versionPlans.find((plan) => plan.fileName === rawVersionPlan.fileName));
const existingPlan = (groupForProject.resolvedVersionPlans.find((plan) => plan.fileName === rawVersionPlan.fileName));
if (existingPlan) {

@@ -130,3 +130,3 @@ existingPlan.projectVersionBumps[key] = value;

else {
groupForProject.versionPlans.push({
groupForProject.resolvedVersionPlans.push({
absolutePath: rawVersionPlan.absolutePath,

@@ -144,3 +144,3 @@ relativePath: rawVersionPlan.relativePath,

else {
const existingPlan = (groupForProject.versionPlans.find((plan) => plan.fileName === rawVersionPlan.fileName));
const existingPlan = (groupForProject.resolvedVersionPlans.find((plan) => plan.fileName === rawVersionPlan.fileName));
// This can occur if the same fixed release group has multiple entries for different projects within

@@ -162,3 +162,3 @@ // the same version plan file. This will be the case when users are using the default release group.

else {
groupForProject.versionPlans.push({
groupForProject.resolvedVersionPlans.push({
absolutePath: rawVersionPlan.absolutePath,

@@ -181,4 +181,4 @@ relativePath: rawVersionPlan.relativePath,

releaseGroups.forEach((group) => {
if (group.versionPlans) {
group.versionPlans.sort((a, b) => b.createdOnMs - a.createdOnMs);
if (group.resolvedVersionPlans) {
group.resolvedVersionPlans.sort((a, b) => b.createdOnMs - a.createdOnMs);
}

@@ -185,0 +185,0 @@ });

@@ -34,3 +34,3 @@ "use strict";

// Apply default configuration to any optional user configuration
const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), nxJson.release);
const { error: configError, nxReleaseConfig } = await (0, config_1.createNxReleaseConfig)(projectGraph, await (0, file_map_utils_1.createProjectFileMapUsingProjectGraph)(projectGraph), userProvidedReleaseConfig);
if (configError) {

@@ -37,0 +37,0 @@ return await (0, config_1.handleNxReleaseConfigError)(configError);

@@ -91,6 +91,6 @@ "use strict";

const rawVersionPlans = await (0, version_plans_1.readRawVersionPlans)();
(0, version_plans_1.setVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
(0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
const planFiles = new Set();
releaseGroups.forEach((group) => {
if (group.versionPlans) {
if (group.resolvedVersionPlans) {
if (group.name === config_1.IMPLICIT_DEFAULT_RELEASE_GROUP) {

@@ -102,3 +102,3 @@ output_1.output.logSingleLine(`Removing version plan files`);

}
group.versionPlans.forEach((plan) => {
group.resolvedVersionPlans.forEach((plan) => {
if (!args.dryRun) {

@@ -105,0 +105,0 @@ (0, fs_extra_1.removeSync)(plan.absolutePath);

@@ -87,3 +87,3 @@ "use strict";

const rawVersionPlans = await (0, version_plans_1.readRawVersionPlans)();
(0, version_plans_1.setVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
(0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
if (args.deleteVersionPlans === undefined) {

@@ -90,0 +90,0 @@ // default to not delete version plans after versioning as they may be needed for changelog generation

@@ -32,3 +32,3 @@ import { Argv } from 'yargs';

}>;
export declare function withVerbose(yargs: Argv): Argv<{
export declare function withVerbose<T>(yargs: Argv<T>): Argv<T & {
verbose: boolean;

@@ -35,0 +35,0 @@ }>;

@@ -148,2 +148,9 @@ import type { ChangelogRenderOptions } from '../../release/changelog-renderer';

}
export interface NxReleaseVersionPlansConfiguration {
/**
* Changes to files matching any of these optional patterns will be excluded from the affected project logic within the `nx release plan:check`
* command. This is useful for ignoring files that are not relevant to the versioning process, such as documentation or configuration files.
*/
ignorePatternsForPlanCheck?: string[];
}
export interface NxReleaseConfiguration {

@@ -197,3 +204,3 @@ /**

*/
versionPlans?: boolean;
versionPlans?: NxReleaseVersionPlansConfiguration | boolean;
}>;

@@ -269,4 +276,23 @@ /**

*/
versionPlans?: boolean;
versionPlans?: NxReleaseVersionPlansConfiguration | boolean;
}
export interface NxSyncConfiguration {
/**
* List of workspace-wide sync generators to be run (not attached to targets).
*/
globalGenerators?: string[];
/**
* Options for the sync generators.
*/
generatorOptions?: {
[generatorName: string]: Record<string, unknown>;
};
/**
* Whether to automatically apply sync generator changes when running tasks.
* If not set, the user will be prompted.
* If set to `true`, the user will not be prompted and the changes will be applied.
* If set to `false`, the user will not be prompted and the changes will not be applied.
*/
applyChanges?: boolean;
}
/**

@@ -419,2 +445,6 @@ * Nx.json configuration

neverConnectToCloud?: boolean;
/**
* Configuration for the `nx sync` command.
*/
sync?: NxSyncConfiguration;
}

@@ -421,0 +451,0 @@ export type PluginConfiguration = string | ExpandedPluginConfiguration;

@@ -220,2 +220,7 @@ import type { NxJsonConfiguration, NxReleaseVersionConfiguration } from './nx-json';

parallelism?: boolean;
/**
* List of generators to run before the target to ensure the workspace
* is up to date.
*/
syncGenerators?: string[];
}

@@ -8,3 +8,4 @@ export interface DaemonProcessJson {

export declare function writeDaemonJsonProcessCache(daemonJson: DaemonProcessJson): Promise<void>;
export declare function waitForDaemonToExitAndCleanupProcessJson(): Promise<void>;
export declare function safelyCleanUpExistingProcess(): Promise<void>;
export declare function getDaemonProcessIdSync(): number | null;

@@ -7,2 +7,3 @@ "use strict";

exports.writeDaemonJsonProcessCache = writeDaemonJsonProcessCache;
exports.waitForDaemonToExitAndCleanupProcessJson = waitForDaemonToExitAndCleanupProcessJson;
exports.safelyCleanUpExistingProcess = safelyCleanUpExistingProcess;

@@ -31,2 +32,25 @@ exports.getDaemonProcessIdSync = getDaemonProcessIdSync;

}
async function waitForDaemonToExitAndCleanupProcessJson() {
const daemonProcessJson = await readDaemonProcessJsonCache();
if (daemonProcessJson && daemonProcessJson.processId) {
await new Promise((resolve, reject) => {
let count = 0;
const interval = setInterval(() => {
try {
// sending a signal 0 to a process checks if the process is running instead of actually killing it
process.kill(daemonProcessJson.processId, 0);
}
catch (e) {
clearInterval(interval);
resolve();
}
if ((count += 1) > 200) {
clearInterval(interval);
reject(`Daemon process ${daemonProcessJson.processId} didn't exit after 2 seconds.`);
}
}, 10);
});
deleteDaemonJsonProcessCache();
}
}
async function safelyCleanUpExistingProcess() {

@@ -38,23 +62,6 @@ const daemonProcessJson = await readDaemonProcessJsonCache();

// we wait for the process to actually shut down before returning
await new Promise((resolve, reject) => {
let count = 0;
const interval = setInterval(() => {
try {
// sending a signal 0 to a process checks if the process is running instead of actually killing it
process.kill(daemonProcessJson.processId, 0);
}
catch (e) {
clearInterval(interval);
resolve();
}
if ((count += 1) > 200) {
clearInterval(interval);
reject(`Daemon process ${daemonProcessJson.processId} didn't exit after 2 seconds.`);
}
}, 10);
});
await waitForDaemonToExitAndCleanupProcessJson();
}
catch { }
}
deleteDaemonJsonProcessCache();
}

@@ -61,0 +68,0 @@ // Must be sync for the help output use case

@@ -8,2 +8,3 @@ import { ChildProcess } from 'child_process';

import { TaskRun } from '../../utils/task-history';
import type { SyncGeneratorChangesResult } from '../../utils/sync-generators';
export type UnregisterCallback = () => void;

@@ -58,2 +59,6 @@ export type ChangedFile = {

writeTaskRunsToHistory(taskRuns: TaskRun[]): Promise<void>;
getSyncGeneratorChanges(generators: string[]): Promise<SyncGeneratorChangesResult[]>;
flushSyncGeneratorChangesToDisk(generators: string[]): Promise<void>;
getRegisteredSyncGenerators(): Promise<string[]>;
updateWorkspaceContext(createdFiles: string[], updatedFiles: string[], deletedFiles: string[]): Promise<void>;
isServerAvailable(): Promise<boolean>;

@@ -60,0 +65,0 @@ private sendToDaemonViaQueue;

@@ -28,2 +28,7 @@ "use strict";

const hash_glob_1 = require("../message-types/hash-glob");
const force_shutdown_1 = require("../message-types/force-shutdown");
const get_sync_generator_changes_1 = require("../message-types/get-sync-generator-changes");
const get_registered_sync_generators_1 = require("../message-types/get-registered-sync-generators");
const update_workspace_context_1 = require("../message-types/update-workspace-context");
const flush_sync_generator_changes_to_disk_1 = require("../message-types/flush-sync-generator-changes-to-disk");
const DAEMON_ENV_SETTINGS = {

@@ -248,2 +253,31 @@ NX_PROJECT_GLOB_CACHE: 'false',

}
getSyncGeneratorChanges(generators) {
const message = {
type: get_sync_generator_changes_1.GET_SYNC_GENERATOR_CHANGES,
generators,
};
return this.sendToDaemonViaQueue(message);
}
flushSyncGeneratorChangesToDisk(generators) {
const message = {
type: flush_sync_generator_changes_to_disk_1.FLUSH_SYNC_GENERATOR_CHANGES_TO_DISK,
generators,
};
return this.sendToDaemonViaQueue(message);
}
getRegisteredSyncGenerators() {
const message = {
type: get_registered_sync_generators_1.GET_REGISTERED_SYNC_GENERATORS,
};
return this.sendToDaemonViaQueue(message);
}
updateWorkspaceContext(createdFiles, updatedFiles, deletedFiles) {
const message = {
type: update_workspace_context_1.UPDATE_WORKSPACE_CONTEXT,
createdFiles,
updatedFiles,
deletedFiles,
};
return this.sendToDaemonViaQueue(message);
}
async isServerAvailable() {

@@ -327,2 +361,6 @@ return new Promise((resolve) => {

}
// An open promise isn't enough to keep the event loop
// alive, so we set a timeout here and clear it when we hear
// back
const keepAlive = setTimeout(() => { }, 10 * 60 * 1000);
return new Promise((resolve, reject) => {

@@ -334,2 +372,4 @@ perf_hooks_1.performance.mark('sendMessageToDaemon-start');

this.socketMessenger.sendMessage(message);
}).finally(() => {
clearTimeout(keepAlive);
});

@@ -405,3 +445,4 @@ }

try {
await (0, cache_1.safelyCleanUpExistingProcess)();
await this.sendMessageToDaemon({ type: force_shutdown_1.FORCE_SHUTDOWN });
await (0, cache_1.waitForDaemonToExitAndCleanupProcessJson)();
}

@@ -408,0 +449,0 @@ catch (err) {

@@ -5,2 +5,3 @@ "use strict";

const shutdown_utils_1 = require("./shutdown-utils");
const server_1 = require("./server");
async function handleRequestShutdown(server, numberOfConnections) {

@@ -19,2 +20,3 @@ // 1 connection is the client asking to shut down

reason: 'Request to shutdown',
sockets: server_1.openSockets,
});

@@ -21,0 +23,0 @@ }, 0);

@@ -23,2 +23,3 @@ import { FileData, FileMap, ProjectGraph } from '../../config/project-graph';

export declare function addUpdatedAndDeletedFiles(createdFiles: string[], updatedFiles: string[], deletedFiles: string[]): void;
export declare function registerProjectGraphRecomputationListener(listener: (projectGraph: ProjectGraph) => void): void;
export {};

@@ -6,2 +6,3 @@ "use strict";

exports.addUpdatedAndDeletedFiles = addUpdatedAndDeletedFiles;
exports.registerProjectGraphRecomputationListener = registerProjectGraphRecomputationListener;
const perf_hooks_1 = require("perf_hooks");

@@ -24,2 +25,3 @@ const nx_json_1 = require("../../config/nx-json");

const collectedDeletedFiles = new Set();
const projectGraphRecomputationListeners = new Set();
let storedWorkspaceConfigHash;

@@ -31,4 +33,6 @@ let waitPeriod = 100;

try {
let wasScheduled = false;
// recomputing it now on demand. we can ignore the scheduled timeout
if (scheduledTimeoutId) {
wasScheduled = true;
clearTimeout(scheduledTimeoutId);

@@ -51,3 +55,7 @@ scheduledTimeoutId = undefined;

}
return await cachedSerializedProjectGraphPromise;
const result = await cachedSerializedProjectGraphPromise;
if (wasScheduled) {
notifyProjectGraphRecomputationListeners(result.projectGraph);
}
return result;
}

@@ -90,9 +98,13 @@ catch (e) {

processFilesAndCreateAndSerializeProjectGraph(await (0, plugins_1.getPlugins)());
await cachedSerializedProjectGraphPromise;
const { projectGraph } = await cachedSerializedProjectGraphPromise;
if (createdFiles.length > 0) {
(0, file_watcher_sockets_1.notifyFileWatcherSockets)(createdFiles, null, null);
}
notifyProjectGraphRecomputationListeners(projectGraph);
}, waitPeriod);
}
}
function registerProjectGraphRecomputationListener(listener) {
projectGraphRecomputationListeners.add(listener);
}
function computeWorkspaceConfigHash(projectsConfigurations) {

@@ -289,1 +301,6 @@ const projectConfigurationStrings = Object.entries(projectsConfigurations)

}
function notifyProjectGraphRecomputationListeners(projectGraph) {
for (const listener of projectGraphRecomputationListeners) {
listener(projectGraph);
}
}

@@ -7,3 +7,4 @@ import { Server, Socket } from 'net';

};
export declare const openSockets: Set<Socket>;
export declare function handleResult(socket: Socket, type: string, hrFn: () => Promise<HandlerResult>): Promise<void>;
export declare function startServer(): Promise<Server>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.openSockets = void 0;
exports.handleResult = handleResult;

@@ -42,2 +43,13 @@ exports.startServer = startServer;

const handle_write_task_runs_to_history_1 = require("./handle-write-task-runs-to-history");
const force_shutdown_1 = require("../message-types/force-shutdown");
const handle_force_shutdown_1 = require("./handle-force-shutdown");
const get_sync_generator_changes_1 = require("../message-types/get-sync-generator-changes");
const handle_get_sync_generator_changes_1 = require("./handle-get-sync-generator-changes");
const sync_generators_1 = require("./sync-generators");
const get_registered_sync_generators_1 = require("../message-types/get-registered-sync-generators");
const handle_get_registered_sync_generators_1 = require("./handle-get-registered-sync-generators");
const update_workspace_context_1 = require("../message-types/update-workspace-context");
const handle_update_workspace_context_1 = require("./handle-update-workspace-context");
const flush_sync_generator_changes_to_disk_1 = require("../message-types/flush-sync-generator-changes-to-disk");
const handle_flush_sync_generator_changes_to_disk_1 = require("./handle-flush-sync-generator-changes-to-disk");
let performanceObserver;

@@ -48,4 +60,6 @@ let workspaceWatcherError;

let numberOfOpenConnections = 0;
exports.openSockets = new Set();
const server = (0, net_1.createServer)(async (socket) => {
numberOfOpenConnections += 1;
exports.openSockets.add(socket);
logger_1.serverLogger.log(`Established a connection. Number of open connections: ${numberOfOpenConnections}`);

@@ -69,2 +83,3 @@ (0, shutdown_utils_1.resetInactivityTimeout)(handleInactivityTimeout);

numberOfOpenConnections -= 1;
exports.openSockets.delete(socket);
logger_1.serverLogger.log(`Closed a connection. Number of open connections: ${numberOfOpenConnections}`);

@@ -137,2 +152,17 @@ (0, file_watcher_sockets_1.removeRegisteredFileWatcherSocket)(socket);

}
else if ((0, force_shutdown_1.isHandleForceShutdownMessage)(payload)) {
await handleResult(socket, 'FORCE_SHUTDOWN', () => (0, handle_force_shutdown_1.handleForceShutdown)(server));
}
else if ((0, get_sync_generator_changes_1.isHandleGetSyncGeneratorChangesMessage)(payload)) {
await handleResult(socket, get_sync_generator_changes_1.GET_SYNC_GENERATOR_CHANGES, () => (0, handle_get_sync_generator_changes_1.handleGetSyncGeneratorChanges)(payload.generators));
}
else if ((0, flush_sync_generator_changes_to_disk_1.isHandleFlushSyncGeneratorChangesToDiskMessage)(payload)) {
await handleResult(socket, flush_sync_generator_changes_to_disk_1.FLUSH_SYNC_GENERATOR_CHANGES_TO_DISK, () => (0, handle_flush_sync_generator_changes_to_disk_1.handleFlushSyncGeneratorChangesToDisk)(payload.generators));
}
else if ((0, get_registered_sync_generators_1.isHandleGetRegisteredSyncGeneratorsMessage)(payload)) {
await handleResult(socket, get_registered_sync_generators_1.GET_REGISTERED_SYNC_GENERATORS, () => (0, handle_get_registered_sync_generators_1.handleGetRegisteredSyncGenerators)());
}
else if ((0, update_workspace_context_1.isHandleUpdateWorkspaceContextMessage)(payload)) {
await handleResult(socket, update_workspace_context_1.UPDATE_WORKSPACE_CONTEXT, () => (0, handle_update_workspace_context_1.handleUpdateWorkspaceContext)(payload.createdFiles, payload.updatedFiles, payload.deletedFiles));
}
else {

@@ -164,2 +194,3 @@ await (0, shutdown_utils_1.respondWithErrorAndExit)(socket, `Invalid payload from the client`, new Error(`Unsupported payload sent to daemon server: ${unparsedPayload}`));

reason: `${shutdown_utils_1.SERVER_INACTIVITY_TIMEOUT_MS}ms of inactivity`,
sockets: exports.openSockets,
});

@@ -173,2 +204,3 @@ }

reason: 'received process SIGINT',
sockets: exports.openSockets,
}))

@@ -178,2 +210,3 @@ .on('SIGTERM', () => (0, shutdown_utils_1.handleServerProcessTermination)({

reason: 'received process SIGTERM',
sockets: exports.openSockets,
}))

@@ -183,2 +216,3 @@ .on('SIGHUP', () => (0, shutdown_utils_1.handleServerProcessTermination)({

reason: 'received process SIGHUP',
sockets: exports.openSockets,
}));

@@ -246,2 +280,3 @@ }

reason: outdatedReason,
sockets: exports.openSockets,
});

@@ -337,2 +372,6 @@ return;

}
// listen for project graph recomputation events to collect and schedule sync generators
(0, project_graph_incremental_recomputation_1.registerProjectGraphRecomputationListener)(sync_generators_1.collectAndScheduleSyncGenerators);
// trigger an initial project graph recomputation
(0, project_graph_incremental_recomputation_1.addUpdatedAndDeletedFiles)([], [], []);
return resolve(server);

@@ -339,0 +378,0 @@ }

@@ -11,4 +11,5 @@ import type { Server, Socket } from 'net';

reason: string;
sockets: Iterable<Socket>;
}
export declare function handleServerProcessTermination({ server, reason, }: HandleServerProcessTerminationParams): Promise<void>;
export declare function handleServerProcessTermination({ server, reason, sockets, }: HandleServerProcessTerminationParams): Promise<void>;
export declare function resetInactivityTimeout(cb: () => void): void;

@@ -15,0 +16,0 @@ export declare function respondToClient(socket: Socket, response: string, description: string): Promise<unknown>;

@@ -33,7 +33,12 @@ "use strict";

}
async function handleServerProcessTermination({ server, reason, }) {
async function handleServerProcessTermination({ server, reason, sockets, }) {
try {
server.close();
(0, cache_1.deleteDaemonJsonProcessCache)();
(0, plugins_1.cleanupPlugins)();
await new Promise((res) => {
server.close(() => {
res(null);
});
for (const socket of sockets) {
socket.destroy();
}
});
if (watcherInstance) {

@@ -47,2 +52,4 @@ await watcherInstance.stop();

}
(0, cache_1.deleteDaemonJsonProcessCache)();
(0, plugins_1.cleanupPlugins)();
logger_1.serverLogger.log(`Server stopped because: "${reason}"`);

@@ -49,0 +56,0 @@ }

@@ -13,2 +13,3 @@ "use strict";

const cache_1 = require("../cache");
const server_1 = require("./server");
const ALWAYS_IGNORE = [

@@ -32,2 +33,3 @@ ...(0, ignore_1.getAlwaysIgnore)(workspace_root_1.workspaceRoot),

reason: 'this process is no longer the current daemon (native)',
sockets: server_1.openSockets,
});

@@ -40,2 +42,3 @@ }

reason: 'Stopping the daemon the set of ignored files changed (native)',
sockets: server_1.openSockets,
});

@@ -42,0 +45,0 @@ }

@@ -18,9 +18,12 @@ "use strict";

*/
const getFullOsSocketPath = () => exports.isWindows
? '\\\\.\\pipe\\nx\\' + (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)())
: (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)());
const getFullOsSocketPath = () => {
const path = (0, path_1.resolve)((0, tmp_dir_1.getDaemonSocketDir)());
assertValidSocketPath(path);
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
};
exports.getFullOsSocketPath = getFullOsSocketPath;
const getForkedProcessOsSocketPath = (id) => {
let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(), 'fp' + id + '.sock'));
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + (0, path_1.resolve)(path) : (0, path_1.resolve)(path);
assertValidSocketPath(path);
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
};

@@ -30,5 +33,15 @@ exports.getForkedProcessOsSocketPath = getForkedProcessOsSocketPath;

let path = (0, path_1.resolve)((0, path_1.join)((0, tmp_dir_1.getSocketDir)(true), 'plugin' + id + '.sock'));
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + (0, path_1.resolve)(path) : (0, path_1.resolve)(path);
assertValidSocketPath(path);
return exports.isWindows ? '\\\\.\\pipe\\nx\\' + path : path;
};
exports.getPluginOsSocketPath = getPluginOsSocketPath;
function assertValidSocketPath(path) {
if (path.length > 95) {
throw new Error([
'Attempted to open socket that exceeds the maximum socket length.',
'',
`Set NX_SOCKET_DIR to a shorter path (e.g. ${exports.isWindows ? '%TMP%/nx-tmp' : '/tmp/nx-tmp'}) to avoid this issue.`,
].join('\n'));
}
}
function killSocketOrPath() {

@@ -35,0 +48,0 @@ try {

@@ -56,3 +56,4 @@ "use strict";

try {
const dir = process.env.NX_DAEMON_SOCKET_DIR ??
const dir = process.env.NX_SOCKET_DIR ??
process.env.NX_DAEMON_SOCKET_DIR ??
(alreadyUnique ? tmp_1.tmpdir : socketDirName());

@@ -59,0 +60,0 @@ (0, fs_extra_1.ensureDirSync)(dir);

@@ -97,3 +97,3 @@ "use strict";

}
function addNxCloudIdToNxJson(tree, nxCloudId, directory = tree.root) {
function addNxCloudIdToNxJson(tree, nxCloudId, directory = '') {
const nxJsonPath = (0, path_1.join)(directory, 'nx.json');

@@ -100,0 +100,0 @@ if (tree.exists(nxJsonPath)) {

@@ -16,3 +16,2 @@ "use strict";

const native_1 = require("../../native");
const os_1 = require("os");
class LoadedNxPlugin {

@@ -76,8 +75,20 @@ constructor(plugin, pluginDefinition) {

exports.nxPluginCache = new Map();
function isIsolationEnabled() {
// Explicitly enabled, regardless of further conditions
if (process.env.NX_ISOLATE_PLUGINS === 'true') {
return true;
}
if (
// Explicitly disabled
process.env.NX_ISOLATE_PLUGINS === 'false' ||
// Isolation is disabled on WASM builds currently.
native_1.IS_WASM) {
return false;
}
// Default value
return true;
}
async function loadNxPlugins(plugins, root = workspace_root_1.workspaceRoot) {
performance.mark('loadNxPlugins:start');
const loadingMethod = process.env.NX_ISOLATE_PLUGINS === 'true' ||
(!native_1.IS_WASM &&
(0, os_1.platform)() !== 'win32' &&
process.env.NX_ISOLATE_PLUGINS !== 'false')
const loadingMethod = isIsolationEnabled()
? isolation_1.loadNxPluginInIsolation

@@ -84,0 +95,0 @@ : loader_1.loadNxPlugin;

@@ -109,3 +109,7 @@ import { ProjectGraph, ProjectGraphProcessorContext } from '../../../config/project-graph';

}
export type PluginWorkerMessage = PluginWorkerLoadMessage | PluginWorkerCreateNodesMessage | PluginCreateDependenciesMessage | PluginWorkerProcessProjectGraphMessage | PluginCreateMetadataMessage;
export interface PluginWorkerShutdownMessage {
type: 'shutdown';
payload: {};
}
export type PluginWorkerMessage = PluginWorkerLoadMessage | PluginWorkerShutdownMessage | PluginWorkerCreateNodesMessage | PluginCreateDependenciesMessage | PluginWorkerProcessProjectGraphMessage | PluginCreateMetadataMessage;
export type PluginWorkerResult = PluginWorkerLoadResult | PluginWorkerCreateNodesResult | PluginCreateDependenciesResult | PluginWorkerProcessProjectGraphResult | PluginCreateMetadataResult;

@@ -112,0 +116,0 @@ export declare function isPluginWorkerMessage(message: Serializable): message is PluginWorkerMessage;

@@ -17,2 +17,3 @@ "use strict";

'createMetadata',
'shutdown',
].includes(message.type));

@@ -19,0 +20,0 @@ }

@@ -32,4 +32,4 @@ "use strict";

worker.off('exit', exitHandler);
shutdownPluginWorker(socket);
socket.destroy();
shutdownPluginWorker(worker);
nxPluginWorkerCache.delete(cacheKey);

@@ -59,7 +59,4 @@ };

}
function shutdownPluginWorker(worker) {
// Clears the plugin cache so no refs to the workers are held
internal_api_1.nxPluginCache.clear();
// logger.verbose(`[plugin-pool] starting worker shutdown`);
worker.kill('SIGINT');
function shutdownPluginWorker(socket) {
(0, messaging_1.sendMessageOverSocket)(socket, { type: 'shutdown', payload: {} });
}

@@ -205,2 +202,3 @@ /**

const exitHandler = () => {
internal_api_1.nxPluginCache.clear();
for (const fn of cleanupFunctions) {

@@ -207,0 +205,0 @@ fn();

@@ -52,2 +52,17 @@ "use strict";

},
shutdown: async () => {
// Stops accepting new connections, but existing connections are
// not closed immediately.
server.close(() => {
try {
(0, fs_1.unlinkSync)(socketPath);
}
catch (e) { }
process.exit(0);
});
// Closes existing connection.
socket.end();
// Destroys the socket once it's fully closed.
socket.destroySoon();
},
createNodes: async ({ configFiles, context, tx }) => {

@@ -54,0 +69,0 @@ try {

@@ -449,3 +449,3 @@ "use strict";

const targetConfig = project.targets[targetName];
const targetDefaults = readTargetDefaultsForTarget(targetName, nxJsonConfiguration.targetDefaults, targetConfig.executor);
const targetDefaults = deepClone(readTargetDefaultsForTarget(targetName, nxJsonConfiguration.targetDefaults, targetConfig.executor));
// We only apply defaults if they exist

@@ -503,5 +503,8 @@ if (targetDefaults && isCompatibleTarget(targetConfig, targetDefaults)) {

}
function deepClone(obj) {
return JSON.parse(JSON.stringify(obj));
}
function mergeTargetDefaultWithTargetDefinition(targetName, project, targetDefault, sourceMap) {
const targetDefinition = project.targets[targetName] ?? {};
const result = JSON.parse(JSON.stringify(targetDefinition));
const result = deepClone(targetDefinition);
for (const key in targetDefault) {

@@ -508,0 +511,0 @@ switch (key) {

@@ -8,3 +8,3 @@ import { NxJsonConfiguration } from '../config/nx-json';

import { TasksRunner } from './tasks-runner';
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[], projectGraph: ProjectGraph, { nxJson }: {
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[], currentProjectGraph: ProjectGraph, { nxJson }: {
nxJson: NxJsonConfiguration;

@@ -11,0 +11,0 @@ }, nxArgs: NxArgs, overrides: any, initiatingProject: string | null, extraTargetDependencies: Record<string, (TargetDependencyConfig | string)[]>, extraOptions: {

@@ -7,2 +7,4 @@ "use strict";

exports.getRunnerOptions = getRunnerOptions;
const enquirer_1 = require("enquirer");
const ora = require("ora");
const path_1 = require("path");

@@ -13,2 +15,3 @@ const nx_json_1 = require("../config/nx-json");

const hash_task_1 = require("../hasher/hash-task");
const project_graph_1 = require("../project-graph/project-graph");
const fileutils_1 = require("../utils/fileutils");

@@ -19,2 +22,3 @@ const is_ci_1 = require("../utils/is-ci");

const params_1 = require("../utils/params");
const sync_generators_1 = require("../utils/sync-generators");
const workspace_root_1 = require("../utils/workspace-root");

@@ -33,2 +37,3 @@ const create_task_graph_1 = require("./create-task-graph");

const utils_1 = require("./utils");
const chalk = require("chalk");
async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {

@@ -97,6 +102,6 @@ const { runnerOptions } = getRunner(nxArgs, nxJson);

}
async function runCommand(projectsToRun, projectGraph, { nxJson }, nxArgs, overrides, initiatingProject, extraTargetDependencies, extraOptions) {
async function runCommand(projectsToRun, currentProjectGraph, { nxJson }, nxArgs, overrides, initiatingProject, extraTargetDependencies, extraOptions) {
const status = await (0, params_1.handleErrors)(process.env.NX_VERBOSE_LOGGING === 'true', async () => {
const projectNames = projectsToRun.map((t) => t.name);
const taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
const { projectGraph, taskGraph } = await ensureWorkspaceIsInSyncAndGetGraphs(currentProjectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions);
const tasks = Object.values(taskGraph.tasks);

@@ -119,2 +124,115 @@ const { lifeCycle, renderIsDone } = await getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides);

}
async function ensureWorkspaceIsInSyncAndGetGraphs(projectGraph, nxJson, projectNames, nxArgs, overrides, extraTargetDependencies, extraOptions) {
let taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
if (process.env.NX_ENABLE_SYNC_GENERATORS !== 'true') {
return { projectGraph, taskGraph };
}
// collect unique syncGenerators from the tasks
const uniqueSyncGenerators = new Set();
for (const { target } of Object.values(taskGraph.tasks)) {
const { syncGenerators } = projectGraph.nodes[target.project].data.targets[target.target];
if (!syncGenerators) {
continue;
}
for (const generator of syncGenerators) {
uniqueSyncGenerators.add(generator);
}
}
if (!uniqueSyncGenerators.size) {
// There are no sync generators registered in the tasks to run
return { projectGraph, taskGraph };
}
const syncGenerators = Array.from(uniqueSyncGenerators);
const results = await (0, sync_generators_1.getSyncGeneratorChanges)(syncGenerators);
if (!results.length) {
// There are no changes to sync, workspace is up to date
return { projectGraph, taskGraph };
}
const outOfSyncTitle = 'The workspace is out of sync';
const resultBodyLines = (0, sync_generators_1.syncGeneratorResultsToMessageLines)(results);
const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks.';
const willErrorOnCiMessage = 'Please note that this will be an error on CI.';
if ((0, is_ci_1.isCI)() || !process.stdout.isTTY) {
// If the user is running in CI or is running in a non-TTY environment we
// throw an error to stop the execution of the tasks.
throw new Error(`${outOfSyncTitle}\n${resultBodyLines.join('\n')}\n${fixMessage}`);
}
if (nxJson.sync?.applyChanges === false) {
// If the user has set `sync.applyChanges` to `false` in their `nx.json`
// we don't prompt the them and just log a warning informing them that
// the workspace is out of sync and they have it set to not apply changes
// automatically.
output_1.output.warn({
title: outOfSyncTitle,
bodyLines: [
...resultBodyLines,
'Your workspace is set to not apply changes automatically (`sync.applyChanges` is set to `false` in your `nx.json`).',
willErrorOnCiMessage,
fixMessage,
],
});
return { projectGraph, taskGraph };
}
output_1.output.warn({
title: outOfSyncTitle,
bodyLines: [
...resultBodyLines,
nxJson.sync?.applyChanges === true
? 'Proceeding to sync the changes automatically (`sync.applyChanges` is set to `true` in your `nx.json`).'
: willErrorOnCiMessage,
],
});
const applyChanges = nxJson.sync?.applyChanges === true ||
(await promptForApplyingSyncGeneratorChanges());
if (applyChanges) {
const spinner = ora('Syncing the workspace...');
spinner.start();
// Flush sync generator changes to disk
await (0, sync_generators_1.flushSyncGeneratorChanges)(results);
// Re-create project graph and task graph
projectGraph = await (0, project_graph_1.createProjectGraphAsync)();
taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
if (nxJson.sync?.applyChanges === true) {
spinner.succeed(`The workspace was synced successfully!
Please make sure to commit the changes to your repository or this will error on CI.`);
}
else {
// The user was prompted and we already logged a message about erroring on CI
// so here we just tell them to commit the changes.
spinner.succeed(`The workspace was synced successfully!
Please make sure to commit the changes to your repository.`);
}
}
else {
output_1.output.warn({
title: 'Syncing the workspace was skipped',
bodyLines: [
'This could lead to unexpected results or errors when running tasks.',
fixMessage,
],
});
}
return { projectGraph, taskGraph };
}
async function promptForApplyingSyncGeneratorChanges() {
const promptConfig = {
name: 'applyChanges',
type: 'select',
message: 'Would you like to sync the changes to get your worskpace up to date?',
choices: [
{
name: 'yes',
message: 'Yes, sync the changes and run the tasks',
},
{
name: 'no',
message: 'No, run the tasks without syncing the changes',
},
],
footer: () => chalk.dim('\nYou can skip this prompt by setting the `sync.applyChanges` option in your `nx.json`.'),
};
return await (0, enquirer_1.prompt)([promptConfig]).then(({ applyChanges }) => applyChanges === 'yes');
}
function setEnvVarsBasedOnArgs(nxArgs, loadDotEnvFiles) {

@@ -121,0 +239,0 @@ if (nxArgs.outputStyle == 'stream' ||

@@ -35,2 +35,3 @@ import type { Arguments } from 'yargs';

export declare function createOverrides(__overrides_unparsed__?: string[]): Record<string, any>;
export declare function getBaseRef(nxJson: NxJsonConfiguration): string;
export declare function splitArgsIntoNxArgsAndOverrides(args: {

@@ -37,0 +38,0 @@ [k: string]: any;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.createOverrides = createOverrides;
exports.getBaseRef = getBaseRef;
exports.splitArgsIntoNxArgsAndOverrides = splitArgsIntoNxArgsAndOverrides;

@@ -27,2 +28,5 @@ exports.readParallelFromArgsAndEnv = readParallelFromArgsAndEnv;

}
function getBaseRef(nxJson) {
return nxJson.defaultBase ?? nxJson.affected?.defaultBase ?? 'main';
}
function splitArgsIntoNxArgsAndOverrides(args, mode, options = { printWarnings: true }, nxJson) {

@@ -74,3 +78,3 @@ // this is to lerna case when this function is invoked imperatively

}
// Allow setting base and head via environment variables (lower priority then direct command arguments)
// Allow setting base and head via environment variables (lower priority than direct command arguments)
if (!nxArgs.base && process.env.NX_BASE) {

@@ -93,4 +97,3 @@ nxArgs.base = process.env.NX_BASE;

if (!nxArgs.base) {
nxArgs.base =
nxJson.defaultBase ?? nxJson.affected?.defaultBase ?? 'main';
nxArgs.base = getBaseRef(nxJson);
// No user-provided arguments to set the affected criteria, so inform the user of the defaults being used

@@ -97,0 +100,0 @@ if (options.printWarnings &&

@@ -0,1 +1,36 @@

import { ExecSyncOptions } from 'child_process';
export declare function cloneFromUpstream(url: string, destination: string, { originName }?: {
originName: string;
}): Promise<GitRepository>;
export declare class GitRepository {
private directory;
root: string;
constructor(directory: string);
getGitRootPath(cwd: string): string;
addFetchRemote(remoteName: string, branch: string): Promise<string>;
private execAsync;
showStat(): Promise<string>;
listBranches(): Promise<string[]>;
getGitFiles(path: string): Promise<string[]>;
reset(ref: string): Promise<string>;
squashLastTwoCommits(): Promise<string>;
mergeUnrelatedHistories(ref: string, message: string): Promise<string>;
fetch(remote: string, ref?: string): Promise<string>;
checkout(branch: string, opts: {
new: boolean;
base: string;
}): Promise<string>;
move(path: string, destination: string): Promise<string>;
push(ref: string, remoteName: string): Promise<string>;
commit(message: string): Promise<string>;
amendCommit(): Promise<string>;
deleteGitRemote(name: string): Promise<string>;
deleteBranch(branch: string): Promise<string>;
addGitRemote(name: string, url: string): Promise<string>;
}
/**
* This is used by the squash editor script to update the rebase file.
*/
export declare function updateRebaseFile(contents: string): string;
export declare function fetchGitRemote(name: string, branch: string, execOptions: ExecSyncOptions): string | Buffer;
export declare function getGithubSlugOrNull(): string | null;

@@ -2,0 +37,0 @@ export declare function extractUserAndRepoFromGitHubUrl(gitRemotes: string): string | null;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.GitRepository = void 0;
exports.cloneFromUpstream = cloneFromUpstream;
exports.updateRebaseFile = updateRebaseFile;
exports.fetchGitRemote = fetchGitRemote;
exports.getGithubSlugOrNull = getGithubSlugOrNull;

@@ -9,2 +13,109 @@ exports.extractUserAndRepoFromGitHubUrl = extractUserAndRepoFromGitHubUrl;

const devkit_exports_1 = require("../devkit-exports");
const path_1 = require("path");
const SQUASH_EDITOR = (0, path_1.join)(__dirname, 'squash.js');
function execAsync(command, execOptions) {
return new Promise((res, rej) => {
(0, child_process_1.exec)(command, execOptions, (err, stdout, stderr) => {
if (err) {
return rej(err);
}
res(stdout);
});
});
}
async function cloneFromUpstream(url, destination, { originName } = { originName: 'origin' }) {
await execAsync(`git clone ${url} ${destination} --depth 1 --origin ${originName}`, {
cwd: (0, path_1.dirname)(destination),
});
return new GitRepository(destination);
}
class GitRepository {
constructor(directory) {
this.directory = directory;
this.root = this.getGitRootPath(this.directory);
}
getGitRootPath(cwd) {
return (0, child_process_1.execSync)('git rev-parse --show-toplevel', {
cwd,
})
.toString()
.trim();
}
addFetchRemote(remoteName, branch) {
return this.execAsync(`git config --add remote.${remoteName}.fetch "+refs/heads/${branch}:refs/remotes/${remoteName}/${branch}"`);
}
execAsync(command) {
return execAsync(command, {
cwd: this.root,
});
}
async showStat() {
return await this.execAsync(`git show --stat`);
}
async listBranches() {
return (await this.execAsync(`git ls-remote --heads --quiet`))
.trim()
.split('\n')
.map((s) => s
.trim()
.substring(s.indexOf('\t') + 1)
.replace('refs/heads/', ''));
}
async getGitFiles(path) {
return (await this.execAsync(`git ls-files ${path}`))
.trim()
.split('\n')
.map((s) => s.trim())
.filter(Boolean);
}
async reset(ref) {
return this.execAsync(`git reset ${ref} --hard`);
}
async squashLastTwoCommits() {
return this.execAsync(`git -c core.editor="node ${SQUASH_EDITOR}" rebase --interactive --no-autosquash HEAD~2`);
}
async mergeUnrelatedHistories(ref, message) {
return this.execAsync(`git merge ${ref} -X ours --allow-unrelated-histories -m "${message}"`);
}
async fetch(remote, ref) {
return this.execAsync(`git fetch ${remote}${ref ? ` ${ref}` : ''}`);
}
async checkout(branch, opts) {
return this.execAsync(`git checkout ${opts.new ? '-b ' : ' '}${branch}${opts.base ? ' ' + opts.base : ''}`);
}
async move(path, destination) {
return this.execAsync(`git mv ${path} ${destination}`);
}
async push(ref, remoteName) {
return this.execAsync(`git push -u -f ${remoteName} ${ref}`);
}
async commit(message) {
return this.execAsync(`git commit -am "${message}"`);
}
async amendCommit() {
return this.execAsync(`git commit --amend -a --no-edit`);
}
deleteGitRemote(name) {
return this.execAsync(`git remote rm ${name}`);
}
deleteBranch(branch) {
return this.execAsync(`git branch -D ${branch}`);
}
addGitRemote(name, url) {
return this.execAsync(`git remote add ${name} ${url}`);
}
}
exports.GitRepository = GitRepository;
/**
* This is used by the squash editor script to update the rebase file.
*/
function updateRebaseFile(contents) {
const lines = contents.split('\n');
const lastCommitIndex = lines.findIndex((line) => line === '') - 1;
lines[lastCommitIndex] = lines[lastCommitIndex].replace('pick', 'fixup');
return lines.join('\n');
}
function fetchGitRemote(name, branch, execOptions) {
return (0, child_process_1.execSync)(`git fetch ${name} ${branch} --depth 1`, execOptions);
}
function getGithubSlugOrNull() {

@@ -11,0 +122,0 @@ try {

@@ -52,3 +52,3 @@ "use strict";

}
// yarn and pnpm both use the same 'workspaces' property in package.json
// yarn and npm both use the same 'workspaces' property in package.json
const packageJson = (0, file_utils_1.readPackageJson)();

@@ -55,0 +55,0 @@ return !!packageJson?.workspaces;

@@ -30,3 +30,3 @@ "use strict";

}
bodyLines.push(`${chalk.bold(p.name)} (${capabilities.join()})`);
bodyLines.push(`${chalk.bold(p.name)} ${capabilities.length >= 1 ? `(${capabilities.join()})` : ''}`);
}

@@ -33,0 +33,0 @@ output_1.output.log({

@@ -14,2 +14,3 @@ import type { NxWorkspaceFilesExternals } from '../native';

export declare function hashWithWorkspaceContext(workspaceRoot: string, globs: string[], exclude?: string[]): Promise<string>;
export declare function updateContextWithChangedFiles(createdFiles: string[], updatedFiles: string[], deletedFiles: string[]): Promise<void>;
export declare function updateFilesInContext(updatedFiles: string[], deletedFiles: string[]): Record<string, string>;

@@ -16,0 +17,0 @@ export declare function getAllFileDataInContext(workspaceRoot: string): Promise<import("../native").FileData[]>;

@@ -8,2 +8,3 @@ "use strict";

exports.hashWithWorkspaceContext = hashWithWorkspaceContext;
exports.updateContextWithChangedFiles = updateContextWithChangedFiles;
exports.updateFilesInContext = updateFilesInContext;

@@ -60,2 +61,17 @@ exports.getAllFileDataInContext = getAllFileDataInContext;

}
async function updateContextWithChangedFiles(createdFiles, updatedFiles, deletedFiles) {
if (!client_1.daemonClient.enabled()) {
updateFilesInContext([...createdFiles, ...updatedFiles], deletedFiles);
}
else if ((0, is_on_daemon_1.isOnDaemon)()) {
// make sure to only import this when running on the daemon
const { addUpdatedAndDeletedFiles } = await Promise.resolve().then(() => require('../daemon/server/project-graph-incremental-recomputation'));
// update files for the incremental graph recomputation on the daemon
addUpdatedAndDeletedFiles(createdFiles, updatedFiles, deletedFiles);
}
else {
// daemon is enabled but we are not running on it, ask the daemon to update the context
await client_1.daemonClient.updateWorkspaceContext(createdFiles, updatedFiles, deletedFiles);
}
}
function updateFilesInContext(updatedFiles, deletedFiles) {

@@ -62,0 +78,0 @@ return workspaceContext?.incrementalUpdate(updatedFiles, deletedFiles);

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc