Socket
Socket
Sign inDemoInstall

nx

Package Overview
Dependencies
Maintainers
8
Versions
1371
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

nx - npm Package Compare versions

Comparing version 19.7.0-beta.3 to 19.7.0-beta.4

src/command-line/release/utils/get-touched-projects-for-group.d.ts

24

package.json
{
"name": "nx",
"version": "19.7.0-beta.3",
"version": "19.7.0-beta.4",
"private": false,

@@ -74,3 +74,3 @@ "description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",

"ora": "5.3.0",
"@nrwl/tao": "19.7.0-beta.3"
"@nrwl/tao": "19.7.0-beta.4"
},

@@ -90,12 +90,12 @@ "peerDependencies": {

"optionalDependencies": {
"@nx/nx-darwin-x64": "19.7.0-beta.3",
"@nx/nx-darwin-arm64": "19.7.0-beta.3",
"@nx/nx-linux-x64-gnu": "19.7.0-beta.3",
"@nx/nx-linux-x64-musl": "19.7.0-beta.3",
"@nx/nx-win32-x64-msvc": "19.7.0-beta.3",
"@nx/nx-linux-arm64-gnu": "19.7.0-beta.3",
"@nx/nx-linux-arm64-musl": "19.7.0-beta.3",
"@nx/nx-linux-arm-gnueabihf": "19.7.0-beta.3",
"@nx/nx-win32-arm64-msvc": "19.7.0-beta.3",
"@nx/nx-freebsd-x64": "19.7.0-beta.3"
"@nx/nx-darwin-x64": "19.7.0-beta.4",
"@nx/nx-darwin-arm64": "19.7.0-beta.4",
"@nx/nx-linux-x64-gnu": "19.7.0-beta.4",
"@nx/nx-linux-x64-musl": "19.7.0-beta.4",
"@nx/nx-win32-x64-msvc": "19.7.0-beta.4",
"@nx/nx-linux-arm64-gnu": "19.7.0-beta.4",
"@nx/nx-linux-arm64-musl": "19.7.0-beta.4",
"@nx/nx-linux-arm-gnueabihf": "19.7.0-beta.4",
"@nx/nx-win32-arm64-msvc": "19.7.0-beta.4",
"@nx/nx-freebsd-x64": "19.7.0-beta.4"
},

@@ -102,0 +102,0 @@ "nx-migrations": {

@@ -44,3 +44,3 @@ import { ChangelogChange } from '../../src/command-line/release/changelog';

repoSlug?: RepoSlug;
conventionalCommitsConfig: NxReleaseConfig['conventionalCommits'];
conventionalCommitsConfig: NxReleaseConfig['conventionalCommits'] | null;
}) => Promise<string> | string;

@@ -47,0 +47,0 @@ /**

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const semver_1 = require("semver");
const conventional_commits_1 = require("../../src/command-line/release/config/conventional-commits");
const github_1 = require("../../src/command-line/release/utils/github");

@@ -13,5 +14,3 @@ // axios types and values don't seem to match

const defaultChangelogRenderer = async ({ projectGraph, changes, releaseVersion, project, entryWhenNoChanges, changelogRenderOptions, dependencyBumps, repoSlug, conventionalCommitsConfig, }) => {
const changeTypes = conventionalCommitsConfig.types;
const markdownLines = [];
const breakingChanges = [];
// If the current range of changes contains both a commit and its revert, we strip them both from the final list. Changes from version plans are unaffected, as they have no hashes.

@@ -30,6 +29,34 @@ for (const change of changes) {

let relevantChanges = changes;
const breakingChanges = [];
// For now to keep the interface of the changelog renderer non-breaking for v19 releases we have a somewhat indirect check for whether or not we are generating a changelog for version plans
const isVersionPlans = !conventionalCommitsConfig;
// Only applicable for version plans
const additionalChangesForAuthorsSection = [];
// Provide a default configuration for version plans to allow most of the subsequent logic to work in the same way it would for conventional commits
// NOTE: The one exception is breaking/major changes, where we do not follow the same structure and instead only show the changes once
if (isVersionPlans) {
conventionalCommitsConfig = {
types: {
feat: conventional_commits_1.DEFAULT_CONVENTIONAL_COMMITS_CONFIG.types.feat,
fix: conventional_commits_1.DEFAULT_CONVENTIONAL_COMMITS_CONFIG.types.fix,
},
};
// Trim down "relevant changes" to only include non-breaking ones so that we can render them differently under version plans,
// but keep track of the changes for the purposes of the authors section
// TODO(v20): Clean this abstraction up as part of the larger overall refactor of changelog rendering
for (let i = 0; i < relevantChanges.length; i++) {
if (relevantChanges[i].isBreaking) {
const change = relevantChanges[i];
additionalChangesForAuthorsSection.push(change);
const line = formatChange(change, changelogRenderOptions, isVersionPlans, repoSlug);
breakingChanges.push(line);
relevantChanges.splice(i, 1);
}
}
}
const changeTypes = conventionalCommitsConfig.types;
// workspace root level changelog
if (project === null) {
// No changes for the workspace
if (relevantChanges.length === 0) {
if (relevantChanges.length === 0 && breakingChanges.length === 0) {
if (dependencyBumps?.length) {

@@ -67,3 +94,3 @@ applyAdditionalDependencyBumps({

for (const change of changes) {
const line = formatChange(change, changelogRenderOptions, repoSlug);
const line = formatChange(change, changelogRenderOptions, isVersionPlans, repoSlug);
markdownLines.push(line);

@@ -85,3 +112,3 @@ if (change.isBreaking) {

// Generating for a named project, but that project has no relevant changes in the current set of commits, exit early
if (relevantChanges.length === 0) {
if (relevantChanges.length === 0 && breakingChanges.length === 0) {
if (dependencyBumps?.length) {

@@ -112,3 +139,3 @@ applyAdditionalDependencyBumps({

for (const change of changesInChronologicalOrder) {
const line = formatChange(change, changelogRenderOptions, repoSlug);
const line = formatChange(change, changelogRenderOptions, isVersionPlans, repoSlug);
markdownLines.push(line + '\n');

@@ -125,3 +152,3 @@ if (change.isBreaking) {

if (breakingChanges.length > 0) {
markdownLines.push('', '#### ⚠️ Breaking Changes', '', ...breakingChanges);
markdownLines.push('', '### ⚠️ Breaking Changes', '', ...breakingChanges);
}

@@ -138,3 +165,6 @@ if (dependencyBumps?.length) {

const _authors = new Map();
for (const change of relevantChanges) {
for (const change of [
...relevantChanges,
...additionalChangesForAuthorsSection,
]) {
if (!change.author) {

@@ -225,3 +255,3 @@ continue;

}
function formatChange(change, changelogRenderOptions, repoSlug) {
function formatChange(change, changelogRenderOptions, isVersionPlans, repoSlug) {
let description = change.description;

@@ -239,5 +269,10 @@ let extraLines = [];

}
/**
* In version plans changelogs:
* - don't repeat the breaking change icon
* - don't render the scope
*/
let changeLine = '- ' +
(change.isBreaking ? '⚠️ ' : '') +
(change.scope ? `**${change.scope.trim()}:** ` : '') +
(!isVersionPlans && change.isBreaking ? '⚠️ ' : '') +
(!isVersionPlans && change.scope ? `**${change.scope.trim()}:** ` : '') +
description;

@@ -244,0 +279,0 @@ if (repoSlug && changelogRenderOptions.commitReferences) {

@@ -509,2 +509,7 @@ {

},
"parallelism": {
"type": "boolean",
"default": true,
"description": "Whether this target can be run in parallel with other tasks"
},
"inputs": {

@@ -511,0 +516,0 @@ "$ref": "#/definitions/inputs"

@@ -15,7 +15,7 @@ "use strict";

type: 'string',
description: 'The package name and optional version (e.g. `@nx/react` or `@nx/react@latest`) to install and initialize. If the version is not specified it will install the same version as the `nx` package for Nx core plugins or the latest version for other packages',
description: 'The package name and optional version (e.g. `@nx/react` or `@nx/react@latest`) to install and initialize. If the version is not specified it will install the same version as the `nx` package for Nx core plugins or the latest version for other packages.',
})
.option('updatePackageScripts', {
type: 'boolean',
description: 'Update `package.json` scripts with inferred targets. Defaults to `true` when the package is a core Nx plugin',
description: 'Update `package.json` scripts with inferred targets. Defaults to `true` when the package is a core Nx plugin.',
})

@@ -22,0 +22,0 @@ .example('$0 add @nx/react', 'Install the latest version of the `@nx/react` package and run its `@nx/react:init` generator')

@@ -9,3 +9,3 @@ "use strict";

command: 'affected',
describe: 'Run target for affected projects',
describe: 'Run target for affected projects.',
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)((0, shared_options_1.withAffectedOptions)((0, shared_options_1.withRunOptions)((0, shared_options_1.withOutputStyleOption)((0, shared_options_1.withTargetAndConfigurationOption)((0, shared_options_1.withBatch)(yargs)))))

@@ -12,0 +12,0 @@ .option('all', {

@@ -6,9 +6,10 @@ "use strict";

const versions_1 = require("../../utils/versions");
const shared_options_1 = require("../yargs-utils/shared-options");
exports.yargsConnectCommand = {
command: 'connect',
aliases: ['connect-to-nx-cloud'],
describe: `Connect workspace to Nx Cloud`,
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(yargs, 'connect-to-nx-cloud'),
handler: async () => {
await (await Promise.resolve().then(() => require('./connect-to-nx-cloud'))).connectToNxCloudCommand();
describe: `Connect workspace to Nx Cloud.`,
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(withConnectOptions(yargs), 'connect-to-nx-cloud'),
handler: async (args) => {
await (await Promise.resolve().then(() => require('./connect-to-nx-cloud'))).connectToNxCloudCommand(args);
await (await Promise.resolve().then(() => require('../../utils/ab-testing'))).recordStat({

@@ -22,2 +23,8 @@ command: 'connect',

};
function withConnectOptions(yargs) {
return (0, shared_options_1.withVerbose)(yargs).option('generateToken', {
type: 'boolean',
description: 'Explicitly asks for a token to be created, do not override existing tokens from Nx Cloud.',
});
}
exports.yargsViewLogsCommand = {

@@ -24,0 +31,0 @@ command: 'view-logs',

@@ -8,4 +8,6 @@ import { ConnectToNxCloudOptions } from '../../nx-cloud/generators/connect-to-nx-cloud/connect-to-nx-cloud';

export declare function connectWorkspaceToCloud(options: ConnectToNxCloudOptions, directory?: string): Promise<string>;
export declare function connectToNxCloudCommand(command?: string): Promise<boolean>;
export declare function connectToNxCloudCommand(options: {
generateToken?: boolean;
}, command?: string): Promise<boolean>;
export declare function connectExistingRepoToNxCloudPrompt(command?: string, key?: MessageKey): Promise<boolean>;
export declare function connectToNxCloudWithPrompt(command: string): Promise<void>;

@@ -57,3 +57,3 @@ "use strict";

}
async function connectToNxCloudCommand(command) {
async function connectToNxCloudCommand(options, command) {
const nxJson = (0, configuration_1.readNxJson)();

@@ -70,3 +70,3 @@ const installationSource = process.env.NX_CONSOLE

}
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)(installationSource, token);
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)(installationSource, token, options?.generateToken !== true);
output_1.output.log({

@@ -83,5 +83,6 @@ title: '✔ This workspace already has Nx Cloud set up',

const token = await connectWorkspaceToCloud({
generateToken: options?.generateToken,
installationSource: command ?? installationSource,
});
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)('nx-connect', token);
const connectCloudUrl = await (0, url_shorten_1.createNxCloudOnboardingURL)('nx-connect', token, options?.generateToken !== true);
try {

@@ -121,3 +122,5 @@ const cloudConnectSpinner = ora(`Opening Nx Cloud ${connectCloudUrl} in your browser to connect your workspace.`).start();

const setNxCloud = await nxCloudPrompt('setupNxCloud');
const useCloud = setNxCloud === 'yes' ? await connectToNxCloudCommand(command) : false;
const useCloud = setNxCloud === 'yes'
? await connectToNxCloudCommand({ generateToken: false }, command)
: false;
await (0, ab_testing_1.recordStat)({

@@ -124,0 +127,0 @@ command,

@@ -7,3 +7,3 @@ "use strict";

command: 'daemon',
describe: 'Prints information about the Nx Daemon process or starts a daemon process',
describe: 'Prints information about the Nx Daemon process or starts a daemon process.',
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(withDaemonOptions(yargs), 'daemon'),

@@ -10,0 +10,0 @@ handler: async (args) => (await Promise.resolve().then(() => require('./daemon'))).daemonHandler(args),

@@ -34,3 +34,3 @@ "use strict";

type: 'string',
describe: 'Select the subset of the returned json document (e.g., --select=projects)',
describe: 'Select the subset of the returned json document (e.g., --select=projects).',
})

@@ -40,3 +40,3 @@ .option('type', {

choices: ['app', 'lib'],
describe: 'Select the type of projects to be returned (e.g., --type=app)',
describe: 'Select the type of projects to be returned (e.g., --type=app).',
}),

@@ -43,0 +43,0 @@ handler: async (args) => {

@@ -7,3 +7,3 @@ "use strict";

command: 'exec',
describe: 'Executes any command as if it was a target on the project',
describe: 'Executes any command as if it was a target on the project.',
builder: (yargs) => (0, shared_options_1.withRunManyOptions)(yargs),

@@ -10,0 +10,0 @@ handler: async (args) => {

@@ -8,3 +8,3 @@ "use strict";

command: 'format:check',
describe: 'Check for un-formatted files',
describe: 'Check for un-formatted files.',
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(withFormatOptions(yargs), 'format:check'),

@@ -18,3 +18,3 @@ handler: async (args) => {

command: 'format:write',
describe: 'Overwrite un-formatted files',
describe: 'Overwrite un-formatted files.',
aliases: ['format'],

@@ -37,3 +37,3 @@ builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(withFormatOptions(yargs), 'format:write'),

.option('projects', {
describe: 'Projects to format (comma/space delimited)',
describe: 'Projects to format (comma/space delimited).',
type: 'string',

@@ -43,3 +43,3 @@ coerce: shared_options_1.parseCSV,

.option('all', {
describe: 'Format all projects',
describe: 'Format all projects.',
type: 'boolean',

@@ -46,0 +46,0 @@ })

@@ -21,3 +21,3 @@ "use strict";

.positional('generator', {
describe: 'Name of the generator (e.g., @nx/js:library, library)',
describe: 'Name of the generator (e.g., @nx/js:library, library).',
type: 'string',

@@ -27,3 +27,3 @@ required: true,

.option('dryRun', {
describe: 'Preview the changes without updating files',
describe: 'Preview the changes without updating files.',
alias: 'd',

@@ -34,3 +34,3 @@ type: 'boolean',

.option('interactive', {
describe: 'When false disables interactive input prompts for options',
describe: 'When false disables interactive input prompts for options.',
type: 'boolean',

@@ -40,3 +40,3 @@ default: true,

.option('quiet', {
describe: 'Hides logs from tree operations (e.g. `CREATE package.json`)',
describe: 'Hides logs from tree operations (e.g. `CREATE package.json`).',
type: 'boolean',

@@ -43,0 +43,0 @@ conflicts: ['verbose'],

@@ -9,3 +9,3 @@ "use strict";

command: 'graph',
describe: 'Graph dependencies within workspace',
describe: 'Graph dependencies within workspace.',
aliases: ['dep-graph'],

@@ -15,3 +15,3 @@ builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)((0, shared_options_1.withVerbose)((0, shared_options_1.withAffectedOptions)(withGraphOptions(yargs))), 'dep-graph')

type: 'boolean',
description: 'Highlight affected projects',
description: 'Highlight affected projects.',
})

@@ -36,3 +36,3 @@ .implies('untracked', 'affected')

.option('view', {
describe: 'Choose whether to view the projects or task graph',
describe: 'Choose whether to view the projects or task graph.',
type: 'string',

@@ -43,3 +43,3 @@ default: 'projects',

.option('targets', {
describe: 'The target to show tasks for in the task graph',
describe: 'The target to show tasks for in the task graph.',
type: 'string',

@@ -58,3 +58,3 @@ coerce: shared_options_1.parseCSV,

.option('groupByFolder', {
describe: 'Group projects by folder in the project graph',
describe: 'Group projects by folder in the project graph.',
type: 'boolean',

@@ -71,3 +71,3 @@ })

.option('watch', {
describe: 'Watch for changes to project graph and update in-browser',
describe: 'Watch for changes to project graph and update in-browser.',
type: 'boolean',

@@ -74,0 +74,0 @@ default: true,

@@ -13,19 +13,23 @@ "use strict";

type: 'string',
description: 'The remote URL of the source to import',
description: 'The remote URL of the source to import.',
})
.positional('destination', {
type: 'string',
description: 'The directory in the current workspace to import into',
description: 'The directory in the current workspace to import into.',
})
.option('source', {
type: 'string',
description: 'The directory in the source repository to import from',
description: 'The directory in the source repository to import from.',
})
.option('ref', {
type: 'string',
description: 'The branch from the source repository to import',
description: 'The branch from the source repository to import.',
})
.option('depth', {
type: 'number',
description: 'The depth to clone the source repository (limit this for faster git clone).',
})
.option('interactive', {
type: 'boolean',
description: 'Interactive mode',
description: 'Interactive mode.',
default: true,

@@ -32,0 +36,0 @@ })), 'import'),

@@ -18,2 +18,6 @@ export interface ImportOptions {

destination: string;
/**
* The depth to clone the source repository (limit this for faster clone times)
*/
depth: number;
verbose: boolean;

@@ -20,0 +24,0 @@ interactive: boolean;

@@ -5,2 +5,6 @@ "use strict";

const path_1 = require("path");
const minimatch_1 = require("minimatch");
const node_fs_1 = require("node:fs");
const chalk = require("chalk");
const js_yaml_1 = require("@zkochan/js-yaml");
const git_utils_1 = require("../../utils/git-utils");

@@ -22,2 +26,3 @@ const promises_1 = require("node:fs/promises");

const needs_install_1 = require("./utils/needs-install");
const file_utils_1 = require("../../project-graph/file-utils");
const importRemoteName = '__tmp_nx_import__';

@@ -59,3 +64,3 @@ async function importHandler(options) {

const sourceRepoPath = (0, path_1.join)(tempImportDirectory, 'repo');
const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath}`).start();
const spinner = createSpinner(`Cloning ${sourceRemoteUrl} into a temporary directory: ${sourceRepoPath} (Use --depth to limit commit history and speed up clone times)`).start();
try {

@@ -70,2 +75,3 @@ await (0, promises_1.rm)(tempImportDirectory, { recursive: true });

originName: importRemoteName,
depth: options.depth,
});

@@ -79,2 +85,4 @@ }

spinner.succeed(`Cloned into ${sourceRepoPath}`);
// Detecting the package manager before preparing the source repo for import.
const sourcePackageManager = (0, package_manager_1.detectPackageManager)(sourceGitClient.root);
if (!ref) {

@@ -112,2 +120,3 @@ const branchChoices = await sourceGitClient.listBranches();

required: true,
initial: source ? source : undefined,
},

@@ -118,2 +127,14 @@ ])).destination;

const absDestination = (0, path_1.join)(process.cwd(), destination);
const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
await assertDestinationEmpty(destinationGitClient, absDestination);
const tempImportBranch = getTempImportBranch(ref);
await sourceGitClient.addFetchRemote(importRemoteName, ref);
await sourceGitClient.fetch(importRemoteName, ref);
spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
await sourceGitClient.checkout(tempImportBranch, {
new: true,
base: `${importRemoteName}/${ref}`,
});
spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
try {

@@ -125,9 +146,6 @@ await (0, promises_1.stat)(absSource);

}
const destinationGitClient = new git_utils_1.GitRepository(process.cwd());
await assertDestinationEmpty(destinationGitClient, absDestination);
const tempImportBranch = getTempImportBranch(ref);
const packageManager = (0, package_manager_1.detectPackageManager)(workspace_root_1.workspaceRoot);
const originalPackageWorkspaces = await (0, needs_install_1.getPackagesInPackageManagerWorkspace)(packageManager);
const relativeDestination = (0, path_1.relative)(destinationGitClient.root, absDestination);
await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl, importRemoteName);
await (0, prepare_source_repo_1.prepareSourceRepo)(sourceGitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl);
await createTemporaryRemote(destinationGitClient, (0, path_1.join)(sourceRepoPath, '.git'), importRemoteName);

@@ -143,15 +161,65 @@ await (0, merge_remote_source_1.mergeRemoteSource)(destinationGitClient, sourceRemoteUrl, tempImportBranch, destination, importRemoteName, ref);

const { plugins, updatePackageScripts } = await (0, init_v2_1.detectPlugins)(nxJson, options.interactive);
if (packageManager !== sourcePackageManager) {
output_1.output.warn({
title: `Mismatched package managers`,
bodyLines: [
`The source repository is using a different package manager (${sourcePackageManager}) than this workspace (${packageManager}).`,
`This could lead to install issues due to discrepancies in "package.json" features.`,
],
});
}
// If install fails, we should continue since the errors could be resolved later.
let installFailed = false;
if (plugins.length > 0) {
output_1.output.log({ title: 'Installing Plugins' });
(0, init_v2_1.installPlugins)(workspace_root_1.workspaceRoot, plugins, pmc, updatePackageScripts);
await destinationGitClient.amendCommit();
try {
output_1.output.log({ title: 'Installing Plugins' });
(0, init_v2_1.installPlugins)(workspace_root_1.workspaceRoot, plugins, pmc, updatePackageScripts);
await destinationGitClient.amendCommit();
}
catch (e) {
installFailed = true;
output_1.output.error({
title: `Install failed: ${e.message || 'Unknown error'}`,
bodyLines: [e.stack],
});
}
}
else if (await (0, needs_install_1.needsInstall)(packageManager, originalPackageWorkspaces)) {
try {
output_1.output.log({
title: 'Installing dependencies for imported code',
});
(0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
await destinationGitClient.amendCommit();
}
catch (e) {
installFailed = true;
output_1.output.error({
title: `Install failed: ${e.message || 'Unknown error'}`,
bodyLines: [e.stack],
});
}
}
console.log(await destinationGitClient.showStat());
if (installFailed) {
const pmc = (0, package_manager_1.getPackageManagerCommand)(packageManager);
output_1.output.warn({
title: `The import was successful, but the install failed`,
bodyLines: [
`You may need to run "${pmc.install}" manually to resolve the issue. The error is logged above.`,
],
});
}
await warnOnMissingWorkspacesEntry(packageManager, pmc, relativeDestination);
// When only a subdirectory is imported, there might be devDependencies in the root package.json file
// that needs to be ported over as well.
if (ref) {
output_1.output.log({
title: 'Installing dependencies for imported code',
title: `Check root dependencies`,
bodyLines: [
`"dependencies" and "devDependencies" are not imported from the source repository (${sourceRemoteUrl}).`,
`You may need to add some of those dependencies to this workspace in order to run tasks successfully.`,
],
});
(0, utils_1.runInstall)(workspace_root_1.workspaceRoot, (0, package_manager_1.getPackageManagerCommand)(packageManager));
await destinationGitClient.amendCommit();
}
console.log(await destinationGitClient.showStat());
output_1.output.log({

@@ -183,1 +251,68 @@ title: `Merging these changes into ${(0, command_line_utils_1.getBaseRef)(nxJson)}`,

}
// If the user imports a project that isn't in NPM/Yarn/PNPM workspaces, then its dependencies
// will not be installed. We should warn users and provide instructions on how to fix this.
async function warnOnMissingWorkspacesEntry(pm, pmc, pkgPath) {
if (!(0, package_manager_1.isWorkspacesEnabled)(pm, workspace_root_1.workspaceRoot)) {
output_1.output.warn({
title: `Missing workspaces in package.json`,
bodyLines: pm === 'npm'
? [
`We recommend enabling NPM workspaces to install dependencies for the imported project.`,
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
`See: https://docs.npmjs.com/cli/using-npm/workspaces`,
]
: pm === 'yarn'
? [
`We recommend enabling Yarn workspaces to install dependencies for the imported project.`,
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
`See: https://yarnpkg.com/features/workspaces`,
]
: pm === 'bun'
? [
`We recommend enabling Bun workspaces to install dependencies for the imported project.`,
`Add \`"workspaces": ["${pkgPath}"]\` to package.json and run "${pmc.install}".`,
`See: https://bun.sh/docs/install/workspaces`,
]
: [
`We recommend enabling PNPM workspaces to install dependencies for the imported project.`,
`Add the following entry to to pnpm-workspace.yaml and run "${pmc.install}":`,
chalk.bold(`packages:\n - '${pkgPath}'`),
`See: https://pnpm.io/workspaces`,
],
});
}
else {
// Check if the new package is included in existing workspaces entries. If not, warn the user.
let workspaces = null;
if (pm === 'npm' || pm === 'yarn' || pm === 'bun') {
const packageJson = (0, file_utils_1.readPackageJson)();
workspaces = packageJson.workspaces;
}
else if (pm === 'pnpm') {
const yamlPath = (0, path_1.join)(workspace_root_1.workspaceRoot, 'pnpm-workspace.yaml');
if ((0, node_fs_1.existsSync)(yamlPath)) {
const yamlContent = await node_fs_1.promises.readFile(yamlPath, 'utf-8');
const yaml = (0, js_yaml_1.load)(yamlContent);
workspaces = yaml.packages;
}
}
if (workspaces) {
const isPkgIncluded = workspaces.some((w) => (0, minimatch_1.minimatch)(pkgPath, w));
if (!isPkgIncluded) {
const pkgsDir = (0, path_1.dirname)(pkgPath);
output_1.output.warn({
title: `Project missing in workspaces`,
bodyLines: pm === 'npm' || pm === 'yarn' || pm === 'bun'
? [
`The imported project (${pkgPath}) is missing the "workspaces" field in package.json.`,
`Add "${pkgsDir}/*" to workspaces run "${pmc.install}".`,
]
: [
`The imported project (${pkgPath}) is missing the "packages" field in pnpm-workspaces.yaml.`,
`Add "${pkgsDir}/*" to packages run "${pmc.install}".`,
],
});
}
}
}
}
import { GitRepository } from '../../../utils/git-utils';
export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string, originName: string): Promise<void>;
export declare function prepareSourceRepo(gitClient: GitRepository, ref: string, source: string, relativeDestination: string, tempImportBranch: string, sourceRemoteUrl: string): Promise<void>;

@@ -7,95 +7,41 @@ "use strict";

const promises_1 = require("node:fs/promises");
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl, originName) {
async function prepareSourceRepo(gitClient, ref, source, relativeDestination, tempImportBranch, sourceRemoteUrl) {
const spinner = createSpinner().start(`Fetching ${ref} from ${sourceRemoteUrl}`);
await gitClient.addFetchRemote(originName, ref);
await gitClient.fetch(originName, ref);
spinner.succeed(`Fetched ${ref} from ${sourceRemoteUrl}`);
spinner.start(`Checking out a temporary branch, ${tempImportBranch} based on ${ref}`);
await gitClient.checkout(tempImportBranch, {
new: true,
base: `${originName}/${ref}`,
});
spinner.succeed(`Created a ${tempImportBranch} branch based on ${ref}`);
const relativeSourceDir = (0, path_1.relative)(gitClient.root, (0, path_1.join)(gitClient.root, source));
if (relativeSourceDir !== '') {
if (await gitClient.hasFilterRepoInstalled()) {
spinner.start(`Filtering git history to only include files in ${relativeSourceDir}`);
await gitClient.filterRepo(relativeSourceDir);
}
else {
spinner.start(`Filtering git history to only include files in ${relativeSourceDir} (this might take a few minutes -- install git-filter-repo for faster performance)`);
await gitClient.filterBranch(relativeSourceDir, tempImportBranch);
}
spinner.succeed(`Filtered git history to only include files in ${relativeSourceDir}`);
}
const destinationInSource = (0, path_1.join)(gitClient.root, relativeDestination);
spinner.start(`Moving files and git history to ${destinationInSource}`);
if (relativeSourceDir === '') {
const files = await gitClient.getGitFiles('.');
// The result of filter-branch will contain only the files in the subdirectory at its root.
const files = await gitClient.getGitFiles('.');
try {
await (0, promises_1.rm)(destinationInSource, {
recursive: true,
});
}
catch { }
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
for (const file of files) {
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
const newPath = (0, path_1.join)(destinationInSource, file);
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
try {
await (0, promises_1.rm)(destinationInSource, {
recursive: true,
});
await gitClient.move(file, newPath);
}
catch { }
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
const gitignores = new Set();
for (const file of files) {
if ((0, path_1.basename)(file) === '.gitignore') {
gitignores.add(file);
continue;
}
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
const newPath = (0, path_1.join)(destinationInSource, file);
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
try {
await gitClient.move(file, newPath);
}
catch {
await wait(100);
await gitClient.move(file, newPath);
}
catch {
await wait(100);
await gitClient.move(file, newPath);
}
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
for (const gitignore of gitignores) {
await gitClient.move(gitignore, (0, path_1.join)(destinationInSource, gitignore));
}
await gitClient.amendCommit();
for (const gitignore of gitignores) {
await (0, promises_1.copyFile)((0, path_1.join)(destinationInSource, gitignore), (0, path_1.join)(gitClient.root, gitignore));
}
}
else {
let needsSquash = false;
const needsMove = destinationInSource !== (0, path_1.join)(gitClient.root, source);
if (needsMove) {
try {
await (0, promises_1.rm)(destinationInSource, {
recursive: true,
});
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
needsSquash = true;
}
catch { }
await (0, promises_1.mkdir)(destinationInSource, { recursive: true });
}
const files = await gitClient.getGitFiles('.');
for (const file of files) {
if (file === '.gitignore') {
continue;
}
spinner.start(`Moving files and git history to ${destinationInSource}: ${file}`);
if (!(0, path_1.relative)(source, file).startsWith('..')) {
if (needsMove) {
const newPath = (0, path_1.join)(destinationInSource, file);
await (0, promises_1.mkdir)((0, path_1.dirname)(newPath), { recursive: true });
try {
await gitClient.move(file, newPath);
}
catch {
await wait(100);
await gitClient.move(file, newPath);
}
}
}
else {
await (0, promises_1.rm)((0, path_1.join)(gitClient.root, file), {
recursive: true,
});
}
}
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
if (needsSquash) {
await gitClient.squashLastTwoCommits();
}
}
await gitClient.commit(`chore(repo): move ${source} to ${relativeDestination} to prepare to be imported`);
await gitClient.amendCommit();
spinner.succeed(`${sourceRemoteUrl} has been prepared to be imported into this workspace on a temporary branch: ${tempImportBranch} in ${gitClient.root}`);

@@ -102,0 +48,0 @@ }

@@ -9,3 +9,3 @@ "use strict";

type: 'string',
description: 'The name of an installed plugin to query',
description: 'The name of an installed plugin to query.',
}),

@@ -12,0 +12,0 @@ handler: async (args) => {

@@ -7,3 +7,3 @@ "use strict";

command: 'login [nxCloudUrl]',
describe: 'Login to Nx Cloud',
describe: false,
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs.positional('nxCloudUrl', {

@@ -10,0 +10,0 @@ describe: 'The Nx Cloud URL of the instance you are trying to connect to. If no positional argument is provided, this command will connect to https://cloud.nx.app.',

@@ -7,3 +7,3 @@ "use strict";

command: 'logout',
describe: 'Logout from Nx Cloud',
describe: false,
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs),

@@ -10,0 +10,0 @@ handler: async (args) => {

@@ -33,11 +33,11 @@ "use strict";

.positional('packageAndVersion', {
describe: `The target package and version (e.g, @nx/workspace@16.0.0)`,
describe: `The target package and version (e.g, @nx/workspace@16.0.0).`,
type: 'string',
})
.option('runMigrations', {
describe: `Execute migrations from a file (when the file isn't provided, execute migrations from migrations.json)`,
describe: `Execute migrations from a file (when the file isn't provided, execute migrations from migrations.json).`,
type: 'string',
})
.option('ifExists', {
describe: `Run migrations only if the migrations file exists, if not continues successfully`,
describe: `Run migrations only if the migrations file exists, if not continues successfully.`,
type: 'boolean',

@@ -47,11 +47,11 @@ default: false,

.option('from', {
describe: 'Use the provided versions for packages instead of the ones installed in node_modules (e.g., --from="@nx/react@16.0.0,@nx/js@16.0.0")',
describe: 'Use the provided versions for packages instead of the ones installed in node_modules (e.g., --from="@nx/react@16.0.0,@nx/js@16.0.0").',
type: 'string',
})
.option('to', {
describe: 'Use the provided versions for packages instead of the ones calculated by the migrator (e.g., --to="@nx/react@16.0.0,@nx/js@16.0.0")',
describe: 'Use the provided versions for packages instead of the ones calculated by the migrator (e.g., --to="@nx/react@16.0.0,@nx/js@16.0.0").',
type: 'string',
})
.option('createCommits', {
describe: 'Automatically create a git commit after each migration runs',
describe: 'Automatically create a git commit after each migration runs.',
type: 'boolean',

@@ -62,3 +62,3 @@ alias: ['C'],

.option('commitPrefix', {
describe: 'Commit prefix to apply to the commit for each migration, when --create-commits is enabled',
describe: 'Commit prefix to apply to the commit for each migration, when --create-commits is enabled.',
type: 'string',

@@ -68,3 +68,3 @@ default: defaultCommitPrefix,

.option('interactive', {
describe: 'Enable prompts to confirm whether to collect optional package updates and migrations',
describe: 'Enable prompts to confirm whether to collect optional package updates and migrations.',
type: 'boolean',

@@ -74,3 +74,3 @@ default: false,

.option('excludeAppliedMigrations', {
describe: 'Exclude migrations that should have been applied on previous updates. To be used with --from',
describe: 'Exclude migrations that should have been applied on previous updates. To be used with --from.',
type: 'boolean',

@@ -77,0 +77,0 @@ default: false,

@@ -16,3 +16,3 @@ "use strict";

.option('nxWorkspaceRoot', {
describe: 'The folder where the new workspace is going to be created',
describe: 'The folder where the new workspace is going to be created.',
type: 'string',

@@ -22,3 +22,3 @@ required: true,

.option('interactive', {
describe: 'When false disables interactive input prompts for options',
describe: 'When false disables interactive input prompts for options.',
type: 'boolean',

@@ -25,0 +25,0 @@ default: true,

@@ -84,3 +84,3 @@ "use strict";

const rawVersionPlans = await (0, version_plans_1.readRawVersionPlans)();
(0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
await (0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes), args.verbose);
if (args.deleteVersionPlans === undefined) {

@@ -150,2 +150,11 @@ // default to deleting version plans in this command instead of after versioning

const releaseType = versionPlanSemverReleaseTypeToChangelogType(vp.groupVersionBump);
let githubReferences = [];
let author = undefined;
const parsedCommit = vp.commit
? (0, git_1.parseGitCommit)(vp.commit, true)
: null;
if (parsedCommit) {
githubReferences = parsedCommit.references;
author = parsedCommit.author;
}
const changes = !vp.triggeredByProjects

@@ -158,3 +167,4 @@ ? {

isBreaking: releaseType.isBreaking,
githubReferences: [],
githubReferences,
author,
affectedProjects: '*',

@@ -168,5 +178,5 @@ }

body: '',
// TODO: what about github references?
isBreaking: releaseType.isBreaking,
githubReferences: [],
githubReferences,
author,
affectedProjects: [project],

@@ -308,2 +318,11 @@ };

const releaseType = versionPlanSemverReleaseTypeToChangelogType(bumpForProject);
let githubReferences = [];
let author = undefined;
const parsedCommit = vp.commit
? (0, git_1.parseGitCommit)(vp.commit, true)
: null;
if (parsedCommit) {
githubReferences = parsedCommit.references;
author = parsedCommit.author;
}
return {

@@ -316,4 +335,4 @@ type: releaseType.type,

affectedProjects: Object.keys(vp.projectVersionBumps),
// TODO: can we include github references when using version plans?
githubReferences: [],
githubReferences,
author,
};

@@ -406,2 +425,11 @@ })

const releaseType = versionPlanSemverReleaseTypeToChangelogType(vp.groupVersionBump);
let githubReferences = [];
let author = undefined;
const parsedCommit = vp.commit
? (0, git_1.parseGitCommit)(vp.commit, true)
: null;
if (parsedCommit) {
githubReferences = parsedCommit.references;
author = parsedCommit.author;
}
const changes = !vp.triggeredByProjects

@@ -414,3 +442,4 @@ ? {

isBreaking: releaseType.isBreaking,
githubReferences: [],
githubReferences,
author,
affectedProjects: '*',

@@ -424,5 +453,5 @@ }

body: '',
// TODO: what about github references?
isBreaking: releaseType.isBreaking,
githubReferences: [],
githubReferences,
author,
affectedProjects: [project],

@@ -591,3 +620,3 @@ };

let deletedFiles = [];
if (args.deleteVersionPlans && !args.dryRun) {
if (args.deleteVersionPlans) {
const planFiles = new Set();

@@ -597,3 +626,13 @@ releaseGroups.forEach((group) => {

group.resolvedVersionPlans.forEach((plan) => {
(0, fs_extra_1.removeSync)(plan.absolutePath);
if (!args.dryRun) {
(0, fs_extra_1.removeSync)(plan.absolutePath);
if (args.verbose) {
console.log(`Removing ${plan.relativePath}`);
}
}
else {
if (args.verbose) {
console.log(`Would remove ${plan.relativePath}, but --dry-run was set`);
}
}
planFiles.add(plan.relativePath);

@@ -815,3 +854,5 @@ });

changelogRenderOptions: config.renderOptions,
conventionalCommitsConfig: nxReleaseConfig.conventionalCommits,
conventionalCommitsConfig: releaseGroup.versionPlans
? null
: nxReleaseConfig.conventionalCommits,
dependencyBumps: projectToAdditionalDependencyBumps.get(project.name),

@@ -818,0 +859,0 @@ });

@@ -42,2 +42,3 @@ import { CommandModule } from 'yargs';

tag?: string;
access?: string;
otp?: number;

@@ -48,2 +49,3 @@ };

message?: string;
onlyTouched?: boolean;
};

@@ -50,0 +52,0 @@ export type PlanCheckOptions = BaseNxReleaseArgs & {

@@ -11,3 +11,3 @@ "use strict";

command: 'release',
describe: 'Orchestrate versioning and publishing of applications and libraries',
describe: 'Orchestrate versioning and publishing of applications and libraries.',
builder: (yargs) => (0, shared_options_1.withVerbose)(yargs)

@@ -33,6 +33,6 @@ .command(releaseCommand)

coerce: shared_options_1.parseCSV,
describe: 'Projects to run. (comma/space delimited project names and/or patterns)',
describe: 'Projects to run. (comma/space delimited project names and/or patterns).',
})
.option('dry-run', {
describe: 'Preview the changes without updating files/creating releases',
describe: 'Preview the changes without updating files/creating releases.',
alias: 'd',

@@ -45,3 +45,3 @@ type: 'boolean',

type: 'string',
describe: 'Print the resolved nx release configuration that would be used for the current command and then exit',
describe: 'Print the resolved nx release configuration that would be used for the current command and then exit.',
coerce: (val) => {

@@ -78,3 +78,3 @@ if (val === '') {

command: '$0 [specifier]',
describe: 'Create a version and release for the workspace, generate a changelog, and optionally publish the packages',
describe: 'Create a version and release for the workspace, generate a changelog, and optionally publish the packages.',
builder: (yargs) => withFirstReleaseOptions(yargs)

@@ -88,7 +88,7 @@ .positional('specifier', {

alias: 'y',
description: 'Automatically answer yes to the confirmation prompt for publishing',
description: 'Automatically answer yes to the confirmation prompt for publishing.',
})
.option('skip-publish', {
type: 'boolean',
description: 'Skip publishing by automatically answering no to the confirmation prompt for publishing',
description: 'Skip publishing by automatically answering no to the confirmation prompt for publishing.',
})

@@ -113,3 +113,3 @@ .check((argv) => {

aliases: ['v'],
describe: 'Create a version and release for one or more applications and libraries',
describe: 'Create a version and release for one or more applications and libraries.',
builder: (yargs) => withFirstReleaseOptions(withGitCommitAndGitTagOptions(yargs

@@ -122,3 +122,3 @@ .positional('specifier', {

type: 'string',
describe: 'The optional prerelease identifier to apply to the version. This will only be applied in the case that the specifier argument has been set to `prerelease` OR when conventional commits are enabled, in which case it will modify the resolved specifier from conventional commits to be its prerelease equivalent. E.g. minor -> preminor',
describe: 'The optional prerelease identifier to apply to the version. This will only be applied in the case that the specifier argument has been set to `prerelease` OR when conventional commits are enabled, in which case it will modify the resolved specifier from conventional commits to be its prerelease equivalent. E.g. minor -> preminor.',
default: '',

@@ -142,3 +142,3 @@ })

aliases: ['c'],
describe: 'Generate a changelog for one or more projects, and optionally push to Github',
describe: 'Generate a changelog for one or more projects, and optionally push to Github.',
builder: (yargs) => withFirstReleaseOptions(withGitCommitAndGitTagOptions(yargs

@@ -149,11 +149,11 @@ // Disable default meaning of yargs version for this command

type: 'string',
description: 'The version to create a Github release and changelog for',
description: 'The version to create a Github release and changelog for.',
})
.option('from', {
type: 'string',
description: 'The git reference to use as the start of the changelog. If not set it will attempt to resolve the latest tag and use that',
description: 'The git reference to use as the start of the changelog. If not set it will attempt to resolve the latest tag and use that.',
})
.option('to', {
type: 'string',
description: 'The git reference to use as the end of the changelog',
description: 'The git reference to use as the end of the changelog.',
default: 'HEAD',

@@ -164,3 +164,3 @@ })

type: 'string',
description: 'Interactively modify changelog markdown contents in your code editor before applying the changes. You can set it to be interactive for all changelogs, or only the workspace level, or only the project level',
description: 'Interactively modify changelog markdown contents in your code editor before applying the changes. You can set it to be interactive for all changelogs, or only the workspace level, or only the project level.',
choices: ['all', 'workspace', 'projects'],

@@ -170,3 +170,3 @@ })

type: 'string',
description: 'Alternate git remote in the form {user}/{repo} on which to create the Github release (useful for testing)',
description: 'Alternate git remote in the form {user}/{repo} on which to create the Github release (useful for testing).',
default: 'origin',

@@ -192,15 +192,20 @@ })

aliases: ['p'],
describe: 'Publish a versioned project to a registry',
describe: 'Publish a versioned project to a registry.',
builder: (yargs) => withFirstReleaseOptions((0, shared_options_1.withRunManyOptions)((0, shared_options_1.withOutputStyleOption)(yargs))
.option('registry', {
type: 'string',
description: 'The registry to publish to',
description: 'The registry to publish to.',
})
.option('tag', {
type: 'string',
description: 'The distribution tag to apply to the published package',
description: 'The distribution tag to apply to the published package.',
})
.option('access', {
type: 'string',
choices: ['public', 'restricted'],
description: 'Overrides the access level of the published package. Unscoped packages cannot be set to restricted. See the npm publish documentation for more information.',
})
.option('otp', {
type: 'number',
description: 'A one-time password for publishing to a registry that requires 2FA',
description: 'A one-time password for publishing to a registry that requires 2FA.',
})),

@@ -218,7 +223,4 @@ handler: async (args) => {

aliases: ['pl'],
// TODO: Remove this when docs are added
// Create a plan to pick a new version and generate a changelog entry.
// Hidden for now until the feature is more stable
describe: false,
builder: (yargs) => yargs
describe: 'Create a version plan file to specify the desired semver bump for one or more projects or groups, as well as the relevant changelog entry.',
builder: (yargs) => (0, shared_options_1.withAffectedOptions)(yargs)
.positional('bump', {

@@ -240,3 +242,8 @@ type: 'string',

alias: 'm',
describe: 'Custom message to use for the changelog entry',
describe: 'Custom message to use for the changelog entry.',
})
.option('onlyTouched', {
type: 'boolean',
describe: 'Only include projects that have been affected by the current changes.',
default: true,
}),

@@ -254,6 +261,3 @@ handler: async (args) => {

command: 'plan:check',
// TODO: Remove this when docs are added
// Create a plan to pick a new version and generate a changelog entry.
// Hidden for now until the feature is more stable
describe: false,
describe: 'Ensure that all touched projects have an applicable version plan created for them.',
builder: (yargs) => (0, shared_options_1.withAffectedOptions)(yargs),

@@ -275,3 +279,3 @@ handler: async (args) => {

.option('git-commit', {
describe: 'Whether or not to automatically commit the changes made by this command',
describe: 'Whether or not to automatically commit the changes made by this command.',
type: 'boolean',

@@ -284,7 +288,7 @@ })

.option('git-commit-args', {
describe: 'Additional arguments (added after the --message argument, which may or may not be customized with --git-commit-message) to pass to the `git commit` command invoked behind the scenes',
describe: 'Additional arguments (added after the --message argument, which may or may not be customized with --git-commit-message) to pass to the `git commit` command invoked behind the scenes.',
type: 'string',
})
.option('git-tag', {
describe: 'Whether or not to automatically tag the changes made by this command',
describe: 'Whether or not to automatically tag the changes made by this command.',
type: 'boolean',

@@ -297,3 +301,3 @@ })

.option('git-tag-args', {
describe: 'Additional arguments to pass to the `git tag` command invoked behind the scenes',
describe: 'Additional arguments to pass to the `git tag` command invoked behind the scenes.',
type: 'string',

@@ -300,0 +304,0 @@ })

import { ReleaseType } from 'semver';
import { RawGitCommit } from '../utils/git';
import { ReleaseGroupWithName } from './filter-release-groups';

@@ -15,2 +16,8 @@ export interface VersionPlanFile {

message: string;
/**
* The commit that added the version plan file, will be null if the file was never committed.
* For optimal performance, we don't apply it at the time of reading the raw contents, because
* it hasn't yet passed further validation at that point.
*/
commit: RawGitCommit | null;
}

@@ -20,2 +27,8 @@ export interface GroupVersionPlan extends VersionPlan {

/**
* The commit that added the version plan file, will be null if the file was never committed.
* For optimal performance, we don't apply it at the time of reading the raw contents, because.
* it hasn't yet passed validation.
*/
commit: RawGitCommit | null;
/**
* Will not be set if the group name was the trigger, otherwise will be a list of

@@ -30,3 +43,3 @@ * all the individual project names explicitly found in the version plan file.

export declare function readRawVersionPlans(): Promise<RawVersionPlan[]>;
export declare function setResolvedVersionPlansOnGroups(rawVersionPlans: RawVersionPlan[], releaseGroups: ReleaseGroupWithName[], allProjectNamesInWorkspace: string[]): ReleaseGroupWithName[];
export declare function setResolvedVersionPlansOnGroups(rawVersionPlans: RawVersionPlan[], releaseGroups: ReleaseGroupWithName[], allProjectNamesInWorkspace: string[], isVerbose: boolean): Promise<ReleaseGroupWithName[]>;
export declare function getVersionPlansAbsolutePath(): string;

@@ -8,2 +8,3 @@ "use strict";

const fs_extra_1 = require("fs-extra");
const node_child_process_1 = require("node:child_process");
const path_1 = require("path");

@@ -39,3 +40,3 @@ const semver_1 = require("semver");

}
function setResolvedVersionPlansOnGroups(rawVersionPlans, releaseGroups, allProjectNamesInWorkspace) {
async function setResolvedVersionPlansOnGroups(rawVersionPlans, releaseGroups, allProjectNamesInWorkspace, isVerbose) {
const groupsByName = releaseGroups.reduce((acc, group) => acc.set(group.name, group), new Map());

@@ -93,2 +94,3 @@ const isDefaultGroup = isDefault(releaseGroups);

groupVersionBump: value,
commit: await getCommitForVersionPlanFile(rawVersionPlan, isVerbose),
});

@@ -136,2 +138,3 @@ }

},
commit: await getCommitForVersionPlanFile(rawVersionPlan, isVerbose),
});

@@ -168,2 +171,3 @@ }

triggeredByProjects: [key],
commit: await getCommitForVersionPlanFile(rawVersionPlan, isVerbose),
});

@@ -193,1 +197,29 @@ }

}
async function getCommitForVersionPlanFile(rawVersionPlan, isVerbose) {
return new Promise((resolve) => {
(0, node_child_process_1.exec)(`git log --diff-filter=A --pretty=format:"%s|%h|%an|%ae|%b" -n 1 -- ${rawVersionPlan.absolutePath}`, (error, stdout, stderr) => {
if (error) {
if (isVerbose) {
console.error(`Error executing git command for ${rawVersionPlan.relativePath}: ${error.message}`);
}
return resolve(null);
}
if (stderr) {
if (isVerbose) {
console.error(`Git command stderr for ${rawVersionPlan.relativePath}: ${stderr}`);
}
return resolve(null);
}
const [message, shortHash, authorName, authorEmail, ...body] = stdout
.trim()
.split('|');
const commitDetails = {
message: message || '',
shortHash: shortHash || '',
author: { name: authorName || '', email: authorEmail || '' },
body: body.join('|') || '', // Handle case where body might be empty or contain multiple '|'
};
return resolve(commitDetails);
});
});
}

@@ -6,9 +6,6 @@ "use strict";

const nx_json_1 = require("../../config/nx-json");
const workspace_projects_1 = require("../../project-graph/affected/locators/workspace-projects");
const file_map_utils_1 = require("../../project-graph/file-map-utils");
const file_utils_1 = require("../../project-graph/file-utils");
const project_graph_1 = require("../../project-graph/project-graph");
const all_file_data_1 = require("../../utils/all-file-data");
const command_line_utils_1 = require("../../utils/command-line-utils");
const ignore_1 = require("../../utils/ignore");
const output_1 = require("../../utils/output");

@@ -20,2 +17,3 @@ const params_1 = require("../../utils/params");

const version_plans_1 = require("./config/version-plans");
const get_touched_projects_for_group_1 = require("./utils/get-touched-projects-for-group");
const print_config_1 = require("./utils/print-config");

@@ -42,3 +40,4 @@ const releasePlanCheckCLIHandler = (args) => (0, params_1.handleErrors)(args.verbose, () => createAPI({})(args));

}
const { error: filterError, releaseGroups, releaseGroupToFilteredProjects, } = (0, filter_release_groups_1.filterReleaseGroups)(projectGraph, nxReleaseConfig, args.projects, args.groups);
// No filtering is applied here, as we want to consider all release groups for plan:check
const { error: filterError, releaseGroups } = (0, filter_release_groups_1.filterReleaseGroups)(projectGraph, nxReleaseConfig);
if (filterError) {

@@ -60,3 +59,3 @@ output_1.output.error(filterError);

const rawVersionPlans = await (0, version_plans_1.readRawVersionPlans)();
(0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
await (0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes), args.verbose);
// Resolve the final values for base, head etc to use when resolving the changes to consider

@@ -81,7 +80,3 @@ const { nxArgs } = (0, command_line_utils_1.splitArgsIntoNxArgsAndOverrides)(args, 'affected', {

const resolvedAllFileData = await (0, all_file_data_1.allFileData)();
/**
* Create a minimal subset of touched projects based on the configured ignore patterns, we only need
* to recompute when the ignorePatternsForPlanCheck differs between release groups.
*/
const serializedIgnorePatternsToTouchedProjects = new Map();
const getTouchedProjectsForGroup = (0, get_touched_projects_for_group_1.createGetTouchedProjectsForGroup)(nxArgs, projectGraph, changedFiles, resolvedAllFileData);
const NOTE_ABOUT_VERBOSE_LOGGING = 'Run with --verbose to see the full list of changed files used for the touched projects logic.';

@@ -108,53 +103,5 @@ let hasErrors = false;

}
// Exclude patterns from .nxignore, .gitignore and explicit version plan config
let serializedIgnorePatterns = '[]';
const ignore = (0, ignore_1.getIgnoreObject)();
if (typeof releaseGroup.versionPlans !== 'boolean' &&
Array.isArray(releaseGroup.versionPlans.ignorePatternsForPlanCheck) &&
releaseGroup.versionPlans.ignorePatternsForPlanCheck.length) {
output_1.output.note({
title: `Applying configured ignore patterns to changed files${releaseGroup.name !== config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
? ` for release group "${releaseGroup.name}"`
: ''}`,
bodyLines: [
...releaseGroup.versionPlans.ignorePatternsForPlanCheck.map((pattern) => ` - ${pattern}`),
],
});
ignore.add(releaseGroup.versionPlans.ignorePatternsForPlanCheck);
serializedIgnorePatterns = JSON.stringify(releaseGroup.versionPlans.ignorePatternsForPlanCheck);
}
let touchedProjects = {};
if (serializedIgnorePatternsToTouchedProjects.has(serializedIgnorePatterns)) {
touchedProjects = serializedIgnorePatternsToTouchedProjects.get(serializedIgnorePatterns);
}
else {
// We only care about directly touched projects, not implicitly affected ones etc
const touchedProjectsArr = await (0, workspace_projects_1.getTouchedProjects)((0, file_utils_1.calculateFileChanges)(changedFiles, resolvedAllFileData, nxArgs, undefined, ignore), projectGraph.nodes);
touchedProjects = touchedProjectsArr.reduce((acc, project) => ({ ...acc, [project]: true }), {});
serializedIgnorePatternsToTouchedProjects.set(serializedIgnorePatterns, touchedProjects);
}
const touchedProjectsUnderReleaseGroup = releaseGroup.projects.filter((project) => touchedProjects[project]);
if (touchedProjectsUnderReleaseGroup.length) {
output_1.output.log({
title: `Touched projects based on changed files${releaseGroup.name !== config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
? ` under release group "${releaseGroup.name}"`
: ''}`,
bodyLines: [
...touchedProjectsUnderReleaseGroup.map((project) => ` - ${project}`),
'',
'NOTE: You can adjust your "versionPlans.ignorePatternsForPlanCheck" config to stop certain files from resulting in projects being classed as touched for the purposes of this command.',
],
});
}
else {
output_1.output.log({
title: `No touched projects found based on changed files${typeof releaseGroup.versionPlans !== 'boolean' &&
Array.isArray(releaseGroup.versionPlans.ignorePatternsForPlanCheck) &&
releaseGroup.versionPlans.ignorePatternsForPlanCheck.length
? ' combined with configured ignore patterns'
: ''}${releaseGroup.name !== config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
? ` under release group "${releaseGroup.name}"`
: ''}`,
});
}
const touchedProjectsUnderReleaseGroup = await getTouchedProjectsForGroup(releaseGroup,
// We do not take any --projects or --groups filtering into account for plan:check
releaseGroup.projects, false);
const projectsInResolvedVersionPlans = resolvedVersionPlans.reduce((acc, plan) => {

@@ -161,0 +108,0 @@ if ('projectVersionBumps' in plan) {

@@ -13,2 +13,4 @@ "use strict";

const project_graph_1 = require("../../project-graph/project-graph");
const all_file_data_1 = require("../../utils/all-file-data");
const command_line_utils_1 = require("../../utils/command-line-utils");
const output_1 = require("../../utils/output");

@@ -21,2 +23,3 @@ const params_1 = require("../../utils/params");

const generate_version_plan_content_1 = require("./utils/generate-version-plan-content");
const get_touched_projects_for_group_1 = require("./utils/get-touched-projects-for-group");
const launch_editor_1 = require("./utils/launch-editor");

@@ -50,2 +53,25 @@ const print_changes_1 = require("./utils/print-changes");

}
// If no release groups have version plans enabled, it doesn't make sense to use the plan command only to set yourself up for an error at release time
if (!releaseGroups.some((group) => group.versionPlans === true)) {
if (releaseGroups.length === 1) {
output_1.output.warn({
title: `Version plans are not enabled in your release configuration`,
bodyLines: [
'To enable version plans, set `"versionPlans": true` at the top level of your `"release"` configuration',
],
});
return 0;
}
output_1.output.warn({
title: 'No release groups have version plans enabled',
bodyLines: [
'To enable version plans, set `"versionPlans": true` at the top level of your `"release"` configuration to apply it to all groups, otherwise set it at the release group level',
],
});
return 0;
}
// Resolve the final values for base, head etc to use when resolving the changes to consider
const { nxArgs } = (0, command_line_utils_1.splitArgsIntoNxArgsAndOverrides)(args, 'affected', {
printWarnings: args.verbose,
}, nxJson);
const versionPlanBumps = {};

@@ -57,54 +83,122 @@ const setBumpIfNotNone = (projectOrGroup, version) => {

};
if (releaseGroups[0].name === config_1.IMPLICIT_DEFAULT_RELEASE_GROUP) {
const group = releaseGroups[0];
if (group.projectsRelationship === 'independent') {
for (const project of group.projects) {
setBumpIfNotNone(project, args.bump ||
(await promptForVersion(`How do you want to bump the version of the project "${project}"?`)));
// Changed files are only relevant if considering touched projects
let changedFiles = [];
let getProjectsToVersionForGroup;
if (args.onlyTouched) {
changedFiles = (0, command_line_utils_1.parseFiles)(nxArgs).files;
if (nxArgs.verbose) {
if (changedFiles.length) {
output_1.output.log({
title: `Changed files based on resolved "base" (${nxArgs.base}) and "head" (${nxArgs.head ?? 'HEAD'})`,
bodyLines: changedFiles.map((file) => ` - ${file}`),
});
}
else {
output_1.output.warn({
title: 'No changed files found based on resolved "base" and "head"',
});
}
}
else {
// TODO: use project names instead of the implicit default release group name? (though this might be confusing, as users might think they can just delete one of the project bumps to change the behavior to independent versioning)
setBumpIfNotNone(group.name, args.bump ||
(await promptForVersion(`How do you want to bump the versions of all projects?`)));
}
const resolvedAllFileData = await (0, all_file_data_1.allFileData)();
getProjectsToVersionForGroup = (0, get_touched_projects_for_group_1.createGetTouchedProjectsForGroup)(nxArgs, projectGraph, changedFiles, resolvedAllFileData);
}
else {
for (const group of releaseGroups) {
if (group.projectsRelationship === 'independent') {
for (const project of releaseGroupToFilteredProjects.get(group)) {
if (args.projects?.length) {
/**
* Run plan for all remaining release groups and filtered projects within them
*/
for (const releaseGroup of releaseGroups) {
const releaseGroupName = releaseGroup.name;
const releaseGroupProjectNames = Array.from(releaseGroupToFilteredProjects.get(releaseGroup));
let applicableProjects = releaseGroupProjectNames;
if (args.onlyTouched &&
typeof getProjectsToVersionForGroup === 'function') {
applicableProjects = await getProjectsToVersionForGroup(releaseGroup, releaseGroupProjectNames, true);
}
if (!applicableProjects.length) {
continue;
}
if (releaseGroup.projectsRelationship === 'independent') {
for (const project of applicableProjects) {
setBumpIfNotNone(project, args.bump ||
(await promptForVersion(`How do you want to bump the version of the project "${project}" within group "${group.name}"?`)));
(await promptForVersion(`How do you want to bump the version of the project "${project}"${releaseGroupName === config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
? ''
: ` within group "${releaseGroupName}"`}?`)));
}
}
else {
setBumpIfNotNone(group.name, args.bump ||
(await promptForVersion(`How do you want to bump the versions of the projects in the group "${group.name}"?`)));
setBumpIfNotNone(releaseGroupName, args.bump ||
(await promptForVersion(`How do you want to bump the versions of ${releaseGroupName === config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
? 'all projects'
: `the projects in the group "${releaseGroupName}"`}?`)));
}
}
}
if (!Object.keys(versionPlanBumps).length) {
output_1.output.warn({
title: 'No version bumps were selected so no version plan file was created.',
});
// Create a version plan file if applicable
await createVersionPlanFileForBumps(args, versionPlanBumps);
return 0;
}
const versionPlanName = `version-plan-${new Date().getTime()}`;
const versionPlanMessage = args.message || (await promptForMessage(versionPlanName));
const versionPlanFileContent = (0, generate_version_plan_content_1.generateVersionPlanContent)(versionPlanBumps, versionPlanMessage);
const versionPlanFileName = `${versionPlanName}.md`;
if (args.dryRun) {
output_1.output.logSingleLine(`Would create version plan file "${versionPlanFileName}", but --dry-run was set.`);
(0, print_changes_1.printDiff)('', versionPlanFileContent, 1);
/**
* Run plan for all remaining release groups
*/
for (const releaseGroup of releaseGroups) {
const releaseGroupName = releaseGroup.name;
let applicableProjects = releaseGroup.projects;
if (args.onlyTouched &&
typeof getProjectsToVersionForGroup === 'function') {
applicableProjects = await getProjectsToVersionForGroup(releaseGroup, releaseGroup.projects, false);
}
if (!applicableProjects.length) {
continue;
}
if (releaseGroup.projectsRelationship === 'independent') {
for (const project of applicableProjects) {
setBumpIfNotNone(project, args.bump ||
(await promptForVersion(`How do you want to bump the version of the project "${project}"${releaseGroupName === config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
? ''
: ` within group "${releaseGroupName}"`}?`)));
}
}
else {
setBumpIfNotNone(releaseGroupName, args.bump ||
(await promptForVersion(`How do you want to bump the versions of ${releaseGroupName === config_1.IMPLICIT_DEFAULT_RELEASE_GROUP
? 'all projects'
: `the projects in the group "${releaseGroupName}"`}?`)));
}
}
else {
output_1.output.logSingleLine(`Creating version plan file "${versionPlanFileName}"`);
(0, print_changes_1.printDiff)('', versionPlanFileContent, 1);
const versionPlansAbsolutePath = (0, version_plans_1.getVersionPlansAbsolutePath)();
await (0, fs_extra_1.ensureDir)(versionPlansAbsolutePath);
await (0, fs_extra_1.writeFile)((0, node_path_1.join)(versionPlansAbsolutePath, versionPlanFileName), versionPlanFileContent);
}
// Create a version plan file if applicable
await createVersionPlanFileForBumps(args, versionPlanBumps);
return 0;
};
}
async function createVersionPlanFileForBumps(args, versionPlanBumps) {
if (!Object.keys(versionPlanBumps).length) {
let bodyLines = [];
if (args.onlyTouched) {
bodyLines = [
'This might be because no projects have been changed, or projects you expected to release have not been touched',
'To include all projects, not just those that have been changed, pass --only-touched=false',
'Alternatively, you can specify alternate --base and --head refs to include only changes from certain commits',
];
}
output_1.output.warn({
title: 'No version bumps were selected so no version plan file was created.',
bodyLines,
});
return 0;
}
const versionPlanName = `version-plan-${new Date().getTime()}`;
const versionPlanMessage = args.message || (await promptForMessage(versionPlanName));
const versionPlanFileContent = (0, generate_version_plan_content_1.generateVersionPlanContent)(versionPlanBumps, versionPlanMessage);
const versionPlanFileName = `${versionPlanName}.md`;
if (args.dryRun) {
output_1.output.logSingleLine(`Would create version plan file "${versionPlanFileName}", but --dry-run was set.`);
(0, print_changes_1.printDiff)('', versionPlanFileContent, 1);
}
else {
output_1.output.logSingleLine(`Creating version plan file "${versionPlanFileName}"`);
(0, print_changes_1.printDiff)('', versionPlanFileContent, 1);
const versionPlansAbsolutePath = (0, version_plans_1.getVersionPlansAbsolutePath)();
await (0, fs_extra_1.ensureDir)(versionPlansAbsolutePath);
await (0, fs_extra_1.writeFile)((0, node_path_1.join)(versionPlansAbsolutePath, versionPlanFileName), versionPlanFileContent);
}
}
async function promptForVersion(message) {

@@ -111,0 +205,0 @@ try {

@@ -106,2 +106,5 @@ "use strict";

}
if (args.access) {
overrides.access = args.access;
}
if (args.dryRun) {

@@ -108,0 +111,0 @@ overrides.dryRun = args.dryRun;

@@ -97,3 +97,3 @@ "use strict";

}
(0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
await (0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes), args.verbose);
const planFiles = new Set();

@@ -100,0 +100,0 @@ releaseGroups.forEach((group) => {

@@ -65,4 +65,4 @@ export interface GitCommitAuthor {

} | null;
export declare function parseGitCommit(commit: RawGitCommit): GitCommit | null;
export declare function parseGitCommit(commit: RawGitCommit, isVersionPlanCommit?: boolean): GitCommit | null;
export declare function getCommitHash(ref: string): Promise<string>;
export declare function getFirstGitCommit(): Promise<string>;

@@ -139,3 +139,3 @@ "use strict";

}
else if (changedTrackedFiles.has(f)) {
else if (changedTrackedFiles.has(f) || dryRun) {
filesToAdd.push(f);

@@ -285,2 +285,26 @@ }

}
function extractReferencesFromCommitMessage(message, shortHash) {
const references = [];
for (const m of message.matchAll(PullRequestRE)) {
references.push({ type: 'pull-request', value: m[1] });
}
for (const m of message.matchAll(IssueRE)) {
if (!references.some((i) => i.value === m[1])) {
references.push({ type: 'issue', value: m[1] });
}
}
references.push({ value: shortHash, type: 'hash' });
return references;
}
function getAllAuthorsForCommit(commit) {
const authors = [commit.author];
// Additional authors can be specified in the commit body (depending on the VCS provider)
for (const match of commit.body.matchAll(CoAuthoredByRegex)) {
authors.push({
name: (match.groups.name || '').trim(),
email: (match.groups.email || '').trim(),
});
}
return authors;
}
// https://www.conventionalcommits.org/en/v1.0.0/

@@ -294,3 +318,21 @@ // https://regex101.com/r/FSfNvA/1

const RevertHashRE = /This reverts commit (?<hash>[\da-f]{40})./gm;
function parseGitCommit(commit) {
function parseGitCommit(commit, isVersionPlanCommit = false) {
// For version plans, we do not require conventional commits and therefore cannot extract data based on that format
if (isVersionPlanCommit) {
return {
...commit,
description: commit.message,
type: '',
scope: '',
references: extractReferencesFromCommitMessage(commit.message, commit.shortHash),
// The commit message is not the source of truth for a breaking (major) change in version plans, so the value is not relevant
// TODO(v20): Make the current GitCommit interface more clearly tied to conventional commits
isBreaking: false,
authors: getAllAuthorsForCommit(commit),
// Not applicable to version plans
affectedFiles: [],
// Not applicable, a version plan cannot have been added in a commit that also reverts another commit
revertedHashes: [],
};
}
const parsedMessage = parseConventionalCommitsMessage(commit.message);

@@ -304,12 +346,3 @@ if (!parsedMessage) {

// Extract references from message
const references = [];
for (const m of description.matchAll(PullRequestRE)) {
references.push({ type: 'pull-request', value: m[1] });
}
for (const m of description.matchAll(IssueRE)) {
if (!references.some((i) => i.value === m[1])) {
references.push({ type: 'issue', value: m[1] });
}
}
references.push({ value: commit.shortHash, type: 'hash' });
const references = extractReferencesFromCommitMessage(description, commit.shortHash);
// Remove references and normalize

@@ -329,9 +362,3 @@ description = description.replace(PullRequestRE, '').trim();

// Find all authors
const authors = [commit.author];
for (const match of commit.body.matchAll(CoAuthoredByRegex)) {
authors.push({
name: (match.groups.name || '').trim(),
email: (match.groups.email || '').trim(),
});
}
const authors = getAllAuthorsForCommit(commit);
// Extract file changes from commit body

@@ -338,0 +365,0 @@ const affectedFiles = Array.from(commit.body.matchAll(ChangedFileRegex)).reduce((prev, [fullLine, changeType, file1, file2]) =>

@@ -85,3 +85,3 @@ "use strict";

const rawVersionPlans = await (0, version_plans_1.readRawVersionPlans)();
(0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes));
await (0, version_plans_1.setResolvedVersionPlansOnGroups)(rawVersionPlans, releaseGroups, Object.keys(projectGraph.nodes), args.verbose);
}

@@ -88,0 +88,0 @@ else {

@@ -6,3 +6,3 @@ "use strict";

command: 'report',
describe: 'Reports useful version numbers to copy into the Nx issue template',
describe: 'Reports useful version numbers to copy into the Nx issue template.',
handler: async () => {

@@ -9,0 +9,0 @@ await (await Promise.resolve().then(() => require('./report'))).reportHandler();

@@ -22,3 +22,3 @@ "use strict";

.option('onlyWorkspaceData', {
description: 'Clears the workspace data directory. Used by Nx to store cached data about the current workspace (e.g. partial results, incremental data, etc)',
description: 'Clears the workspace data directory. Used by Nx to store cached data about the current workspace (e.g. partial results, incremental data, etc).',
type: 'boolean',

@@ -25,0 +25,0 @@ }),

@@ -9,3 +9,3 @@ "use strict";

command: 'run-many',
describe: 'Run target for multiple listed projects',
describe: 'Run target for multiple listed projects.',
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)((0, shared_options_1.withRunManyOptions)((0, shared_options_1.withOutputStyleOption)((0, shared_options_1.withTargetAndConfigurationOption)((0, shared_options_1.withBatch)(yargs)))), 'run-many'),

@@ -12,0 +12,0 @@ handler: async (args) => {

@@ -30,3 +30,3 @@ "use strict";

command: '$0 <target> [project] [_..]',
describe: 'Run a target for a project',
describe: 'Run a target for a project.',
handler: async (args) => {

@@ -33,0 +33,0 @@ const exitCode = await (0, params_1.handleErrors)(args.verbose ?? process.env.NX_VERBOSE_LOGGING === 'true', async () => {

@@ -9,3 +9,3 @@ "use strict";

command: 'show',
describe: 'Show information about the workspace (e.g., list of projects)',
describe: 'Show information about the workspace (e.g., list of projects).',
builder: (yargs) => yargs

@@ -17,3 +17,3 @@ .command(showProjectsCommand)

type: 'boolean',
description: 'Output JSON',
description: 'Output JSON.',
})

@@ -30,7 +30,7 @@ .example('$0 show projects', 'Show a list of all projects in the workspace')

command: 'projects',
describe: 'Show a list of projects in the workspace',
describe: 'Show a list of projects in the workspace.',
builder: (yargs) => (0, shared_options_1.withVerbose)((0, shared_options_1.withAffectedOptions)(yargs))
.option('affected', {
type: 'boolean',
description: 'Show only affected projects',
description: 'Show only affected projects.',
})

@@ -46,3 +46,3 @@ .option('projects', {

alias: ['t'],
description: 'Show only projects that have a specific target',
description: 'Show only projects that have a specific target.',
coerce: shared_options_1.parseCSV,

@@ -52,3 +52,3 @@ })

type: 'string',
description: 'Select only projects of the given type',
description: 'Select only projects of the given type.',
choices: ['app', 'lib', 'e2e'],

@@ -58,3 +58,3 @@ })

type: 'string',
description: 'Outputs projects with the specified seperator',
description: 'Outputs projects with the specified seperator.',
})

@@ -88,11 +88,11 @@ .implies('untracked', 'affected')

alias: 'p',
description: 'Which project should be viewed?',
description: 'Which project should be viewed?.',
})
.option('web', {
type: 'boolean',
description: 'Show project details in the browser. (default when interactive)',
description: 'Show project details in the browser. (default when interactive).',
})
.option('open', {
type: 'boolean',
description: 'Set to false to prevent the browser from opening when using --web',
description: 'Set to false to prevent the browser from opening when using --web.',
implies: 'web',

@@ -99,0 +99,0 @@ })

@@ -8,3 +8,3 @@ "use strict";

command: 'watch',
describe: 'Watch for changes within projects, and execute commands',
describe: 'Watch for changes within projects, and execute commands.',
builder: (yargs) => (0, documentation_1.linkToNxDevAndExamples)(withWatchOptions(yargs), 'watch'),

@@ -11,0 +11,0 @@ handler: async (args) => {

@@ -6,3 +6,3 @@ import { Argv, ParserConfigurationOptions } from 'yargs';

export declare const defaultYargsParserConfiguration: Partial<ParserConfigurationOptions>;
export declare function withExcludeOption(yargs: Argv): Argv<ExcludeOptions>;
export declare function withExcludeOption<T>(yargs: Argv<T>): Argv<T & ExcludeOptions>;
export interface RunOptions {

@@ -24,2 +24,3 @@ exclude: string;

excludeTaskDependencies: boolean;
skipSync: boolean;
}

@@ -26,0 +27,0 @@ export declare function withRunOptions<T>(yargs: Argv<T>): Argv<T & RunOptions>;

@@ -25,3 +25,3 @@ "use strict";

return yargs.option('exclude', {
describe: 'Exclude certain projects from being processed',
describe: 'Exclude certain projects from being processed.',
type: 'string',

@@ -34,3 +34,3 @@ coerce: parseCSV,

.option('parallel', {
describe: 'Max number of parallel processes [default is 3]',
describe: 'Max number of parallel processes [default is 3].',
type: 'string',

@@ -43,7 +43,7 @@ })

.options('runner', {
describe: 'This is the name of the tasks runner configured in nx.json',
describe: 'This is the name of the tasks runner configured in nx.json.',
type: 'string',
})
.option('prod', {
describe: 'Use the production configuration',
describe: 'Use the production configuration.',
type: 'boolean',

@@ -67,3 +67,3 @@ default: false,

.option('nxBail', {
describe: 'Stop command execution after the first failed task',
describe: 'Stop command execution after the first failed task.',
type: 'boolean',

@@ -73,3 +73,3 @@ default: false,

.option('nxIgnoreCycles', {
describe: 'Ignore cycles in the task graph',
describe: 'Ignore cycles in the task graph.',
type: 'boolean',

@@ -79,3 +79,3 @@ default: false,

.options('skipNxCache', {
describe: 'Rerun the tasks even when the results are available in the cache',
describe: 'Rerun the tasks even when the results are available in the cache.',
type: 'boolean',

@@ -85,6 +85,11 @@ default: false,

.options('excludeTaskDependencies', {
describe: 'Skips running dependent tasks first',
describe: 'Skips running dependent tasks first.',
type: 'boolean',
default: false,
})
.option('skipSync', {
type: 'boolean',
// TODO(leo): add description and make it visible once it is stable
hidden: true,
})
.options('cloud', {

@@ -106,3 +111,3 @@ type: 'boolean',

return withConfiguration(yargs).option('targets', {
describe: 'Tasks to run for affected projects',
describe: 'Tasks to run for affected projects.',
type: 'string',

@@ -118,3 +123,3 @@ alias: ['target', 't'],

return yargs.options('configuration', {
describe: 'This is the configuration to use when performing tasks on projects',
describe: 'This is the configuration to use when performing tasks on projects.',
type: 'string',

@@ -127,3 +132,3 @@ alias: 'c',

.option('verbose', {
describe: 'Prints additional information about the commands (e.g., stack traces)',
describe: 'Prints additional information about the commands (e.g., stack traces).',
type: 'boolean',

@@ -140,3 +145,3 @@ })

type: 'boolean',
describe: 'Run task(s) in batches for executors which support batches',
describe: 'Run task(s) in batches for executors which support batches.',
coerce: (v) => {

@@ -152,3 +157,3 @@ return v || process.env.NX_BATCH_MODE === 'true';

.option('files', {
describe: 'Change the way Nx is calculating the affected command by providing directly changed files, list of files delimited by commas or spaces',
describe: 'Change the way Nx is calculating the affected command by providing directly changed files, list of files delimited by commas or spaces.',
type: 'string',

@@ -159,11 +164,11 @@ requiresArg: true,

.option('uncommitted', {
describe: 'Uncommitted changes',
describe: 'Uncommitted changes.',
type: 'boolean',
})
.option('untracked', {
describe: 'Untracked changes',
describe: 'Untracked changes.',
type: 'boolean',
})
.option('base', {
describe: 'Base of the current branch (usually main)',
describe: 'Base of the current branch (usually main).',
type: 'string',

@@ -173,3 +178,3 @@ requiresArg: true,

.option('head', {
describe: 'Latest commit of the current branch (usually HEAD)',
describe: 'Latest commit of the current branch (usually HEAD).',
type: 'string',

@@ -195,3 +200,3 @@ requiresArg: true,

coerce: parseCSV,
describe: 'Projects to run. (comma/space delimited project names and/or patterns)',
describe: 'Projects to run. (comma/space delimited project names and/or patterns).',
})

@@ -227,11 +232,3 @@ .option('all', {

return yargs.option('output-style', {
describe: `Defines how Nx emits outputs tasks logs
| option | description |
| --- | --- |
| dynamic | use dynamic output life cycle, previous content is overwritten or modified as new outputs are added, display minimal logs by default, always show errors. This output format is recommended on your local development environments. |
| static | uses static output life cycle, no previous content is rewritten or modified as new outputs are added. This output format is recommened for CI environments. |
| stream | nx by default logs output to an internal output stream, enable this option to stream logs to stdout / stderr |
| stream-without-prefixes | nx prefixes the project name the target is running on, use this option remove the project name prefix from output |
`,
describe: `Defines how Nx emits outputs tasks logs. **dynamic**: use dynamic output life cycle, previous content is overwritten or modified as new outputs are added, display minimal logs by default, always show errors. This output format is recommended on your local development environments. **static**: uses static output life cycle, no previous content is rewritten or modified as new outputs are added. This output format is recommened for CI environments. **stream**: nx by default logs output to an internal output stream, enable this option to stream logs to stdout / stderr. **stream-without-prefixes**: nx prefixes the project name the target is running on, use this option remove the project name prefix from output.`,
type: 'string',

@@ -246,7 +243,7 @@ choices,

.option('project', {
describe: 'Target project',
describe: 'Target project.',
type: 'string',
})
.option('help', {
describe: 'Show Help',
describe: 'Show Help.',
type: 'boolean',

@@ -253,0 +250,0 @@ });

@@ -7,1 +7,5 @@ import type { ProjectGraph } from '../../config/project-graph';

export declare function getCachedRegisteredSyncGenerators(): Promise<string[]>;
/**
* @internal
*/
export declare function _getConflictingGeneratorGroups(results: SyncGeneratorChangesResult[]): string[][];

@@ -7,2 +7,3 @@ "use strict";

exports.getCachedRegisteredSyncGenerators = getCachedRegisteredSyncGenerators;
exports._getConflictingGeneratorGroups = _getConflictingGeneratorGroups;
const nx_json_1 = require("../../config/nx-json");

@@ -29,3 +30,2 @@ const tree_1 = require("../../generators/tree");

};
// TODO(leo): check conflicts and reuse the Tree where possible
async function getCachedSyncGeneratorChanges(generators) {

@@ -42,47 +42,11 @@ try {

waitPeriod = 100;
let projects;
let errored = false;
const getProjectsConfigurations = async () => {
if (projects || errored) {
return projects;
}
const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
projects = projectGraph
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
: null;
errored = error !== undefined;
return projects;
};
return (await Promise.all(generators.map(async (generator) => {
if (scheduledGenerators.has(generator) ||
!syncGeneratorsCacheResultPromises.has(generator)) {
// it's scheduled to run (there are pending changes to process) or
// it's not scheduled and there's no cached result, so run it
const projects = await getProjectsConfigurations();
if (projects) {
log(generator, 'already scheduled or not cached, running it now');
runGenerator(generator, projects);
}
else {
log(generator, 'already scheduled or not cached, project graph errored');
/**
* This should never happen. This is invoked imperatively, and by
* the time it is invoked, the project graph would have already
* been requested. If it errored, it would have been reported and
* this wouldn't have been invoked. We handle it just in case.
*
* Since the project graph would be reported by the relevant
* handlers separately, we just ignore the error, don't cache
* any result and return an empty result, the next time this is
* invoked the process will repeat until it eventually recovers
* when the project graph is fixed.
*/
return Promise.resolve({ changes: [], generatorName: generator });
}
}
else {
log(generator, 'not scheduled and has cached result, returning cached result');
}
return syncGeneratorsCacheResultPromises.get(generator);
}))).flat();
const results = await getFromCacheOrRunGenerators(generators);
const conflicts = _getConflictingGeneratorGroups(results);
if (!conflicts.length) {
// there are no conflicts
return results;
}
// there are conflicts, so we need to re-run the conflicting generators
// using the same tree
return await processConflictingGenerators(conflicts, results);
}

@@ -137,3 +101,3 @@ catch (e) {

for (const generator of scheduledGenerators) {
runGenerator(generator, projects);
syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
}

@@ -155,2 +119,159 @@ await Promise.all(syncGeneratorsCacheResultPromises.values());

}
async function getFromCacheOrRunGenerators(generators) {
let projects;
let errored = false;
const getProjectsConfigurations = async () => {
if (projects || errored) {
return projects;
}
const { projectGraph, error } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
projects = projectGraph
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
: null;
errored = error !== undefined;
return projects;
};
return (await Promise.all(generators.map(async (generator) => {
if (scheduledGenerators.has(generator) ||
!syncGeneratorsCacheResultPromises.has(generator)) {
// it's scheduled to run (there are pending changes to process) or
// it's not scheduled and there's no cached result, so run it
const projects = await getProjectsConfigurations();
if (projects) {
log(generator, 'already scheduled or not cached, running it now');
syncGeneratorsCacheResultPromises.set(generator, runGenerator(generator, projects));
}
else {
log(generator, 'already scheduled or not cached, project graph errored');
/**
* This should never happen. This is invoked imperatively, and by
* the time it is invoked, the project graph would have already
* been requested. If it errored, it would have been reported and
* this wouldn't have been invoked. We handle it just in case.
*
* Since the project graph would be reported by the relevant
* handlers separately, we just ignore the error, don't cache
* any result and return an empty result, the next time this is
* invoked the process will repeat until it eventually recovers
* when the project graph is fixed.
*/
return Promise.resolve({ changes: [], generatorName: generator });
}
}
else {
log(generator, 'not scheduled and has cached result, returning cached result');
}
return syncGeneratorsCacheResultPromises.get(generator);
}))).flat();
}
async function runConflictingGenerators(tree, generators) {
const { projectGraph } = await (0, project_graph_incremental_recomputation_1.getCachedSerializedProjectGraphPromise)();
const projects = projectGraph
? (0, project_graph_1.readProjectsConfigurationFromProjectGraph)(projectGraph).projects
: null;
if (!projects) {
/**
* This should never happen. This is invoked imperatively, and by
* the time it is invoked, the project graph would have already
* been requested. If it errored, it would have been reported and
* this wouldn't have been invoked. We handle it just in case.
*
* Since the project graph would be reported by the relevant
* handlers separately, we just ignore the error.
*/
return generators.map((generator) => ({
changes: [],
generatorName: generator,
}));
}
// we need to run conflicting generators sequentially because they use the same tree
const results = [];
for (const generator of generators) {
log(generator, 'running it now');
results.push(await runGenerator(generator, projects, tree));
}
return results;
}
async function processConflictingGenerators(conflicts, initialResults) {
const conflictRunResults = (await Promise.all(conflicts.map((generators) => {
const [firstGenerator, ...generatorsToRun] = generators;
// it must exists because the conflicts were identified from the initial results
const firstGeneratorResult = initialResults.find((r) => r.generatorName === firstGenerator);
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generators ${generators.join(',')}`);
// pre-apply the changes from the first generator to avoid running it
for (const change of firstGeneratorResult.changes) {
if (change.type === 'CREATE' || change.type === 'UPDATE') {
tree.write(change.path, change.content, change.options);
}
else if (change.type === 'DELETE') {
tree.delete(change.path);
}
}
/**
* We don't cache the results of conflicting generators because they
* use the same tree, so some files might contain results from multiple
* generators and we don't have guarantees that the same combination of
* generators will run together.
*/
return runConflictingGenerators(tree, generatorsToRun);
}))).flat();
/**
* The order of the results from the re-run generators is important because
* the last result from a group of conflicting generators will contain the
* changes from the previous conflicting generators. So, instead of replacing
* in-place the initial results, we first add the results from the re-run
* generators, and then add the initial results that were not from a
* conflicting generator.
*/
const results = [...conflictRunResults];
for (const result of initialResults) {
if (conflictRunResults.every((r) => r.generatorName !== result.generatorName)) {
// this result is not from a conflicting generator, so we add it to the
// results
results.push(result);
}
}
return results;
}
/**
* @internal
*/
function _getConflictingGeneratorGroups(results) {
const changedFileToGeneratorMap = new Map();
for (const result of results) {
for (const change of result.changes) {
if (!changedFileToGeneratorMap.has(change.path)) {
changedFileToGeneratorMap.set(change.path, new Set());
}
changedFileToGeneratorMap.get(change.path).add(result.generatorName);
}
}
const conflicts = [];
for (const generatorSet of changedFileToGeneratorMap.values()) {
if (generatorSet.size === 1) {
// no conflicts
continue;
}
if (conflicts.length === 0) {
// there are no conflicts yet, so we just add the first group
conflicts.push(new Set(generatorSet));
continue;
}
// identify if any of the current generator sets intersect with any of the
// existing conflict groups
const generatorsArray = Array.from(generatorSet);
const existingConflictGroup = conflicts.find((group) => generatorsArray.some((generator) => group.has(generator)));
if (existingConflictGroup) {
// there's an intersecting group, so we merge the two
for (const generator of generatorsArray) {
existingConflictGroup.add(generator);
}
}
else {
// there's no intersecting group, so we create a new one
conflicts.push(new Set(generatorsArray));
}
}
return conflicts.map((group) => Array.from(group));
}
function collectAllRegisteredSyncGenerators(projectGraph) {

@@ -199,12 +320,11 @@ const nxJson = (0, nx_json_1.readNxJson)();

}
function runGenerator(generator, projects) {
function runGenerator(generator, projects, tree) {
log('running scheduled generator', generator);
// remove it from the scheduled set
scheduledGenerators.delete(generator);
const tree = new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
// run the generator and cache the result
syncGeneratorsCacheResultPromises.set(generator, (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
tree ??= new tree_1.FsTree(workspace_root_1.workspaceRoot, false, `running sync generator ${generator}`);
return (0, sync_generators_1.runSyncGenerator)(tree, generator, projects).then((result) => {
log(generator, 'changes:', result.changes.map((c) => c.path).join(', '));
return result;
}));
});
}

@@ -211,0 +331,0 @@ function hashProjectGraph(projectGraph) {

@@ -10,5 +10,6 @@ import { Tree } from '../../../generators/tree';

directory?: string;
generateToken?: boolean;
}
export declare function connectToNxCloud(tree: Tree, schema: ConnectToNxCloudOptions, nxJson?: NxJsonConfiguration<string[] | "*">): Promise<string>;
export declare function connectToNxCloud(tree: Tree, schema: ConnectToNxCloudOptions, nxJson?: NxJsonConfiguration<string[] | "*">): Promise<string | null>;
declare function connectToNxCloudGenerator(tree: Tree, options: ConnectToNxCloudOptions): Promise<void>;
export default connectToNxCloudGenerator;

@@ -116,26 +116,28 @@ "use strict";

}
const isGitHubDetected = schema.github ?? (await (0, url_shorten_1.repoUsesGithub)(schema.github));
let responseFromCreateNxCloudWorkspaceV1;
let responseFromCreateNxCloudWorkspaceV2;
/**
* Do not create an Nx Cloud token if the user is using GitHub and
* is running `nx-connect` AND `token` is undefined (override)
*/
if (!schema.generateToken &&
isGitHubDetected &&
schema.installationSource === 'nx-connect')
return null;
if (process.env.NX_ENABLE_LOGIN === 'true') {
responseFromCreateNxCloudWorkspaceV2 = await createNxCloudWorkspaceV2(getRootPackageName(tree), schema.installationSource, getNxInitDate());
addNxCloudIdToNxJson(tree, responseFromCreateNxCloudWorkspaceV2?.nxCloudId, schema.directory);
await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
silent: schema.hideFormatLogs,
});
return responseFromCreateNxCloudWorkspaceV2.nxCloudId;
}
else {
const usesGithub = schema.github ?? (await (0, url_shorten_1.repoUsesGithub)(schema.github));
let responseFromCreateNxCloudWorkspaceV1;
let responseFromCreateNxCloudWorkspaceV2;
// do NOT create Nx Cloud token (createNxCloudWorkspace)
// if user is using github and is running nx-connect
if (!(usesGithub && schema.installationSource === 'nx-connect')) {
if (process.env.NX_ENABLE_LOGIN === 'true') {
responseFromCreateNxCloudWorkspaceV2 = await createNxCloudWorkspaceV2(getRootPackageName(tree), schema.installationSource, getNxInitDate());
addNxCloudIdToNxJson(tree, responseFromCreateNxCloudWorkspaceV2?.nxCloudId, schema.directory);
await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
silent: schema.hideFormatLogs,
});
return responseFromCreateNxCloudWorkspaceV2.nxCloudId;
}
else {
responseFromCreateNxCloudWorkspaceV1 = await createNxCloudWorkspaceV1(getRootPackageName(tree), schema.installationSource, getNxInitDate());
addNxCloudOptionsToNxJson(tree, responseFromCreateNxCloudWorkspaceV1?.token, schema.directory);
await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
silent: schema.hideFormatLogs,
});
return responseFromCreateNxCloudWorkspaceV1.token;
}
}
responseFromCreateNxCloudWorkspaceV1 = await createNxCloudWorkspaceV1(getRootPackageName(tree), schema.installationSource, getNxInitDate());
addNxCloudOptionsToNxJson(tree, responseFromCreateNxCloudWorkspaceV1?.token, schema.directory);
await (0, format_changed_files_with_prettier_if_available_1.formatChangedFilesWithPrettierIfAvailable)(tree, {
silent: schema.hideFormatLogs,
});
return responseFromCreateNxCloudWorkspaceV1.token;
}

@@ -142,0 +144,0 @@ }

@@ -24,2 +24,6 @@ {

},
"generateToken": {
"type": "boolean",
"description": "Explicitly asks for a token to be created, do not override existing tokens from Nx Cloud"
},
"github": {

@@ -26,0 +30,0 @@ "type": "boolean",

@@ -21,2 +21,9 @@ "use strict";

...node_module_1.builtinModules.map((x) => `node:${x}`),
// These are missing in the builtinModules list
// See: https://github.com/nodejs/node/issues/42785
// TODO(v20): We should be safe to use `isBuiltin` function instead of keep the set here (https://nodejs.org/api/module.html#moduleisbuiltinmodulename)
'test',
'node:test',
'node:sea',
'node:sqlite',
]);

@@ -269,3 +276,3 @@ function isBuiltinModuleImport(importExpr) {

let dir = (0, node_path_1.dirname)(pathOfFileInPackage);
while (dir !== (0, node_path_1.parse)(dir).root) {
while (dir !== (0, node_path_1.dirname)(dir)) {
const packageJsonPath = (0, node_path_1.join)(dir, 'package.json');

@@ -272,0 +279,0 @@ try {

@@ -276,3 +276,3 @@ "use strict";

}
else if (attempts > 1000) {
else if (attempts > 10000) {
// daemon fails to start, the process probably exited

@@ -279,0 +279,0 @@ // we print the logs and exit the client

@@ -13,2 +13,3 @@ import { DefaultTasksRunnerOptions, RemoteCache } from './default-tasks-runner';

};
export declare function getCache(options: DefaultTasksRunnerOptions): DbCache | Cache;
export declare class DbCache {

@@ -24,2 +25,3 @@ private readonly options;

get(task: Task): Promise<CachedResult | null>;
private applyRemoteCacheResults;
put(task: Task, terminalOutput: string | null, outputs: string[], code: number): Promise<void>;

@@ -26,0 +28,0 @@ copyFilesFromCache(_: string, cachedResult: CachedResult, outputs: string[]): Promise<void>;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Cache = exports.DbCache = void 0;
exports.getCache = getCache;
const workspace_root_1 = require("../utils/workspace-root");

@@ -18,2 +19,12 @@ const fs_extra_1 = require("fs-extra");

const get_cloud_options_1 = require("../nx-cloud/utilities/get-cloud-options");
function getCache(options) {
return process.env.NX_DB_CACHE === 'true'
? new DbCache({
// Remove this in Nx 21
nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
? options.remoteCache
: null,
})
: new Cache(options);
}
class DbCache {

@@ -41,3 +52,3 @@ async setup() {

if (res) {
this.cache.applyRemoteCacheResults(task.hash, res);
this.applyRemoteCacheResults(task.hash, res);
return {

@@ -56,2 +67,5 @@ ...res,

}
applyRemoteCacheResults(hash, res) {
return this.cache.applyRemoteCacheResults(hash, res);
}
async put(task, terminalOutput, outputs, code) {

@@ -58,0 +72,0 @@ return tryAndRetry(async () => {

@@ -123,2 +123,5 @@ "use strict";

let taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
if (nxArgs.skipSync) {
return { projectGraph, taskGraph };
}
// collect unique syncGenerators from the tasks

@@ -138,3 +141,3 @@ const uniqueSyncGenerators = (0, sync_generators_1.collectEnabledTaskSyncGeneratorsFromTaskGraph)(taskGraph, projectGraph, nxJson);

const resultBodyLines = [...(0, sync_generators_1.syncGeneratorResultsToMessageLines)(results), ''];
const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks.';
const fixMessage = 'You can manually run `nx sync` to update your workspace or you can set `sync.applyChanges` to `true` in your `nx.json` to apply the changes automatically when running tasks in interactive environments.';
const willErrorOnCiMessage = 'Please note that this will be an error on CI.';

@@ -141,0 +144,0 @@ if ((0, is_ci_1.isCI)() || !process.stdout.isTTY) {

@@ -18,4 +18,2 @@ "use strict";

const params_1 = require("../utils/params");
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
const nx_json_1 = require("../config/nx-json");
class TaskOrchestrator {

@@ -32,10 +30,3 @@ // endregion internal state

this.outputStyle = outputStyle;
this.cache = process.env.NX_DB_CACHE === 'true'
? new cache_1.DbCache({
// Remove this in Nx 21
nxCloudRemoteCache: (0, nx_cloud_utils_1.isNxCloudUsed)((0, nx_json_1.readNxJson)())
? this.options.remoteCache
: null,
})
: new cache_1.Cache(this.options);
this.cache = (0, cache_1.getCache)(this.options);
this.forkedProcessTaskRunner = new forked_process_task_runner_1.ForkedProcessTaskRunner(this.options);

@@ -42,0 +33,0 @@ this.tasksSchedule = new tasks_schedule_1.TasksSchedule(this.projectGraph, this.taskGraph, this.options);

@@ -33,2 +33,3 @@ import type { Arguments } from 'yargs';

excludeTaskDependencies?: boolean;
skipSync?: boolean;
}

@@ -35,0 +36,0 @@ export declare function createOverrides(__overrides_unparsed__?: string[]): Record<string, any>;

@@ -1,4 +0,4 @@

import { ExecSyncOptions } from 'child_process';
export declare function cloneFromUpstream(url: string, destination: string, { originName }?: {
export declare function cloneFromUpstream(url: string, destination: string, { originName, depth }?: {
originName: string;
depth?: number;
}): Promise<GitRepository>;

@@ -11,3 +11,2 @@ export declare class GitRepository {

addFetchRemote(remoteName: string, branch: string): Promise<string>;
private execAsync;
showStat(): Promise<string>;

@@ -17,3 +16,2 @@ listBranches(): Promise<string[]>;

reset(ref: string): Promise<string>;
squashLastTwoCommits(): Promise<string>;
mergeUnrelatedHistories(ref: string, message: string): Promise<string>;

@@ -30,11 +28,10 @@ fetch(remote: string, ref?: string): Promise<string>;

deleteGitRemote(name: string): Promise<string>;
deleteBranch(branch: string): Promise<string>;
addGitRemote(name: string, url: string): Promise<string>;
hasFilterRepoInstalled(): Promise<boolean>;
filterRepo(subdirectory: string): Promise<string>;
filterBranch(subdirectory: string, branchName: string): Promise<string>;
private execAsync;
private quotePath;
}
/**
* This is used by the squash editor script to update the rebase file.
*/
export declare function updateRebaseFile(contents: string): string;
export declare function fetchGitRemote(name: string, branch: string, execOptions: ExecSyncOptions): string | Buffer;
/**
* This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.

@@ -41,0 +38,0 @@ */

@@ -5,4 +5,2 @@ "use strict";

exports.cloneFromUpstream = cloneFromUpstream;
exports.updateRebaseFile = updateRebaseFile;
exports.fetchGitRemote = fetchGitRemote;
exports.getGithubSlugOrNull = getGithubSlugOrNull;

@@ -13,5 +11,4 @@ exports.extractUserAndRepoFromGitHubUrl = extractUserAndRepoFromGitHubUrl;

const child_process_1 = require("child_process");
const path_1 = require("path");
const devkit_exports_1 = require("../devkit-exports");
const path_1 = require("path");
const SQUASH_EDITOR = (0, path_1.join)(__dirname, 'squash.js');
function execAsync(command, execOptions) {

@@ -27,5 +24,8 @@ return new Promise((res, rej) => {

}
async function cloneFromUpstream(url, destination, { originName } = { originName: 'origin' }) {
await execAsync(`git clone ${url} ${destination} --depth 1 --origin ${originName}`, {
async function cloneFromUpstream(url, destination, { originName, depth } = {
originName: 'origin',
}) {
await execAsync(`git clone ${url} ${destination} ${depth ? `--depth ${depth}` : ''} --origin ${originName}`, {
cwd: (0, path_1.dirname)(destination),
maxBuffer: 10 * 1024 * 1024,
});

@@ -46,10 +46,5 @@ return new GitRepository(destination);

}
addFetchRemote(remoteName, branch) {
return this.execAsync(`git config --add remote.${remoteName}.fetch "+refs/heads/${branch}:refs/remotes/${remoteName}/${branch}"`);
async addFetchRemote(remoteName, branch) {
return await this.execAsync(`git config --add remote.${remoteName}.fetch "+refs/heads/${branch}:refs/remotes/${remoteName}/${branch}"`);
}
execAsync(command) {
return execAsync(command, {
cwd: this.root,
});
}
async showStat() {

@@ -68,5 +63,7 @@ return await this.execAsync(`git show --stat`);

async getGitFiles(path) {
return (await this.execAsync(`git ls-files ${path}`))
// Use -z to return file names exactly as they are stored in git, separated by NULL (\x00) character.
// This avoids problems with special characters in file names.
return (await this.execAsync(`git ls-files -z ${path}`))
.trim()
.split('\n')
.split('\x00')
.map((s) => s.trim())

@@ -76,52 +73,72 @@ .filter(Boolean);

async reset(ref) {
return this.execAsync(`git reset ${ref} --hard`);
return await this.execAsync(`git reset ${ref} --hard`);
}
async squashLastTwoCommits() {
return this.execAsync(`git -c core.editor="node ${SQUASH_EDITOR}" rebase --interactive --no-autosquash HEAD~2`);
}
async mergeUnrelatedHistories(ref, message) {
return this.execAsync(`git merge ${ref} -X ours --allow-unrelated-histories -m "${message}"`);
return await this.execAsync(`git merge ${ref} -X ours --allow-unrelated-histories -m "${message}"`);
}
async fetch(remote, ref) {
return this.execAsync(`git fetch ${remote}${ref ? ` ${ref}` : ''}`);
return await this.execAsync(`git fetch ${remote}${ref ? ` ${ref}` : ''}`);
}
async checkout(branch, opts) {
return this.execAsync(`git checkout ${opts.new ? '-b ' : ' '}${branch}${opts.base ? ' ' + opts.base : ''}`);
return await this.execAsync(`git checkout ${opts.new ? '-b ' : ' '}${branch}${opts.base ? ' ' + opts.base : ''}`);
}
async move(path, destination) {
return this.execAsync(`git mv "${path}" "${destination}"`);
return await this.execAsync(`git mv ${this.quotePath(path)} ${this.quotePath(destination)}`);
}
async push(ref, remoteName) {
return this.execAsync(`git push -u -f ${remoteName} ${ref}`);
return await this.execAsync(`git push -u -f ${remoteName} ${ref}`);
}
async commit(message) {
return this.execAsync(`git commit -am "${message}"`);
return await this.execAsync(`git commit -am "${message}"`);
}
async amendCommit() {
return this.execAsync(`git commit --amend -a --no-edit`);
return await this.execAsync(`git commit --amend -a --no-edit`);
}
deleteGitRemote(name) {
return this.execAsync(`git remote rm ${name}`);
async deleteGitRemote(name) {
return await this.execAsync(`git remote rm ${name}`);
}
deleteBranch(branch) {
return this.execAsync(`git branch -D ${branch}`);
async addGitRemote(name, url) {
return await this.execAsync(`git remote add ${name} ${url}`);
}
addGitRemote(name, url) {
return this.execAsync(`git remote add ${name} ${url}`);
async hasFilterRepoInstalled() {
try {
await this.execAsync(`git filter-repo --help`);
return true;
}
catch {
return false;
}
}
// git-filter-repo is much faster than filter-branch, but needs to be installed by user
// Use `hasFilterRepoInstalled` to check if it's installed
async filterRepo(subdirectory) {
// filter-repo requires POSIX path to work
const posixPath = subdirectory.split(path_1.sep).join(path_1.posix.sep);
return await this.execAsync(`git filter-repo -f --subdirectory-filter ${this.quotePath(posixPath)}`);
}
async filterBranch(subdirectory, branchName) {
// filter-repo requires POSIX path to work
const posixPath = subdirectory.split(path_1.sep).join(path_1.posix.sep);
// We need non-ASCII file names to not be quoted, or else filter-branch will exclude them.
await this.execAsync(`git config core.quotepath false`);
return await this.execAsync(`git filter-branch --subdirectory-filter ${this.quotePath(posixPath)} -- ${branchName}`);
}
execAsync(command) {
return execAsync(command, {
cwd: this.root,
maxBuffer: 10 * 1024 * 1024,
});
}
quotePath(path) {
return process.platform === 'win32'
? // Windows/CMD only understands double-quotes, single-quotes are treated as part of the file name
// Bash and other shells will substitute `$` in file names with a variable value.
`"${path}"`
: // e.g. `git mv "$$file.txt" "libs/a/$$file.txt"` will not work since `$$` is swapped with the PID of the last process.
// Using single-quotes prevents this substitution.
`'${path}'`;
}
}
exports.GitRepository = GitRepository;
/**
* This is used by the squash editor script to update the rebase file.
*/
function updateRebaseFile(contents) {
const lines = contents.split('\n');
const lastCommitIndex = lines.findIndex((line) => line === '') - 1;
lines[lastCommitIndex] = lines[lastCommitIndex].replace('pick', 'fixup');
return lines.join('\n');
}
function fetchGitRemote(name, branch, execOptions) {
return (0, child_process_1.execSync)(`git fetch ${name} ${branch} --depth 1`, execOptions);
}
/**
* This is currently duplicated in Nx Console. Please let @MaxKless know if you make changes here.

@@ -128,0 +145,0 @@ */

@@ -6,3 +6,3 @@ import type { GeneratorCallback } from '../config/misc-interfaces';

import type { ProjectConfiguration } from '../config/workspace-json-project-json';
import { FsTree, type FileChange, type Tree } from '../generators/tree';
import { type FileChange, type Tree } from '../generators/tree';
export type SyncGeneratorResult = void | {

@@ -22,3 +22,3 @@ callback?: GeneratorCallback;

export declare function collectAllRegisteredSyncGenerators(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Promise<string[]>;
export declare function runSyncGenerator(tree: FsTree, generatorSpecifier: string, projects: Record<string, ProjectConfiguration>): Promise<SyncGeneratorChangesResult>;
export declare function runSyncGenerator(tree: Tree, generatorSpecifier: string, projects: Record<string, ProjectConfiguration>): Promise<SyncGeneratorChangesResult>;
export declare function collectEnabledTaskSyncGeneratorsFromProjectGraph(projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Set<string>;

@@ -25,0 +25,0 @@ export declare function collectEnabledTaskSyncGeneratorsFromTaskGraph(taskGraph: TaskGraph, projectGraph: ProjectGraph, nxJson: NxJsonConfiguration): Set<string>;

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc