Socket
Socket
Sign inDemoInstall

nx

Package Overview
Dependencies
Maintainers
8
Versions
1304
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 0.0.0-pr-26816-51699f9 to 0.0.0-pr-26898-0693e21

src/native/browser.js

56

package.json
{
"name": "nx",
"version": "0.0.0-pr-26816-51699f9",
"version": "0.0.0-pr-26898-0693e21",
"private": false,

@@ -40,2 +40,3 @@ "description": "The core Nx plugin contains the core functionality of Nx like the project graph, nx commands and task orchestration.",

"dependencies": {
"@napi-rs/wasm-runtime": "0.2.4",
"@yarnpkg/lockfile": "^1.1.0",

@@ -74,3 +75,3 @@ "@yarnpkg/parsers": "3.0.0-rc.46",

"ora": "5.3.0",
"@nrwl/tao": "0.0.0-pr-26816-51699f9"
"@nrwl/tao": "0.0.0-pr-26898-0693e21"
},

@@ -90,12 +91,12 @@ "peerDependencies": {

"optionalDependencies": {
"@nx/nx-darwin-x64": "0.0.0-pr-26816-51699f9",
"@nx/nx-darwin-arm64": "0.0.0-pr-26816-51699f9",
"@nx/nx-linux-x64-gnu": "0.0.0-pr-26816-51699f9",
"@nx/nx-linux-x64-musl": "0.0.0-pr-26816-51699f9",
"@nx/nx-win32-x64-msvc": "0.0.0-pr-26816-51699f9",
"@nx/nx-linux-arm64-gnu": "0.0.0-pr-26816-51699f9",
"@nx/nx-linux-arm64-musl": "0.0.0-pr-26816-51699f9",
"@nx/nx-linux-arm-gnueabihf": "0.0.0-pr-26816-51699f9",
"@nx/nx-win32-arm64-msvc": "0.0.0-pr-26816-51699f9",
"@nx/nx-freebsd-x64": "0.0.0-pr-26816-51699f9"
"@nx/nx-darwin-x64": "0.0.0-pr-26898-0693e21",
"@nx/nx-darwin-arm64": "0.0.0-pr-26898-0693e21",
"@nx/nx-linux-x64-gnu": "0.0.0-pr-26898-0693e21",
"@nx/nx-linux-x64-musl": "0.0.0-pr-26898-0693e21",
"@nx/nx-win32-x64-msvc": "0.0.0-pr-26898-0693e21",
"@nx/nx-linux-arm64-gnu": "0.0.0-pr-26898-0693e21",
"@nx/nx-linux-arm64-musl": "0.0.0-pr-26898-0693e21",
"@nx/nx-linux-arm-gnueabihf": "0.0.0-pr-26898-0693e21",
"@nx/nx-win32-arm64-msvc": "0.0.0-pr-26898-0693e21",
"@nx/nx-freebsd-x64": "0.0.0-pr-26898-0693e21"
},

@@ -176,17 +177,20 @@ "nx-migrations": {

"napi": {
"name": "nx",
"package": {
"name": "@nx/nx"
"binaryName": "nx",
"packageName": "@nx/nx",
"wasm": {
"initialMemory": 16384,
"maximumMemory": 32768
},
"triples": {
"additional": [
"aarch64-apple-darwin",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"aarch64-pc-windows-msvc",
"armv7-unknown-linux-gnueabihf",
"x86_64-unknown-linux-musl",
"x86_64-unknown-freebsd"
]
}
"targets": [
"x86_64-unknown-linux-gnu",
"x86_64-pc-windows-msvc",
"x86_64-apple-darwin",
"aarch64-apple-darwin",
"aarch64-unknown-linux-gnu",
"aarch64-unknown-linux-musl",
"aarch64-pc-windows-msvc",
"armv7-unknown-linux-gnueabihf",
"x86_64-unknown-linux-musl",
"x86_64-unknown-freebsd"
]
},

@@ -193,0 +197,0 @@ "main": "./bin/nx.js",

@@ -322,5 +322,10 @@ "use strict";

exports.generateGraph = generateGraph;
async function startServer(html, environmentJs, host, port = 4211, watchForchanges = true, affected = [], focus = null, groupByFolder = false, exclude = []) {
async function startServer(html, environmentJs, host, port = 4211, watchForChanges = true, affected = [], focus = null, groupByFolder = false, exclude = []) {
let unregisterFileWatcher;
if (watchForchanges) {
if (watchForChanges && !client_1.daemonClient.enabled()) {
output_1.output.warn({
title: 'Nx Daemon is not enabled. Graph will not refresh on file changes.',
});
}
if (watchForChanges && client_1.daemonClient.enabled()) {
unregisterFileWatcher = await createFileWatcher();

@@ -327,0 +332,0 @@ }

@@ -55,3 +55,3 @@ "use strict";

catch {
errors.push('Failed to clean up the native file cache.');
// ignore, deleting the native file cache is not critical and can fail if another process is locking the file
}

@@ -58,0 +58,0 @@ try {

@@ -119,2 +119,8 @@ "use strict";

}
if (client_1.daemonClient.enabled()) {
output_1.output.error({
title: 'Daemon is not running. The watch command is not supported without the Nx Daemon.',
});
process.exit(1);
}
if (args.includeGlobalWorkspaceFiles &&

@@ -121,0 +127,0 @@ args.command.match(projectReplacementRegex)) {

@@ -6,15 +6,37 @@ "use strict";

const output_1 = require("../utils/output");
/**
* Make structure { lib: [dep], dep: [dep1], dep1: [] } from projectName lib and projectGraph
* @param projectGraph
* @param projectName
* @param resolved reference to an object that will contain resolved dependencies
* @returns
*/
const recursiveResolveDeps = (projectGraph, projectName, resolved) => {
if (projectGraph.dependencies[projectName].length === 0) {
// no deps - no resolve
resolved[projectName] = [];
return;
}
// if already resolved - just skip
if (resolved[projectName]) {
return resolved[projectName];
}
// deps string list
const projectDeps = [
...new Set(projectGraph.dependencies[projectName]
.map((projectDep) => projectDep.target)
.filter((projectDep) => projectGraph.nodes[projectDep])).values(),
];
// define
resolved[projectName] = projectDeps;
if (projectDeps.length > 0) {
for (const dep of projectDeps) {
recursiveResolveDeps(projectGraph, dep, resolved);
}
}
};
function createCommandGraph(projectGraph, projectNames, nxArgs) {
const dependencies = {};
for (const projectName of projectNames) {
if (projectGraph.dependencies[projectName].length >= 1) {
dependencies[projectName] = [
...new Set(projectGraph.dependencies[projectName]
.map((projectDep) => projectDep.target)
.filter((projectDep) => projectGraph.nodes[projectDep])).values(),
];
}
else {
dependencies[projectName] = [];
}
recursiveResolveDeps(projectGraph, projectName, dependencies);
}

@@ -21,0 +43,0 @@ const roots = Object.keys(dependencies).filter((d) => dependencies[d].length === 0);

@@ -68,1 +68,2 @@ /// <reference types="node" />

export declare const daemonClient: DaemonClient;
export declare function isDaemonEnabled(): boolean;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.daemonClient = exports.DaemonClient = void 0;
exports.isDaemonEnabled = exports.daemonClient = exports.DaemonClient = void 0;
const workspace_root_1 = require("../../utils/workspace-root");

@@ -16,8 +16,9 @@ const child_process_1 = require("child_process");

const is_ci_1 = require("../../utils/is-ci");
const nx_json_1 = require("../../config/nx-json");
const configuration_1 = require("../../config/configuration");
const promised_based_queue_1 = require("../../utils/promised-based-queue");
const nx_json_1 = require("../../config/nx-json");
const daemon_socket_messenger_1 = require("./daemon-socket-messenger");
const cache_1 = require("../cache");
const error_types_1 = require("../../project-graph/error-types");
const native_1 = require("../../native");
const get_nx_workspace_files_1 = require("../message-types/get-nx-workspace-files");

@@ -68,2 +69,3 @@ const get_context_file_data_1 = require("../message-types/get-context-file-data");

// CI=true,env=true => daemon
// WASM => no daemon because file watching does not work
if (((0, is_ci_1.isCI)() && env !== 'true') ||

@@ -79,2 +81,8 @@ isDocker() ||

}
else if (native_1.IS_WASM) {
output_1.output.warn({
title: 'The Nx Daemon is unsupported in WebAssembly environments. Some things may be slower than or not function as expected.',
});
this._enabled = false;
}
else {

@@ -408,2 +416,6 @@ this._enabled = true;

exports.daemonClient = new DaemonClient();
function isDaemonEnabled() {
return exports.daemonClient.enabled();
}
exports.isDaemonEnabled = isDaemonEnabled;
function isDocker() {

@@ -410,0 +422,0 @@ try {

@@ -135,1 +135,2 @@ /**

export { createProjectFileMapUsingProjectGraph } from './project-graph/file-map-utils';
export { isDaemonEnabled } from './daemon/client/client';

@@ -7,3 +7,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.createProjectFileMapUsingProjectGraph = exports.cacheDir = exports.hashArray = exports.defaultTasksRunner = exports.getOutputsForTargetAndConfiguration = exports.readProjectsConfigurationFromProjectGraph = exports.readCachedProjectGraph = exports.createProjectGraphAsync = exports.reverse = exports.appRootPath = exports.workspaceRoot = exports.normalizePath = exports.joinPathFragments = exports.stripIndents = exports.writeJsonFile = exports.readJsonFile = exports.stripJsonComments = exports.serializeJson = exports.parseJson = exports.updateJson = exports.writeJson = exports.readJson = exports.validateDependency = exports.ProjectGraphBuilder = exports.DependencyType = exports.updateNxJson = exports.readNxJson = exports.globAsync = exports.glob = exports.getProjects = exports.updateProjectConfiguration = exports.removeProjectConfiguration = exports.readProjectConfiguration = exports.addProjectConfiguration = exports.runExecutor = exports.isWorkspacesEnabled = exports.getPackageManagerVersion = exports.detectPackageManager = exports.getPackageManagerCommand = exports.output = exports.logger = exports.createNodesFromFiles = exports.AggregateCreateNodesError = exports.workspaceLayout = void 0;
exports.isDaemonEnabled = exports.createProjectFileMapUsingProjectGraph = exports.cacheDir = exports.hashArray = exports.defaultTasksRunner = exports.getOutputsForTargetAndConfiguration = exports.readProjectsConfigurationFromProjectGraph = exports.readCachedProjectGraph = exports.createProjectGraphAsync = exports.reverse = exports.appRootPath = exports.workspaceRoot = exports.normalizePath = exports.joinPathFragments = exports.stripIndents = exports.writeJsonFile = exports.readJsonFile = exports.stripJsonComments = exports.serializeJson = exports.parseJson = exports.updateJson = exports.writeJson = exports.readJson = exports.validateDependency = exports.ProjectGraphBuilder = exports.DependencyType = exports.updateNxJson = exports.readNxJson = exports.globAsync = exports.glob = exports.getProjects = exports.updateProjectConfiguration = exports.removeProjectConfiguration = exports.readProjectConfiguration = exports.addProjectConfiguration = exports.runExecutor = exports.isWorkspacesEnabled = exports.getPackageManagerVersion = exports.detectPackageManager = exports.getPackageManagerCommand = exports.output = exports.logger = exports.createNodesFromFiles = exports.AggregateCreateNodesError = exports.workspaceLayout = void 0;
var configuration_1 = require("./config/configuration");

@@ -142,1 +142,3 @@ Object.defineProperty(exports, "workspaceLayout", { enumerable: true, get: function () { return configuration_1.workspaceLayout; } });

Object.defineProperty(exports, "createProjectFileMapUsingProjectGraph", { enumerable: true, get: function () { return file_map_utils_1.createProjectFileMapUsingProjectGraph; } });
var client_1 = require("./daemon/client/client");
Object.defineProperty(exports, "isDaemonEnabled", { enumerable: true, get: function () { return client_1.isDaemonEnabled; } });

@@ -1,7 +0,5 @@

/* tslint:disable */
/* auto-generated by NAPI-RS */
/* eslint-disable */
/* auto-generated by NAPI-RS */
export class ExternalObject<T> {
export declare class ExternalObject<T> {
readonly '': {

@@ -12,2 +10,79 @@ readonly '': unique symbol

}
export declare class ChildProcess {
kill(): void
onExit(callback: (message: string) => void): void
onOutput(callback: (message: string) => void): void
}
export declare class HashPlanner {
constructor(nxJson: NxJson, projectGraph: ExternalObject<ProjectGraph>)
getPlans(taskIds: Array<string>, taskGraph: TaskGraph): Record<string, string[]>
getPlansReference(taskIds: Array<string>, taskGraph: TaskGraph): JsExternal
}
export declare class ImportResult {
file: string
sourceProject: string
dynamicImportExpressions: Array<string>
staticImportExpressions: Array<string>
}
export declare class RustPseudoTerminal {
constructor()
runCommand(command: string, commandDir?: string | undefined | null, jsEnv?: Record<string, string> | undefined | null, execArgv?: Array<string> | undefined | null, quiet?: boolean | undefined | null, tty?: boolean | undefined | null): ChildProcess
/**
* This allows us to run a pseudoterminal with a fake node ipc channel
* this makes it possible to be backwards compatible with the old implementation
*/
fork(id: string, forkScript: string, pseudoIpcPath: string, commandDir: string | undefined | null, jsEnv: Record<string, string> | undefined | null, execArgv: Array<string> | undefined | null, quiet: boolean): ChildProcess
}
export declare class TaskHasher {
constructor(workspaceRoot: string, projectGraph: ExternalObject<ProjectGraph>, projectFileMap: ExternalObject<ProjectFiles>, allWorkspaceFiles: ExternalObject<Array<FileData>>, tsConfig: Buffer, tsConfigPaths: Record<string, Array<string>>, options?: HasherOptions | undefined | null)
hashPlans(hashPlans: ExternalObject<Record<string, Array<HashInstruction>>>, jsEnv: Record<string, string>): NapiDashMap
}
export declare class Watcher {
origin: string
/**
* Creates a new Watcher instance.
* Will always ignore the following directories:
* * .git/
* * node_modules/
* * .nx/
*/
constructor(origin: string, additionalGlobs?: Array<string> | undefined | null, useIgnore?: boolean | undefined | null)
watch(callback: (err: string | null, events: WatchEvent[]) => void): void
stop(): Promise<void>
}
export declare class WorkspaceContext {
workspaceRoot: string
constructor(workspaceRoot: string, cacheDir: string)
getWorkspaceFiles(projectRootMap: Record<string, string>): NxWorkspaceFiles
glob(globs: Array<string>, exclude?: Array<string> | undefined | null): Array<string>
hashFilesMatchingGlob(globs: Array<string>, exclude?: Array<string> | undefined | null): string
incrementalUpdate(updatedFiles: Array<string>, deletedFiles: Array<string>): Record<string, string>
updateProjectFiles(projectRootMappings: ProjectRootMappings, projectFiles: ExternalObject<ProjectFiles>, globalFiles: ExternalObject<Array<FileData>>, updatedFiles: Record<string, string>, deletedFiles: Array<string>): UpdatedWorkspaceFiles
allFileData(): Array<FileData>
getFilesInDirectory(directory: string): Array<string>
}
export declare export function copy(src: string, dest: string): void
export interface DepsOutputsInput {
dependentTasksOutputFiles: string
transitive?: boolean
}
export interface EnvironmentInput {
env: string
}
export declare const enum EventType {
delete = 'delete',
update = 'update',
create = 'create'
}
/**

@@ -17,18 +92,8 @@ * Expands the given entries into a list of existing directories and files.

*/
export function expandOutputs(directory: string, entries: Array<string>): Array<string>
/**
* Expands the given outputs into a list of existing files.
* This is used when hashing outputs
*/
export function getFilesForOutputs(directory: string, entries: Array<string>): Array<string>
export function remove(src: string): void
export function copy(src: string, dest: string): void
export function hashArray(input: Array<string>): string
export function hashFile(file: string): string | null
export function findImports(projectFileMap: Record<string, Array<string>>): Array<ImportResult>
/**
* Transfer the project graph from the JS world to the Rust world, so that we can pass the project graph via memory quicker
* This wont be needed once the project graph is created in Rust
*/
export function transferProjectGraph(projectGraph: ProjectGraph): ExternalObject<ProjectGraph>
export declare export function expandOutputs(directory: string, entries: Array<string>): Array<string>
export interface ExternalDependenciesInput {
externalDependencies: Array<string>
}
export interface ExternalNode {

@@ -39,20 +104,27 @@ packageName?: string

}
export interface Target {
executor?: string
inputs?: Array<JsInputs>
outputs?: Array<string>
options?: string
configurations?: string
export interface FileData {
file: string
hash: string
}
export interface Project {
root: string
namedInputs?: Record<string, Array<JsInputs>>
tags?: Array<string>
targets: Record<string, Target>
export interface FileMap {
projectFileMap: ProjectFiles
nonProjectFiles: Array<FileData>
}
export interface ProjectGraph {
nodes: Record<string, Project>
dependencies: Record<string, Array<string>>
externalNodes: Record<string, ExternalNode>
export interface FileSetInput {
fileset: string
}
export declare export function findImports(projectFileMap: Record<string, Array<string>>): Array<ImportResult>
/**
* Expands the given outputs into a list of existing files.
* This is used when hashing outputs
*/
export declare export function getFilesForOutputs(directory: string, entries: Array<string>): Array<string>
export declare export function hashArray(input: Array<string>): string
export interface HashDetails {

@@ -62,25 +134,9 @@ value: string

}
export interface HasherOptions {
selectivelyHashTsConfig: boolean
}
export interface Task {
id: string
target: TaskTarget
outputs: Array<string>
projectRoot?: string
}
export interface TaskTarget {
project: string
target: string
configuration?: string
}
export interface TaskGraph {
roots: Array<string>
tasks: Record<string, Task>
dependencies: Record<string, Array<string>>
}
export interface FileData {
file: string
hash: string
}
export declare export function hashFile(file: string): string | null
export interface InputsInput {

@@ -91,18 +147,5 @@ input: string

}
export interface FileSetInput {
fileset: string
}
export interface RuntimeInput {
runtime: string
}
export interface EnvironmentInput {
env: string
}
export interface ExternalDependenciesInput {
externalDependencies: Array<string>
}
export interface DepsOutputsInput {
dependentTasksOutputFiles: string
transitive?: boolean
}
export const IS_WASM: boolean
/** Stripped version of the NxJson interface for use in rust */

@@ -112,16 +155,3 @@ export interface NxJson {

}
export const enum EventType {
delete = 'delete',
update = 'update',
create = 'create'
}
export interface WatchEvent {
path: string
type: EventType
}
/** Public NAPI error codes that are for Node */
export const enum WorkspaceErrors {
ParseError = 'ParseError',
Generic = 'Generic'
}
export interface NxWorkspaceFiles {

@@ -132,2 +162,3 @@ projectFileMap: ProjectFiles

}
export interface NxWorkspaceFilesExternals {

@@ -138,63 +169,72 @@ projectFiles: ExternalObject<ProjectFiles>

}
export interface UpdatedWorkspaceFiles {
fileMap: FileMap
externalReferences: NxWorkspaceFilesExternals
export interface Project {
root: string
namedInputs?: Record<string, Array<JsInputs>>
tags?: Array<string>
targets: Record<string, Target>
}
export interface FileMap {
projectFileMap: ProjectFiles
nonProjectFiles: Array<FileData>
export interface ProjectGraph {
nodes: Record<string, Project>
dependencies: Record<string, Array<string>>
externalNodes: Record<string, ExternalNode>
}
export function testOnlyTransferFileMap(projectFiles: Record<string, Array<FileData>>, nonProjectFiles: Array<FileData>): NxWorkspaceFilesExternals
export class ImportResult {
file: string
sourceProject: string
dynamicImportExpressions: Array<string>
staticImportExpressions: Array<string>
export declare export function remove(src: string): void
export interface RuntimeInput {
runtime: string
}
export class ChildProcess {
kill(): void
onExit(callback: (message: string) => void): void
onOutput(callback: (message: string) => void): void
export interface Target {
executor?: string
inputs?: Array<JsInputs>
outputs?: Array<string>
options?: string
configurations?: string
}
export class RustPseudoTerminal {
constructor()
runCommand(command: string, commandDir?: string | undefined | null, jsEnv?: Record<string, string> | undefined | null, execArgv?: Array<string> | undefined | null, quiet?: boolean | undefined | null, tty?: boolean | undefined | null): ChildProcess
/**
* This allows us to run a pseudoterminal with a fake node ipc channel
* this makes it possible to be backwards compatible with the old implementation
*/
fork(id: string, forkScript: string, pseudoIpcPath: string, commandDir: string | undefined | null, jsEnv: Record<string, string> | undefined | null, execArgv: Array<string> | undefined | null, quiet: boolean): ChildProcess
export interface Task {
id: string
target: TaskTarget
outputs: Array<string>
projectRoot?: string
}
export class HashPlanner {
constructor(nxJson: NxJson, projectGraph: ExternalObject<ProjectGraph>)
getPlans(taskIds: Array<string>, taskGraph: TaskGraph): Record<string, string[]>
getPlansReference(taskIds: Array<string>, taskGraph: TaskGraph): JsExternal
export interface TaskGraph {
roots: Array<string>
tasks: Record<string, Task>
dependencies: Record<string, Array<string>>
}
export class TaskHasher {
constructor(workspaceRoot: string, projectGraph: ExternalObject<ProjectGraph>, projectFileMap: ExternalObject<ProjectFiles>, allWorkspaceFiles: ExternalObject<Array<FileData>>, tsConfig: Buffer, tsConfigPaths: Record<string, Array<string>>, options?: HasherOptions | undefined | null)
hashPlans(hashPlans: ExternalObject<Record<string, Array<HashInstruction>>>, jsEnv: Record<string, string>): NapiDashMap
export interface TaskTarget {
project: string
target: string
configuration?: string
}
export class Watcher {
origin: string
/**
* Creates a new Watcher instance.
* Will always ignore the following directories:
* * .git/
* * node_modules/
* * .nx/
*/
constructor(origin: string, additionalGlobs?: Array<string> | undefined | null, useIgnore?: boolean | undefined | null)
watch(callback: (err: string | null, events: WatchEvent[]) => void): void
stop(): Promise<void>
export declare export function testOnlyTransferFileMap(projectFiles: Record<string, Array<FileData>>, nonProjectFiles: Array<FileData>): NxWorkspaceFilesExternals
/**
* Transfer the project graph from the JS world to the Rust world, so that we can pass the project graph via memory quicker
* This wont be needed once the project graph is created in Rust
*/
export declare export function transferProjectGraph(projectGraph: ProjectGraph): ExternalObject<ProjectGraph>
export interface UpdatedWorkspaceFiles {
fileMap: FileMap
externalReferences: NxWorkspaceFilesExternals
}
export class WorkspaceContext {
workspaceRoot: string
constructor(workspaceRoot: string, cacheDir: string)
getWorkspaceFiles(projectRootMap: Record<string, string>): NxWorkspaceFiles
glob(globs: Array<string>, exclude?: Array<string> | undefined | null): Array<string>
hashFilesMatchingGlob(globs: Array<string>, exclude?: Array<string> | undefined | null): string
incrementalUpdate(updatedFiles: Array<string>, deletedFiles: Array<string>): Record<string, string>
updateProjectFiles(projectRootMappings: ProjectRootMappings, projectFiles: ExternalObject<ProjectFiles>, globalFiles: ExternalObject<Array<FileData>>, updatedFiles: Record<string, string>, deletedFiles: Array<string>): UpdatedWorkspaceFiles
allFileData(): Array<FileData>
getFilesInDirectory(directory: string): Array<string>
export interface WatchEvent {
path: string
type: EventType
}
/** Public NAPI error codes that are for Node */
export declare const enum WorkspaceErrors {
ParseError = 'ParseError',
Generic = 'Generic'
}

@@ -7,2 +7,18 @@ const { join, basename } = require('path');

// WASI is still experimental and throws a warning when used
// We spawn many many processes so the warning gets printed a lot
// We have a different warning elsewhere to warn people using WASI
const originalEmit = process.emit;
process.emit = function (eventName, eventData) {
if (
eventName === `warning` &&
typeof eventData === `object` &&
eventData?.name === `ExperimentalWarning` &&
eventData?.message?.includes(`WASI`)
) {
return false;
}
return originalEmit.apply(process, arguments);
};
const nxPackages = new Set([

@@ -9,0 +25,0 @@ '@nx/nx-android-arm64',

@@ -1,245 +0,360 @@

const { existsSync, readFileSync } = require('fs')
const { join } = require('path')
// prettier-ignore
/* eslint-disable */
/* auto-generated by NAPI-RS */
const { platform, arch } = process
const { readFileSync } = require('fs')
let nativeBinding = null
let localFileExisted = false
let loadError = null
const loadErrors = []
function isMusl() {
// For Node 10
if (!process.report || typeof process.report.getReport !== 'function') {
try {
const lddPath = require('child_process').execSync('which ldd').toString().trim();
return readFileSync(lddPath, 'utf8').includes('musl')
} catch (e) {
const isMusl = () => {
let musl = false
if (process.platform === 'linux') {
musl = isMuslFromFilesystem()
if (musl === null) {
musl = isMuslFromReport()
}
if (musl === null) {
musl = isMuslFromChildProcess()
}
}
return musl
}
const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-')
const isMuslFromFilesystem = () => {
try {
return readFileSync('/usr/bin/ldd', 'utf-8').includes('musl')
} catch {
return null
}
}
const isMuslFromReport = () => {
const report = typeof process.report.getReport === 'function' ? process.report.getReport() : null
if (!report) {
return null
}
if (report.header && report.header.glibcVersionRuntime) {
return false
}
if (Array.isArray(report.sharedObjects)) {
if (report.sharedObjects.some(isFileMusl)) {
return true
}
} else {
const { glibcVersionRuntime } = process.report.getReport().header
return !glibcVersionRuntime
}
return false
}
switch (platform) {
case 'android':
switch (arch) {
case 'arm64':
localFileExisted = existsSync(join(__dirname, 'nx.android-arm64.node'))
const isMuslFromChildProcess = () => {
try {
return require('child_process').execSync('ldd --version', { encoding: 'utf8' }).includes('musl')
} catch (e) {
// If we reach this case, we don't know if the system is musl or not, so is better to just fallback to false
return false
}
}
function requireNative() {
if (process.platform === 'android') {
if (process.arch === 'arm64') {
try {
return require('./nx.android-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-android-arm64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm') {
try {
return require('./nx.android-arm-eabi.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-android-arm-eabi')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Android ${process.arch}`))
}
} else if (process.platform === 'win32') {
if (process.arch === 'x64') {
try {
return require('./nx.win32-x64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-win32-x64-msvc')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'ia32') {
try {
return require('./nx.win32-ia32-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-win32-ia32-msvc')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./nx.win32-arm64-msvc.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-win32-arm64-msvc')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Windows: ${process.arch}`))
}
} else if (process.platform === 'darwin') {
try {
return require('./nx.darwin-universal.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-darwin-universal')
} catch (e) {
loadErrors.push(e)
}
if (process.arch === 'x64') {
try {
return require('./nx.darwin-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-darwin-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./nx.darwin-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-darwin-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on macOS: ${process.arch}`))
}
} else if (process.platform === 'freebsd') {
if (process.arch === 'x64') {
try {
return require('./nx.freebsd-x64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-freebsd-x64')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 'arm64') {
try {
return require('./nx.freebsd-arm64.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-freebsd-arm64')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on FreeBSD: ${process.arch}`))
}
} else if (process.platform === 'linux') {
if (process.arch === 'x64') {
if (isMusl()) {
try {
if (localFileExisted) {
nativeBinding = require('./nx.android-arm64.node')
} else {
nativeBinding = require('@nx/nx-android-arm64')
}
} catch (e) {
loadError = e
}
break
case 'arm':
localFileExisted = existsSync(join(__dirname, 'nx.android-arm-eabi.node'))
return require('./nx.linux-x64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-x64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
if (localFileExisted) {
nativeBinding = require('./nx.android-arm-eabi.node')
} else {
nativeBinding = require('@nx/nx-android-arm-eabi')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Android ${arch}`)
}
break
case 'win32':
switch (arch) {
case 'x64':
localFileExisted = existsSync(
join(__dirname, 'nx.win32-x64-msvc.node')
)
return require('./nx.linux-x64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-x64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm64') {
if (isMusl()) {
try {
if (localFileExisted) {
nativeBinding = require('./nx.win32-x64-msvc.node')
} else {
nativeBinding = require('@nx/nx-win32-x64-msvc')
}
} catch (e) {
loadError = e
}
break
case 'ia32':
localFileExisted = existsSync(
join(__dirname, 'nx.win32-ia32-msvc.node')
)
return require('./nx.linux-arm64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-arm64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
if (localFileExisted) {
nativeBinding = require('./nx.win32-ia32-msvc.node')
} else {
nativeBinding = require('@nx/nx-win32-ia32-msvc')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'nx.win32-arm64-msvc.node')
)
return require('./nx.linux-arm64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-arm64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'arm') {
if (isMusl()) {
try {
if (localFileExisted) {
nativeBinding = require('./nx.win32-arm64-msvc.node')
} else {
nativeBinding = require('@nx/nx-win32-arm64-msvc')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Windows: ${arch}`)
}
break
case 'darwin':
localFileExisted = existsSync(join(__dirname, 'nx.darwin-universal.node'))
try {
if (localFileExisted) {
nativeBinding = require('./nx.darwin-universal.node')
return require('./nx.linux-arm-musleabihf.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-arm-musleabihf')
} catch (e) {
loadErrors.push(e)
}
} else {
nativeBinding = require('@nx/nx-darwin-universal')
try {
return require('./nx.linux-arm-gnueabihf.node')
} catch (e) {
loadErrors.push(e)
}
break
} catch {}
switch (arch) {
case 'x64':
localFileExisted = existsSync(join(__dirname, 'nx.darwin-x64.node'))
try {
return require('@nx/nx-linux-arm-gnueabihf')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'riscv64') {
if (isMusl()) {
try {
if (localFileExisted) {
nativeBinding = require('./nx.darwin-x64.node')
} else {
nativeBinding = require('@nx/nx-darwin-x64')
}
} catch (e) {
loadError = e
}
break
case 'arm64':
localFileExisted = existsSync(
join(__dirname, 'nx.darwin-arm64.node')
)
return require('./nx.linux-riscv64-musl.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-riscv64-musl')
} catch (e) {
loadErrors.push(e)
}
} else {
try {
if (localFileExisted) {
nativeBinding = require('./nx.darwin-arm64.node')
} else {
nativeBinding = require('@nx/nx-darwin-arm64')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on macOS: ${arch}`)
return require('./nx.linux-riscv64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-riscv64-gnu')
} catch (e) {
loadErrors.push(e)
}
}
} else if (process.arch === 'ppc64') {
try {
return require('./nx.linux-ppc64-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-ppc64-gnu')
} catch (e) {
loadErrors.push(e)
}
} else if (process.arch === 's390x') {
try {
return require('./nx.linux-s390x-gnu.node')
} catch (e) {
loadErrors.push(e)
}
try {
return require('@nx/nx-linux-s390x-gnu')
} catch (e) {
loadErrors.push(e)
}
} else {
loadErrors.push(new Error(`Unsupported architecture on Linux: ${process.arch}`))
}
break
case 'freebsd':
if (arch !== 'x64') {
throw new Error(`Unsupported architecture on FreeBSD: ${arch}`)
} else {
loadErrors.push(new Error(`Unsupported OS: ${process.platform}, architecture: ${process.arch}`))
}
}
nativeBinding = requireNative()
if (!nativeBinding || process.env.NAPI_RS_FORCE_WASI) {
try {
nativeBinding = require('./nx.wasi.cjs')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
console.error(err)
}
localFileExisted = existsSync(join(__dirname, 'nx.freebsd-x64.node'))
}
if (!nativeBinding) {
try {
if (localFileExisted) {
nativeBinding = require('./nx.freebsd-x64.node')
} else {
nativeBinding = require('@nx/nx-freebsd-x64')
nativeBinding = require('@nx/nx-wasm32-wasi')
} catch (err) {
if (process.env.NAPI_RS_FORCE_WASI) {
console.error(err)
}
} catch (e) {
loadError = e
}
break
case 'linux':
switch (arch) {
case 'x64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'nx.linux-x64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./nx.linux-x64-musl.node')
} else {
nativeBinding = require('@nx/nx-linux-x64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'nx.linux-x64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./nx.linux-x64-gnu.node')
} else {
nativeBinding = require('@nx/nx-linux-x64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm64':
if (isMusl()) {
localFileExisted = existsSync(
join(__dirname, 'nx.linux-arm64-musl.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./nx.linux-arm64-musl.node')
} else {
nativeBinding = require('@nx/nx-linux-arm64-musl')
}
} catch (e) {
loadError = e
}
} else {
localFileExisted = existsSync(
join(__dirname, 'nx.linux-arm64-gnu.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./nx.linux-arm64-gnu.node')
} else {
nativeBinding = require('@nx/nx-linux-arm64-gnu')
}
} catch (e) {
loadError = e
}
}
break
case 'arm':
localFileExisted = existsSync(
join(__dirname, 'nx.linux-arm-gnueabihf.node')
)
try {
if (localFileExisted) {
nativeBinding = require('./nx.linux-arm-gnueabihf.node')
} else {
nativeBinding = require('@nx/nx-linux-arm-gnueabihf')
}
} catch (e) {
loadError = e
}
break
default:
throw new Error(`Unsupported architecture on Linux: ${arch}`)
}
break
default:
throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`)
}
}
if (!nativeBinding) {
if (loadError) {
throw loadError
if (loadErrors.length > 0) {
// TODO Link to documentation with potential fixes
// - The package owner could build/publish bindings for this arch
// - The user may need to bundle the correct files
// - The user may need to re-install node_modules to get new packages
throw new Error('Failed to load native binding', { cause: loadErrors })
}

@@ -249,21 +364,20 @@ throw new Error(`Failed to load native binding`)

const { expandOutputs, getFilesForOutputs, remove, copy, hashArray, hashFile, ImportResult, findImports, transferProjectGraph, ChildProcess, RustPseudoTerminal, HashPlanner, TaskHasher, EventType, Watcher, WorkspaceContext, WorkspaceErrors, testOnlyTransferFileMap } = nativeBinding
module.exports.expandOutputs = expandOutputs
module.exports.getFilesForOutputs = getFilesForOutputs
module.exports.remove = remove
module.exports.copy = copy
module.exports.hashArray = hashArray
module.exports.hashFile = hashFile
module.exports.ImportResult = ImportResult
module.exports.findImports = findImports
module.exports.transferProjectGraph = transferProjectGraph
module.exports.ChildProcess = ChildProcess
module.exports.RustPseudoTerminal = RustPseudoTerminal
module.exports.HashPlanner = HashPlanner
module.exports.TaskHasher = TaskHasher
module.exports.EventType = EventType
module.exports.Watcher = Watcher
module.exports.WorkspaceContext = WorkspaceContext
module.exports.WorkspaceErrors = WorkspaceErrors
module.exports.testOnlyTransferFileMap = testOnlyTransferFileMap
module.exports.ChildProcess = nativeBinding.ChildProcess
module.exports.HashPlanner = nativeBinding.HashPlanner
module.exports.ImportResult = nativeBinding.ImportResult
module.exports.RustPseudoTerminal = nativeBinding.RustPseudoTerminal
module.exports.TaskHasher = nativeBinding.TaskHasher
module.exports.Watcher = nativeBinding.Watcher
module.exports.WorkspaceContext = nativeBinding.WorkspaceContext
module.exports.copy = nativeBinding.copy
module.exports.EventType = nativeBinding.EventType
module.exports.expandOutputs = nativeBinding.expandOutputs
module.exports.findImports = nativeBinding.findImports
module.exports.getFilesForOutputs = nativeBinding.getFilesForOutputs
module.exports.hashArray = nativeBinding.hashArray
module.exports.hashFile = nativeBinding.hashFile
module.exports.IS_WASM = nativeBinding.IS_WASM
module.exports.remove = nativeBinding.remove
module.exports.testOnlyTransferFileMap = nativeBinding.testOnlyTransferFileMap
module.exports.transferProjectGraph = nativeBinding.transferProjectGraph
module.exports.WorkspaceErrors = nativeBinding.WorkspaceErrors
export declare function shortenedCloudUrl(installationSource: string, accessToken?: string, usesGithub?: boolean): Promise<string>;
export declare function repoUsesGithub(github?: boolean): Promise<boolean>;
export declare function removeTrailingSlash(apiUrl: string): string;
export declare function getURLifShortenFailed(usesGithub: boolean, githubSlug: string, apiUrl: string, source: string, accessToken?: string): string;
export declare function getNxCloudVersion(apiUrl: string): Promise<string | null>;
export declare function removeVersionModifier(versionString: string): string;
export declare function versionIsValid(version: string): boolean;
export declare function compareCleanCloudVersions(version1: string, version2: string): number;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.repoUsesGithub = exports.shortenedCloudUrl = void 0;
exports.compareCleanCloudVersions = exports.versionIsValid = exports.removeVersionModifier = exports.getNxCloudVersion = exports.getURLifShortenFailed = exports.removeTrailingSlash = exports.repoUsesGithub = exports.shortenedCloudUrl = void 0;
const devkit_exports_1 = require("../../devkit-exports");
const git_utils_1 = require("../../utils/git-utils");
const semver_1 = require("semver");
async function shortenedCloudUrl(installationSource, accessToken, usesGithub) {
const githubSlug = (0, git_utils_1.getGithubSlugOrNull)();
const apiUrl = removeTrailingSlash(process.env.NX_CLOUD_API || process.env.NRWL_API || `https://cloud.nx.app`);
const version = await getNxCloudVersion(apiUrl);
if (version && (0, semver_1.lt)(removeVersionModifier(version), '2406.11.5')) {
try {
const version = await getNxCloudVersion(apiUrl);
if ((version && compareCleanCloudVersions(version, '2406.11.5') < 0) ||
!version) {
return apiUrl;
}
}
catch (e) {
devkit_exports_1.logger.verbose(`Failed to get Nx Cloud version.
${e}`);
return apiUrl;

@@ -47,2 +54,3 @@ }

}
exports.removeTrailingSlash = removeTrailingSlash;
function getSource(installationSource) {

@@ -73,2 +81,3 @@ if (installationSource.includes('nx-init')) {

}
exports.getURLifShortenFailed = getURLifShortenFailed;
async function getInstallationSupportsGitHub(apiUrl) {

@@ -94,5 +103,9 @@ try {

const version = removeVersionModifier(response.data.version);
const isValid = versionIsValid(version);
if (!version) {
throw new Error('Failed to extract version from response.');
}
if (!isValid) {
throw new Error(`Invalid version format: ${version}`);
}
return version;

@@ -103,7 +116,41 @@ }

${e}`);
return null;
}
}
exports.getNxCloudVersion = getNxCloudVersion;
function removeVersionModifier(versionString) {
// version may be something like 2406.13.5.hotfix2
// Cloud version string is in the format of YYMM.DD.BuildNumber-Modifier
return versionString.split(/[\.-]/).slice(0, 3).join('.');
}
exports.removeVersionModifier = removeVersionModifier;
function versionIsValid(version) {
// Updated Regex pattern to require YYMM.DD.BuildNumber format
// All parts are required, including the BuildNumber.
const pattern = /^\d{4}\.\d{2}\.\d+$/;
return pattern.test(version);
}
exports.versionIsValid = versionIsValid;
function compareCleanCloudVersions(version1, version2) {
const parseVersion = (version) => {
// The format we're using is YYMM.DD.BuildNumber
const parts = version.split('.').map((part) => parseInt(part, 10));
return {
yearMonth: parts[0],
day: parts[1],
buildNumber: parts[2],
};
};
const v1 = parseVersion(version1);
const v2 = parseVersion(version2);
if (v1.yearMonth !== v2.yearMonth) {
return v1.yearMonth > v2.yearMonth ? 1 : -1;
}
if (v1.day !== v2.day) {
return v1.day > v2.day ? 1 : -1;
}
if (v1.buildNumber !== v2.buildNumber) {
return v1.buildNumber > v2.buildNumber ? 1 : -1;
}
return 0;
}
exports.compareCleanCloudVersions = compareCleanCloudVersions;

@@ -82,9 +82,3 @@ "use strict";

exports.getSwcTranspiler = getSwcTranspiler;
const registered = new Set();
function getTsNodeTranspiler(compilerOptions) {
// Just return if transpiler was already registered before.
const registrationKey = JSON.stringify(compilerOptions);
if (registered.has(registrationKey)) {
return () => { };
}
const { register } = require('ts-node');

@@ -98,3 +92,2 @@ // ts-node doesn't provide a cleanup method

});
registered.add(registrationKey);
const { transpiler, swc } = service.options;

@@ -152,2 +145,3 @@ // Don't warn if a faster transpiler is enabled

}
const registered = new Map();
function getTranspiler(compilerOptions, tsConfigRaw) {

@@ -166,9 +160,31 @@ const preferTsNode = process.env.NX_PREFER_TS_NODE === 'true';

compilerOptions.skipLibCheck = true;
if (swcNodeInstalled && !preferTsNode) {
return () => getSwcTranspiler(compilerOptions);
// Just return if transpiler was already registered before.
const registrationKey = JSON.stringify(compilerOptions);
const registrationEntry = registered.get(registrationKey);
if (registered.has(registrationKey)) {
registrationEntry.refCount++;
return registrationEntry.cleanup;
}
// We can fall back on ts-node if it's available
if (tsNodeInstalled) {
const tsNodeOptions = filterRecognizedTsConfigTsNodeOptions(tsConfigRaw).recognized;
return () => getTsNodeTranspiler(compilerOptions);
const _getTranspiler = swcNodeInstalled && !preferTsNode
? getSwcTranspiler
: tsNodeInstalled
? // We can fall back on ts-node if it's available
getTsNodeTranspiler
: undefined;
if (_getTranspiler) {
const transpilerCleanup = _getTranspiler(compilerOptions);
const currRegistrationEntry = {
refCount: 1,
cleanup: () => {
return () => {
currRegistrationEntry.refCount--;
if (currRegistrationEntry.refCount === 0) {
registered.delete(registrationKey);
transpilerCleanup();
}
};
},
};
registered.set(registrationKey, currRegistrationEntry);
return currRegistrationEntry.cleanup;
}

@@ -175,0 +191,0 @@ }

@@ -28,3 +28,3 @@ import { PluginConfiguration } from '../../config/nx-json';

]>;
export declare function loadNxPlugins(plugins: PluginConfiguration[], root?: string): Promise<[LoadedNxPlugin[], () => void]>;
export declare function loadNxPlugins(plugins: PluginConfiguration[], root?: string): Promise<readonly [LoadedNxPlugin[], () => void]>;
export declare function getDefaultPlugins(root: string): Promise<string[]>;

@@ -72,14 +72,15 @@ "use strict";

async function loadNxPlugins(plugins, root = workspace_root_1.workspaceRoot) {
const result = [];
const loadingMethod = process.env.NX_ISOLATE_PLUGINS === 'true'
performance.mark('loadNxPlugins:start');
const loadingMethod = process.env.NX_ISOLATE_PLUGINS !== 'false'
? isolation_1.loadNxPluginInIsolation
: loader_1.loadNxPlugin;
plugins = await normalizePlugins(plugins, root);
const result = new Array(plugins?.length);
const cleanupFunctions = [];
for (const plugin of plugins) {
await Promise.all(plugins.map(async (plugin, idx) => {
const [loadedPluginPromise, cleanup] = await loadingMethod(plugin, root);
result.push(loadedPluginPromise);
result[idx] = loadedPluginPromise;
cleanupFunctions.push(cleanup);
}
return [
}));
const ret = [
await Promise.all(result),

@@ -95,2 +96,5 @@ () => {

];
performance.mark('loadNxPlugins:end');
performance.measure('loadNxPlugins', 'loadNxPlugins:start', 'loadNxPlugins:end');
return ret;
}

@@ -112,3 +116,2 @@ exports.loadNxPlugins = loadNxPlugins;

(0, path_1.join)(__dirname, '../../plugins/js'),
(0, path_1.join)(__dirname, '../../plugins/target-defaults/target-defaults-plugin'),
...((0, angular_json_1.shouldMergeAngularProjects)(root, false)

@@ -115,0 +118,0 @@ ? [(0, path_1.join)(__dirname, '../../adapter/angular-json')]

@@ -87,3 +87,3 @@ /// <reference types="node" />

success: false;
error: string;
error: Error;
tx: string;

@@ -90,0 +90,0 @@ };

@@ -238,3 +238,3 @@ "use strict";

], {
stdio: process.stdout.isTTY ? 'inherit' : 'ignore',
stdio: 'inherit',
env,

@@ -241,0 +241,0 @@ detached: true,

@@ -6,4 +6,4 @@ "use strict";

const serializable_error_1 = require("../../../utils/serializable-error");
const consume_messages_from_socket_1 = require("../../../utils/consume-messages-from-socket");
const net_1 = require("net");
const consume_messages_from_socket_1 = require("../../../utils/consume-messages-from-socket");
const fs_1 = require("fs");

@@ -121,3 +121,7 @@ if (process.env.NX_PERF_LOGGING === 'true') {

type: 'createMetadataResult',
payload: { success: false, error: e.stack, tx },
payload: {
success: false,
error: (0, serializable_error_1.createSerializableError)(e),
tx,
},
};

@@ -124,0 +128,0 @@ }

import { NxJsonConfiguration, TargetDefaults } from '../../config/nx-json';
import { ProjectGraphExternalNode } from '../../config/project-graph';
import { ProjectConfiguration, ProjectMetadata, TargetConfiguration, TargetMetadata } from '../../config/workspace-json-project-json';
import { ONLY_MODIFIES_EXISTING_TARGET } from '../../plugins/target-defaults/symbols';
import { LoadedNxPlugin } from '../plugins/internal-api';
export type SourceInformation = [file: string | null, plugin: string];
export type ConfigurationSourceMaps = Record<string, Record<string, SourceInformation>>;
export declare function mergeProjectConfigurationIntoRootMap(projectRootMap: Record<string, ProjectConfiguration>, project: ProjectConfiguration & {
targets?: Record<string, TargetConfiguration & {
[ONLY_MODIFIES_EXISTING_TARGET]?: boolean;
}>;
}, configurationSourceMaps?: ConfigurationSourceMaps, sourceInformation?: SourceInformation, skipTargetNormalization?: boolean): void;
export declare function mergeProjectConfigurationIntoRootMap(projectRootMap: Record<string, ProjectConfiguration>, project: ProjectConfiguration, configurationSourceMaps?: ConfigurationSourceMaps, sourceInformation?: SourceInformation, skipTargetNormalization?: boolean): void;
export declare function mergeMetadata<T = ProjectMetadata | TargetMetadata>(sourceMap: Record<string, [file: string, plugin: string]>, sourceInformation: [file: string, plugin: string], baseSourceMapPath: string, metadata: T, matchingMetadata?: T): T;

@@ -47,2 +42,3 @@ export type ConfigurationResult = {

export declare function validateProject(project: ProjectConfiguration, knownProjects: Record<string, ProjectConfiguration>): void;
export declare function mergeTargetDefaultWithTargetDefinition(targetName: string, project: ProjectConfiguration, targetDefault: Partial<TargetConfiguration>, sourceMap: Record<string, SourceInformation>): TargetConfiguration;
/**

@@ -72,2 +68,8 @@ * Merges two targets.

export declare function readTargetDefaultsForTarget(targetName: string, targetDefaults: TargetDefaults, executor?: string): TargetDefaults[string];
/**
* Expand's `command` syntactic sugar and replaces tokens in options.
* @param target The target to normalize
* @param project The project that the target belongs to
* @returns The normalized target configuration
*/
export declare function normalizeTarget(target: TargetConfiguration, project: ProjectConfiguration): TargetConfiguration<any>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.normalizeTarget = exports.readTargetDefaultsForTarget = exports.resolveNxTokensInOptions = exports.isCompatibleTarget = exports.mergeTargetConfigurations = exports.validateProject = exports.readProjectConfigurationsFromRootMap = exports.createProjectConfigurations = exports.mergeMetadata = exports.mergeProjectConfigurationIntoRootMap = void 0;
exports.normalizeTarget = exports.readTargetDefaultsForTarget = exports.resolveNxTokensInOptions = exports.isCompatibleTarget = exports.mergeTargetConfigurations = exports.mergeTargetDefaultWithTargetDefinition = exports.validateProject = exports.readProjectConfigurationsFromRootMap = exports.createProjectConfigurations = exports.mergeMetadata = exports.mergeProjectConfigurationIntoRootMap = void 0;
const logger_1 = require("../../utils/logger");
const fileutils_1 = require("../../utils/fileutils");
const workspace_root_1 = require("../../utils/workspace-root");
const symbols_1 = require("../../plugins/target-defaults/symbols");
const minimatch_1 = require("minimatch");

@@ -123,11 +122,5 @@ const path_1 = require("path");

const target = project.targets?.[targetName];
if (sourceMap && !target?.[symbols_1.ONLY_MODIFIES_EXISTING_TARGET]) {
if (sourceMap) {
sourceMap[`targets.${targetName}`] = sourceInformation;
}
// If ONLY_MODIFIES_EXISTING_TARGET is true, and its not on the matching project
// we shouldn't merge its info into the graph
if (target?.[symbols_1.ONLY_MODIFIES_EXISTING_TARGET] &&
!matchingProject.targets?.[targetName]) {
continue;
}
const normalizedTarget = skipTargetNormalization

@@ -137,5 +130,2 @@ ? target

const mergedTarget = mergeTargetConfigurations(normalizedTarget, matchingProject.targets?.[targetName], sourceMap, sourceInformation, `targets.${targetName}`);
// We don't want the symbol to live on past the merge process
if (mergedTarget?.[symbols_1.ONLY_MODIFIES_EXISTING_TARGET])
delete mergedTarget?.[symbols_1.ONLY_MODIFIES_EXISTING_TARGET];
updatedProjectConfiguration.targets[targetName] = mergedTarget;

@@ -258,4 +248,6 @@ }

}
const innerStackTrace = ' ' + e.stack.split('\n').join('\n ');
errorBodyLines.push(innerStackTrace);
}
error.message = errorBodyLines.join('\n');
error.stack = errorBodyLines.join('\n');
// This represents a single plugin erroring out with a hard error.

@@ -269,3 +261,3 @@ errors.push(error);

return Promise.all(results).then((results) => {
const { projectRootMap, externalNodes, rootMap, configurationSourceMaps } = mergeCreateNodesResults(results, errors);
const { projectRootMap, externalNodes, rootMap, configurationSourceMaps } = mergeCreateNodesResults(results, nxJson, errors);
perf_hooks_1.performance.mark('build-project-configs:end');

@@ -294,3 +286,3 @@ perf_hooks_1.performance.measure('build-project-configs', 'build-project-configs:start', 'build-project-configs:end');

exports.createProjectConfigurations = createProjectConfigurations;
function mergeCreateNodesResults(results, errors) {
function mergeCreateNodesResults(results, nxJsonConfiguration, errors) {
perf_hooks_1.performance.mark('createNodes:merge - start');

@@ -304,5 +296,2 @@ const projectRootMap = {};

const sourceInfo = [file, pluginName];
if (result[symbols_1.OVERRIDE_SOURCE_FILE]) {
sourceInfo[0] = result[symbols_1.OVERRIDE_SOURCE_FILE];
}
for (const node in projectNodes) {

@@ -331,3 +320,3 @@ // Handles `{projects: {'libs/foo': undefined}}`.

try {
validateAndNormalizeProjectRootMap(projectRootMap);
validateAndNormalizeProjectRootMap(projectRootMap, nxJsonConfiguration, configurationSourceMaps);
}

@@ -411,3 +400,3 @@ catch (e) {

exports.readProjectConfigurationsFromRootMap = readProjectConfigurationsFromRootMap;
function validateAndNormalizeProjectRootMap(projectRootMap) {
function validateAndNormalizeProjectRootMap(projectRootMap, nxJsonConfiguration, sourceMaps = {}) {
// Name -> Project, used to validate that all projects have unique names

@@ -445,20 +434,3 @@ const projects = {};

}
for (const targetName in project.targets) {
project.targets[targetName] = normalizeTarget(project.targets[targetName], project);
if (
// If the target has no executor or command, it doesn't do anything
!project.targets[targetName].executor &&
!project.targets[targetName].command) {
// But it may have dependencies that do something
if (project.targets[targetName].dependsOn &&
project.targets[targetName].dependsOn.length > 0) {
project.targets[targetName].executor = 'nx:noop';
}
else {
// If it does nothing, and has no depenencies,
// we can remove it.
delete project.targets[targetName];
}
}
}
normalizeTargets(project, sourceMaps, nxJsonConfiguration);
}

@@ -473,2 +445,29 @@ if (conflicts.size > 0) {

}
function normalizeTargets(project, sourceMaps, nxJsonConfiguration) {
for (const targetName in project.targets) {
project.targets[targetName] = normalizeTarget(project.targets[targetName], project);
const projectSourceMaps = sourceMaps[project.root];
const targetConfig = project.targets[targetName];
const targetDefaults = readTargetDefaultsForTarget(targetName, nxJsonConfiguration.targetDefaults, targetConfig.executor);
// We only apply defaults if they exist
if (targetDefaults && isCompatibleTarget(targetConfig, targetDefaults)) {
project.targets[targetName] = mergeTargetDefaultWithTargetDefinition(targetName, project, normalizeTarget(targetDefaults, project), projectSourceMaps);
}
if (
// If the target has no executor or command, it doesn't do anything
!project.targets[targetName].executor &&
!project.targets[targetName].command) {
// But it may have dependencies that do something
if (project.targets[targetName].dependsOn &&
project.targets[targetName].dependsOn.length > 0) {
project.targets[targetName].executor = 'nx:noop';
}
else {
// If it does nothing, and has no depenencies,
// we can remove it.
delete project.targets[targetName];
}
}
}
}
function validateProject(project,

@@ -495,2 +494,71 @@ // name -> project

exports.validateProject = validateProject;
function targetDefaultShouldBeApplied(key, sourceMap) {
const sourceInfo = sourceMap[key];
if (!sourceInfo) {
return true;
}
// The defined value of the target is from a plugin that
// isn't part of Nx's core plugins, so target defaults are
// applied on top of it.
const [, plugin] = sourceInfo;
return !plugin?.startsWith('nx/');
}
function mergeTargetDefaultWithTargetDefinition(targetName, project, targetDefault, sourceMap) {
const targetDefinition = project.targets[targetName] ?? {};
const result = JSON.parse(JSON.stringify(targetDefinition));
for (const key in targetDefault) {
switch (key) {
case 'options': {
const normalizedDefaults = resolveNxTokensInOptions(targetDefault.options, project, targetName);
for (const optionKey in normalizedDefaults) {
const sourceMapKey = `targets.${targetName}.options.${optionKey}`;
if (targetDefinition.options[optionKey] === undefined ||
targetDefaultShouldBeApplied(sourceMapKey, sourceMap)) {
result.options[optionKey] = targetDefault.options[optionKey];
sourceMap[sourceMapKey] = ['nx.json', 'nx/target-defaults'];
}
}
break;
}
case 'configurations': {
if (!result.configurations) {
result.configurations = {};
sourceMap[`targets.${targetName}.configurations`] = [
'nx.json',
'nx/target-defaults',
];
}
for (const configuration in targetDefault.configurations) {
if (!result.configurations[configuration]) {
result.configurations[configuration] = {};
sourceMap[`targets.${targetName}.configurations.${configuration}`] =
['nx.json', 'nx/target-defaults'];
}
const normalizedConfigurationDefaults = resolveNxTokensInOptions(targetDefault.configurations[configuration], project, targetName);
for (const configurationKey in normalizedConfigurationDefaults) {
const sourceMapKey = `targets.${targetName}.configurations.${configuration}.${configurationKey}`;
if (targetDefinition.configurations?.[configuration]?.[configurationKey] === undefined ||
targetDefaultShouldBeApplied(sourceMapKey, sourceMap)) {
result.configurations[configuration][configurationKey] =
targetDefault.configurations[configuration][configurationKey];
sourceMap[sourceMapKey] = ['nx.json', 'nx/target-defaults'];
}
}
}
break;
}
default: {
const sourceMapKey = `targets.${targetName}.${key}`;
if (targetDefinition[key] === undefined ||
targetDefaultShouldBeApplied(sourceMapKey, sourceMap)) {
result[key] = targetDefault[key];
sourceMap[sourceMapKey] = ['nx.json', 'nx/target-defaults'];
}
break;
}
}
}
return result;
}
exports.mergeTargetDefaultWithTargetDefinition = mergeTargetDefaultWithTargetDefinition;
/**

@@ -514,9 +582,2 @@ * Merges two targets.

const isCompatible = isCompatibleTarget(baseTarget ?? {}, target);
// If the targets are not compatible, we would normally overwrite the old target
// with the new one. However, we have a special case for targets that have the
// ONLY_MODIFIES_EXISTING_TARGET symbol set. This prevents the merged target
// equaling info that should have only been used to modify the existing target.
if (!isCompatible && target[symbols_1.ONLY_MODIFIES_EXISTING_TARGET]) {
return baseTarget;
}
if (!isCompatible && projectConfigSourceMap) {

@@ -695,2 +756,8 @@ // if the target is not compatible, we will simply override the options

}
/**
* Expand's `command` syntactic sugar and replaces tokens in options.
* @param target The target to normalize
* @param project The project that the target belongs to
* @returns The normalized target configuration
*/
function normalizeTarget(target, project) {

@@ -697,0 +764,0 @@ target = resolveCommandSyntacticSugar(target, project.root);

@@ -14,6 +14,7 @@ import { ProjectGraph, ProjectGraphProjectNode } from '../config/project-graph';

};
private readonly allTargetNames;
constructor(extraTargetDependencies: TargetDependencies, projectGraph: ProjectGraph);
processTasks(projectNames: string[], targets: string[], configuration: string, overrides: Object, excludeTaskDependencies: boolean): string[];
processTask(task: Task, projectUsedToDeriveDependencies: string, configuration: string, overrides: Object): void;
private processTasksForMatchingProjects;
private processTasksForMultipleProjects;
private processTasksForSingleProject;

@@ -20,0 +21,0 @@ private processTasksForDependencies;

@@ -6,3 +6,2 @@ "use strict";

const project_graph_utils_1 = require("../utils/project-graph-utils");
const find_matching_projects_1 = require("../utils/find-matching-projects");
const output_1 = require("../utils/output");

@@ -16,2 +15,10 @@ class ProcessTasks {

this.dependencies = {};
const allTargetNames = new Set();
for (const projectName in projectGraph.nodes) {
const project = projectGraph.nodes[projectName];
for (const targetName in project.data.targets ?? {}) {
allTargetNames.add(targetName);
}
}
this.allTargetNames = Array.from(allTargetNames);
}

@@ -63,3 +70,3 @@ processTasks(projectNames, targets, configuration, overrides, excludeTaskDependencies) {

this.seen.add(seenKey);
const dependencyConfigs = (0, utils_1.getDependencyConfigs)({ project: task.target.project, target: task.target.target }, this.extraTargetDependencies, this.projectGraph);
const dependencyConfigs = (0, utils_1.getDependencyConfigs)({ project: task.target.project, target: task.target.target }, this.extraTargetDependencies, this.projectGraph, this.allTargetNames);
for (const dependencyConfig of dependencyConfigs) {

@@ -70,22 +77,3 @@ const taskOverrides = dependencyConfig.params === 'forward'

if (dependencyConfig.projects) {
/** LERNA SUPPORT START - Remove in v20 */
// Lerna uses `dependencies` in `prepNxOptions`, so we need to maintain
// support for it until lerna can be updated to use the syntax.
//
// This should have been removed in v17, but the updates to lerna had not
// been made yet.
//
// TODO(@agentender): Remove this part in v20
if (typeof dependencyConfig.projects === 'string') {
if (dependencyConfig.projects === 'self') {
this.processTasksForSingleProject(task, task.target.project, dependencyConfig, configuration, taskOverrides, overrides);
continue;
}
else if (dependencyConfig.projects === 'dependencies') {
this.processTasksForDependencies(projectUsedToDeriveDependencies, dependencyConfig, configuration, task, taskOverrides, overrides);
continue;
}
}
/** LERNA SUPPORT END - Remove in v17 */
this.processTasksForMatchingProjects(dependencyConfig, configuration, task, taskOverrides, overrides);
this.processTasksForMultipleProjects(dependencyConfig, configuration, task, taskOverrides, overrides);
}

@@ -100,16 +88,12 @@ else if (dependencyConfig.dependencies) {

}
processTasksForMatchingProjects(dependencyConfig, configuration, task, taskOverrides, overrides) {
const targetProjectSpecifiers = typeof dependencyConfig.projects === 'string'
? [dependencyConfig.projects]
: dependencyConfig.projects;
const matchingProjects = (0, find_matching_projects_1.findMatchingProjects)(targetProjectSpecifiers, this.projectGraph.nodes);
if (matchingProjects.length === 0) {
processTasksForMultipleProjects(dependencyConfig, configuration, task, taskOverrides, overrides) {
if (dependencyConfig.projects.length === 0) {
output_1.output.warn({
title: `\`dependsOn\` is misconfigured for ${task.target.project}:${task.target.target}`,
bodyLines: [
`Project patterns "${targetProjectSpecifiers}" does not match any projects.`,
`Project patterns "${dependencyConfig.projects}" does not match any projects.`,
],
});
}
for (const projectName of matchingProjects) {
for (const projectName of dependencyConfig.projects) {
this.processTasksForSingleProject(task, projectName, dependencyConfig, configuration, taskOverrides, overrides);

@@ -116,0 +100,0 @@ }

@@ -142,2 +142,5 @@ "use strict";

function supportedPtyPlatform() {
if (native_1.IS_WASM) {
return false;
}
if (process.platform !== 'win32') {

@@ -144,0 +147,0 @@ return true;

@@ -1,8 +0,8 @@

import { TasksRunner } from './tasks-runner';
import { NxArgs } from '../utils/command-line-utils';
import { LifeCycle } from './life-cycle';
import { NxJsonConfiguration } from '../config/nx-json';
import { ProjectGraph, ProjectGraphProjectNode } from '../config/project-graph';
import { NxJsonConfiguration } from '../config/nx-json';
import { Task, TaskGraph } from '../config/task-graph';
import { TargetDependencyConfig } from '../config/workspace-json-project-json';
import { NxArgs } from '../utils/command-line-utils';
import { LifeCycle } from './life-cycle';
import { TasksRunner } from './tasks-runner';
export declare function runCommand(projectsToRun: ProjectGraphProjectNode[], projectGraph: ProjectGraph, { nxJson }: {

@@ -9,0 +9,0 @@ nxJson: NxJsonConfiguration;

@@ -5,24 +5,24 @@ "use strict";

const path_1 = require("path");
const workspace_root_1 = require("../utils/workspace-root");
const nx_json_1 = require("../config/nx-json");
const client_1 = require("../daemon/client/client");
const create_task_hasher_1 = require("../hasher/create-task-hasher");
const hash_task_1 = require("../hasher/hash-task");
const fileutils_1 = require("../utils/fileutils");
const is_ci_1 = require("../utils/is-ci");
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
const output_1 = require("../utils/output");
const utils_1 = require("./utils");
const params_1 = require("../utils/params");
const workspace_root_1 = require("../utils/workspace-root");
const create_task_graph_1 = require("./create-task-graph");
const life_cycle_1 = require("./life-cycle");
const dynamic_run_many_terminal_output_life_cycle_1 = require("./life-cycles/dynamic-run-many-terminal-output-life-cycle");
const dynamic_run_one_terminal_output_life_cycle_1 = require("./life-cycles/dynamic-run-one-terminal-output-life-cycle");
const static_run_many_terminal_output_life_cycle_1 = require("./life-cycles/static-run-many-terminal-output-life-cycle");
const static_run_one_terminal_output_life_cycle_1 = require("./life-cycles/static-run-one-terminal-output-life-cycle");
const store_run_information_life_cycle_1 = require("./life-cycles/store-run-information-life-cycle");
const task_history_life_cycle_1 = require("./life-cycles/task-history-life-cycle");
const task_profiling_life_cycle_1 = require("./life-cycles/task-profiling-life-cycle");
const task_timings_life_cycle_1 = require("./life-cycles/task-timings-life-cycle");
const dynamic_run_many_terminal_output_life_cycle_1 = require("./life-cycles/dynamic-run-many-terminal-output-life-cycle");
const task_profiling_life_cycle_1 = require("./life-cycles/task-profiling-life-cycle");
const is_ci_1 = require("../utils/is-ci");
const dynamic_run_one_terminal_output_life_cycle_1 = require("./life-cycles/dynamic-run-one-terminal-output-life-cycle");
const nx_json_1 = require("../config/nx-json");
const create_task_graph_1 = require("./create-task-graph");
const task_graph_utils_1 = require("./task-graph-utils");
const params_1 = require("../utils/params");
const hash_task_1 = require("../hasher/hash-task");
const client_1 = require("../daemon/client/client");
const store_run_information_life_cycle_1 = require("./life-cycles/store-run-information-life-cycle");
const create_task_hasher_1 = require("../hasher/create-task-hasher");
const task_history_life_cycle_1 = require("./life-cycles/task-history-life-cycle");
const nx_cloud_utils_1 = require("../utils/nx-cloud-utils");
const utils_1 = require("./utils");
async function getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides) {

@@ -65,3 +65,3 @@ const { runnerOptions } = getRunner(nxArgs, nxJson);

}
function createTaskGraphAndValidateCycles(projectGraph, extraTargetDependencies, projectNames, nxArgs, overrides, extraOptions) {
function createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies, projectNames, nxArgs, overrides, extraOptions) {
const taskGraph = (0, create_task_graph_1.createTaskGraph)(projectGraph, extraTargetDependencies, projectNames, nxArgs.targets, nxArgs.configuration, overrides, extraOptions.excludeTaskDependencies);

@@ -85,2 +85,3 @@ const cycle = (0, task_graph_utils_1.findCycle)(taskGraph);

}
(0, task_graph_utils_1.validateAtomizedTasks)(taskGraph, projectGraph);
return taskGraph;

@@ -91,3 +92,3 @@ }

const projectNames = projectsToRun.map((t) => t.name);
const taskGraph = createTaskGraphAndValidateCycles(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
const taskGraph = createTaskGraphAndRunValidations(projectGraph, extraTargetDependencies ?? {}, projectNames, nxArgs, overrides, extraOptions);
const tasks = Object.values(taskGraph.tasks);

@@ -94,0 +95,0 @@ const { lifeCycle, renderIsDone } = await getTerminalOutputLifeCycle(initiatingProject, projectNames, tasks, nxArgs, nxJson, overrides);

@@ -1,2 +0,4 @@

export declare function findCycle(taskGraph: {
import { ProjectGraph } from '../config/project-graph';
import { TaskGraph } from '../config/task-graph';
export declare function findCycle(graph: {
dependencies: Record<string, string[]>;

@@ -8,1 +10,2 @@ }): string[] | null;

}): void;
export declare function validateAtomizedTasks(taskGraph: TaskGraph, projectGraph: ProjectGraph): void;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.makeAcyclic = exports.findCycle = void 0;
exports.validateAtomizedTasks = exports.makeAcyclic = exports.findCycle = void 0;
const output_1 = require("../utils/output");
function _findCycle(graph, id, visited, path) {

@@ -17,9 +18,9 @@ if (visited[id])

}
function findCycle(taskGraph) {
function findCycle(graph) {
const visited = {};
for (const t of Object.keys(taskGraph.dependencies)) {
for (const t of Object.keys(graph.dependencies)) {
visited[t] = false;
}
for (const t of Object.keys(taskGraph.dependencies)) {
const cycle = _findCycle(taskGraph, t, visited, [t]);
for (const t of Object.keys(graph.dependencies)) {
const cycle = _findCycle(graph, t, visited, [t]);
if (cycle)

@@ -57,1 +58,29 @@ return cycle;

exports.makeAcyclic = makeAcyclic;
function validateAtomizedTasks(taskGraph, projectGraph) {
if (process.env['NX_SKIP_ATOMIZER_VALIDATION']) {
return;
}
const tasksWithAtomizer = Object.values(taskGraph.tasks).filter((task) => projectGraph.nodes[task.target.project]?.data?.targets?.[task.target.target]?.metadata?.nonAtomizedTarget !== undefined);
const isInDTE = process.env['NX_CLOUD_DISTRIBUTED_EXECUTION_ID'] ||
process.env['NX_AGENT_NAME'];
if (tasksWithAtomizer.length > 0 && !isInDTE) {
const linkLine = 'Learn more at https://nx.dev/ci/features/split-e2e-tasks#use-atomizer-only-with-nx-cloud-distribution';
if (tasksWithAtomizer.length === 1) {
output_1.output.error({
title: `The ${tasksWithAtomizer[0].id} task uses the atomizer and should only be run with Nx Cloud distribution.`,
bodyLines: [linkLine],
});
}
else {
output_1.output.error({
title: `The following tasks use the atomizer and should only be run with Nx Cloud distribution:`,
bodyLines: [
`${tasksWithAtomizer.map((task) => task.id).join(', ')}`,
linkLine,
],
});
}
process.exit(1);
}
}
exports.validateAtomizedTasks = validateAtomizedTasks;

@@ -267,2 +267,10 @@ "use strict";

}
else if (targetConfiguration.executor === 'nx:noop') {
(0, fs_1.writeFileSync)(temporaryOutputPath, '');
results.push({
task,
status: 'success',
terminalOutput: '',
});
}
else {

@@ -269,0 +277,0 @@ // cache prep

@@ -5,8 +5,21 @@ import { Task, TaskGraph } from '../config/task-graph';

import { CustomHasher, ExecutorConfig } from '../config/misc-interfaces';
export type NormalizedTargetDependencyConfig = TargetDependencyConfig & {
projects: string[];
};
export declare function getDependencyConfigs({ project, target }: {
project: string;
target: string;
}, extraTargetDependencies: Record<string, (TargetDependencyConfig | string)[]>, projectGraph: ProjectGraph): TargetDependencyConfig[] | undefined;
export declare function expandDependencyConfigSyntaxSugar(dependencyConfigString: string, graph: ProjectGraph): TargetDependencyConfig;
}, extraTargetDependencies: Record<string, (TargetDependencyConfig | string)[]>, projectGraph: ProjectGraph, allTargetNames: string[]): NormalizedTargetDependencyConfig[] | undefined;
export declare function normalizeDependencyConfigDefinition(definition: string | TargetDependencyConfig, currentProject: string, graph: ProjectGraph, allTargetNames: string[]): NormalizedTargetDependencyConfig[];
export declare function normalizeDependencyConfigProjects(dependencyConfig: TargetDependencyConfig, currentProject: string, graph: ProjectGraph): NormalizedTargetDependencyConfig;
export declare function expandDependencyConfigSyntaxSugar(dependencyConfigString: string | TargetDependencyConfig, graph: ProjectGraph): TargetDependencyConfig;
export declare function expandWildcardTargetConfiguration(dependencyConfig: NormalizedTargetDependencyConfig, allTargetNames: string[]): NormalizedTargetDependencyConfig[];
export declare function readProjectAndTargetFromTargetString(targetString: string, projects: Record<string, ProjectGraphProjectNode>): {
projects?: string[];
target: string;
};
export declare function getOutputs(p: Record<string, ProjectGraphProjectNode>, target: Task['target'], overrides: Task['overrides']): string[];
export declare function normalizeTargetDependencyWithStringProjects(dependencyConfig: TargetDependencyConfig): Omit<TargetDependencyConfig, 'projects'> & {
projects: string[];
};
declare class InvalidOutputsError extends Error {

@@ -13,0 +26,0 @@ outputs: string[];

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.unparse = exports.isCacheableTask = exports.shouldStreamOutput = exports.getSerializedArgsForTask = exports.getPrintableCommandArgsForTask = exports.getCliPath = exports.calculateReverseDeps = exports.removeIdsFromGraph = exports.removeTasksFromTaskGraph = exports.getCustomHasher = exports.getExecutorForTask = exports.getExecutorNameForTask = exports.getTargetConfigurationForTask = exports.interpolate = exports.getOutputsForTargetAndConfiguration = exports.transformLegacyOutputs = exports.validateOutputs = exports.getOutputs = exports.expandDependencyConfigSyntaxSugar = exports.getDependencyConfigs = void 0;
const output_1 = require("../utils/output");
exports.unparse = exports.isCacheableTask = exports.shouldStreamOutput = exports.getSerializedArgsForTask = exports.getPrintableCommandArgsForTask = exports.getCliPath = exports.calculateReverseDeps = exports.removeIdsFromGraph = exports.removeTasksFromTaskGraph = exports.getCustomHasher = exports.getExecutorForTask = exports.getExecutorNameForTask = exports.getTargetConfigurationForTask = exports.interpolate = exports.getOutputsForTargetAndConfiguration = exports.transformLegacyOutputs = exports.validateOutputs = exports.normalizeTargetDependencyWithStringProjects = exports.getOutputs = exports.readProjectAndTargetFromTargetString = exports.expandWildcardTargetConfiguration = exports.expandDependencyConfigSyntaxSugar = exports.normalizeDependencyConfigProjects = exports.normalizeDependencyConfigDefinition = exports.getDependencyConfigs = void 0;
const path_1 = require("path");

@@ -14,24 +13,31 @@ const posix_1 = require("path/posix");

const project_graph_1 = require("../project-graph/project-graph");
function getDependencyConfigs({ project, target }, extraTargetDependencies, projectGraph) {
const find_matching_projects_1 = require("../utils/find-matching-projects");
const minimatch_1 = require("minimatch");
function getDependencyConfigs({ project, target }, extraTargetDependencies, projectGraph, allTargetNames) {
const dependencyConfigs = (projectGraph.nodes[project].data?.targets[target]?.dependsOn ??
// This is passed into `run-command` from programmatic invocations
extraTargetDependencies[target] ??
[]).map((config) => typeof config === 'string'
? expandDependencyConfigSyntaxSugar(config, projectGraph)
: config);
for (const dependencyConfig of dependencyConfigs) {
if (dependencyConfig.projects && dependencyConfig.dependencies) {
output_1.output.error({
title: `dependsOn is improperly configured for ${project}:${target}`,
bodyLines: [
`dependsOn.projects and dependsOn.dependencies cannot be used together.`,
],
});
process.exit(1);
}
}
[]).flatMap((config) => normalizeDependencyConfigDefinition(config, project, projectGraph, allTargetNames));
return dependencyConfigs;
}
exports.getDependencyConfigs = getDependencyConfigs;
function normalizeDependencyConfigDefinition(definition, currentProject, graph, allTargetNames) {
return expandWildcardTargetConfiguration(normalizeDependencyConfigProjects(expandDependencyConfigSyntaxSugar(definition, graph), currentProject, graph), allTargetNames);
}
exports.normalizeDependencyConfigDefinition = normalizeDependencyConfigDefinition;
function normalizeDependencyConfigProjects(dependencyConfig, currentProject, graph) {
const noStringConfig = normalizeTargetDependencyWithStringProjects(dependencyConfig);
if (noStringConfig.projects) {
dependencyConfig.projects = (0, find_matching_projects_1.findMatchingProjects)(noStringConfig.projects, graph.nodes);
}
else if (!noStringConfig.dependencies) {
dependencyConfig.projects = [currentProject];
}
return dependencyConfig;
}
exports.normalizeDependencyConfigProjects = normalizeDependencyConfigProjects;
function expandDependencyConfigSyntaxSugar(dependencyConfigString, graph) {
if (typeof dependencyConfigString !== 'string') {
return dependencyConfigString;
}
const [dependencies, targetString] = dependencyConfigString.startsWith('^')

@@ -48,18 +54,50 @@ ? [true, dependencyConfigString.substring(1)]

}
const { projects, target } = readProjectAndTargetFromTargetString(targetString, graph.nodes);
return projects ? { projects, target } : { target };
}
exports.expandDependencyConfigSyntaxSugar = expandDependencyConfigSyntaxSugar;
// Weakmap let's the cache get cleared by garbage collector if allTargetNames is no longer used
const patternResultCache = new WeakMap();
function expandWildcardTargetConfiguration(dependencyConfig, allTargetNames) {
if (!find_matching_projects_1.GLOB_CHARACTERS.some((char) => dependencyConfig.target.includes(char))) {
return [dependencyConfig];
}
let cache = patternResultCache.get(allTargetNames);
if (!cache) {
cache = new Map();
patternResultCache.set(allTargetNames, cache);
}
const cachedResult = cache.get(dependencyConfig.target);
if (cachedResult) {
return cachedResult;
}
const matcher = minimatch_1.minimatch.filter(dependencyConfig.target);
const matchingTargets = allTargetNames.filter((t) => matcher(t));
const result = matchingTargets.map((t) => ({
...dependencyConfig,
target: t,
}));
cache.set(dependencyConfig.target, result);
return result;
}
exports.expandWildcardTargetConfiguration = expandWildcardTargetConfiguration;
function readProjectAndTargetFromTargetString(targetString, projects) {
// Support for both `project:target` and `target:with:colons` syntax
const [maybeProject, ...segments] = (0, split_target_1.splitByColons)(targetString);
// if no additional segments are provided, then the string references
// a target of the same project
if (!segments.length) {
// if no additional segments are provided, then the string references
// a target of the same project
return { target: maybeProject };
}
return {
else if (maybeProject in projects) {
// Only the first segment could be a project. If it is, the rest is a target.
// If its not, then the whole targetString was a target with colons in its name.
target: maybeProject in graph.nodes ? segments.join(':') : targetString,
return { projects: [maybeProject], target: segments.join(':') };
}
else {
// If the first segment is a project, then we have a specific project. Otherwise, we don't.
projects: maybeProject in graph.nodes ? [maybeProject] : undefined,
};
return { target: targetString };
}
}
exports.expandDependencyConfigSyntaxSugar = expandDependencyConfigSyntaxSugar;
exports.readProjectAndTargetFromTargetString = readProjectAndTargetFromTargetString;
function getOutputs(p, target, overrides) {

@@ -69,2 +107,28 @@ return getOutputsForTargetAndConfiguration(target, overrides, p[target.project]);

exports.getOutputs = getOutputs;
function normalizeTargetDependencyWithStringProjects(dependencyConfig) {
if (typeof dependencyConfig.projects === 'string') {
/** LERNA SUPPORT START - Remove in v20 */
// Lerna uses `dependencies` in `prepNxOptions`, so we need to maintain
// support for it until lerna can be updated to use the syntax.
//
// This should have been removed in v17, but the updates to lerna had not
// been made yet.
//
// TODO(@agentender): Remove this part in v20
if (dependencyConfig.projects === 'self') {
delete dependencyConfig.projects;
}
else if (dependencyConfig.projects === 'dependencies') {
dependencyConfig.dependencies = true;
delete dependencyConfig.projects;
return;
/** LERNA SUPPORT END - Remove in v20 */
}
else {
dependencyConfig.projects = [dependencyConfig.projects];
}
}
return dependencyConfig;
}
exports.normalizeTargetDependencyWithStringProjects = normalizeTargetDependencyWithStringProjects;
class InvalidOutputsError extends Error {

@@ -137,3 +201,3 @@ constructor(outputs, invalidOutputs) {

const options = {
...targetConfiguration.options,
...targetConfiguration?.options,
...targetConfiguration?.configurations?.[configuration],

@@ -140,0 +204,0 @@ ...overrides,

import type { ProjectGraphProjectNode } from '../config/project-graph';
/**
* The presence of these characters in a string indicates that it might be a glob pattern.
*/
export declare const GLOB_CHARACTERS: string[];
/**
* Find matching project names given a list of potential project names or globs.

@@ -4,0 +8,0 @@ *

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getMatchingStringsWithCache = exports.findMatchingProjects = void 0;
exports.getMatchingStringsWithCache = exports.findMatchingProjects = exports.GLOB_CHARACTERS = void 0;
const minimatch_1 = require("minimatch");

@@ -11,4 +11,7 @@ const validPatternTypes = [

];
const globCharacters = ['*', '|', '{', '}', '(', ')'];
/**
* The presence of these characters in a string indicates that it might be a glob pattern.
*/
exports.GLOB_CHARACTERS = ['*', '|', '{', '}', '(', ')'];
/**
* Find matching project names given a list of potential project names or globs.

@@ -114,3 +117,3 @@ *

}
if (!globCharacters.some((c) => pattern.value.includes(c))) {
if (!exports.GLOB_CHARACTERS.some((c) => pattern.value.includes(c))) {
return;

@@ -140,3 +143,3 @@ }

}
if (!globCharacters.some((c) => pattern.value.includes(c))) {
if (!exports.GLOB_CHARACTERS.some((c) => pattern.value.includes(c))) {
continue;

@@ -143,0 +146,0 @@ }

@@ -6,3 +6,2 @@ "use strict";

const project_json_1 = require("../plugins/project-json/build-nodes/project-json");
const target_defaults_plugin_1 = require("../plugins/target-defaults/target-defaults-plugin");
const PackageJsonWorkspacesPlugin = require("../plugins/package-json-workspaces");

@@ -18,3 +17,2 @@ /**

: []),
target_defaults_plugin_1.default,
PackageJsonWorkspacesPlugin,

@@ -21,0 +19,0 @@ project_json_1.default,

@@ -20,3 +20,9 @@ "use strict";

return createSerializableError(v);
// Support for AggregateCreateNodesError
}
else if (Array.isArray(v) &&
v.length === 2 &&
v[1] instanceof Error) {
return [v[0], createSerializableError(v[1])];
}
return v;

@@ -23,0 +29,0 @@ });

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc