Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@rushstack/heft

Package Overview
Dependencies
Maintainers
3
Versions
358
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@rushstack/heft - npm Package Compare versions

Comparing version 0.49.0-rc.2 to 0.49.0

lib/cli/actions/BuildAction.d.ts

1119

dist/heft.d.ts
import { AsyncParallelHook } from 'tapable';
import { CommandLineChoiceListParameter } from '@rushstack/ts-command-line';
import { CommandLineChoiceParameter } from '@rushstack/ts-command-line';
import { AsyncSeriesBailHook } from 'tapable';
import { AsyncSeriesHook } from 'tapable';
import { AsyncSeriesWaterfallHook } from 'tapable';
import { CommandLineAction } from '@rushstack/ts-command-line';
import { CommandLineFlagParameter } from '@rushstack/ts-command-line';
import { CommandLineIntegerListParameter } from '@rushstack/ts-command-line';
import { CommandLineIntegerParameter } from '@rushstack/ts-command-line';
import { CommandLineParameter } from '@rushstack/ts-command-line';
import { CommandLineParser } from '@rushstack/ts-command-line';
import { CommandLineStringListParameter } from '@rushstack/ts-command-line';
import { CommandLineStringParameter } from '@rushstack/ts-command-line';
import { FileLocationStyle } from '@rushstack/node-core-library';
import { IBaseCommandLineDefinition } from '@rushstack/ts-command-line';
import { ICommandLineChoiceDefinition } from '@rushstack/ts-command-line';
import { ICommandLineChoiceListDefinition } from '@rushstack/ts-command-line';
import { ICommandLineFlagDefinition } from '@rushstack/ts-command-line';
import { ICommandLineIntegerDefinition } from '@rushstack/ts-command-line';
import { ICommandLineStringDefinition } from '@rushstack/ts-command-line';
import { ICommandLineStringListDefinition } from '@rushstack/ts-command-line';
import { IFileErrorFormattingOptions } from '@rushstack/node-core-library';
import { IPackageJson } from '@rushstack/node-core-library';
import { ITerminal } from '@rushstack/node-core-library';
import { ITerminalProvider } from '@rushstack/node-core-library';
import { JsonSchema } from '@rushstack/node-core-library';
import { RigConfig } from '@rushstack/rig-package';
import { Terminal } from '@rushstack/node-core-library';
import { SyncHook } from 'tapable';
declare class BuildStage extends StageBase<BuildStageHooks, IBuildStageProperties, IBuildStageOptions> {
constructor(heftConfiguration: HeftConfiguration, loggingManager: LoggingManager);
static defineStageStandardParameters(action: CommandLineAction): IBuildStageStandardParameters;
static getOptionsFromStandardParameters(standardParameters: IBuildStageStandardParameters): Omit<IBuildStageOptions, 'watchMode' | 'serveMode'>;
protected getDefaultStagePropertiesAsync(options: IBuildStageOptions): Promise<IBuildStageProperties>;
protected executeInnerAsync(): Promise<void>;
private _runSubstageWithLoggingAsync;
}
/**
* A cancellation token. Can be used to signal that an ongoing process has either been cancelled
* or timed out.
*
* @remarks This class will eventually be removed once the `AbortSignal` API is available in
* the lowest supported LTS version of Node.js. See here for more information:
* https://nodejs.org/docs/latest-v16.x/api/globals.html#class-abortsignal
*
* @beta
* @public
*/
export declare class CancellationToken {
private readonly _isCancelled;
private readonly _cancellationTokenSource;
/** @internal */
constructor(options?: _ICancellationTokenOptions);
export declare class BuildStageHooks extends StageHooksBase<IBuildStageProperties> {
readonly preCompile: SyncHook<IPreCompileSubstage>;
readonly compile: SyncHook<ICompileSubstage>;
readonly bundle: SyncHook<IBundleSubstage>;
readonly postBuild: SyncHook<IPostBuildSubstage>;
}
/**
* @public
*/
export declare class BuildSubstageHooksBase {
readonly run: AsyncParallelHook;
}
/**
* @public
*/
export declare class BundleSubstageHooks extends BuildSubstageHooksBase {
readonly configureWebpack: AsyncSeriesWaterfallHook<unknown>;
readonly afterConfigureWebpack: AsyncSeriesHook;
}
declare class CleanStage extends StageBase<CleanStageHooks, ICleanStageProperties, ICleanStageOptions> {
constructor(heftConfiguration: HeftConfiguration, loggingManager: LoggingManager);
protected getDefaultStagePropertiesAsync(options: ICleanStageOptions): Promise<ICleanStageProperties>;
protected executeInnerAsync(): Promise<void>;
}
/**
* @public
*/
export declare class CleanStageHooks extends StageHooksBase<ICleanStageProperties> {
readonly run: AsyncParallelHook;
}
/**
* @public
*/
export declare class CompileSubstageHooks extends BuildSubstageHooksBase {
/**
* {@inheritdoc CancellationTokenSource.isCancelled}
* The `afterCompile` event is fired exactly once, after the "compile" stage completes its first operation.
* The "bundle" stage will not begin until all event handlers have resolved their promises. The behavior
* of this event is the same in watch mode and non-watch mode.
*/
get isCancelled(): boolean;
readonly afterCompile: AsyncParallelHook;
/**
* Obtain a promise that resolves when the token is cancelled.
* The `afterRecompile` event is only used in watch mode. It fires whenever the compiler's outputs have
* been rebuilt. The initial compilation fires the `afterCompile` event only, and then all subsequent iterations
* fire the `afterRecompile` event only. Heft does not wait for the `afterRecompile` promises to resolve.
*/
get onCancelledPromise(): Promise<void>;
readonly afterRecompile: AsyncParallelHook;
}
/** @beta */
export declare type CustomActionParameterType = string | boolean | number | ReadonlyArray<string> | undefined;
/**
* A cancellation token source. Produces cancellation tokens that can be used to signal that
* an ongoing process has either been cancelled or timed out.
*
* @remarks This class will eventually be removed once the `AbortController` API is available
* in the lowest supported LTS version of Node.js. See here for more information:
* https://nodejs.org/docs/latest-v16.x/api/globals.html#class-abortcontroller
*
* @beta
* Command line utilities provided for Heft plugin developers.
*/
export declare class CancellationTokenSource {
private readonly _cancellationToken;
private readonly _cancellationPromise;
private _resolveCancellationPromise;
private _isCancelled;
constructor(options?: ICancellationTokenSourceOptions);
export declare class HeftCommandLine {
private readonly _commandLineParser;
private readonly _terminal;
/**
* Whether or not the token has been cancelled.
* @internal
*/
get isCancelled(): boolean;
constructor(commandLineParser: CommandLineParser, terminal: ITerminal);
/**
* Obtain the cancellation token produced by this source.
* Utility method used by Heft plugins to register a choice type parameter.
*/
get token(): CancellationToken;
/** @internal */
get _onCancelledPromise(): Promise<void>;
registerChoiceParameter(options: IHeftRegisterParameterOptions<ICommandLineChoiceDefinition>): IHeftChoiceParameter;
/**
* Cancel the token provided by the source.
* Utility method used by Heft plugins to register a choiceList type parameter.
*/
cancel(): void;
registerChoiceListParameter(options: IHeftRegisterParameterOptions<ICommandLineChoiceListDefinition>): IHeftChoiceListParameter;
/**
* Utility method used by Heft plugins to register a flag type parameter.
*/
registerFlagParameter(options: IHeftRegisterParameterOptions<ICommandLineFlagDefinition>): IHeftFlagParameter;
/**
* Utility method used by Heft plugins to register an integer type parameter.
*/
registerIntegerParameter(options: IHeftRegisterParameterOptions<ICommandLineIntegerDefinition>): IHeftIntegerParameter;
/**
* Utility method used by Heft plugins to register a string type parameter.
*/
registerStringParameter(options: IHeftRegisterParameterOptions<ICommandLineStringDefinition>): IHeftStringParameter;
/**
* Utility method used by Heft plugins to register a stringList type parameter.
*/
registerStringListParameter(options: IHeftRegisterParameterOptions<ICommandLineStringListDefinition>): IHeftStringListParameter;
private _registerParameter;
private _getActions;
private _verifyUniqueParameterName;
private _verifyParametersProcessed;
}
export { CommandLineChoiceListParameter }
export { CommandLineChoiceParameter }
export { CommandLineFlagParameter }
export { CommandLineIntegerListParameter }
export { CommandLineIntegerParameter }
export { CommandLineParameter }
export { CommandLineStringListParameter }
export { CommandLineStringParameter }
/**
* Glob a set of files and return a list of paths that match the provided patterns.
*
* @param patterns - Glob patterns to match against.
* @param options - Options that are used when globbing the set of files.
*
* @public
*/
export declare type GlobFn = (pattern: string | string[], options?: IGlobOptions | undefined) => Promise<string[]>;
/**
* @public
*/
export declare class HeftConfiguration {
private _buildFolderPath;
private _projectConfigFolderPath;
private _cacheFolderPath;
private _tempFolderPath;
private _buildFolder;
private _projectHeftDataFolder;
private _projectConfigFolder;
private _buildCacheFolder;
private _rigConfig;
private _globalTerminal;
private _terminalProvider;
private _rigPackageResolver;
/**
* Project build folder path. This is the folder containing the project's package.json file.
* Project build folder. This is the folder containing the project's package.json file.
*/
get buildFolderPath(): string;
get buildFolder(): string;
/**
* The path to the project's "config" folder.
* The path to the project's ".heft" folder.
*/
get projectConfigFolderPath(): string;
get projectHeftDataFolder(): string;
/**
* The project's cache folder.
*
* @remarks This folder exists at \<project root\>/.cache. In general, this folder is used to store
* cached output from tasks under task-specific subfolders, and is not intended to be directly
* written to. Instead, plugins should write to the directory provided by
* HeftTaskSession.taskCacheFolderPath
* The path to the project's "config" folder.
*/
get cacheFolderPath(): string;
get projectConfigFolder(): string;
/**
* The project's temporary folder.
* The project's build cache folder.
*
* @remarks This folder exists at \<project root\>/temp. In general, this folder is used to store temporary
* output from tasks under task-specific subfolders, and is not intended to be directly written to.
* Instead, plugins should write to the directory provided by HeftTaskSession.taskTempFolderPath
* This folder exists at \<project root\>/.heft/build-cache. TypeScript's output
* goes into this folder and then is either copied or linked to the final output folder
*/
get tempFolderPath(): string;
get buildCacheFolder(): string;
/**

@@ -141,9 +171,5 @@ * The rig.json configuration for this project, if present.

/**
* The rig package resolver, which can be used to rig-resolve a requested package.
*/
get rigPackageResolver(): IRigPackageResolver;
/**
* Terminal instance to facilitate logging.
*/
get globalTerminal(): Terminal;
get globalTerminal(): ITerminal;
/**

@@ -173,602 +199,363 @@ * Terminal provider for the provided terminal.

/** @internal */
export declare class _HeftLifecycleHooks {
toolStart: AsyncParallelHook;
}
/**
* Options for the cancellation token.
*
* @internal
* @public
*/
export declare interface _ICancellationTokenOptions {
export declare class HeftSession {
private readonly _loggingManager;
private readonly _options;
private readonly _getIsDebugMode;
readonly hooks: IHeftSessionHooks;
/**
* A cancellation token source to use for the token.
*
* @internal
*/
cancellationTokenSource?: CancellationTokenSource;
readonly metricsCollector: _MetricsCollector;
/**
* A static cancellation state. Mutually exclusive with `cancellationTokenSource`.
* If true, CancellationToken.isCancelled will always return true. Otherwise,
* CancellationToken.isCancelled will always return false.
* If set to true, the build is running with the --debug flag
*/
get debugMode(): boolean;
/** @beta */
readonly registerAction: RegisterAction;
/**
* @beta
* {@inheritDoc HeftCommandLine}
*/
readonly commandLine: HeftCommandLine;
/**
* Call this function to receive a callback with the plugin if and after the specified plugin
* has been applied. This is used to tap hooks on another plugin.
*
* @beta
*/
readonly requestAccessToPluginByName: RequestAccessToPluginByNameCallback;
/**
* @internal
*/
isCancelled?: boolean;
}
/**
* Options for the cancellation token source.
*
* @beta
*/
export declare interface ICancellationTokenSourceOptions {
constructor(options: IHeftSessionOptions, internalSessionOptions: IInternalHeftSessionOptions);
/**
* Amount of time in milliseconds to wait before cancelling the token.
* Call this function to request a logger with the specified name.
*/
delayMs?: number;
requestScopedLogger(loggerName: string): ScopedLogger;
}
/**
* The state of a changed file.
*
* @public
*/
export declare interface IChangedFileState {
/**
* Whether or not the file is a source file. A source file is determined to be any file
* that is not ignored by Git.
*
* @public
*/
readonly isSourceFile: boolean;
/**
* A version hash of a specific file properties that can be used to determine if a
* file has changed. The version hash will change when any of the following properties
* are changed:
* - path
* - file size
* - content last modified date (mtime)
* - metadata last modified date (ctime)
*
* @remarks The initial state of the version hash is "0", which should only ever be
* returned on the first incremental run of the task. When a file is deleted, the
* version hash will be undefined.
*
* @public
*/
readonly version: string | undefined;
export declare interface IBuildStageContext extends IStageContext<BuildStageHooks, IBuildStageProperties> {
}
declare interface IBuildStageOptions {
production: boolean;
lite: boolean;
locales?: ReadonlyArray<string>;
maxOldSpaceSize?: string;
watchMode: boolean;
serveMode: boolean;
typescriptMaxWriteParallelism?: number;
}
/**
* Used to specify a selection of files to copy from a specific source folder to one
* or more destination folders.
*
* @public
*/
export declare interface ICopyOperation extends IFileSelectionSpecifier {
export declare interface IBuildStageProperties {
production: boolean;
lite: boolean;
locales?: ReadonlyArray<string>;
maxOldSpaceSize?: string;
watchMode: boolean;
serveMode: boolean;
webpackStats?: unknown;
/**
* Absolute paths to folders which files or folders should be copied to.
* @beta
*/
destinationFolders: string[];
isTypeScriptProject?: boolean;
/**
* Copy only the file and discard the relative path from the source folder.
* @beta
*/
flatten?: boolean;
emitFolderNameForTests?: string;
/**
* Hardlink files instead of copying.
*
* @remarks
* If the sourcePath is a folder, the contained directory structure will be re-created
* and all files will be individually hardlinked. This means that folders will be new
* filesystem entities and will have separate folder metadata, while the contained files
* will maintain normal hardlink behavior. This is done since folders do not have a
* cross-platform equivalent of a hardlink, and since file symlinks provide fundamentally
* different functionality in comparison to hardlinks.
* @beta
*/
hardlink?: boolean;
emitExtensionForTests?: '.js' | '.cjs' | '.mjs';
}
declare interface IBuildStageStandardParameters {
productionFlag: CommandLineFlagParameter;
localesParameter: CommandLineStringListParameter;
liteFlag: CommandLineFlagParameter;
typescriptMaxWriteParallelismParameter: CommandLineIntegerParameter;
maxOldSpaceSizeParameter: CommandLineStringParameter;
}
/**
* Used to specify a selection of source files to delete from the specified source folder.
*
* @public
*/
export declare interface IDeleteOperation extends IFileSelectionSpecifier {
export declare interface IBuildSubstage<TBuildSubstageHooks extends BuildSubstageHooksBase, TBuildSubstageProperties extends object> {
hooks: TBuildSubstageHooks;
properties: TBuildSubstageProperties;
}
/**
* Used to specify a selection of one or more files.
*
* @public
*/
export declare interface IFileSelectionSpecifier {
/**
* Absolute path to the target. The provided sourcePath can be to a file or a folder. If
* fileExtensions, excludeGlobs, or includeGlobs are specified, the sourcePath is assumed
* to be a folder. If it is not a folder, an error will be thrown.
*/
sourcePath: string;
/**
* File extensions that should be included from the source folder. Only supported when the sourcePath
* is a folder.
*/
fileExtensions?: string[];
/**
* Globs that should be explicitly excluded. This takes precedence over globs listed in "includeGlobs" and
* files that match the file extensions provided in "fileExtensions". Only supported when the sourcePath
* is a folder.
*/
excludeGlobs?: string[];
/**
* Globs that should be explicitly included. Only supported when the sourcePath is a folder.
*/
includeGlobs?: string[];
export declare interface IBundleSubstage extends IBuildSubstage<BundleSubstageHooks, IBundleSubstageProperties> {
}
/**
* A supported subset of options used when globbing files.
*
* @public
*/
export declare interface IGlobOptions {
export declare interface IBundleSubstageProperties {
/**
* Current working directory that the glob pattern will be applied to.
* If webpack is used, this will be set to the version of the webpack package
*/
cwd?: string;
webpackVersion?: string | undefined;
/**
* Whether or not the returned file paths should be absolute.
*
* @defaultValue false
* If webpack is used, this will be set to the version of the webpack-dev-server package
*/
absolute?: boolean;
webpackDevServerVersion?: string | undefined;
/**
* Patterns to ignore when globbing.
* The configuration used by the Webpack plugin. This must be populated
* for Webpack to run. If webpackConfigFilePath is specified,
* this will be populated automatically with the exports of the
* config file referenced in that property.
*/
ignore?: string[];
/**
* Whether or not to include dot files when globbing.
*
* @defaultValue false
*/
dot?: boolean;
webpackConfiguration?: unknown;
}
/**
* @internal
* @public
*/
export declare interface _IHeftConfigurationInitializationOptions {
/**
* The working directory the tool was executed in.
*/
cwd: string;
/**
* Terminal instance to facilitate logging.
*/
terminalProvider: ITerminalProvider;
export declare interface ICleanStageContext extends IStageContext<CleanStageHooks, ICleanStageProperties> {
}
declare interface ICleanStageOptions {
deleteCache?: boolean;
}
/**
* The default parameters provided by Heft.
*
* @public
*/
export declare interface IHeftDefaultParameters {
/**
* Whether or not the `--clean` flag was passed to Heft.
*
* @public
*/
readonly clean: boolean;
/**
* Whether or not the `--clean-cache` flag was passed to Heft.
*
* @public
*/
readonly cleanCache: boolean;
/**
* Whether or not the `--debug` flag was passed to Heft.
*
* @public
*/
readonly debug: boolean;
/**
* Whether or not the `--verbose` flag was passed to the Heft action.
*
* @public
*/
readonly verbose: boolean;
/**
* Whether or not the `--production` flag was passed to the Heft action.
*
* @public
*/
readonly production: boolean;
/**
* The locales provided to the Heft action via the `--locales` parameter.
*
* @public
*/
readonly locales: Iterable<string>;
/**
* Whether or not the Heft action is running in watch mode.
*/
readonly watch: boolean;
export declare interface ICleanStageProperties {
deleteCache: boolean;
pathsToDelete: Set<string>;
}
/**
* Options provided to the clean hook.
*
* @public
*/
export declare interface IHeftLifecycleCleanHookOptions {
/**
* Add delete operations, which will be performed at the beginning of Heft execution.
*
* @public
*/
addDeleteOperations: (...deleteOperations: IDeleteOperation[]) => void;
export declare interface ICompileSubstage extends IBuildSubstage<CompileSubstageHooks, ICompileSubstageProperties> {
}
/**
* Hooks that are available to the lifecycle plugin.
*
* @public
*/
export declare interface IHeftLifecycleHooks {
/**
* The `clean` hook is called at the beginning of Heft execution. It can be used to clean up
* any files or folders that may be produced by the plugin. To use it, call
* `clean.tapPromise(<pluginName>, <callback>)`.
*
* @public
*/
clean: AsyncParallelHook<IHeftLifecycleCleanHookOptions>;
/**
* The `toolStart` hook is called at the beginning of Heft execution, after the `clean` hook. It is
* called before any phases have begun to execute. To use it, call
* `toolStart.tapPromise(<pluginName>, <callback>)`.
*
* @public
*/
toolStart: AsyncParallelHook<IHeftLifecycleToolStartHookOptions>;
/**
* The `toolFinish` hook is called at the end of Heft execution. It is called after all phases have
* completed execution. To use it, call
* `toolFinish.tapPromise(<pluginName>, <callback>)`.
*
* @public
*/
toolFinish: AsyncParallelHook<IHeftLifecycleToolFinishHookOptions>;
recordMetrics: AsyncParallelHook<IHeftRecordMetricsHookOptions>;
export declare interface ICompileSubstageProperties {
typescriptMaxWriteParallelism: number | undefined;
}
/** @beta */
export declare interface ICustomActionOptions<TParameters> {
actionName: string;
documentation: string;
summary?: string;
parameters?: {
[K in keyof TParameters]: ICustomActionParameter<TParameters[K]>;
};
callback: (parameters: TParameters) => void | Promise<void>;
}
/** @beta */
export declare type ICustomActionParameter<TParameter> = TParameter extends boolean ? ICustomActionParameterFlag : TParameter extends number ? ICustomActionParameterInteger : TParameter extends string ? ICustomActionParameterString : TParameter extends ReadonlyArray<string> ? ICustomActionParameterStringList : never;
/** @beta */
export declare interface ICustomActionParameterBase<CustomActionParameterType> {
kind: 'flag' | 'integer' | 'string' | 'stringList';
parameterLongName: string;
description: string;
}
/** @beta */
export declare interface ICustomActionParameterFlag extends ICustomActionParameterBase<boolean> {
kind: 'flag';
}
/** @beta */
export declare interface ICustomActionParameterInteger extends ICustomActionParameterBase<number> {
kind: 'integer';
}
/** @beta */
export declare interface ICustomActionParameterString extends ICustomActionParameterBase<string> {
kind: 'string';
}
/** @beta */
export declare interface ICustomActionParameterStringList extends ICustomActionParameterBase<ReadonlyArray<string>> {
kind: 'stringList';
}
/**
* The interface that Heft lifecycle plugins must implement. Lifecycle plugins are used to provide
* functionality that affects the lifecycle of the Heft run. As such, they do not belong to any particular
* Heft phase.
* The base action configuration that all custom action configuration files
* should inherit from.
*
* @public
*/
export declare interface IHeftLifecyclePlugin<TOptions = void> extends IHeftPlugin<IHeftLifecycleSession, TOptions> {
export declare interface IHeftActionConfiguration {
}
/**
* The lifecycle session is responsible for providing session-specific information to Heft lifecycle
* plugins. The session provides access to the hooks that Heft will run as part of lifecycle execution,
* as well as access to parameters provided via the CLI. The session is also how you request access to
* other lifecycle plugins.
* Options to be used when retrieving the action configuration.
*
* @public
*/
export declare interface IHeftLifecycleSession {
export declare interface IHeftActionConfigurationOptions {
/**
* The hooks available to the lifecycle plugin.
*
* @public
* Whether or not arrays should be merged across Heft action configuration files.
*/
readonly hooks: IHeftLifecycleHooks;
mergeArrays?: boolean;
}
/**
* @beta
* The base set of utility values provided in every object returned when registering a parameter.
*/
export declare interface IHeftBaseParameter<TValue, TCommandLineDefinition extends IBaseCommandLineDefinition> {
/**
* Contains default parameters provided by Heft, as well as CLI parameters requested by the lifecycle
* plugin.
*
* @public
* The value specified on the command line for this parameter.
*/
readonly parameters: IHeftParameters;
readonly value?: TValue;
/**
* The cache folder for the lifecycle plugin. This folder is unique for each lifecycle plugin,
* and will not be cleaned when Heft is run with `--clean`. However, it will be cleaned when
* Heft is run with `--clean` and `--clean-cache`.
* If true, then the user has invoked Heft with a command line action that supports this parameter
* (as defined by the {@link IParameterAssociatedActionNames.associatedActionNames} option).
*
* @public
*/
readonly cacheFolderPath: string;
/**
* The temp folder for the lifecycle plugin. This folder is unique for each lifecycle plugin,
* and will be cleaned when Heft is run with `--clean`.
* @remarks
* For example, if `build` is one of the associated action names for `--my-integer-parameter`,
* then `actionAssociated` will be true if the user invokes `heft build`.
*
* @public
* To test whether the parameter was actually included (e.g. `heft build --my-integer-parameter 123`),
* verify the {@link IHeftBaseParameter.value} property is not `undefined`.
*/
readonly tempFolderPath: string;
readonly actionAssociated: boolean;
/**
* The scoped logger for the lifecycle plugin. Messages logged with this logger will be prefixed
* with the plugin name, in the format "[lifecycle:<pluginName>]". It is highly recommended that
* writing to the console be performed via the logger, as it will ensure that logging messages
* are labeled with the source of the message.
*
* @public
* The options {@link IHeftRegisterParameterOptions} used to create and register the parameter with
* a Heft command line action.
*/
readonly logger: IScopedLogger;
/**
* Set a a callback which will be called if and after the specified plugin has been applied.
* This can be used to tap hooks on another lifecycle plugin that exists within the same phase.
*
* @public
*/
requestAccessToPluginByName<T extends object>(pluginToAccessPackage: string, pluginToAccessName: string, pluginApply: (pluginAccessor: T) => void): void;
readonly definition: IHeftRegisterParameterOptions<TCommandLineDefinition>;
}
/**
* Options provided to the toolFinish hook.
*
* @public
* @beta
* The object returned when registering a choiceList type parameter.
*/
export declare interface IHeftLifecycleToolFinishHookOptions {
}
export declare type IHeftChoiceListParameter = IHeftBaseParameter<readonly string[], ICommandLineChoiceListDefinition>;
/**
* Options provided to the toolStart hook.
*
* @public
* @beta
* The object returned when registering a choice type parameter.
*/
export declare interface IHeftLifecycleToolStartHookOptions {
}
export declare type IHeftChoiceParameter = IHeftBaseParameter<string, ICommandLineChoiceDefinition>;
/**
* Parameters provided to a Heft plugin.
*
* @public
* @internal
*/
export declare interface IHeftParameters extends IHeftDefaultParameters {
export declare interface _IHeftConfigurationInitializationOptions {
/**
* Get a choice parameter that has been defined in heft-plugin.json.
*
* @public
* The working directory the tool was executed in.
*/
getChoiceParameter(parameterLongName: string): CommandLineChoiceParameter;
cwd: string;
/**
* Get a choice list parameter that has been defined in heft-plugin.json.
*
* @public
* Terminal instance to facilitate logging.
*/
getChoiceListParameter(parameterLongName: string): CommandLineChoiceListParameter;
/**
* Get a flag parameter that has been defined in heft-plugin.json.
*
* @public
*/
getFlagParameter(parameterLongName: string): CommandLineFlagParameter;
/**
* Get an integer parameter that has been defined in heft-plugin.json.
*
* @public
*/
getIntegerParameter(parameterLongName: string): CommandLineIntegerParameter;
/**
* Get an integer list parameter that has been defined in heft-plugin.json.
*
* @public
*/
getIntegerListParameter(parameterLongName: string): CommandLineIntegerListParameter;
/**
* Get a string parameter that has been defined in heft-plugin.json.
*
* @public
*/
getStringParameter(parameterLongName: string): CommandLineStringParameter;
/**
* Get a string list parameter that has been defined in heft-plugin.json.
*
* @public
*/
getStringListParameter(parameterLongName: string): CommandLineStringListParameter;
terminalProvider: ITerminalProvider;
}
/**
* The interface used for all Heft plugins.
*
* @public
* @beta
* The object returned when registering a flag type parameter.
*/
export declare interface IHeftPlugin<TSession extends IHeftLifecycleSession | IHeftTaskSession = IHeftLifecycleSession | IHeftTaskSession, TOptions = void> {
/**
* The accessor provided by the plugin. This accessor can be obtained by other plugins within the same
* phase by calling `session.requestAccessToPlugin(...)`, and is used by other plugins to interact with
* hooks or properties provided by the host plugin.
*/
readonly accessor?: object;
/**
* Apply the plugin to the session. Plugins are expected to hook into session hooks to provide plugin
* implementation. The `apply(...)` method is called once per phase.
*
* @param session - The session to apply the plugin to.
* @param heftConfiguration - The Heft configuration.
* @param pluginOptions - Options for the plugin, specified in heft.json.
*/
apply(session: TSession, heftConfiguration: HeftConfiguration, pluginOptions?: TOptions): void;
}
export declare type IHeftFlagParameter = IHeftBaseParameter<boolean, ICommandLineFlagDefinition>;
/**
* @public
* @beta
* The object returned when registering an integer type parameter.
*/
export declare interface IHeftRecordMetricsHookOptions {
/**
* @public
*/
metricName: string;
/**
* @public
*/
metricData: IMetricsData;
export declare type IHeftIntegerParameter = IHeftBaseParameter<number, ICommandLineIntegerDefinition>;
/** @internal */
export declare interface _IHeftLifecycle {
hooks: _HeftLifecycleHooks;
}
/**
* Hooks that are available to the task plugin.
*
* @public
*/
export declare interface IHeftTaskHooks {
/**
* The `run` hook is called after all dependency task executions have completed during a normal
* run, or during a watch mode run when no `runIncremental` hook is provided. It is where the
* plugin can perform its work. To use it, call `run.tapPromise(<pluginName>, <callback>)`.
*
* @public
*/
readonly run: AsyncParallelHook<IHeftTaskRunHookOptions>;
/**
* If provided, the `runIncremental` hook is called after all dependency task executions have completed
* during a watch mode run. It is where the plugin can perform incremental work. To use it, call
* `run.tapPromise(<pluginName>, <callback>)`.
*/
readonly runIncremental: AsyncParallelHook<IHeftTaskRunIncrementalHookOptions>;
export declare interface IHeftPlugin<TOptions = void> {
readonly pluginName: string;
readonly optionsSchema?: JsonSchema;
readonly accessor?: object;
apply(heftSession: HeftSession, heftConfiguration: HeftConfiguration, options?: TOptions): void;
}
/**
* The interface that Heft task plugins must implement. Task plugins are used to provide the implementation
* of a specific task.
*
* @public
* @beta
* The options object provided to the command line parser when registering a parameter
* in addition to the action names used to associate the parameter with.
*/
export declare interface IHeftTaskPlugin<TOptions = void> extends IHeftPlugin<IHeftTaskSession, TOptions> {
}
export declare type IHeftRegisterParameterOptions<TCommandLineDefinition extends IBaseCommandLineDefinition> = TCommandLineDefinition & IParameterAssociatedActionNames;
/**
* Options provided to the `run` hook.
*
* @public
*/
export declare interface IHeftTaskRunHookOptions {
/**
* Add copy operations to be performed during the `run` hook. These operations will be
* performed after the task `run` hook has completed.
*
* @public
*/
readonly addCopyOperations: (copyOperations: ICopyOperation[]) => void;
/**
* Add delete operations to be performed during the `run` hook. These operations will be
* performed after the task `run` hook has completed.
*
* @public
*/
readonly addDeleteOperations: (deleteOperations: IDeleteOperation[]) => void;
export declare interface IHeftSessionHooks {
metricsCollector: MetricsCollectorHooks;
/** @internal */
heftLifecycle: SyncHook<_IHeftLifecycle>;
build: SyncHook<IBuildStageContext>;
clean: SyncHook<ICleanStageContext>;
test: SyncHook<ITestStageContext>;
}
/**
* Options provided to the 'runIncremental' hook.
*
* @public
*/
export declare interface IHeftTaskRunIncrementalHookOptions extends IHeftTaskRunHookOptions {
declare interface IHeftSessionOptions {
plugin: IHeftPlugin;
/**
* Add copy operations to be performed during the `runIncremental` hook. These operations will
* be performed after the task `runIncremental` hook has completed.
*
* @public
*/
readonly addCopyOperations: (copyOperations: IIncrementalCopyOperation[]) => void;
/**
* A map of changed files to the corresponding change state. This can be used to track which
* files have been changed during an incremental build. This map is populated with all changed
* files, including files that are not source files. When an incremental build completes
* successfully, the map is cleared and only files changed after the incremental build will be
* included in the map.
*/
readonly changedFiles: ReadonlyMap<string, IChangedFileState>;
/**
* Glob the map of changed files and return the subset of changed files that match the provided
* globs.
*/
readonly globChangedFilesAsync: GlobFn;
/**
* A cancellation token that is used to signal that the incremental build is cancelled. This
* can be used to stop incremental operations early and allow for a new incremental build to
* be started.
*
* @beta
*/
readonly cancellationToken: CancellationToken;
requestAccessToPluginByName: RequestAccessToPluginByNameCallback;
}
/**
* The task session is responsible for providing session-specific information to Heft task plugins.
* The session provides access to the hooks that Heft will run as part of task execution, as well as
* access to parameters provided via the CLI. The session is also how you request access to other task
* plugins.
*
* @public
* @beta
* The object returned when registering a stringList type parameter.
*/
export declare interface IHeftTaskSession {
/**
* The name of the task. This is defined in "heft.json".
*
* @public
*/
readonly taskName: string;
/**
* The hooks available to the task plugin.
*
* @public
*/
readonly hooks: IHeftTaskHooks;
/**
* Contains default parameters provided by Heft, as well as CLI parameters requested by the task
* plugin.
*
* @public
*/
readonly parameters: IHeftParameters;
/**
* The cache folder for the task. This folder is unique for each task, and will not be
* cleaned when Heft is run with `--clean`. However, it will be cleaned when Heft is run
* with `--clean` and `--clean-cache`.
*
* @public
*/
readonly cacheFolderPath: string;
/**
* The temp folder for the task. This folder is unique for each task, and will be cleaned
* when Heft is run with `--clean`.
*
* @public
*/
readonly tempFolderPath: string;
/**
* The scoped logger for the task. Messages logged with this logger will be prefixed with
* the phase and task name, in the format "[<phaseName>:<taskName>]". It is highly recommended
* that writing to the console be performed via the logger, as it will ensure that logging messages
* are labeled with the source of the message.
*
* @public
*/
readonly logger: IScopedLogger;
/**
* Set a a callback which will be called if and after the specified plugin has been applied.
* This can be used to tap hooks on another plugin that exists within the same phase.
*
* @public
*/
requestAccessToPluginByName<T extends object>(pluginToAccessPackage: string, pluginToAccessName: string, pluginApply: (pluginAccessor: T) => void): void;
}
export declare type IHeftStringListParameter = IHeftBaseParameter<readonly string[], ICommandLineStringListDefinition>;
/**
* Used to specify a selection of files to copy from a specific source folder to one
* or more destination folders.
*
* @public
* @beta
* The object returned when registering a string type parameter.
*/
export declare interface IIncrementalCopyOperation extends ICopyOperation {
/**
* If true, the file will be copied only if the source file is contained in the
* IHeftTaskRunIncrementalHookOptions.changedFiles map.
*/
onlyIfChanged?: boolean;
export declare type IHeftStringParameter = IHeftBaseParameter<string, ICommandLineStringDefinition>;
/**
* @internal
*/
declare interface IInternalHeftSessionOptions {
heftLifecycleHook: SyncHook<_IHeftLifecycle>;
buildStage: BuildStage;
cleanStage: CleanStage;
testStage: TestStage;
metricsCollector: _MetricsCollector;
loggingManager: LoggingManager;
getIsDebugMode(): boolean;
registerAction: RegisterAction;
commandLine: HeftCommandLine;
}
declare interface ILoggingManagerOptions {
terminalProvider: ITerminalProvider;
}
/**

@@ -817,2 +604,14 @@ * @public

/**
* @beta
* The configuration interface for associating a parameter definition with a Heft
* command line action in {@link IHeftRegisterParameterOptions}.
*/
export declare interface IParameterAssociatedActionNames {
/**
* A string list of one or more action names to associate the parameter with.
*/
associatedActionNames: string[];
}
/**
* @internal

@@ -826,23 +625,11 @@ */

/**
* Rig resolves requested tools from the project's Heft rig.
*
* @remarks For more information on rig resolution, see
* https://rushstack.io/pages/heft/rig_packages/#3-riggable-dependencies
*
* @public
*/
export declare interface IRigPackageResolver {
resolvePackageAsync(packageName: string, terminal: ITerminal): Promise<string>;
export declare interface IPostBuildSubstage extends IBuildSubstage<BuildSubstageHooksBase, {}> {
}
/**
* Interface used by scripts that are run by the RunScriptPlugin.
*
* @beta
* @public
*/
export declare interface IRunScript {
/**
* The method that is called by the RunScriptPlugin to run the script.
*/
runAsync: (options: IRunScriptOptions) => Promise<void>;
export declare interface IPreCompileSubstage extends IBuildSubstage<BuildSubstageHooksBase, {}> {
}

@@ -855,26 +642,16 @@

*/
export declare interface IRunScriptOptions {
heftTaskSession: IHeftTaskSession;
export declare interface IRunScriptOptions<TStageProperties> {
scopedLogger: ScopedLogger;
heftConfiguration: HeftConfiguration;
runOptions: IHeftTaskRunHookOptions;
scriptOptions: Record<string, unknown>;
debugMode: boolean;
properties: TStageProperties;
scriptOptions: Record<string, any>;
}
/**
* A logger which is used to emit errors and warnings to the console, as well as to write
* to the console. Messaged emitted by the scoped logger are prefixed with the name of the
* scoped logger.
*
* @public
*/
export declare interface IScopedLogger {
readonly terminal: ITerminal;
/**
* The name of the scoped logger. Logging messages will be prefixed with this name.
*/
readonly loggerName: string;
/**
* The terminal used to write messages to the console.
*/
readonly terminal: Terminal;
/**
* Call this function to emit an error to the heft runtime.

@@ -889,3 +666,50 @@ */

declare interface IScopedLoggerOptions {
requestingPlugin: IHeftPlugin;
loggerName: string;
terminalProvider: ITerminalProvider;
getShouldPrintStacks: () => boolean;
errorHasBeenEmittedCallback: () => void;
}
/**
* @public
*/
export declare interface IStageContext<TStageHooks extends StageHooksBase<TStageProperties>, TStageProperties extends object> {
hooks: TStageHooks;
properties: TStageProperties;
}
/**
* @public
*/
export declare interface ITestStageContext extends IStageContext<TestStageHooks, ITestStageProperties> {
}
declare interface ITestStageOptions {
watchMode: boolean;
}
/**
* @public
*/
export declare interface ITestStageProperties {
watchMode: boolean;
}
declare class LoggingManager {
private _options;
private _scopedLoggers;
private _shouldPrintStacks;
private _hasAnyErrors;
get errorsHaveBeenEmitted(): boolean;
constructor(options: ILoggingManagerOptions);
enablePrintStacks(): void;
requestScopedLogger(plugin: IHeftPlugin, loggerName: string): ScopedLogger;
getErrorStrings(fileLocationStyle?: FileLocationStyle): string[];
getWarningStrings(fileErrorFormat?: FileLocationStyle): string[];
static getErrorMessage(error: Error, options?: IFileErrorFormattingOptions): string;
}
/**
* @internal

@@ -895,3 +719,4 @@ * A simple performance metrics collector. A plugin is required to pipe data anywhere.

export declare class _MetricsCollector {
readonly recordMetricsHook: AsyncParallelHook<IHeftRecordMetricsHookOptions>;
readonly hooks: MetricsCollectorHooks;
private _hasBeenTornDown;
private _startTimeMs;

@@ -909,5 +734,117 @@ /**

*/
recordAsync(command: string, performanceData?: Partial<_IPerformanceData>, parameters?: Record<string, string>): Promise<void>;
record(command: string, performanceData?: Partial<_IPerformanceData>, parameters?: Record<string, string>): void;
/**
* Flushes all pending logged metrics.
*/
flushAsync(): Promise<void>;
/**
* Flushes all pending logged metrics and closes the MetricsCollector instance.
*/
flushAndTeardownAsync(): Promise<void>;
}
/**
* Tap these hooks to record build metrics, to a file, for example.
*
* @public
*/
export declare class MetricsCollectorHooks {
/**
* This hook is called when a metric is recorded.
*/
recordMetric: SyncHook<string, IMetricsData>;
/**
* This hook is called when collected metrics should be flushed
*/
flush: AsyncParallelHook;
/**
* This hook is called when collected metrics should be flushed and no more metrics will be collected.
*/
flushAndTeardown: AsyncParallelHook;
}
/** @beta */
export declare type RegisterAction = <TParameters>(action: ICustomActionOptions<TParameters>) => void;
/**
* @beta
*/
export declare type RequestAccessToPluginByNameCallback = (pluginToAccessName: string, pluginApply: (pluginAccessor: object) => void) => void;
/**
* @public
*/
export declare class ScopedLogger implements IScopedLogger {
private readonly _options;
private readonly _errors;
private readonly _warnings;
private get _shouldPrintStacks();
get errors(): ReadonlyArray<Error>;
get warnings(): ReadonlyArray<Error>;
/**
* @internal
*/
readonly _requestingPlugin: IHeftPlugin;
readonly loggerName: string;
readonly terminalProvider: ITerminalProvider;
readonly terminal: ITerminal;
/**
* @internal
*/
constructor(options: IScopedLoggerOptions);
/**
* {@inheritdoc IScopedLogger.emitError}
*/
emitError(error: Error): void;
/**
* {@inheritdoc IScopedLogger.emitWarning}
*/
emitWarning(warning: Error): void;
}
declare abstract class StageBase<TStageHooks extends StageHooksBase<TStageProperties>, TStageProperties extends object, TStageOptions> {
readonly stageInitializationHook: SyncHook<IStageContext<TStageHooks, TStageProperties>>;
protected readonly heftConfiguration: HeftConfiguration;
protected readonly loggingManager: LoggingManager;
protected readonly globalTerminal: ITerminal;
protected stageOptions: TStageOptions;
protected stageProperties: TStageProperties;
protected stageHooks: TStageHooks;
private readonly _innerHooksType;
constructor(heftConfiguration: HeftConfiguration, loggingManager: LoggingManager, innerHooksType: new () => TStageHooks);
initializeAsync(stageOptions: TStageOptions): Promise<void>;
executeAsync(): Promise<void>;
protected abstract getDefaultStagePropertiesAsync(options: TStageOptions): Promise<TStageProperties>;
protected abstract executeInnerAsync(): Promise<void>;
}
/**
* @public
*/
export declare abstract class StageHooksBase<TStageProperties extends object> {
/**
* This hook allows the stage's execution to be completely overridden. Only the last-registered plugin
* with an override hook provided applies.
*
* @beta
*/
readonly overrideStage: AsyncSeriesBailHook<TStageProperties>;
readonly loadStageConfiguration: AsyncSeriesHook;
readonly afterLoadStageConfiguration: AsyncSeriesHook;
}
declare class TestStage extends StageBase<TestStageHooks, ITestStageProperties, ITestStageOptions> {
constructor(heftConfiguration: HeftConfiguration, loggingManager: LoggingManager);
protected getDefaultStagePropertiesAsync(options: ITestStageOptions): Promise<ITestStageProperties>;
protected executeInnerAsync(): Promise<void>;
}
/**
* @public
*/
export declare class TestStageHooks extends StageHooksBase<ITestStageProperties> {
readonly run: AsyncParallelHook;
readonly configureTest: AsyncSeriesHook;
}
export { }

@@ -8,5 +8,5 @@ // This file is read by tools that parse documentation comments conforming to the TSDoc standard.

"packageName": "@microsoft/api-extractor",
"packageVersion": "7.33.6"
"packageVersion": "7.33.7"
}
]
}

@@ -1,20 +0,7 @@

import { CommandLineAction } from '@rushstack/ts-command-line';
import type { IHeftAction, IHeftActionOptions } from './IHeftAction';
import type { HeftPhase } from '../../pluginFramework/HeftPhase';
export declare class CleanAction extends CommandLineAction implements IHeftAction {
readonly watch: boolean;
private readonly _internalHeftSession;
private readonly _terminal;
private readonly _metricsCollector;
private readonly _verboseFlag;
private readonly _toParameter;
private readonly _toExceptParameter;
private readonly _onlyParameter;
private readonly _cleanCacheFlag;
private _selectedPhases;
constructor(options: IHeftActionOptions);
get selectedPhases(): ReadonlySet<HeftPhase>;
protected onExecute(): Promise<void>;
private _cleanFilesAsync;
import { HeftActionBase, IHeftActionBaseOptions } from './HeftActionBase';
export declare class CleanAction extends HeftActionBase {
private readonly _deleteCacheFlag;
constructor(options: IHeftActionBaseOptions);
protected actionExecuteAsync(): Promise<void>;
}
//# sourceMappingURL=CleanAction.d.ts.map

@@ -6,74 +6,26 @@ "use strict";

exports.CleanAction = void 0;
const ts_command_line_1 = require("@rushstack/ts-command-line");
const Constants_1 = require("../../utilities/Constants");
const RunAction_1 = require("./RunAction");
const DeleteFilesPlugin_1 = require("../../plugins/DeleteFilesPlugin");
const HeftActionRunner_1 = require("../HeftActionRunner");
const CancellationToken_1 = require("../../pluginFramework/CancellationToken");
class CleanAction extends ts_command_line_1.CommandLineAction {
const HeftActionBase_1 = require("./HeftActionBase");
class CleanAction extends HeftActionBase_1.HeftActionBase {
constructor(options) {
super({
actionName: 'clean',
documentation: 'Clean the project, removing temporary task folders and specified clean paths.',
summary: 'Clean the project, removing temporary task folders and specified clean paths.'
summary: 'Clean the project',
documentation: ''
}, options);
this._deleteCacheFlag = this.defineFlagParameter({
parameterLongName: '--clear-cache',
description: "If this flag is provided, the compiler cache will also be cleared. This isn't dangerous, " +
'but may lead to longer compile times'
});
this.watch = false;
this._terminal = options.terminal;
this._metricsCollector = options.metricsCollector;
this._internalHeftSession = options.internalHeftSession;
const { toParameter, toExceptParameter, onlyParameter } = (0, RunAction_1.definePhaseScopingParameters)(this);
this._toParameter = toParameter;
this._toExceptParameter = toExceptParameter;
this._onlyParameter = onlyParameter;
this._verboseFlag = this.defineFlagParameter({
parameterLongName: Constants_1.Constants.verboseParameterLongName,
parameterShortName: Constants_1.Constants.verboseParameterShortName,
description: 'If specified, log information useful for debugging.'
});
this._cleanCacheFlag = this.defineFlagParameter({
parameterLongName: Constants_1.Constants.cleanCacheParameterLongName,
description: 'If specified, clean the cache directories in addition to the temp directories and provided ' +
'clean operations.'
});
}
get selectedPhases() {
if (!this._selectedPhases) {
if (this._onlyParameter.values.length ||
this._toParameter.values.length ||
this._toExceptParameter.values.length) {
this._selectedPhases = (0, RunAction_1.expandPhases)(this._onlyParameter, this._toParameter, this._toExceptParameter, this._internalHeftSession, this._terminal);
}
else {
// No selected phases, clean everything
this._selectedPhases = this._internalHeftSession.phases;
}
}
return this._selectedPhases;
async actionExecuteAsync() {
const cleanStage = this.stages.cleanStage;
const cleanStageOptions = {
deleteCache: this._deleteCacheFlag.value
};
await cleanStage.initializeAsync(cleanStageOptions);
await cleanStage.executeAsync();
}
async onExecute() {
const { heftConfiguration } = this._internalHeftSession;
const cancellationToken = new CancellationToken_1.CancellationToken();
(0, HeftActionRunner_1.initializeHeft)(heftConfiguration, this._terminal, this._verboseFlag.value);
await (0, HeftActionRunner_1.runWithLoggingAsync)(this._cleanFilesAsync.bind(this), this, this._internalHeftSession.loggingManager, this._terminal, this._metricsCollector, cancellationToken);
}
async _cleanFilesAsync() {
const deleteOperations = [];
for (const phase of this.selectedPhases) {
// Add the temp folder and cache folder (if requested) for each task
const phaseSession = this._internalHeftSession.getSessionForPhase(phase);
for (const task of phase.tasks) {
const taskSession = phaseSession.getSessionForTask(task);
deleteOperations.push({ sourcePath: taskSession.tempFolderPath });
if (this._cleanCacheFlag.value) {
deleteOperations.push({ sourcePath: taskSession.cacheFolderPath });
}
}
// Add the manually specified clean operations
deleteOperations.push(...phase.cleanFiles);
}
// Delete the files
await (0, DeleteFilesPlugin_1.deleteFilesAsync)(deleteOperations, this._terminal);
}
}
exports.CleanAction = CleanAction;
//# sourceMappingURL=CleanAction.js.map
import { CommandLineParser } from '@rushstack/ts-command-line';
import { type ITerminal } from '@rushstack/node-core-library';
import { ITerminal } from '@rushstack/node-core-library';
export declare class HeftCommandLineParser extends CommandLineParser {
readonly globalTerminal: ITerminal;
private _terminalProvider;
private _terminal;
private _loggingManager;
private _metricsCollector;
private _pluginManager;
private _heftConfiguration;
private _internalHeftSession;
private _heftLifecycleHook;
private _preInitializationArgumentValues;
private readonly _unmanagedFlag;
private readonly _debugFlag;
private readonly _unmanagedFlag;
private readonly _debug;
private readonly _terminalProvider;
private readonly _loggingManager;
private readonly _metricsCollector;
private readonly _heftConfiguration;
private readonly _pluginsParameter;
get isDebug(): boolean;
get terminal(): ITerminal;
constructor();
execute(args?: string[]): Promise<boolean>;
private _checkForUpgradeAsync;
protected onExecute(): Promise<void>;
private _normalizeCwd;
private _getPreInitializationArgumentValues;
private _initializePluginsAsync;
private _reportErrorAndSetExitCode;
}
//# sourceMappingURL=HeftCommandLineParser.d.ts.map

@@ -6,13 +6,22 @@ "use strict";

exports.HeftCommandLineParser = void 0;
const argparse_1 = require("argparse");
const ts_command_line_1 = require("@rushstack/ts-command-line");
const node_core_library_1 = require("@rushstack/node-core-library");
const argparse_1 = require("argparse");
const tapable_1 = require("tapable");
const MetricsCollector_1 = require("../metrics/MetricsCollector");
const CleanAction_1 = require("./actions/CleanAction");
const BuildAction_1 = require("./actions/BuildAction");
const StartAction_1 = require("./actions/StartAction");
const TestAction_1 = require("./actions/TestAction");
const PluginManager_1 = require("../pluginFramework/PluginManager");
const HeftConfiguration_1 = require("../configuration/HeftConfiguration");
const InternalHeftSession_1 = require("../pluginFramework/InternalHeftSession");
const CleanStage_1 = require("../stages/CleanStage");
const BuildStage_1 = require("../stages/BuildStage");
const TestStage_1 = require("../stages/TestStage");
const LoggingManager_1 = require("../pluginFramework/logging/LoggingManager");
const CustomAction_1 = require("./actions/CustomAction");
const Constants_1 = require("../utilities/Constants");
const CleanAction_1 = require("./actions/CleanAction");
const PhaseAction_1 = require("./actions/PhaseAction");
const RunAction_1 = require("./actions/RunAction");
const HeftLifecycle_1 = require("../pluginFramework/HeftLifecycle");
const HeftCommandLine_1 = require("./HeftCommandLine");
class HeftCommandLineParser extends ts_command_line_1.CommandLineParser {

@@ -24,12 +33,4 @@ constructor() {

});
// Initialize the debug flag as a parameter on the tool itself
this._debugFlag = this.defineFlagParameter({
parameterLongName: Constants_1.Constants.debugParameterLongName,
description: 'Show the full call stack if an error occurs while executing the tool'
});
// Initialize the unmanaged flag as a parameter on the tool itself. While this parameter
// is only used during version selection, we need to support parsing it here so that we
// don't throw due to an unrecognized parameter.
this._unmanagedFlag = this.defineFlagParameter({
parameterLongName: Constants_1.Constants.unmanagedParameterLongName,
parameterLongName: '--unmanaged',
description: 'Disables the Heft version selector: When Heft is invoked via the shell path, normally it' +

@@ -40,14 +41,19 @@ " will examine the project's package.json dependencies and try to use the locally installed version" +

});
// Pre-initialize with known argument values to determine state of "--debug"
const preInitializationArgumentValues = this._getPreInitializationArgumentValues();
this._debug = !!preInitializationArgumentValues.debug;
// Enable debug and verbose logging if the "--debug" flag is set
this._terminalProvider = new node_core_library_1.ConsoleTerminalProvider({
debugEnabled: this._debug,
verboseEnabled: this._debug
this._debugFlag = this.defineFlagParameter({
parameterLongName: Constants_1.Constants.debugParameterLongName,
description: 'Show the full call stack if an error occurs while executing the tool'
});
this.globalTerminal = new node_core_library_1.Terminal(this._terminalProvider);
this._loggingManager = new LoggingManager_1.LoggingManager({ terminalProvider: this._terminalProvider });
if (this._debug) {
// Enable printing stacktraces if the "--debug" flag is set
this._pluginsParameter = this.defineStringListParameter({
parameterLongName: Constants_1.Constants.pluginParameterLongName,
argumentName: 'PATH',
description: 'Used to specify Heft plugins.'
});
this._preInitializationArgumentValues = this._getPreInitializationArgumentValues();
this._terminalProvider = new node_core_library_1.ConsoleTerminalProvider();
this._terminal = new node_core_library_1.Terminal(this._terminalProvider);
this._metricsCollector = new MetricsCollector_1.MetricsCollector();
this._loggingManager = new LoggingManager_1.LoggingManager({
terminalProvider: this._terminalProvider
});
if (this.isDebug) {
this._loggingManager.enablePrintStacks();

@@ -60,34 +66,61 @@ node_core_library_1.InternalError.breakInDebugger = true;

});
this._metricsCollector = new MetricsCollector_1.MetricsCollector();
const stages = {
buildStage: new BuildStage_1.BuildStage(this._heftConfiguration, this._loggingManager),
cleanStage: new CleanStage_1.CleanStage(this._heftConfiguration, this._loggingManager),
testStage: new TestStage_1.TestStage(this._heftConfiguration, this._loggingManager)
};
const actionOptions = {
terminal: this._terminal,
loggingManager: this._loggingManager,
metricsCollector: this._metricsCollector,
heftConfiguration: this._heftConfiguration,
stages
};
this._heftLifecycleHook = new tapable_1.SyncHook(['heftLifecycle']);
this._internalHeftSession = new InternalHeftSession_1.InternalHeftSession(Object.assign(Object.assign({ getIsDebugMode: () => this.isDebug }, stages), { heftLifecycleHook: this._heftLifecycleHook, loggingManager: this._loggingManager, metricsCollector: this._metricsCollector, registerAction: (options) => {
const action = new CustomAction_1.CustomAction(options, actionOptions);
this.addAction(action);
}, commandLine: new HeftCommandLine_1.HeftCommandLine(this, this._terminal) }));
this._pluginManager = new PluginManager_1.PluginManager({
terminal: this._terminal,
heftConfiguration: this._heftConfiguration,
internalHeftSession: this._internalHeftSession
});
const cleanAction = new CleanAction_1.CleanAction(actionOptions);
const buildAction = new BuildAction_1.BuildAction(actionOptions);
const startAction = new StartAction_1.StartAction(actionOptions);
const testAction = new TestAction_1.TestAction(actionOptions);
this.addAction(cleanAction);
this.addAction(buildAction);
this.addAction(startAction);
this.addAction(testAction);
}
get isDebug() {
return !!this._preInitializationArgumentValues.debug;
}
get terminal() {
return this._terminal;
}
async execute(args) {
// Defensively set the exit code to 1 so if the tool crashes for whatever reason,
// we'll have a nonzero exit code.
// Defensively set the exit code to 1 so if the tool crashes for whatever reason, we'll have a nonzero exit code.
process.exitCode = 1;
this._terminalProvider.verboseEnabled = this.isDebug;
try {
this._normalizeCwd();
const internalHeftSession = await InternalHeftSession_1.InternalHeftSession.initializeAsync({
debug: this._debug,
heftConfiguration: this._heftConfiguration,
loggingManager: this._loggingManager,
metricsCollector: this._metricsCollector
});
const actionOptions = {
internalHeftSession: internalHeftSession,
terminal: this.globalTerminal,
loggingManager: this._loggingManager,
metricsCollector: this._metricsCollector,
heftConfiguration: this._heftConfiguration
await this._checkForUpgradeAsync();
await this._heftConfiguration._checkForRigAsync();
if (this._heftConfiguration.rigConfig.rigFound) {
const rigProfileFolder = await this._heftConfiguration.rigConfig.getResolvedProfileFolderAsync();
const relativeRigFolderPath = node_core_library_1.Path.formatConcisely({
pathToConvert: rigProfileFolder,
baseFolder: this._heftConfiguration.buildFolder
});
this._terminal.writeLine(`Using rig configuration from ${relativeRigFolderPath}`);
}
await this._initializePluginsAsync();
const heftLifecycle = {
hooks: new HeftLifecycle_1.HeftLifecycleHooks()
};
// Add the clean action, the run action, and the individual phase actions
this.addAction(new CleanAction_1.CleanAction(actionOptions));
this.addAction(new RunAction_1.RunAction(actionOptions));
for (const phase of internalHeftSession.phases) {
this.addAction(new PhaseAction_1.PhaseAction(Object.assign(Object.assign({}, actionOptions), { phase })));
}
// Add the watch variant of the run action and the individual phase actions
this.addAction(new RunAction_1.RunAction(Object.assign(Object.assign({}, actionOptions), { watch: true })));
for (const phase of internalHeftSession.phases) {
this.addAction(new PhaseAction_1.PhaseAction(Object.assign(Object.assign({}, actionOptions), { phase, watch: true })));
}
this._heftLifecycleHook.call(heftLifecycle);
await heftLifecycle.hooks.toolStart.promise();
return await super.execute(args);

@@ -100,5 +133,15 @@ }

}
async _checkForUpgradeAsync() {
// The .heft/clean.json file is a fairly reliable heuristic for detecting projects created prior to
// the big config file redesign with Heft 0.14.0
if (await node_core_library_1.FileSystem.existsAsync('.heft/clean.json')) {
this._terminal.writeErrorLine('\nThis project has a ".heft/clean.json" file, which is now obsolete as of Heft 0.14.0.');
this._terminal.writeLine('\nFor instructions for migrating config files, please read UPGRADING.md in the @rushstack/heft package folder.\n');
throw new node_core_library_1.AlreadyReportedError();
}
}
async onExecute() {
try {
await super.onExecute();
await this._metricsCollector.flushAndTeardownAsync();
}

@@ -112,7 +155,8 @@ catch (e) {

_normalizeCwd() {
const buildFolder = this._heftConfiguration.buildFolderPath;
const buildFolder = this._heftConfiguration.buildFolder;
this._terminal.writeLine(`Project build folder is "${buildFolder}"`);
const currentCwd = process.cwd();
if (currentCwd !== buildFolder) {
// Update the CWD to the project's build root. Some tools, like Jest, use process.cwd()
this.globalTerminal.writeVerboseLine(`CWD is "${currentCwd}". Normalizing to "${buildFolder}".`);
this._terminal.writeVerboseLine(`CWD is "${currentCwd}". Normalizing to project build folder.`);
// If `process.cwd()` and `buildFolder` differ only by casing on Windows, the chdir operation will not fix the casing, which is the entire purpose of the exercise.

@@ -126,25 +170,27 @@ // As such, chdir to a different directory first. That directory needs to exist, so use the parent of the current directory.

_getPreInitializationArgumentValues(args = process.argv) {
if (!this._debugFlag) {
// The `this._debugFlag` parameter (the parameter itself, not its value)
// has not yet been defined. Parameters need to be defined before we
// try to evaluate any parameters. This is to ensure that the
// `--debug` flag is defined correctly before we do this not-so-rigorous
// parameter parsing.
throw new node_core_library_1.InternalError('onDefineParameters() has not yet been called.');
}
// This is a rough parsing of the --debug parameter
// This is a rough parsing of the --plugin parameters
const parser = new argparse_1.ArgumentParser({ addHelp: false });
parser.addArgument(this._pluginsParameter.longName, { dest: 'plugins', action: 'append' });
parser.addArgument(this._debugFlag.longName, { dest: 'debug', action: 'storeTrue' });
parser.addArgument(this._unmanagedFlag.longName, { dest: 'unmanaged', action: 'storeTrue' });
const [result] = parser.parseKnownArgs(args);
return result;
}
async _initializePluginsAsync() {
this._pluginManager.initializeDefaultPlugins();
await this._pluginManager.initializePluginsFromConfigFileAsync();
const pluginSpecifiers = this._preInitializationArgumentValues.plugins || [];
for (const pluginSpecifier of pluginSpecifiers) {
this._pluginManager.initializePlugin(pluginSpecifier);
}
this._pluginManager.afterInitializeAllPlugins();
}
async _reportErrorAndSetExitCode(error) {
if (!(error instanceof node_core_library_1.AlreadyReportedError)) {
this.globalTerminal.writeErrorLine(error.toString());
this._terminal.writeErrorLine(error.toString());
}
if (this._debug) {
this.globalTerminal.writeLine();
this.globalTerminal.writeErrorLine(error.stack);
if (this.isDebug) {
this._terminal.writeLine();
this._terminal.writeErrorLine(error.stack);
}
await this._metricsCollector.flushAndTeardownAsync();
if (!process.exitCode || process.exitCode > 0) {

@@ -151,0 +197,0 @@ process.exit(process.exitCode);

@@ -1,4 +0,3 @@

import { Terminal, ITerminalProvider, IPackageJson } from '@rushstack/node-core-library';
import { type ITerminalProvider, type IPackageJson, type ITerminal } from '@rushstack/node-core-library';
import { RigConfig } from '@rushstack/rig-package';
import { type IRigPackageResolver } from './RigPackageResolver';
/**

@@ -18,38 +17,50 @@ * @internal

/**
* The base action configuration that all custom action configuration files
* should inherit from.
*
* @public
*/
export interface IHeftActionConfiguration {
}
/**
* Options to be used when retrieving the action configuration.
*
* @public
*/
export interface IHeftActionConfigurationOptions {
/**
* Whether or not arrays should be merged across Heft action configuration files.
*/
mergeArrays?: boolean;
}
/**
* @public
*/
export declare class HeftConfiguration {
private _buildFolderPath;
private _projectConfigFolderPath;
private _cacheFolderPath;
private _tempFolderPath;
private _buildFolder;
private _projectHeftDataFolder;
private _projectConfigFolder;
private _buildCacheFolder;
private _rigConfig;
private _globalTerminal;
private _terminalProvider;
private _rigPackageResolver;
/**
* Project build folder path. This is the folder containing the project's package.json file.
* Project build folder. This is the folder containing the project's package.json file.
*/
get buildFolderPath(): string;
get buildFolder(): string;
/**
* The path to the project's "config" folder.
* The path to the project's ".heft" folder.
*/
get projectConfigFolderPath(): string;
get projectHeftDataFolder(): string;
/**
* The project's cache folder.
*
* @remarks This folder exists at \<project root\>/.cache. In general, this folder is used to store
* cached output from tasks under task-specific subfolders, and is not intended to be directly
* written to. Instead, plugins should write to the directory provided by
* HeftTaskSession.taskCacheFolderPath
* The path to the project's "config" folder.
*/
get cacheFolderPath(): string;
get projectConfigFolder(): string;
/**
* The project's temporary folder.
* The project's build cache folder.
*
* @remarks This folder exists at \<project root\>/temp. In general, this folder is used to store temporary
* output from tasks under task-specific subfolders, and is not intended to be directly written to.
* Instead, plugins should write to the directory provided by HeftTaskSession.taskTempFolderPath
* This folder exists at \<project root\>/.heft/build-cache. TypeScript's output
* goes into this folder and then is either copied or linked to the final output folder
*/
get tempFolderPath(): string;
get buildCacheFolder(): string;
/**

@@ -60,9 +71,5 @@ * The rig.json configuration for this project, if present.

/**
* The rig package resolver, which can be used to rig-resolve a requested package.
*/
get rigPackageResolver(): IRigPackageResolver;
/**
* Terminal instance to facilitate logging.
*/
get globalTerminal(): Terminal;
get globalTerminal(): ITerminal;
/**

@@ -69,0 +76,0 @@ * Terminal provider for the provided terminal.

@@ -34,3 +34,2 @@ "use strict";

const Constants_1 = require("../utilities/Constants");
const RigPackageResolver_1 = require("./RigPackageResolver");
/**

@@ -42,42 +41,36 @@ * @public

/**
* Project build folder path. This is the folder containing the project's package.json file.
* Project build folder. This is the folder containing the project's package.json file.
*/
get buildFolderPath() {
return this._buildFolderPath;
get buildFolder() {
return this._buildFolder;
}
/**
* The path to the project's "config" folder.
* The path to the project's ".heft" folder.
*/
get projectConfigFolderPath() {
if (!this._projectConfigFolderPath) {
this._projectConfigFolderPath = path.join(this.buildFolderPath, Constants_1.Constants.projectConfigFolderName);
get projectHeftDataFolder() {
if (!this._projectHeftDataFolder) {
this._projectHeftDataFolder = path.join(this.buildFolder, Constants_1.Constants.projectHeftFolderName);
}
return this._projectConfigFolderPath;
return this._projectHeftDataFolder;
}
/**
* The project's cache folder.
*
* @remarks This folder exists at \<project root\>/.cache. In general, this folder is used to store
* cached output from tasks under task-specific subfolders, and is not intended to be directly
* written to. Instead, plugins should write to the directory provided by
* HeftTaskSession.taskCacheFolderPath
* The path to the project's "config" folder.
*/
get cacheFolderPath() {
if (!this._cacheFolderPath) {
this._cacheFolderPath = path.join(this.buildFolderPath, Constants_1.Constants.cacheFolderName);
get projectConfigFolder() {
if (!this._projectConfigFolder) {
this._projectConfigFolder = path.join(this.buildFolder, Constants_1.Constants.projectConfigFolderName);
}
return this._cacheFolderPath;
return this._projectConfigFolder;
}
/**
* The project's temporary folder.
* The project's build cache folder.
*
* @remarks This folder exists at \<project root\>/temp. In general, this folder is used to store temporary
* output from tasks under task-specific subfolders, and is not intended to be directly written to.
* Instead, plugins should write to the directory provided by HeftTaskSession.taskTempFolderPath
* This folder exists at \<project root\>/.heft/build-cache. TypeScript's output
* goes into this folder and then is either copied or linked to the final output folder
*/
get tempFolderPath() {
if (!this._tempFolderPath) {
this._tempFolderPath = path.join(this._buildFolderPath, Constants_1.Constants.tempFolderName);
get buildCacheFolder() {
if (!this._buildCacheFolder) {
this._buildCacheFolder = path.join(this.projectHeftDataFolder, Constants_1.Constants.buildCacheFolderName);
}
return this._tempFolderPath;
return this._buildCacheFolder;
}

@@ -94,15 +87,2 @@ /**

/**
* The rig package resolver, which can be used to rig-resolve a requested package.
*/
get rigPackageResolver() {
if (!this._rigPackageResolver) {
this._rigPackageResolver = new RigPackageResolver_1.RigPackageResolver({
buildFolder: this.buildFolderPath,
projectPackageJson: this.projectPackageJson,
rigConfig: this.rigConfig
});
}
return this._rigPackageResolver;
}
/**
* Terminal instance to facilitate logging.

@@ -129,3 +109,3 @@ */

get projectPackageJson() {
return node_core_library_1.PackageJsonLookup.instance.tryLoadPackageJsonFor(this.buildFolderPath);
return node_core_library_1.PackageJsonLookup.instance.tryLoadPackageJsonFor(this.buildFolder);
}

@@ -138,5 +118,3 @@ /**

if (!this._rigConfig) {
this._rigConfig = await rig_package_1.RigConfig.loadForProjectFolderAsync({
projectFolderPath: this._buildFolderPath
});
this._rigConfig = await rig_package_1.RigConfig.loadForProjectFolderAsync({ projectFolderPath: this._buildFolder });
}

@@ -151,3 +129,3 @@ }

if (packageJsonPath) {
let buildFolderPath = path.dirname(packageJsonPath);
let buildFolder = path.dirname(packageJsonPath);
// The CWD path's casing may be incorrect on a case-insensitive filesystem. Some tools, like Jest

@@ -157,4 +135,4 @@ // expect the casing of the project path to be correct and produce unexpected behavior when the casing

// This ensures the casing of the project folder is correct.
buildFolderPath = (0, true_case_path_1.trueCasePathSync)(buildFolderPath);
configuration._buildFolderPath = buildFolderPath;
buildFolder = (0, true_case_path_1.trueCasePathSync)(buildFolder);
configuration._buildFolder = buildFolder;
}

@@ -161,0 +139,0 @@ else {

@@ -1,15 +0,14 @@

export { HeftConfiguration, IHeftConfigurationInitializationOptions as _IHeftConfigurationInitializationOptions } from './configuration/HeftConfiguration';
export { IRigPackageResolver } from './configuration/RigPackageResolver';
export { IHeftPlugin, IHeftTaskPlugin, IHeftLifecyclePlugin } from './pluginFramework/IHeftPlugin';
export { CancellationTokenSource, CancellationToken, ICancellationTokenSourceOptions, ICancellationTokenOptions as _ICancellationTokenOptions } from './pluginFramework/CancellationToken';
export { IHeftParameters, IHeftDefaultParameters } from './pluginFramework/HeftParameterManager';
export { IHeftLifecycleSession, IHeftLifecycleHooks, IHeftLifecycleCleanHookOptions, IHeftLifecycleToolStartHookOptions, IHeftLifecycleToolFinishHookOptions } from './pluginFramework/HeftLifecycleSession';
export { IHeftTaskSession, IHeftTaskHooks, IHeftTaskRunHookOptions, IHeftTaskRunIncrementalHookOptions, IChangedFileState } from './pluginFramework/HeftTaskSession';
export { ICopyOperation, IIncrementalCopyOperation } from './plugins/CopyFilesPlugin';
export { IDeleteOperation } from './plugins/DeleteFilesPlugin';
export { IRunScript, IRunScriptOptions } from './plugins/RunScriptPlugin';
export { IFileSelectionSpecifier, IGlobOptions, GlobFn } from './plugins/FileGlobSpecifier';
export { IHeftRecordMetricsHookOptions, IMetricsData, IPerformanceData as _IPerformanceData, MetricsCollector as _MetricsCollector } from './metrics/MetricsCollector';
export { IScopedLogger } from './pluginFramework/logging/ScopedLogger';
export type { CommandLineParameter, CommandLineChoiceListParameter, CommandLineChoiceParameter, CommandLineFlagParameter, CommandLineIntegerListParameter, CommandLineIntegerParameter, CommandLineStringListParameter, CommandLineStringParameter } from '@rushstack/ts-command-line';
export { IHeftPlugin } from './pluginFramework/IHeftPlugin';
export { HeftConfiguration, IHeftActionConfiguration, IHeftActionConfigurationOptions, IHeftConfigurationInitializationOptions as _IHeftConfigurationInitializationOptions } from './configuration/HeftConfiguration';
export { HeftSession, IHeftSessionHooks, RequestAccessToPluginByNameCallback, RegisterAction } from './pluginFramework/HeftSession';
export { MetricsCollectorHooks, IMetricsData, IPerformanceData as _IPerformanceData, MetricsCollector as _MetricsCollector } from './metrics/MetricsCollector';
export { ScopedLogger, IScopedLogger } from './pluginFramework/logging/ScopedLogger';
export { ICustomActionOptions, ICustomActionParameterFlag, ICustomActionParameterInteger, ICustomActionParameterString, ICustomActionParameterStringList, ICustomActionParameterBase, ICustomActionParameter, CustomActionParameterType } from './cli/actions/CustomAction';
export { HeftCommandLine, IHeftBaseParameter, IHeftChoiceParameter, IHeftChoiceListParameter, IHeftFlagParameter, IHeftIntegerParameter, IHeftStringParameter, IHeftStringListParameter, IParameterAssociatedActionNames, IHeftRegisterParameterOptions } from './cli/HeftCommandLine';
export { StageHooksBase, IStageContext } from './stages/StageBase';
export { BuildStageHooks, BuildSubstageHooksBase, CompileSubstageHooks, BundleSubstageHooks, IBuildStageContext, IBuildStageProperties, IBuildSubstage, IBundleSubstage, IBundleSubstageProperties, ICompileSubstage, ICompileSubstageProperties, IPostBuildSubstage, IPreCompileSubstage } from './stages/BuildStage';
export { ICleanStageProperties, CleanStageHooks, ICleanStageContext } from './stages/CleanStage';
export { ITestStageProperties, TestStageHooks, ITestStageContext } from './stages/TestStage';
export { IHeftLifecycle as _IHeftLifecycle, HeftLifecycleHooks as _HeftLifecycleHooks } from './pluginFramework/HeftLifecycle';
export { IRunScriptOptions } from './plugins/RunScriptPlugin';
//# sourceMappingURL=index.d.ts.map

@@ -5,10 +5,29 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports._MetricsCollector = exports.CancellationToken = exports.CancellationTokenSource = exports.HeftConfiguration = void 0;
exports._HeftLifecycleHooks = exports.TestStageHooks = exports.CleanStageHooks = exports.BundleSubstageHooks = exports.CompileSubstageHooks = exports.BuildSubstageHooksBase = exports.BuildStageHooks = exports.StageHooksBase = exports.HeftCommandLine = exports.ScopedLogger = exports._MetricsCollector = exports.MetricsCollectorHooks = exports.HeftSession = exports.HeftConfiguration = void 0;
var HeftConfiguration_1 = require("./configuration/HeftConfiguration");
Object.defineProperty(exports, "HeftConfiguration", { enumerable: true, get: function () { return HeftConfiguration_1.HeftConfiguration; } });
var CancellationToken_1 = require("./pluginFramework/CancellationToken");
Object.defineProperty(exports, "CancellationTokenSource", { enumerable: true, get: function () { return CancellationToken_1.CancellationTokenSource; } });
Object.defineProperty(exports, "CancellationToken", { enumerable: true, get: function () { return CancellationToken_1.CancellationToken; } });
var HeftSession_1 = require("./pluginFramework/HeftSession");
Object.defineProperty(exports, "HeftSession", { enumerable: true, get: function () { return HeftSession_1.HeftSession; } });
var MetricsCollector_1 = require("./metrics/MetricsCollector");
Object.defineProperty(exports, "MetricsCollectorHooks", { enumerable: true, get: function () { return MetricsCollector_1.MetricsCollectorHooks; } });
Object.defineProperty(exports, "_MetricsCollector", { enumerable: true, get: function () { return MetricsCollector_1.MetricsCollector; } });
var ScopedLogger_1 = require("./pluginFramework/logging/ScopedLogger");
Object.defineProperty(exports, "ScopedLogger", { enumerable: true, get: function () { return ScopedLogger_1.ScopedLogger; } });
var HeftCommandLine_1 = require("./cli/HeftCommandLine");
Object.defineProperty(exports, "HeftCommandLine", { enumerable: true, get: function () { return HeftCommandLine_1.HeftCommandLine; } });
// Stages
var StageBase_1 = require("./stages/StageBase");
Object.defineProperty(exports, "StageHooksBase", { enumerable: true, get: function () { return StageBase_1.StageHooksBase; } });
var BuildStage_1 = require("./stages/BuildStage");
Object.defineProperty(exports, "BuildStageHooks", { enumerable: true, get: function () { return BuildStage_1.BuildStageHooks; } });
Object.defineProperty(exports, "BuildSubstageHooksBase", { enumerable: true, get: function () { return BuildStage_1.BuildSubstageHooksBase; } });
Object.defineProperty(exports, "CompileSubstageHooks", { enumerable: true, get: function () { return BuildStage_1.CompileSubstageHooks; } });
Object.defineProperty(exports, "BundleSubstageHooks", { enumerable: true, get: function () { return BuildStage_1.BundleSubstageHooks; } });
var CleanStage_1 = require("./stages/CleanStage");
Object.defineProperty(exports, "CleanStageHooks", { enumerable: true, get: function () { return CleanStage_1.CleanStageHooks; } });
var TestStage_1 = require("./stages/TestStage");
Object.defineProperty(exports, "TestStageHooks", { enumerable: true, get: function () { return TestStage_1.TestStageHooks; } });
// Other hooks
var HeftLifecycle_1 = require("./pluginFramework/HeftLifecycle");
Object.defineProperty(exports, "_HeftLifecycleHooks", { enumerable: true, get: function () { return HeftLifecycle_1.HeftLifecycleHooks; } });
//# sourceMappingURL=index.js.map

@@ -1,2 +0,2 @@

import { AsyncParallelHook } from 'tapable';
import { AsyncParallelHook, SyncHook } from 'tapable';
/**

@@ -44,13 +44,19 @@ * @public

/**
* Tap these hooks to record build metrics, to a file, for example.
*
* @public
*/
export interface IHeftRecordMetricsHookOptions {
export declare class MetricsCollectorHooks {
/**
* @public
* This hook is called when a metric is recorded.
*/
metricName: string;
recordMetric: SyncHook<string, IMetricsData>;
/**
* @public
* This hook is called when collected metrics should be flushed
*/
metricData: IMetricsData;
flush: AsyncParallelHook;
/**
* This hook is called when collected metrics should be flushed and no more metrics will be collected.
*/
flushAndTeardown: AsyncParallelHook;
}

@@ -69,3 +75,4 @@ /**

export declare class MetricsCollector {
readonly recordMetricsHook: AsyncParallelHook<IHeftRecordMetricsHookOptions>;
readonly hooks: MetricsCollectorHooks;
private _hasBeenTornDown;
private _startTimeMs;

@@ -83,4 +90,12 @@ /**

*/
recordAsync(command: string, performanceData?: Partial<IPerformanceData>, parameters?: Record<string, string>): Promise<void>;
record(command: string, performanceData?: Partial<IPerformanceData>, parameters?: Record<string, string>): void;
/**
* Flushes all pending logged metrics.
*/
flushAsync(): Promise<void>;
/**
* Flushes all pending logged metrics and closes the MetricsCollector instance.
*/
flushAndTeardownAsync(): Promise<void>;
}
//# sourceMappingURL=MetricsCollector.d.ts.map

@@ -28,3 +28,3 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.MetricsCollector = void 0;
exports.MetricsCollector = exports.MetricsCollectorHooks = void 0;
const os = __importStar(require("os"));

@@ -35,2 +35,27 @@ const tapable_1 = require("tapable");

/**
* Tap these hooks to record build metrics, to a file, for example.
*
* @public
*/
class MetricsCollectorHooks {
constructor() {
/**
* This hook is called when a metric is recorded.
*/
this.recordMetric = new tapable_1.SyncHook([
'metricName',
'metricsData'
]);
/**
* This hook is called when collected metrics should be flushed
*/
this.flush = new tapable_1.AsyncParallelHook();
/**
* This hook is called when collected metrics should be flushed and no more metrics will be collected.
*/
this.flushAndTeardown = new tapable_1.AsyncParallelHook();
}
}
exports.MetricsCollectorHooks = MetricsCollectorHooks;
/**
* @internal

@@ -41,3 +66,4 @@ * A simple performance metrics collector. A plugin is required to pipe data anywhere.

constructor() {
this.recordMetricsHook = new tapable_1.AsyncParallelHook(['recordMetricsHookOptions']);
this.hooks = new MetricsCollectorHooks();
this._hasBeenTornDown = false;
}

@@ -57,6 +83,9 @@ /**

*/
async recordAsync(command, performanceData, parameters) {
record(command, performanceData, parameters) {
if (this._startTimeMs === undefined) {
throw new node_core_library_1.InternalError('MetricsCollector has not been initialized with setStartTime() yet');
}
if (this._hasBeenTornDown) {
throw new node_core_library_1.InternalError('MetricsCollector has been torn down.');
}
if (!command) {

@@ -66,3 +95,3 @@ throw new node_core_library_1.InternalError('The command name must be specified.');

const filledPerformanceData = Object.assign({ taskTotalExecutionMs: (perf_hooks_1.performance.now() - this._startTimeMs) / 1000 }, (performanceData || {}));
const metricData = {
const metricsData = {
command: command,

@@ -78,9 +107,25 @@ encounteredError: filledPerformanceData.encounteredError,

};
await this.recordMetricsHook.promise({
metricName: 'inner_loop_heft',
metricData
});
this.hooks.recordMetric.call('inner_loop_heft', metricsData);
}
/**
* Flushes all pending logged metrics.
*/
async flushAsync() {
if (this._hasBeenTornDown) {
throw new Error('MetricsCollector has been torn down.');
}
await this.hooks.flush.promise();
}
/**
* Flushes all pending logged metrics and closes the MetricsCollector instance.
*/
async flushAndTeardownAsync() {
if (this._hasBeenTornDown) {
throw new Error('MetricsCollector has already been torn down.');
}
await this.hooks.flushAndTeardown.promise();
this._hasBeenTornDown = true;
}
}
exports.MetricsCollector = MetricsCollector;
//# sourceMappingURL=MetricsCollector.js.map

@@ -1,25 +0,10 @@

import { HeftPluginHost } from './HeftPluginHost';
import type { InternalHeftSession } from './InternalHeftSession';
import type { IHeftConfigurationJsonPluginSpecifier } from '../utilities/CoreConfigFiles';
import type { HeftLifecyclePluginDefinition } from '../configuration/HeftPluginDefinition';
import { HeftLifecycleSession, type IHeftLifecycleHooks, type IHeftLifecycleSession } from './HeftLifecycleSession';
export interface IHeftLifecycleContext {
lifecycleSession?: HeftLifecycleSession;
pluginOptions?: object;
import { AsyncParallelHook } from 'tapable';
/** @internal */
export interface IHeftLifecycle {
hooks: HeftLifecycleHooks;
}
export declare class HeftLifecycle extends HeftPluginHost {
private readonly _internalHeftSession;
private readonly _lifecyclePluginSpecifiers;
private readonly _lifecycleHooks;
private readonly _lifecycleContextByDefinition;
private readonly _lifecyclePluginsByDefinition;
private _isInitialized;
get hooks(): IHeftLifecycleHooks;
get pluginDefinitions(): Iterable<HeftLifecyclePluginDefinition>;
constructor(internalHeftSession: InternalHeftSession, lifecyclePluginSpecifiers: IHeftConfigurationJsonPluginSpecifier[]);
protected applyPluginsInternalAsync(): Promise<void>;
ensureInitializedAsync(): Promise<void>;
getSessionForPluginDefinitionAsync(pluginDefinition: HeftLifecyclePluginDefinition): Promise<IHeftLifecycleSession>;
private _getLifecyclePluginForPluginDefinitionAsync;
/** @internal */
export declare class HeftLifecycleHooks {
toolStart: AsyncParallelHook;
}
//# sourceMappingURL=HeftLifecycle.d.ts.map

@@ -5,131 +5,11 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.HeftLifecycle = void 0;
exports.HeftLifecycleHooks = void 0;
const tapable_1 = require("tapable");
const node_core_library_1 = require("@rushstack/node-core-library");
const HeftPluginConfiguration_1 = require("../configuration/HeftPluginConfiguration");
const HeftPluginHost_1 = require("./HeftPluginHost");
const HeftLifecycleSession_1 = require("./HeftLifecycleSession");
class HeftLifecycle extends HeftPluginHost_1.HeftPluginHost {
constructor(internalHeftSession, lifecyclePluginSpecifiers) {
super();
this._lifecycleContextByDefinition = new Map();
this._lifecyclePluginsByDefinition = new Map();
this._isInitialized = false;
this._internalHeftSession = internalHeftSession;
this._lifecyclePluginSpecifiers = lifecyclePluginSpecifiers;
this._lifecycleHooks = {
clean: new tapable_1.AsyncParallelHook(),
toolStart: new tapable_1.AsyncParallelHook(),
toolFinish: new tapable_1.AsyncParallelHook(),
recordMetrics: internalHeftSession.metricsCollector.recordMetricsHook
};
/** @internal */
class HeftLifecycleHooks {
constructor() {
this.toolStart = new tapable_1.AsyncParallelHook();
}
get hooks() {
return this._lifecycleHooks;
}
get pluginDefinitions() {
if (!this._isInitialized) {
throw new node_core_library_1.InternalError('HeftLifecycle.ensureInitializedAsync() must be called before accessing HeftLifecycle.pluginDefinitions.');
}
return this._lifecycleContextByDefinition.keys();
}
async applyPluginsInternalAsync() {
await this.ensureInitializedAsync();
// Load up all plugins concurrently
const loadPluginPromises = [];
for (const [pluginDefinition, lifecycleContext] of this._lifecycleContextByDefinition) {
if (!lifecycleContext.lifecycleSession) {
// Generate the plugin-specific session
lifecycleContext.lifecycleSession = new HeftLifecycleSession_1.HeftLifecycleSession({
debug: this._internalHeftSession.debug,
heftConfiguration: this._internalHeftSession.heftConfiguration,
loggingManager: this._internalHeftSession.loggingManager,
metricsCollector: this._internalHeftSession.metricsCollector,
logger: this._internalHeftSession.loggingManager.requestScopedLogger(`lifecycle:${pluginDefinition.pluginName}`),
lifecycleHooks: this.hooks,
lifecycleParameters: this._internalHeftSession.parameterManager.getParametersForPlugin(pluginDefinition),
pluginDefinition: pluginDefinition,
pluginHost: this
});
}
loadPluginPromises.push(this._getLifecyclePluginForPluginDefinitionAsync(pluginDefinition, lifecycleContext.lifecycleSession));
}
// Promise.all maintains the order of the input array
const plugins = await Promise.all(loadPluginPromises);
// Iterate through and apply the plugins
let pluginIndex = 0;
for (const [pluginDefinition, lifecycleContext] of this._lifecycleContextByDefinition) {
const lifecyclePlugin = plugins[pluginIndex++];
try {
// Apply the plugin. We know the session should exist because we generated it above.
lifecyclePlugin.apply(lifecycleContext.lifecycleSession, this._internalHeftSession.heftConfiguration, lifecycleContext.pluginOptions);
}
catch (error) {
throw new Error(`Error applying plugin ${JSON.stringify(pluginDefinition.pluginName)} from package ` +
`${JSON.stringify(pluginDefinition.pluginPackageName)}: ${error}`);
}
}
// Do a second pass to apply the plugin access requests for each plugin
pluginIndex = 0;
for (const [pluginDefinition] of this._lifecycleContextByDefinition) {
const lifecyclePlugin = plugins[pluginIndex++];
this.resolvePluginAccessRequests(lifecyclePlugin, pluginDefinition);
}
}
async ensureInitializedAsync() {
if (!this._isInitialized) {
this._isInitialized = true;
// Load up all plugin configurations concurrently
const pluginConfigurationPromises = [];
for (const pluginSpecifier of this._lifecyclePluginSpecifiers) {
const { pluginPackageRoot, pluginPackage } = pluginSpecifier;
pluginConfigurationPromises.push(HeftPluginConfiguration_1.HeftPluginConfiguration.loadFromPackageAsync(pluginPackageRoot, pluginPackage));
}
// Promise.all maintains the order of the input array
const pluginConfigurations = await Promise.all(pluginConfigurationPromises);
// Iterate through and generate the lifecycle context for each plugin
let pluginConfigurationIndex = 0;
for (const pluginSpecifier of this._lifecyclePluginSpecifiers) {
const pluginConfiguration = pluginConfigurations[pluginConfigurationIndex++];
const pluginDefinition = pluginConfiguration.getPluginDefinitionBySpecifier(pluginSpecifier);
// Ensure the plugin is a lifecycle plugin
if (!pluginConfiguration.lifecyclePluginDefinitions.has(pluginDefinition)) {
throw new Error(`Plugin ${JSON.stringify(pluginDefinition.pluginName)} from package ` +
`${JSON.stringify(pluginSpecifier.pluginPackage)} is not a lifecycle plugin.`);
}
// Ensure there are no duplicate plugin names within the same package
if (this._lifecycleContextByDefinition.has(pluginDefinition)) {
throw new Error(`Lifecycle plugin ${JSON.stringify(pluginDefinition.pluginName)} from package ` +
`${JSON.stringify(pluginSpecifier.pluginPackage)} cannot be specified more than once.`);
}
// Validate the plugin options
const pluginOptions = pluginSpecifier.options;
pluginDefinition.validateOptions(pluginOptions);
// Partially populate the context. The session will be populated while applying the plugins.
const lifecycleContext = { pluginOptions };
this._lifecycleContextByDefinition.set(pluginDefinition, lifecycleContext);
}
}
}
async getSessionForPluginDefinitionAsync(pluginDefinition) {
await this.ensureInitializedAsync();
const lifecycleContext = this._lifecycleContextByDefinition.get(pluginDefinition);
if (!lifecycleContext) {
throw new node_core_library_1.InternalError(`Could not find lifecycle context for plugin ${JSON.stringify(pluginDefinition.pluginName)}.`);
}
if (!lifecycleContext.lifecycleSession) {
throw new node_core_library_1.InternalError(`Lifecycle session for plugin ${JSON.stringify(pluginDefinition.pluginName)} has not been created yet.`);
}
return lifecycleContext.lifecycleSession;
}
async _getLifecyclePluginForPluginDefinitionAsync(pluginDefinition, lifecycleSession) {
let lifecyclePlugin = this._lifecyclePluginsByDefinition.get(pluginDefinition);
if (!lifecyclePlugin) {
lifecyclePlugin = await pluginDefinition.loadPluginAsync(lifecycleSession.logger);
this._lifecyclePluginsByDefinition.set(pluginDefinition, lifecyclePlugin);
}
return lifecyclePlugin;
}
}
exports.HeftLifecycle = HeftLifecycle;
exports.HeftLifecycleHooks = HeftLifecycleHooks;
//# sourceMappingURL=HeftLifecycle.js.map

@@ -1,43 +0,13 @@

import type { HeftConfiguration } from '../configuration/HeftConfiguration';
import type { IHeftTaskSession } from './HeftTaskSession';
import type { IHeftLifecycleSession } from './HeftLifecycleSession';
import { JsonSchema } from '@rushstack/node-core-library';
import { HeftConfiguration } from '../configuration/HeftConfiguration';
import { HeftSession } from './HeftSession';
/**
* The interface used for all Heft plugins.
*
* @public
*/
export interface IHeftPlugin<TSession extends IHeftLifecycleSession | IHeftTaskSession = IHeftLifecycleSession | IHeftTaskSession, TOptions = void> {
/**
* The accessor provided by the plugin. This accessor can be obtained by other plugins within the same
* phase by calling `session.requestAccessToPlugin(...)`, and is used by other plugins to interact with
* hooks or properties provided by the host plugin.
*/
export interface IHeftPlugin<TOptions = void> {
readonly pluginName: string;
readonly optionsSchema?: JsonSchema;
readonly accessor?: object;
/**
* Apply the plugin to the session. Plugins are expected to hook into session hooks to provide plugin
* implementation. The `apply(...)` method is called once per phase.
*
* @param session - The session to apply the plugin to.
* @param heftConfiguration - The Heft configuration.
* @param pluginOptions - Options for the plugin, specified in heft.json.
*/
apply(session: TSession, heftConfiguration: HeftConfiguration, pluginOptions?: TOptions): void;
apply(heftSession: HeftSession, heftConfiguration: HeftConfiguration, options?: TOptions): void;
}
/**
* The interface that Heft lifecycle plugins must implement. Lifecycle plugins are used to provide
* functionality that affects the lifecycle of the Heft run. As such, they do not belong to any particular
* Heft phase.
*
* @public
*/
export interface IHeftLifecyclePlugin<TOptions = void> extends IHeftPlugin<IHeftLifecycleSession, TOptions> {
}
/**
* The interface that Heft task plugins must implement. Task plugins are used to provide the implementation
* of a specific task.
*
* @public
*/
export interface IHeftTaskPlugin<TOptions = void> extends IHeftPlugin<IHeftTaskSession, TOptions> {
}
//# sourceMappingURL=IHeftPlugin.d.ts.map

@@ -1,41 +0,35 @@

import { HeftLifecycle } from './HeftLifecycle';
import { HeftPhaseSession } from './HeftPhaseSession';
import { HeftPhase } from './HeftPhase';
import type { MetricsCollector } from '../metrics/MetricsCollector';
import type { LoggingManager } from './logging/LoggingManager';
import type { HeftConfiguration } from '../configuration/HeftConfiguration';
import type { HeftParameterManager } from './HeftParameterManager';
import { SyncHook } from 'tapable';
import { IHeftPlugin } from './IHeftPlugin';
import { HeftSession, RegisterAction } from './HeftSession';
import { BuildStage } from '../stages/BuildStage';
import { CleanStage } from '../stages/CleanStage';
import { TestStage } from '../stages/TestStage';
import { MetricsCollector } from '../metrics/MetricsCollector';
import { LoggingManager } from './logging/LoggingManager';
import { IHeftLifecycle } from './HeftLifecycle';
import { HeftCommandLine } from '../cli/HeftCommandLine';
/**
* @internal
*/
export interface IInternalHeftSessionOptions {
heftConfiguration: HeftConfiguration;
heftLifecycleHook: SyncHook<IHeftLifecycle>;
buildStage: BuildStage;
cleanStage: CleanStage;
testStage: TestStage;
metricsCollector: MetricsCollector;
loggingManager: LoggingManager;
metricsCollector: MetricsCollector;
debug: boolean;
getIsDebugMode(): boolean;
registerAction: RegisterAction;
commandLine: HeftCommandLine;
}
export interface IHeftSessionWatchOptions {
ignoredSourceFileGlobs: readonly string[];
forbiddenSourceFileGlobs: readonly string[];
}
/**
* @internal
*/
export declare class InternalHeftSession {
private readonly _phaseSessionsByPhase;
private readonly _heftConfigurationJson;
private _lifecycle;
private _phases;
private _phasesByName;
private _parameterManager;
private _watchOptions;
readonly heftConfiguration: HeftConfiguration;
readonly loggingManager: LoggingManager;
readonly metricsCollector: MetricsCollector;
readonly debug: boolean;
private constructor();
static initializeAsync(options: IInternalHeftSessionOptions): Promise<InternalHeftSession>;
get parameterManager(): HeftParameterManager;
set parameterManager(value: HeftParameterManager);
get lifecycle(): HeftLifecycle;
get phases(): ReadonlySet<HeftPhase>;
get phasesByName(): ReadonlyMap<string, HeftPhase>;
get watchOptions(): IHeftSessionWatchOptions;
getSessionForPhase(phase: HeftPhase): HeftPhaseSession;
private _ensurePhases;
private readonly _options;
private _pluginHooks;
constructor(options: IInternalHeftSessionOptions);
getSessionForPlugin(thisPlugin: IHeftPlugin): HeftSession;
applyPluginHooks(plugin: IHeftPlugin): void;
}
//# sourceMappingURL=InternalHeftSession.d.ts.map

@@ -6,90 +6,38 @@ "use strict";

exports.InternalHeftSession = void 0;
const node_core_library_1 = require("@rushstack/node-core-library");
const Constants_1 = require("../utilities/Constants");
const HeftLifecycle_1 = require("./HeftLifecycle");
const HeftPhaseSession_1 = require("./HeftPhaseSession");
const HeftPhase_1 = require("./HeftPhase");
const CoreConfigFiles_1 = require("../utilities/CoreConfigFiles");
function* getAllTasks(phases) {
for (const phase of phases) {
yield* phase.tasks;
}
}
const FORBIDDEN_SOURCE_FILE_GLOBS = ['package.json', '.gitingore', 'config/**/*', '.rush/**/*'];
const tapable_1 = require("tapable");
const HeftSession_1 = require("./HeftSession");
/**
* @internal
*/
class InternalHeftSession {
constructor(heftConfigurationJson, options) {
this._phaseSessionsByPhase = new Map();
this.heftConfiguration = options.heftConfiguration;
this.loggingManager = options.loggingManager;
this.metricsCollector = options.metricsCollector;
this.debug = options.debug;
this._heftConfigurationJson = heftConfigurationJson;
constructor(options) {
this._pluginHooks = new Map();
this._options = options;
}
static async initializeAsync(options) {
// Initialize the rig. Must be done before the HeftConfiguration.rigConfig is used.
await options.heftConfiguration._checkForRigAsync();
const heftConfigurationJson = await CoreConfigFiles_1.CoreConfigFiles.loadHeftConfigurationFileForProjectAsync(options.heftConfiguration.globalTerminal, options.heftConfiguration.buildFolderPath, options.heftConfiguration.rigConfig);
const internalHeftSession = new InternalHeftSession(heftConfigurationJson, options);
// Initialize the lifecycle and the tasks. This will ensure that we throw an error if a plugin is improperly
// specified, or if the options provided to a plugin are invalid. We will avoid loading the actual plugins
// until they are needed.
await internalHeftSession.lifecycle.ensureInitializedAsync();
const tasks = getAllTasks(internalHeftSession.phases);
await node_core_library_1.Async.forEachAsync(tasks, async (task) => {
await task.ensureInitializedAsync();
}, { concurrency: Constants_1.Constants.maxParallelism });
return internalHeftSession;
getSessionForPlugin(thisPlugin) {
return new HeftSession_1.HeftSession({
plugin: thisPlugin,
requestAccessToPluginByName: (pluginToAccessName, pluginApplyFn) => {
let pluginHook = this._pluginHooks.get(pluginToAccessName);
if (!pluginHook) {
pluginHook = new tapable_1.SyncHook(['pluginAccessor']);
this._pluginHooks.set(pluginToAccessName, pluginHook);
}
pluginHook.tap(thisPlugin.pluginName, pluginApplyFn);
}
}, this._options);
}
get parameterManager() {
if (!this._parameterManager) {
throw new node_core_library_1.InternalError('A parameter manager for the session has not been provided.');
}
return this._parameterManager;
}
set parameterManager(value) {
this._parameterManager = value;
}
get lifecycle() {
if (!this._lifecycle) {
this._lifecycle = new HeftLifecycle_1.HeftLifecycle(this, this._heftConfigurationJson.heftPlugins || []);
}
return this._lifecycle;
}
get phases() {
this._ensurePhases();
return this._phases;
}
get phasesByName() {
this._ensurePhases();
return this._phasesByName;
}
get watchOptions() {
var _a, _b;
if (!this._watchOptions) {
this._watchOptions = {
ignoredSourceFileGlobs: ((_a = this._heftConfigurationJson.watchOptions) === null || _a === void 0 ? void 0 : _a.ignoredSourceFileGlobs) || [],
forbiddenSourceFileGlobs: [
...FORBIDDEN_SOURCE_FILE_GLOBS,
...(((_b = this._heftConfigurationJson.watchOptions) === null || _b === void 0 ? void 0 : _b.forbiddenSourceFileGlobs) || [])
]
};
}
return this._watchOptions;
}
getSessionForPhase(phase) {
let phaseSession = this._phaseSessionsByPhase.get(phase);
if (!phaseSession) {
phaseSession = new HeftPhaseSession_1.HeftPhaseSession({ internalHeftSession: this, phase });
this._phaseSessionsByPhase.set(phase, phaseSession);
}
return phaseSession;
}
_ensurePhases() {
if (!this._phases || !this._phasesByName) {
this._phasesByName = new Map();
for (const [phaseName, phaseSpecifier] of Object.entries(this._heftConfigurationJson.phasesByName || {})) {
const phase = new HeftPhase_1.HeftPhase(this, phaseName, phaseSpecifier);
this._phasesByName.set(phaseName, phase);
applyPluginHooks(plugin) {
const pluginHook = this._pluginHooks.get(plugin.pluginName);
const accessor = plugin.accessor;
if (pluginHook && pluginHook.taps.length > 0) {
if (!accessor) {
const accessingPlugins = new Set(pluginHook.taps.map((x) => x.name));
throw new Error(`Plugin "${plugin.pluginName}" does not provide an accessor property, so it does not provide ` +
`access to other plugins. Plugins requesting access to "${plugin.pluginName}: ` +
Array.from(accessingPlugins).join(', '));
}
this._phases = new Set(this._phasesByName.values());
else {
pluginHook.call(accessor);
}
}

@@ -96,0 +44,0 @@ }

@@ -0,1 +1,2 @@

import { IHeftPlugin } from '../IHeftPlugin';
import { ScopedLogger } from './ScopedLogger';

@@ -14,4 +15,3 @@ import { FileLocationStyle, ITerminalProvider, IFileErrorFormattingOptions } from '@rushstack/node-core-library';

enablePrintStacks(): void;
resetScopedLoggerErrorsAndWarnings(): void;
requestScopedLogger(loggerName: string): ScopedLogger;
requestScopedLogger(plugin: IHeftPlugin, loggerName: string): ScopedLogger;
getErrorStrings(fileLocationStyle?: FileLocationStyle): string[];

@@ -18,0 +18,0 @@ getWarningStrings(fileErrorFormat?: FileLocationStyle): string[];

@@ -21,15 +21,11 @@ "use strict";

}
resetScopedLoggerErrorsAndWarnings() {
this._hasAnyErrors = false;
for (const scopedLogger of this._scopedLoggers.values()) {
scopedLogger.resetErrorsAndWarnings();
}
}
requestScopedLogger(loggerName) {
requestScopedLogger(plugin, loggerName) {
const existingScopedLogger = this._scopedLoggers.get(loggerName);
if (existingScopedLogger) {
throw new Error(`A named logger with name ${JSON.stringify(loggerName)} has already been requested.`);
throw new Error(`A named logger with name "${loggerName}" has already been requested ` +
`by plugin "${existingScopedLogger._requestingPlugin.pluginName}".`);
}
else {
const scopedLogger = new ScopedLogger_1.ScopedLogger({
requestingPlugin: plugin,
loggerName,

@@ -36,0 +32,0 @@ terminalProvider: this._options.terminalProvider,

@@ -1,19 +0,16 @@

import { Terminal, ITerminalProvider } from '@rushstack/node-core-library';
import { type ITerminal, ITerminalProvider } from '@rushstack/node-core-library';
import { IHeftPlugin } from '../IHeftPlugin';
export interface IScopedLoggerOptions {
requestingPlugin: IHeftPlugin;
loggerName: string;
terminalProvider: ITerminalProvider;
getShouldPrintStacks: () => boolean;
errorHasBeenEmittedCallback: () => void;
}
/**
* A logger which is used to emit errors and warnings to the console, as well as to write
* to the console. Messaged emitted by the scoped logger are prefixed with the name of the
* scoped logger.
*
* @public
*/
export interface IScopedLogger {
readonly terminal: ITerminal;
/**
* The name of the scoped logger. Logging messages will be prefixed with this name.
*/
readonly loggerName: string;
/**
* The terminal used to write messages to the console.
*/
readonly terminal: Terminal;
/**
* Call this function to emit an error to the heft runtime.

@@ -27,18 +24,19 @@ */

}
export interface IScopedLoggerOptions {
loggerName: string;
terminalProvider: ITerminalProvider;
getShouldPrintStacks: () => boolean;
errorHasBeenEmittedCallback: () => void;
}
/**
* @public
*/
export declare class ScopedLogger implements IScopedLogger {
private readonly _options;
private _errors;
private _warnings;
private readonly _errors;
private readonly _warnings;
private get _shouldPrintStacks();
get errors(): ReadonlyArray<Error>;
get warnings(): ReadonlyArray<Error>;
/**
* @internal
*/
readonly _requestingPlugin: IHeftPlugin;
readonly loggerName: string;
readonly terminalProvider: ITerminalProvider;
readonly terminal: Terminal;
readonly terminal: ITerminal;
/**

@@ -56,7 +54,3 @@ * @internal

emitWarning(warning: Error): void;
/**
* Reset the errors and warnings for this scoped logger.
*/
resetErrorsAndWarnings(): void;
}
//# sourceMappingURL=ScopedLogger.d.ts.map

@@ -9,2 +9,5 @@ "use strict";

const LoggingManager_1 = require("./LoggingManager");
/**
* @public
*/
class ScopedLogger {

@@ -18,2 +21,3 @@ /**

this._options = options;
this._requestingPlugin = options.requestingPlugin;
this.loggerName = options.loggerName;

@@ -24,3 +28,2 @@ this.terminalProvider = new PrefixProxyTerminalProvider_1.PrefixProxyTerminalProvider(options.terminalProvider, `[${this.loggerName}] `);

get _shouldPrintStacks() {
// TODO: Consider dumping stacks and more verbose logging to a file
return this._options.getShouldPrintStacks();

@@ -54,11 +57,4 @@ }

}
/**
* Reset the errors and warnings for this scoped logger.
*/
resetErrorsAndWarnings() {
this._errors = [];
this._warnings = [];
}
}
exports.ScopedLogger = ScopedLogger;
//# sourceMappingURL=ScopedLogger.js.map

@@ -1,56 +0,38 @@

import { type GlobFn, type IFileSelectionSpecifier } from './FileGlobSpecifier';
import type { HeftConfiguration } from '../configuration/HeftConfiguration';
import type { IHeftTaskPlugin } from '../pluginFramework/IHeftPlugin';
import type { IHeftTaskSession } from '../pluginFramework/HeftTaskSession';
import type { IScopedLogger } from '../pluginFramework/logging/ScopedLogger';
/**
* Used to specify a selection of files to copy from a specific source folder to one
* or more destination folders.
*
* @public
*/
export interface ICopyOperation extends IFileSelectionSpecifier {
import { ScopedLogger } from '../pluginFramework/logging/ScopedLogger';
import { HeftEventPluginBase } from '../pluginFramework/HeftEventPluginBase';
import { HeftSession } from '../pluginFramework/HeftSession';
import { HeftConfiguration } from '../configuration/HeftConfiguration';
import { IExtendedSharedCopyConfiguration, IHeftEventActions, IHeftConfigurationCopyFilesEventAction, HeftEvent } from '../utilities/CoreConfigFiles';
import { IBuildStageProperties } from '../stages/BuildStage';
export interface IResolvedDestinationCopyConfiguration extends IExtendedSharedCopyConfiguration {
/**
* Absolute paths to folders which files or folders should be copied to.
* Fully-qualified folder paths to which files should be copied.
*/
destinationFolders: string[];
/**
* Copy only the file and discard the relative path from the source folder.
*/
flatten?: boolean;
/**
* Hardlink files instead of copying.
*
* @remarks
* If the sourcePath is a folder, the contained directory structure will be re-created
* and all files will be individually hardlinked. This means that folders will be new
* filesystem entities and will have separate folder metadata, while the contained files
* will maintain normal hardlink behavior. This is done since folders do not have a
* cross-platform equivalent of a hardlink, and since file symlinks provide fundamentally
* different functionality in comparison to hardlinks.
*/
hardlink?: boolean;
resolvedDestinationFolderPaths: string[];
}
/**
* Used to specify a selection of files to copy from a specific source folder to one
* or more destination folders.
*
* @public
*/
export interface IIncrementalCopyOperation extends ICopyOperation {
export interface ICopyFilesOptions {
buildFolder: string;
copyConfigurations: IResolvedDestinationCopyConfiguration[];
logger: ScopedLogger;
watchMode: boolean;
}
export interface ICopyFilesResult {
copiedFileCount: number;
linkedFileCount: number;
}
export declare class CopyFilesPlugin extends HeftEventPluginBase<IHeftConfigurationCopyFilesEventAction> {
readonly pluginName: string;
protected eventActionName: keyof IHeftEventActions;
protected loggerName: string;
/**
* If true, the file will be copied only if the source file is contained in the
* IHeftTaskRunIncrementalHookOptions.changedFiles map.
* @override
*/
onlyIfChanged?: boolean;
protected handleBuildEventActionsAsync(heftEvent: HeftEvent, heftEventActions: IHeftConfigurationCopyFilesEventAction[], logger: ScopedLogger, heftSession: HeftSession, heftConfiguration: HeftConfiguration, properties: IBuildStageProperties): Promise<void>;
private _runCopyFilesForHeftEventActions;
protected runCopyAsync(options: ICopyFilesOptions): Promise<void>;
private _copyFilesAsync;
private _getCopyFileDescriptorsAsync;
private _getIncludedGlobPatterns;
private _runWatchAsync;
}
interface ICopyFilesPluginOptions {
copyOperations: ICopyOperation[];
}
export declare function copyFilesAsync(copyOperations: ICopyOperation[], logger: IScopedLogger): Promise<void>;
export declare function copyIncrementalFilesAsync(copyOperations: ICopyOperation[], globChangedFilesAsyncFn: GlobFn, isFirstRun: boolean, logger: IScopedLogger): Promise<void>;
export default class CopyFilesPlugin implements IHeftTaskPlugin<ICopyFilesPluginOptions> {
apply(taskSession: IHeftTaskSession, heftConfiguration: HeftConfiguration, pluginOptions: ICopyFilesPluginOptions): void;
}
export {};
//# sourceMappingURL=CopyFilesPlugin.d.ts.map

@@ -31,148 +31,195 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.copyIncrementalFilesAsync = exports.copyFilesAsync = void 0;
exports.CopyFilesPlugin = void 0;
const chokidar = __importStar(require("chokidar"));
const path = __importStar(require("path"));
const fast_glob_1 = __importDefault(require("fast-glob"));
const perf_hooks_1 = require("perf_hooks");
const node_core_library_1 = require("@rushstack/node-core-library");
const HeftAsync_1 = require("../utilities/HeftAsync");
const HeftEventPluginBase_1 = require("../pluginFramework/HeftEventPluginBase");
const Constants_1 = require("../utilities/Constants");
const FileGlobSpecifier_1 = require("./FileGlobSpecifier");
async function copyFilesAsync(copyOperations, logger) {
const copyDescriptors = await _getCopyDescriptorsAsync(copyOperations, fast_glob_1.default);
await _copyFilesInnerAsync(copyDescriptors, logger);
}
exports.copyFilesAsync = copyFilesAsync;
async function copyIncrementalFilesAsync(copyOperations, globChangedFilesAsyncFn, isFirstRun, logger) {
const copyDescriptors = await _getCopyDescriptorsAsync(copyOperations,
// Use the normal globber if it is the first run, to ensure that non-watched files are copied
isFirstRun ? fast_glob_1.default : globChangedFilesAsyncFn);
await _copyFilesInnerAsync(copyDescriptors, logger);
}
exports.copyIncrementalFilesAsync = copyIncrementalFilesAsync;
async function _getCopyDescriptorsAsync(copyConfigurations, globFn) {
const processedCopyDescriptors = [];
// Create a map to deduplicate and prevent double-writes
// resolvedDestinationFilePath -> descriptor
const destinationCopyDescriptors = new Map();
await node_core_library_1.Async.forEachAsync(copyConfigurations, async (copyConfiguration) => {
var _a, _b, _c;
let sourceFolder;
let sourceFilePaths;
if (!((_a = copyConfiguration.fileExtensions) === null || _a === void 0 ? void 0 : _a.length) &&
!((_b = copyConfiguration.includeGlobs) === null || _b === void 0 ? void 0 : _b.length) &&
!((_c = copyConfiguration.excludeGlobs) === null || _c === void 0 ? void 0 : _c.length)) {
sourceFolder = path.dirname(copyConfiguration.sourcePath);
if (copyConfiguration.hardlink) {
// Specify a glob to match all files in the folder, since folders cannot be hardlinked.
// Perform globbing from one folder up, so that we create the folder in the destination.
try {
sourceFilePaths = await (0, FileGlobSpecifier_1.getFilePathsAsync)(Object.assign(Object.assign({}, copyConfiguration), { sourcePath: sourceFolder, includeGlobs: [`${path.basename(copyConfiguration.sourcePath)}/**/*`] }), globFn);
}
catch (error) {
if (node_core_library_1.FileSystem.isNotDirectoryError(error)) {
// The source path is a file, not a folder. Handled below.
}
else {
throw error;
}
}
class CopyFilesPlugin extends HeftEventPluginBase_1.HeftEventPluginBase {
constructor() {
super(...arguments);
this.pluginName = 'CopyFilesPlugin';
this.eventActionName = 'copyFiles';
this.loggerName = 'copy-files';
}
/**
* @override
*/
async handleBuildEventActionsAsync(heftEvent, heftEventActions, logger, heftSession, heftConfiguration, properties) {
await this._runCopyFilesForHeftEventActions(heftEventActions, logger, heftConfiguration);
}
async _runCopyFilesForHeftEventActions(heftEventActions, logger, heftConfiguration) {
const copyConfigurations = [];
for (const copyFilesEventAction of heftEventActions) {
for (const copyOperation of copyFilesEventAction.copyOperations) {
copyConfigurations.push(Object.assign(Object.assign({}, copyOperation), { resolvedDestinationFolderPaths: copyOperation.destinationFolders.map((destinationFolder) => path.join(heftConfiguration.buildFolder, destinationFolder)) }));
}
// Still not set, either it's not a hardlink or it's a file.
if (!sourceFilePaths) {
sourceFilePaths = new Set([copyConfiguration.sourcePath]);
}
}
else {
// Assume the source path is a folder
sourceFolder = copyConfiguration.sourcePath;
sourceFilePaths = await (0, FileGlobSpecifier_1.getFilePathsAsync)(copyConfiguration, globFn);
await this.runCopyAsync({
buildFolder: heftConfiguration.buildFolder,
copyConfigurations,
logger,
watchMode: false
});
}
async runCopyAsync(options) {
const { logger, buildFolder, copyConfigurations } = options;
const startTime = perf_hooks_1.performance.now();
const copyDescriptors = await this._getCopyFileDescriptorsAsync(buildFolder, copyConfigurations);
if (copyDescriptors.length === 0) {
// No need to run copy and print to console
return;
}
// Dedupe and throw if a double-write is detected
for (const destinationFolderPath of copyConfiguration.destinationFolders) {
for (const sourceFilePath of sourceFilePaths) {
const sourceFileRelativePath = path.relative(sourceFolder, sourceFilePath);
// Only include the relative path from the sourceFolder if flatten is false
const resolvedDestinationPath = path.resolve(destinationFolderPath, copyConfiguration.flatten ? path.basename(sourceFileRelativePath) : sourceFileRelativePath);
// Throw if a duplicate copy target with a different source or options is specified
const existingDestinationCopyDescriptor = destinationCopyDescriptors.get(resolvedDestinationPath);
if (existingDestinationCopyDescriptor) {
if (existingDestinationCopyDescriptor.sourcePath === sourceFilePath &&
existingDestinationCopyDescriptor.hardlink === !!copyConfiguration.hardlink) {
// Found a duplicate, avoid adding again
continue;
const { copiedFileCount, linkedFileCount } = await this._copyFilesAsync(copyDescriptors);
const duration = perf_hooks_1.performance.now() - startTime;
logger.terminal.writeLine(`Copied ${copiedFileCount} file${copiedFileCount === 1 ? '' : 's'} and ` +
`linked ${linkedFileCount} file${linkedFileCount === 1 ? '' : 's'} in ${Math.round(duration)}ms`);
// Then enter watch mode if requested
if (options.watchMode) {
HeftAsync_1.HeftAsync.runWatcherWithErrorHandling(async () => await this._runWatchAsync(options), logger);
}
}
async _copyFilesAsync(copyDescriptors) {
if (copyDescriptors.length === 0) {
return { copiedFileCount: 0, linkedFileCount: 0 };
}
let copiedFileCount = 0;
let linkedFileCount = 0;
await node_core_library_1.Async.forEachAsync(copyDescriptors, async (copyDescriptor) => {
if (copyDescriptor.hardlink) {
linkedFileCount++;
await node_core_library_1.FileSystem.createHardLinkAsync({
linkTargetPath: copyDescriptor.sourceFilePath,
newLinkPath: copyDescriptor.destinationFilePath,
alreadyExistsBehavior: node_core_library_1.AlreadyExistsBehavior.Overwrite
});
}
else {
copiedFileCount++;
await node_core_library_1.FileSystem.copyFileAsync({
sourcePath: copyDescriptor.sourceFilePath,
destinationPath: copyDescriptor.destinationFilePath,
alreadyExistsBehavior: node_core_library_1.AlreadyExistsBehavior.Overwrite
});
}
}, { concurrency: Constants_1.Constants.maxParallelism });
return {
copiedFileCount,
linkedFileCount
};
}
async _getCopyFileDescriptorsAsync(buildFolder, copyConfigurations) {
const processedCopyDescriptors = [];
// Create a map to deduplicate and prevent double-writes
// resolvedDestinationFilePath -> descriptor
const destinationCopyDescriptors = new Map();
for (const copyConfiguration of copyConfigurations) {
// Resolve the source folder path which is where the glob will be run from
const resolvedSourceFolderPath = path.resolve(buildFolder, copyConfiguration.sourceFolder);
const sourceFileRelativePaths = new Set(await (0, fast_glob_1.default)(this._getIncludedGlobPatterns(copyConfiguration), {
cwd: resolvedSourceFolderPath,
ignore: copyConfiguration.excludeGlobs,
dot: true,
onlyFiles: true
}));
// Dedupe and throw if a double-write is detected
for (const destinationFolderPath of copyConfiguration.resolvedDestinationFolderPaths) {
for (const sourceFileRelativePath of sourceFileRelativePaths) {
// Only include the relative path from the sourceFolder if flatten is false
const resolvedSourceFilePath = path.join(resolvedSourceFolderPath, sourceFileRelativePath);
const resolvedDestinationFilePath = path.resolve(destinationFolderPath, copyConfiguration.flatten ? '.' : path.dirname(sourceFileRelativePath), path.basename(sourceFileRelativePath));
// Throw if a duplicate copy target with a different source or options is specified
const existingDestinationCopyDescriptor = destinationCopyDescriptors.get(resolvedDestinationFilePath);
if (existingDestinationCopyDescriptor) {
if (existingDestinationCopyDescriptor.sourceFilePath === resolvedSourceFilePath &&
existingDestinationCopyDescriptor.hardlink === !!copyConfiguration.hardlink) {
// Found a duplicate, avoid adding again
continue;
}
throw new Error(`Cannot copy different files to the same destination "${resolvedDestinationFilePath}"`);
}
throw new Error(`Cannot copy multiple files to the same destination "${resolvedDestinationPath}".`);
// Finally, default hardlink to false, add to the result, and add to the map for deduping
const processedCopyDescriptor = {
sourceFilePath: resolvedSourceFilePath,
destinationFilePath: resolvedDestinationFilePath,
hardlink: !!copyConfiguration.hardlink
};
processedCopyDescriptors.push(processedCopyDescriptor);
destinationCopyDescriptors.set(resolvedDestinationFilePath, processedCopyDescriptor);
}
// Finally, default hardlink to false, add to the result, and add to the map for deduping
const processedCopyDescriptor = {
sourcePath: sourceFilePath,
destinationPath: resolvedDestinationPath,
hardlink: !!copyConfiguration.hardlink
};
processedCopyDescriptors.push(processedCopyDescriptor);
destinationCopyDescriptors.set(resolvedDestinationPath, processedCopyDescriptor);
}
}
}, { concurrency: Constants_1.Constants.maxParallelism });
// We're done with the map, grab the values and return
return processedCopyDescriptors;
}
async function _copyFilesInnerAsync(copyDescriptors, logger) {
if (copyDescriptors.length === 0) {
return;
// We're done with the map, grab the values and return
return processedCopyDescriptors;
}
let copiedFolderOrFileCount = 0;
let linkedFileCount = 0;
await node_core_library_1.Async.forEachAsync(copyDescriptors, async (copyDescriptor) => {
if (copyDescriptor.hardlink) {
linkedFileCount++;
await node_core_library_1.FileSystem.createHardLinkAsync({
linkTargetPath: copyDescriptor.sourcePath,
newLinkPath: copyDescriptor.destinationPath,
alreadyExistsBehavior: node_core_library_1.AlreadyExistsBehavior.Overwrite
});
logger.terminal.writeVerboseLine(`Linked "${copyDescriptor.sourcePath}" to "${copyDescriptor.destinationPath}".`);
_getIncludedGlobPatterns(copyConfiguration) {
const patternsToGlob = new Set();
// Glob file extensions with a specific glob to increase perf
const escapedFileExtensions = new Set();
for (const fileExtension of copyConfiguration.fileExtensions || []) {
let escapedFileExtension;
if (fileExtension.charAt(0) === '.') {
escapedFileExtension = fileExtension.substr(1);
}
else {
escapedFileExtension = fileExtension;
}
escapedFileExtension = fast_glob_1.default.escapePath(escapedFileExtension);
escapedFileExtensions.add(escapedFileExtension);
}
else {
copiedFolderOrFileCount++;
await node_core_library_1.FileSystem.copyFilesAsync({
sourcePath: copyDescriptor.sourcePath,
destinationPath: copyDescriptor.destinationPath,
alreadyExistsBehavior: node_core_library_1.AlreadyExistsBehavior.Overwrite
});
logger.terminal.writeVerboseLine(`Copied "${copyDescriptor.sourcePath}" to "${copyDescriptor.destinationPath}".`);
if (escapedFileExtensions.size > 1) {
patternsToGlob.add(`**/*.{${Array.from(escapedFileExtensions).join(',')}}`);
}
}, { concurrency: Constants_1.Constants.maxParallelism });
const folderOrFilesPlural = copiedFolderOrFileCount === 1 ? '' : 's';
logger.terminal.writeLine(`Copied ${copiedFolderOrFileCount} folder${folderOrFilesPlural} or file${folderOrFilesPlural} and ` +
`linked ${linkedFileCount} file${linkedFileCount === 1 ? '' : 's'}`);
}
function _resolveCopyOperationPaths(heftConfiguration, copyOperations) {
for (const copyOperation of copyOperations) {
if (!path.isAbsolute(copyOperation.sourcePath)) {
copyOperation.sourcePath = path.resolve(heftConfiguration.buildFolderPath, copyOperation.sourcePath);
else if (escapedFileExtensions.size === 1) {
patternsToGlob.add(`**/*.${Array.from(escapedFileExtensions)[0]}`);
}
const destinationFolders = [];
for (const destinationFolder of copyOperation.destinationFolders) {
if (!path.isAbsolute(destinationFolder)) {
destinationFolders.push(path.resolve(heftConfiguration.buildFolderPath, destinationFolder));
// Now include the other globs as well
for (const include of copyConfiguration.includeGlobs || []) {
patternsToGlob.add(include);
}
return Array.from(patternsToGlob);
}
async _runWatchAsync(options) {
const { buildFolder, copyConfigurations, logger } = options;
for (const copyConfiguration of copyConfigurations) {
// Obtain the glob patterns to provide to the watcher
const globsToWatch = this._getIncludedGlobPatterns(copyConfiguration);
if (globsToWatch.length) {
const resolvedSourceFolderPath = path.join(buildFolder, copyConfiguration.sourceFolder);
const watcher = chokidar.watch(globsToWatch, {
cwd: resolvedSourceFolderPath,
ignoreInitial: true,
ignored: copyConfiguration.excludeGlobs
});
const copyAsset = async (relativeAssetPath) => {
const { copiedFileCount, linkedFileCount } = await this._copyFilesAsync(copyConfiguration.resolvedDestinationFolderPaths.map((resolvedDestinationFolderPath) => {
return {
sourceFilePath: path.join(resolvedSourceFolderPath, relativeAssetPath),
destinationFilePath: path.join(resolvedDestinationFolderPath, copyConfiguration.flatten ? path.basename(relativeAssetPath) : relativeAssetPath),
hardlink: !!copyConfiguration.hardlink
};
}));
logger.terminal.writeLine(copyConfiguration.hardlink
? `Linked ${linkedFileCount} file${linkedFileCount === 1 ? '' : 's'}`
: `Copied ${copiedFileCount} file${copiedFileCount === 1 ? '' : 's'}`);
};
const deleteAsset = async (relativeAssetPath) => {
const deletePromises = copyConfiguration.resolvedDestinationFolderPaths.map((resolvedDestinationFolderPath) => node_core_library_1.FileSystem.deleteFileAsync(path.resolve(resolvedDestinationFolderPath, relativeAssetPath)));
await Promise.all(deletePromises);
logger.terminal.writeLine(`Deleted ${deletePromises.length} file${deletePromises.length === 1 ? '' : 's'}`);
};
watcher.on('add', copyAsset);
watcher.on('change', copyAsset);
watcher.on('unlink', deleteAsset);
}
}
copyOperation.destinationFolders = destinationFolders;
}
}
const PLUGIN_NAME = 'copy-files-plugin';
class CopyFilesPlugin {
apply(taskSession, heftConfiguration, pluginOptions) {
// TODO: Remove once improved heft-config-file is used to resolve paths
_resolveCopyOperationPaths(heftConfiguration, pluginOptions.copyOperations);
taskSession.hooks.run.tapPromise(PLUGIN_NAME, async (runOptions) => {
runOptions.addCopyOperations(pluginOptions.copyOperations);
return new Promise(() => {
/* never resolve */
});
taskSession.hooks.runIncremental.tapPromise(PLUGIN_NAME, async (runIncrementalOptions) => {
runIncrementalOptions.addCopyOperations(pluginOptions.copyOperations.map((copyOperation) => {
return Object.assign(Object.assign({}, copyOperation), { onlyIfChanged: true });
}));
});
}
}
exports.default = CopyFilesPlugin;
exports.CopyFilesPlugin = CopyFilesPlugin;
//# sourceMappingURL=CopyFilesPlugin.js.map

@@ -1,7 +0,8 @@

import type { IHeftTaskPlugin } from '../pluginFramework/IHeftPlugin';
import type { HeftConfiguration } from '../configuration/HeftConfiguration';
import type { IHeftTaskSession } from '../pluginFramework/HeftTaskSession';
import { HeftSession } from '../pluginFramework/HeftSession';
import { HeftConfiguration } from '../configuration/HeftConfiguration';
import { IHeftPlugin } from '../pluginFramework/IHeftPlugin';
export interface INodeServicePluginCompleteConfiguration {
commandName: string;
ignoreMissingScript: boolean;
waitBeforeRestartMs: number;
waitForTerminateMs: number;

@@ -12,13 +13,12 @@ waitForKillMs: number;

}
export default class NodeServicePlugin implements IHeftTaskPlugin {
export declare class NodeServicePlugin implements IHeftPlugin {
readonly pluginName: string;
private static readonly _isWindows;
private _logger;
private _activeChildProcess;
private _childProcessExitPromise;
private _childProcessExitPromiseResolveFn;
private _childProcessExitPromiseRejectFn;
private _state;
private _logger;
/**
* The state machine schedules at most one setInterval() timeout at any given time. It is for:
*
* - waitBeforeRestartMs in State.Stopped
* - waitForTerminateMs in State.Stopping

@@ -29,2 +29,7 @@ * - waitForKillMs in State.Killing

/**
* Used by _scheduleRestart(). The process will be automatically restarted when performance.now()
* exceeds this time.
*/
private _restartTime;
/**
* The data read from the node-service.json config file, or "undefined" if the file is missing.

@@ -41,13 +46,23 @@ */

private _shellCommand;
/**
* This is set to true when the child process terminates unexpectedly (for example, something like
* "the service listening port is already in use" or "unable to authenticate to the database").
* Rather than attempting to restart in a potentially endless loop, instead we will wait until "watch mode"
* recompiles the project.
*/
private _childProcessFailed;
private _pluginEnabled;
apply(taskSession: IHeftTaskSession, heftConfiguration: HeftConfiguration): void;
apply(heftSession: HeftSession, heftConfiguration: HeftConfiguration): void;
private _loadStageConfiguration;
private _runCommandAsync;
private _stopChildAsync;
private _compileHooks_afterEachCompile;
private _restartChild;
private _formatCodeOrSignal;
private _stopChild;
private _transitionToKilling;
private _transitionToStopped;
private _startChild;
private _scheduleRestart;
private _clearTimeout;
private _formatCodeOrSignal;
private _trapUnhandledException;
}
//# sourceMappingURL=NodeServicePlugin.d.ts.map

@@ -28,7 +28,9 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.NodeServicePlugin = void 0;
const child_process = __importStar(require("child_process"));
const process = __importStar(require("process"));
const perf_hooks_1 = require("perf_hooks");
const node_core_library_1 = require("@rushstack/node-core-library");
const CoreConfigFiles_1 = require("../utilities/CoreConfigFiles");
const PLUGIN_NAME = 'node-service-plugin';
const PLUGIN_NAME = 'NodeServicePlugin';
var State;

@@ -61,2 +63,3 @@ (function (State) {

constructor() {
this.pluginName = PLUGIN_NAME;
this._state = State.Stopped;

@@ -66,2 +69,3 @@ /**

*
* - waitBeforeRestartMs in State.Stopped
* - waitForTerminateMs in State.Stopping

@@ -72,78 +76,189 @@ * - waitForKillMs in State.Killing

/**
* Used by _scheduleRestart(). The process will be automatically restarted when performance.now()
* exceeds this time.
*/
this._restartTime = undefined;
/**
* The data read from the node-service.json config file, or "undefined" if the file is missing.
*/
this._rawConfiguration = undefined;
/**
* This is set to true when the child process terminates unexpectedly (for example, something like
* "the service listening port is already in use" or "unable to authenticate to the database").
* Rather than attempting to restart in a potentially endless loop, instead we will wait until "watch mode"
* recompiles the project.
*/
this._childProcessFailed = false;
this._pluginEnabled = false;
this._compileHooks_afterEachCompile = () => {
this._trapUnhandledException(() => {
// We've recompiled, so try launching again
this._childProcessFailed = false;
if (this._state === State.Stopped) {
// If we are already stopped, then extend the timeout
this._scheduleRestart(this._configuration.waitBeforeRestartMs);
}
else {
this._stopChild();
}
});
};
}
apply(taskSession, heftConfiguration) {
// Set this immediately to make it available to the internal methods that use it
this._logger = taskSession.logger;
taskSession.hooks.run.tapPromise(PLUGIN_NAME, async () => {
taskSession.logger.terminal.writeWarningLine('Node services can only be run in watch mode.');
apply(heftSession, heftConfiguration) {
this._logger = heftSession.requestScopedLogger('node-service');
heftSession.hooks.build.tap(PLUGIN_NAME, (build) => {
if (!build.properties.serveMode) {
// This plugin is only used with "heft start"
return;
}
build.hooks.loadStageConfiguration.tapPromise(PLUGIN_NAME, async () => {
await this._loadStageConfiguration(heftConfiguration);
if (this._pluginEnabled) {
build.hooks.postBuild.tap(PLUGIN_NAME, (bundle) => {
bundle.hooks.run.tapPromise(PLUGIN_NAME, async () => {
await this._runCommandAsync(heftSession, heftConfiguration);
});
});
build.hooks.compile.tap(PLUGIN_NAME, (compile) => {
compile.hooks.afterCompile.tap(PLUGIN_NAME, this._compileHooks_afterEachCompile);
compile.hooks.afterRecompile.tap(PLUGIN_NAME, this._compileHooks_afterEachCompile);
});
}
});
});
taskSession.hooks.runIncremental.tapPromise(PLUGIN_NAME, async (runIncrementalOptions) => {
await this._runCommandAsync(taskSession, heftConfiguration);
});
}
async _loadStageConfiguration(taskSession, heftConfiguration) {
if (!this._rawConfiguration) {
this._rawConfiguration =
await CoreConfigFiles_1.CoreConfigFiles.nodeServiceConfigurationFile.tryLoadConfigurationFileForProjectAsync(taskSession.logger.terminal, heftConfiguration.buildFolderPath, heftConfiguration.rigConfig);
// defaults
this._configuration = {
commandName: 'serve',
ignoreMissingScript: false,
waitForTerminateMs: 2000,
waitForKillMs: 2000
};
// TODO: @rushstack/heft-config-file should be able to read a *.defaults.json file
if (this._rawConfiguration) {
this._pluginEnabled = true;
if (this._rawConfiguration.commandName !== undefined) {
this._configuration.commandName = this._rawConfiguration.commandName;
async _loadStageConfiguration(heftConfiguration) {
this._rawConfiguration =
await CoreConfigFiles_1.CoreConfigFiles.nodeServiceConfigurationLoader.tryLoadConfigurationFileForProjectAsync(this._logger.terminal, heftConfiguration.buildFolder, heftConfiguration.rigConfig);
// defaults
this._configuration = {
commandName: 'serve',
ignoreMissingScript: false,
waitBeforeRestartMs: 2000,
waitForTerminateMs: 2000,
waitForKillMs: 2000
};
// TODO: @rushstack/heft-config-file should be able to read a *.defaults.json file
if (this._rawConfiguration) {
this._pluginEnabled = true;
if (this._rawConfiguration.commandName !== undefined) {
this._configuration.commandName = this._rawConfiguration.commandName;
}
if (this._rawConfiguration.ignoreMissingScript !== undefined) {
this._configuration.ignoreMissingScript = this._rawConfiguration.ignoreMissingScript;
}
if (this._rawConfiguration.waitBeforeRestartMs !== undefined) {
this._configuration.waitBeforeRestartMs = this._rawConfiguration.waitBeforeRestartMs;
}
if (this._rawConfiguration.waitForTerminateMs !== undefined) {
this._configuration.waitForTerminateMs = this._rawConfiguration.waitForTerminateMs;
}
if (this._rawConfiguration.waitForKillMs !== undefined) {
this._configuration.waitForKillMs = this._rawConfiguration.waitForKillMs;
}
this._shellCommand = (heftConfiguration.projectPackageJson.scripts || {})[this._configuration.commandName];
if (this._shellCommand === undefined) {
if (this._configuration.ignoreMissingScript) {
this._logger.terminal.writeLine(`The plugin is disabled because the project's package.json` +
` does not have a "${this._configuration.commandName}" script`);
}
if (this._rawConfiguration.ignoreMissingScript !== undefined) {
this._configuration.ignoreMissingScript = this._rawConfiguration.ignoreMissingScript;
else {
throw new Error(`The node-service task cannot start because the project's package.json ` +
`does not have a "${this._configuration.commandName}" script`);
}
if (this._rawConfiguration.waitForTerminateMs !== undefined) {
this._configuration.waitForTerminateMs = this._rawConfiguration.waitForTerminateMs;
this._pluginEnabled = false;
}
}
else {
this._logger.terminal.writeVerboseLine('The plugin is disabled because its config file was not found: ' +
CoreConfigFiles_1.CoreConfigFiles.nodeServiceConfigurationLoader.projectRelativeFilePath);
}
}
async _runCommandAsync(heftSession, heftConfiguration) {
this._logger.terminal.writeLine(`Starting Node service...`);
this._restartChild();
}
_restartChild() {
if (this._state !== State.Stopped) {
throw new node_core_library_1.InternalError('Invalid state');
}
this._state = State.Running;
this._clearTimeout();
this._logger.terminal.writeLine('Invoking command: ' + JSON.stringify(this._shellCommand));
this._activeChildProcess = child_process.spawn(this._shellCommand, Object.assign({ shell: true, stdio: ['inherit', 'inherit', 'inherit'] }, node_core_library_1.SubprocessTerminator.RECOMMENDED_OPTIONS));
node_core_library_1.SubprocessTerminator.killProcessTreeOnExit(this._activeChildProcess, node_core_library_1.SubprocessTerminator.RECOMMENDED_OPTIONS);
const childPid = this._activeChildProcess.pid;
this._logger.terminal.writeVerboseLine(`Started service process #${childPid}`);
this._activeChildProcess.on('close', (code, signal) => {
this._trapUnhandledException(() => {
// The 'close' event is emitted after a process has ended and the stdio streams of a child process
// have been closed. This is distinct from the 'exit' event, since multiple processes might share the
// same stdio streams. The 'close' event will always emit after 'exit' was already emitted,
// or 'error' if the child failed to spawn.
if (this._state === State.Running) {
this._logger.terminal.writeWarningLine(`The service process #${childPid} terminated unexpectedly` +
this._formatCodeOrSignal(code, signal));
this._childProcessFailed = true;
this._transitionToStopped();
return;
}
if (this._rawConfiguration.waitForKillMs !== undefined) {
this._configuration.waitForKillMs = this._rawConfiguration.waitForKillMs;
if (this._state === State.Stopping || this._state === State.Killing) {
this._logger.terminal.writeVerboseLine(`The service process #${childPid} terminated successfully` +
this._formatCodeOrSignal(code, signal));
this._transitionToStopped();
return;
}
this._shellCommand = (heftConfiguration.projectPackageJson.scripts || {})[this._configuration.commandName];
if (this._shellCommand === undefined) {
if (this._configuration.ignoreMissingScript) {
taskSession.logger.terminal.writeLine(`The plugin is disabled because the project's package.json` +
` does not have a "${this._configuration.commandName}" script`);
}
else {
throw new Error(`The node-service task cannot start because the project's package.json ` +
`does not have a "${this._configuration.commandName}" script`);
}
this._pluginEnabled = false;
});
});
// This is event only fires for Node.js >= 15.x
this._activeChildProcess.on('spawn', () => {
this._trapUnhandledException(() => {
// Print a newline to separate the service's STDOUT from Heft's output
console.log();
});
});
this._activeChildProcess.on('exit', (code, signal) => {
this._trapUnhandledException(() => {
this._logger.terminal.writeVerboseLine(`The service process fired its "exit" event` + this._formatCodeOrSignal(code, signal));
});
});
this._activeChildProcess.on('error', (err) => {
this._trapUnhandledException(() => {
// "The 'error' event is emitted whenever:
// 1. The process could not be spawned, or
// 2. The process could not be killed, or
// 3. Sending a message to the child process failed.
//
// The 'exit' event may or may not fire after an error has occurred. When listening to both the 'exit'
// and 'error' events, guard against accidentally invoking handler functions multiple times."
if (this._state === State.Running) {
this._logger.terminal.writeErrorLine(`Failed to start: ` + err.toString());
this._childProcessFailed = true;
this._transitionToStopped();
return;
}
}
else {
taskSession.logger.terminal.writeVerboseLine('The plugin is disabled because its config file was not found: ' +
CoreConfigFiles_1.CoreConfigFiles.nodeServiceConfigurationFile.projectRelativeFilePath);
}
}
if (this._state === State.Stopping) {
this._logger.terminal.writeWarningLine(`The service process #${childPid} rejected the shutdown signal: ` + err.toString());
this._transitionToKilling();
return;
}
if (this._state === State.Killing) {
this._logger.terminal.writeErrorLine(`The service process #${childPid} could not be killed: ` + err.toString());
this._transitionToStopped();
return;
}
});
});
}
async _runCommandAsync(taskSession, heftConfiguration) {
await this._loadStageConfiguration(taskSession, heftConfiguration);
if (!this._pluginEnabled) {
return;
_formatCodeOrSignal(code, signal) {
if (signal) {
return ` (signal=${code})`;
}
this._logger.terminal.writeLine(`Starting Node service...`);
await this._stopChildAsync();
this._startChild();
if (typeof code === 'number') {
return ` (exit code ${code})`;
}
return '';
}
async _stopChildAsync() {
_stopChild() {
if (this._state !== State.Running) {
if (this._childProcessExitPromise) {
// If we have an active process but are not in the running state, we must be in the process of
// terminating or the process is already stopped.
await this._childProcessExitPromise;
}
return;

@@ -161,2 +276,3 @@ }

this._state = State.Stopping;
this._clearTimeout();
this._logger.terminal.writeVerboseLine('Sending SIGTERM to gracefully shut down the service process');

@@ -168,17 +284,11 @@ // Passing a negative PID terminates the entire group instead of just the one process.

this._timeout = setTimeout(() => {
try {
if (this._state !== State.Stopped) {
this._logger.terminal.writeWarningLine('The service process is taking too long to terminate');
this._transitionToKilling();
}
}
catch (e) {
this._childProcessExitPromiseRejectFn(e);
}
this._timeout = undefined;
this._logger.terminal.writeWarningLine('The service process is taking too long to terminate');
this._transitionToKilling();
}, this._configuration.waitForTerminateMs);
}
await this._childProcessExitPromise;
}
_transitionToKilling() {
this._state = State.Killing;
this._clearTimeout();
if (!this._activeChildProcess) {

@@ -192,11 +302,5 @@ // All the code paths that set _activeChildProcess=undefined should also leave the Running state

this._timeout = setTimeout(() => {
try {
if (this._state !== State.Stopped) {
this._logger.terminal.writeErrorLine('Abandoning the service process because it could not be killed');
this._transitionToStopped();
}
}
catch (e) {
this._childProcessExitPromiseRejectFn(e);
}
this._timeout = undefined;
this._logger.terminal.writeErrorLine('Abandoning the service process because it could not be killed');
this._transitionToStopped();
}, this._configuration.waitForKillMs);

@@ -209,88 +313,24 @@ }

this._activeChildProcess = undefined;
this._childProcessExitPromiseResolveFn();
// Once we have stopped, schedule a restart
if (!this._childProcessFailed) {
this._scheduleRestart(this._configuration.waitBeforeRestartMs);
}
else {
this._logger.terminal.writeLine('The service process has failed. Waiting for watch mode to recompile before restarting...');
}
}
_startChild() {
if (this._state !== State.Stopped) {
throw new node_core_library_1.InternalError('Invalid state');
_scheduleRestart(msFromNow) {
const newTime = perf_hooks_1.performance.now() + msFromNow;
if (this._restartTime !== undefined && newTime < this._restartTime) {
return;
}
this._state = State.Running;
this._restartTime = newTime;
this._logger.terminal.writeVerboseLine(`Sleeping for ${msFromNow} milliseconds`);
this._clearTimeout();
this._logger.terminal.writeLine(`Invoking command: "${this._shellCommand}"`);
const childProcess = child_process.spawn(this._shellCommand, Object.assign({ shell: true }, node_core_library_1.SubprocessTerminator.RECOMMENDED_OPTIONS));
node_core_library_1.SubprocessTerminator.killProcessTreeOnExit(childProcess, node_core_library_1.SubprocessTerminator.RECOMMENDED_OPTIONS);
const childPid = childProcess.pid;
this._logger.terminal.writeVerboseLine(`Started service process #${childPid}`);
// Create a promise that resolves when the child process exits
this._childProcessExitPromise = new Promise((resolve, reject) => {
var _a, _b;
this._childProcessExitPromiseResolveFn = resolve;
this._childProcessExitPromiseRejectFn = reject;
(_a = childProcess.stdout) === null || _a === void 0 ? void 0 : _a.on('data', (data) => {
this._logger.terminal.write(data.toString());
});
(_b = childProcess.stderr) === null || _b === void 0 ? void 0 : _b.on('data', (data) => {
this._logger.terminal.writeError(data.toString());
});
childProcess.on('close', (code, signal) => {
try {
// The 'close' event is emitted after a process has ended and the stdio streams of a child process
// have been closed. This is distinct from the 'exit' event, since multiple processes might share the
// same stdio streams. The 'close' event will always emit after 'exit' was already emitted,
// or 'error' if the child failed to spawn.
if (this._state === State.Running) {
this._logger.terminal.writeWarningLine(`The service process #${childPid} terminated unexpectedly` +
this._formatCodeOrSignal(code, signal));
this._transitionToStopped();
return;
}
if (this._state === State.Stopping || this._state === State.Killing) {
this._logger.terminal.writeVerboseLine(`The service process #${childPid} terminated successfully` +
this._formatCodeOrSignal(code, signal));
this._transitionToStopped();
return;
}
}
catch (e) {
reject(e);
}
});
childProcess.on('exit', (code, signal) => {
try {
this._logger.terminal.writeVerboseLine(`The service process fired its "exit" event` + this._formatCodeOrSignal(code, signal));
}
catch (e) {
reject(e);
}
});
childProcess.on('error', (err) => {
try {
// "The 'error' event is emitted whenever:
// 1. The process could not be spawned, or
// 2. The process could not be killed, or
// 3. Sending a message to the child process failed.
//
// The 'exit' event may or may not fire after an error has occurred. When listening to both the 'exit'
// and 'error' events, guard against accidentally invoking handler functions multiple times."
if (this._state === State.Running) {
this._logger.terminal.writeErrorLine(`Failed to start: ` + err.toString());
this._transitionToStopped();
return;
}
if (this._state === State.Stopping) {
this._logger.terminal.writeWarningLine(`The service process #${childPid} rejected the shutdown signal: ` + err.toString());
this._transitionToKilling();
return;
}
if (this._state === State.Killing) {
this._logger.terminal.writeErrorLine(`The service process #${childPid} could not be killed: ` + err.toString());
this._transitionToStopped();
return;
}
}
catch (e) {
reject(e);
}
});
});
this._activeChildProcess = childProcess;
this._timeout = setTimeout(() => {
this._timeout = undefined;
this._restartTime = undefined;
this._logger.terminal.writeVerboseLine('Time to restart');
this._restartChild();
}, Math.max(0, this._restartTime - perf_hooks_1.performance.now()));
}

@@ -303,14 +343,16 @@ _clearTimeout() {

}
_formatCodeOrSignal(code, signal) {
if (signal) {
return ` (signal=${code})`;
_trapUnhandledException(action) {
try {
action();
}
if (typeof code === 'number') {
return ` (exit code ${code})`;
catch (error) {
this._logger.emitError(error);
this._logger.terminal.writeErrorLine('An unexpected error occurred');
// TODO: Provide a Heft facility for this
process.exit(1);
}
return '';
}
}
exports.default = NodeServicePlugin;
exports.NodeServicePlugin = NodeServicePlugin;
NodeServicePlugin._isWindows = process.platform === 'win32';
//# sourceMappingURL=NodeServicePlugin.js.map

@@ -1,8 +0,8 @@

import type { HeftConfiguration } from '../configuration/HeftConfiguration';
import type { IHeftTaskPlugin } from '../pluginFramework/IHeftPlugin';
import type { IHeftTaskSession, IHeftTaskRunHookOptions } from '../pluginFramework/HeftTaskSession';
interface IRunScriptPluginOptions {
scriptPath: string;
scriptOptions: Record<string, unknown>;
}
import { HeftEventPluginBase } from '../pluginFramework/HeftEventPluginBase';
import { ScopedLogger } from '../pluginFramework/logging/ScopedLogger';
import { HeftSession } from '../pluginFramework/HeftSession';
import { HeftConfiguration } from '../configuration/HeftConfiguration';
import { IHeftEventActions, IHeftConfigurationRunScriptEventAction, HeftEvent } from '../utilities/CoreConfigFiles';
import { IBuildStageProperties } from '../stages/BuildStage';
import { ITestStageProperties } from '../stages/TestStage';
/**

@@ -13,24 +13,23 @@ * Options provided to scripts that are run using the RunScriptPlugin.

*/
export interface IRunScriptOptions {
heftTaskSession: IHeftTaskSession;
export interface IRunScriptOptions<TStageProperties> {
scopedLogger: ScopedLogger;
heftConfiguration: HeftConfiguration;
runOptions: IHeftTaskRunHookOptions;
scriptOptions: Record<string, unknown>;
debugMode: boolean;
properties: TStageProperties;
scriptOptions: Record<string, any>;
}
/**
* Interface used by scripts that are run by the RunScriptPlugin.
*
* @beta
*/
export interface IRunScript {
export declare class RunScriptPlugin extends HeftEventPluginBase<IHeftConfigurationRunScriptEventAction> {
readonly pluginName: string;
protected readonly eventActionName: keyof IHeftEventActions;
protected readonly loggerName: string;
/**
* The method that is called by the RunScriptPlugin to run the script.
* @override
*/
runAsync: (options: IRunScriptOptions) => Promise<void>;
protected handleBuildEventActionsAsync(heftEvent: HeftEvent, runScriptEventActions: IHeftConfigurationRunScriptEventAction[], logger: ScopedLogger, heftSession: HeftSession, heftConfiguration: HeftConfiguration, properties: IBuildStageProperties): Promise<void>;
/**
* @override
*/
protected handleTestEventActionsAsync(heftEvent: HeftEvent, runScriptEventActions: IHeftConfigurationRunScriptEventAction[], logger: ScopedLogger, heftSession: HeftSession, heftConfiguration: HeftConfiguration, properties: ITestStageProperties): Promise<void>;
private _runScriptsForHeftEventActions;
}
export default class RunScriptPlugin implements IHeftTaskPlugin<IRunScriptPluginOptions> {
apply(heftTaskSession: IHeftTaskSession, heftConfiguration: HeftConfiguration, pluginOptions: IRunScriptPluginOptions): void;
private _runScriptAsync;
}
export {};
//# sourceMappingURL=RunScriptPlugin.d.ts.map
"use strict";
// Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the MIT license.
// See LICENSE in the project root for license information.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", { value: true });
const PLUGIN_NAME = 'run-script-plugin';
class RunScriptPlugin {
apply(heftTaskSession, heftConfiguration, pluginOptions) {
heftTaskSession.hooks.run.tapPromise(PLUGIN_NAME, async (runOptions) => {
await this._runScriptAsync(heftTaskSession, heftConfiguration, pluginOptions, runOptions);
});
exports.RunScriptPlugin = void 0;
const node_core_library_1 = require("@rushstack/node-core-library");
const HeftEventPluginBase_1 = require("../pluginFramework/HeftEventPluginBase");
const Constants_1 = require("../utilities/Constants");
class RunScriptPlugin extends HeftEventPluginBase_1.HeftEventPluginBase {
constructor() {
super(...arguments);
this.pluginName = 'RunScriptPlugin';
this.eventActionName = 'runScript';
this.loggerName = 'run-script';
}
async _runScriptAsync(heftTaskSession, heftConfiguration, pluginOptions, runOptions) {
// The scriptPath property should be fully resolved since it is included in the resolution logic used by
// HeftConfiguration
const resolvedModulePath = pluginOptions.scriptPath;
const runScript = await Promise.resolve().then(() => __importStar(require(resolvedModulePath)));
if (!runScript.runAsync) {
throw new Error(`The script at ${JSON.stringify(resolvedModulePath)} doesn\'t export a "runAsync" function.`);
}
const runScriptOptions = {
heftTaskSession,
heftConfiguration,
runOptions,
scriptOptions: pluginOptions.scriptOptions
};
await runScript.runAsync(runScriptOptions);
/**
* @override
*/
async handleBuildEventActionsAsync(heftEvent, runScriptEventActions, logger, heftSession, heftConfiguration, properties) {
await this._runScriptsForHeftEventActions(runScriptEventActions, logger, heftSession, heftConfiguration, properties);
}
/**
* @override
*/
async handleTestEventActionsAsync(heftEvent, runScriptEventActions, logger, heftSession, heftConfiguration, properties) {
await this._runScriptsForHeftEventActions(runScriptEventActions, logger, heftSession, heftConfiguration, properties);
}
async _runScriptsForHeftEventActions(runScriptEventActions, logger, heftSession, heftConfiguration, properties) {
await node_core_library_1.Async.forEachAsync(runScriptEventActions, async (runScriptEventAction) => {
// The scriptPath property should be fully resolved since it is included in the resolution logic used by
// HeftConfiguration
const resolvedModulePath = runScriptEventAction.scriptPath;
// Use the HeftEvent.actionId field for the logger since this should identify the HeftEvent that the
// script is sourced from. This is also a bit more user-friendly and customizable than simply using
// the script name for the logger. We will also prefix the logger name with the plugin name to clarify
// that the output is coming from the RunScriptPlugin.
const scriptLogger = heftSession.requestScopedLogger(`${logger.loggerName}:${runScriptEventAction.actionId}`);
const runScript = require(resolvedModulePath);
if (runScript.run && runScript.runAsync) {
throw new Error(`The script at "${resolvedModulePath}" exports both a "run" and a "runAsync" function`);
}
else if (!runScript.run && !runScript.runAsync) {
throw new Error(`The script at "${resolvedModulePath}" doesn\'t export a "run" or a "runAsync" function`);
}
const runScriptOptions = {
scopedLogger: scriptLogger,
debugMode: heftSession.debugMode,
scriptOptions: runScriptEventAction.scriptOptions,
heftConfiguration,
properties
};
if (runScript.run) {
runScript.run(runScriptOptions);
}
else if (runScript.runAsync) {
await runScript.runAsync(runScriptOptions);
}
}, { concurrency: Constants_1.Constants.maxParallelism });
}
}
exports.default = RunScriptPlugin;
exports.RunScriptPlugin = RunScriptPlugin;
//# sourceMappingURL=RunScriptPlugin.js.map

@@ -11,80 +11,2 @@ {

"items": { "$ref": "#/definitions/anything" }
},
"heft-plugin": {
"description": "A plugin that can be used to extend Heft functionality.",
"type": "object",
"required": ["pluginPackage"],
"additionalProperties": false,
"properties": {
"pluginPackage": {
"description": "The plugin package name.",
"type": "string",
"pattern": "[^\\\\]"
},
"pluginName": {
"description": "Name of the plugin to load from the specified package. If not specified and the plugin package provides a single plugin, that plugin will be loaded.",
"type": "string",
"pattern": "^[a-z][a-z0-9]*([-][a-z0-9]+)*$"
},
"options": {
"description": "Options to pass to the plugin.",
"type": "object"
}
}
},
"heft-event": {
"description": "An event that can be used to extend Heft functionality.",
"type": "object",
"required": ["eventKind"],
"additionalProperties": false,
"properties": {
"eventKind": {
"description": "Name of the event to trigger.",
"type": "string",
"enum": ["copyFiles", "deleteFiles", "runScript", "nodeService"]
},
"options": {
"description": "Options to pass to the event.",
"type": "object"
}
}
},
"delete-operation": {
"type": "object",
"additionalProperties": false,
"properties": {
"sourcePath": {
"title": "Source Path",
"type": "string",
"description": "The target file or folder, relative to the project root. Settings such as \"includeGlobs\" and \"excludeGlobs\" will be resolved relative to this folder. If no globs or file extensions are specified, the file or folder will be deleted.",
"pattern": "[^\\\\]"
},
"fileExtensions": {
"type": "array",
"description": "If specified, this option recursively scans all folders under \"sourcePath\" and includes any files that match the specified extensions. (If \"fileExtensions\" and \"includeGlobs\" are both specified, their selections are added together.)",
"items": {
"type": "string",
"pattern": "^\\.[A-z0-9-_.]*[A-z0-9-_]+$"
}
},
"excludeGlobs": {
"type": "array",
"description": "A list of glob patterns that exclude files or folders from being copied. The paths are resolved relative to \"sourcePath\". These exclusions eliminate items that were selected by the \"includeGlobs\" or \"fileExtensions\" setting.",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
},
"includeGlobs": {
"type": "array",
"description": "A list of glob patterns that select files to be copied. The paths are resolved relative to \"sourcePath\".",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
}
}
}

@@ -106,110 +28,185 @@ },

"watchOptions": {
"title": "Watch Mode Options",
"type": "object",
"description": "Global options used by Heft when running in watch mode.",
"properties": {
"ignoredSourceFileGlobs": {
"title": "Ignored Source File Globs",
"type": "array",
"description": "Glob patterns for source files that should be ignored when watching for changes. Changes to these files will still be detected and recorded by Heft, but will not trigger a rebuild.",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
},
"forbiddenSourceFileGlobs": {
"title": "Forbidden Source File Globs",
"type": "array",
"description": "Glob patterns for source files that should break the build when watching for changes. Changes to these files will cause Heft to exit with an error. Changes in project \"package.json\" files and \"config\" and \".rush\" folders are always forbidden and do not need to be included in this list.",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
}
}
},
"heftPlugins": {
"eventActions": {
"type": "array",
"description": "List of Heft plugins that are used by a project.",
"items": { "$ref": "#/definitions/heft-plugin" }
},
"description": "An array of actions (such as deleting files or folders) that should occur during a Heft run.",
"phasesByName": {
"type": "object",
"description": "Heft phases that can be run during an execution of Heft.",
"additionalProperties": false,
"patternProperties": {
"^[a-z][a-z0-9]*([-][a-z0-9]+)*$": {
"description": "Defines a Heft phase.",
"type": "object",
"additionalProperties": false,
"properties": {
"phaseDescription": {
"description": "A description of the purpose of the Heft phase.",
"type": "string"
},
"items": {
"type": "object",
"required": ["actionKind", "heftEvent", "actionId"],
"allOf": [
{
"properties": {
"actionKind": {
"type": "string",
"description": "The kind of built-in operation that should be performed.",
"enum": ["deleteGlobs", "copyFiles", "runScript"]
},
"phaseDependencies": {
"type": "array",
"description": "List of phase names that must be run before this phase.",
"items": {
"heftEvent": {
"type": "string",
"pattern": "^[a-z][a-z0-9]*([-][a-z0-9]+)*$"
"description": "The Heft stage when this action should be performed. Note that heft.json event actions are scheduled after any plugin tasks have processed the event. For example, a \"compile\" event action will be performed after the TypeScript compiler has been invoked.",
"enum": ["clean", "pre-compile", "compile", "bundle", "post-build", "test"]
},
"actionId": {
"type": "string",
"description": "A user-defined tag whose purpose is to allow configs to replace/delete handlers that were added by other configs."
}
},
}
},
{
"oneOf": [
{
"required": ["globsToDelete"],
"properties": {
"actionKind": {
"type": "string",
"enum": ["deleteGlobs"]
},
"cleanFiles": {
"description": "List of delete operations to perform when cleaning at the beginning of phase execution.",
"type": "array",
"items": { "$ref": "#/definitions/delete-operation" }
},
"heftEvent": {
"type": "string",
"enum": ["clean", "pre-compile", "compile", "bundle", "post-build"]
},
"tasksByName": {
"description": "Heft tasks that are run during an execution of the Heft phase.",
"type": "object",
"additionalProperties": false,
"patternProperties": {
"^[a-z][a-z0-9]*([-][a-z0-9]+)*$": {
"type": "object",
"description": "Defines a Heft task.",
"additionalProperties": false,
"oneOf": [
{
"required": ["taskPlugin"],
"globsToDelete": {
"type": "array",
"description": "Glob patterns to be deleted. The paths are resolved relative to the project folder.",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
}
}
},
{
"required": ["copyOperations"],
"properties": {
"actionKind": {
"type": "string",
"enum": ["copyFiles"]
},
"heftEvent": {
"type": "string",
"enum": ["pre-compile", "compile", "bundle", "post-build"]
},
"copyOperations": {
"type": "array",
"description": "An array of copy operations to run perform during the specified Heft event.",
"items": {
"type": "object",
"required": ["sourceFolder", "destinationFolders"],
"properties": {
"taskPlugin": { "$ref": "#/definitions/heft-plugin" }
"sourceFolder": {
"type": "string",
"description": "The base folder that files will be copied from, relative to the project root. Settings such as \"includeGlobs\" and \"excludeGlobs\" will be resolved relative to this folder. NOTE: Assigning \"sourceFolder\" does not by itself select any files to be copied.",
"pattern": "[^\\\\]"
},
"destinationFolders": {
"type": "array",
"description": "One or more folders that files will be copied into, relative to the project root. If you specify more than one destination folder, Heft will read the input files only once, using streams to efficiently write multiple outputs.",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
},
"fileExtensions": {
"type": "array",
"description": "If specified, this option recursively scans all folders under \"sourceFolder\" and includes any files that match the specified extensions. (If \"fileExtensions\" and \"includeGlobs\" are both specified, their selections are added together.)",
"items": {
"type": "string",
"pattern": "^\\.[A-z0-9-_.]*[A-z0-9-_]+$"
}
},
"excludeGlobs": {
"type": "array",
"description": "A list of glob patterns that exclude files/folders from being copied. The paths are resolved relative to \"sourceFolder\". These exclusions eliminate items that were selected by the \"includeGlobs\" or \"fileExtensions\" setting.",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
},
"includeGlobs": {
"type": "array",
"description": "A list of glob patterns that select files to be copied. The paths are resolved relative to \"sourceFolder\".",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
},
"flatten": {
"type": "boolean",
"description": "Normally, when files are selected under a child folder, a corresponding folder will be created in the destination folder. Specify flatten=true to discard the source path and copy all matching files to the same folder. If two files have the same name an error will be reported. The default value is false."
},
"hardlink": {
"type": "boolean",
"description": "If true, filesystem hard links will be created instead of copying the file. Depending on the operating system, this may be faster. (But note that it may cause unexpected behavior if a tool modifies the link.) The default value is false."
}
}
},
{
"required": ["taskEvent"],
"properties": {
"taskEvent": { "$ref": "#/definitions/heft-event" }
}
}
],
"properties": {
"taskPlugin": {
"description": "A plugin that can be used to extend Heft functionality.",
"type": "object"
},
"taskEvent": {
"description": "An event that can be used to extend Heft functionality.",
"type": "object"
},
}
}
},
{
"required": ["scriptPath"],
"properties": {
"actionKind": {
"type": "string",
"enum": ["runScript"]
},
"taskDependencies": {
"type": "array",
"description": "List of task names that must be run before this task.",
"items": {
"type": "string",
"pattern": "^[a-z][a-z0-9]*([-][a-z0-9]+)*$"
}
"heftEvent": {
"type": "string",
"enum": ["pre-compile", "compile", "bundle", "post-build", "test"]
},
"scriptPath": {
"type": "string",
"description": "Path to the script that will be run, relative to the project root.",
"items": {
"type": "string",
"pattern": "[^\\\\]"
}
},
"scriptOptions": {
"type": "object",
"description": "Optional parameters that will be passed to the script at runtime.",
"patternProperties": {
"^.*$": { "$ref": "#/definitions/anything" }
}
}
}
}
}
]
}
]
}
},
"heftPlugins": {
"type": "array",
"description": "Defines heft plugins that are used by a project.",
"items": {
"type": "object",
"required": ["plugin"],
"properties": {
"plugin": {
"description": "Path to the plugin package, relative to the project root.",
"type": "string",
"pattern": "[^\\\\]"
},
"options": {
"type": "object"
}
}

@@ -216,0 +213,0 @@ }

@@ -30,2 +30,7 @@ {

"waitBeforeRestartMs": {
"description": "Customizes the number of milliseconds to wait before restarting the child process, as measured from when the previous process exited. If this interval is too small, then the new process may start while the developer is still saving changes, or while other monitoring processes are still holding OS locks.",
"type": "number"
},
"waitForTerminateMs": {

@@ -32,0 +37,0 @@ "description": "Customizes the number of milliseconds to wait for the child process to be terminated (SIGTERM) before forcibly killing it.",

@@ -15,5 +15,5 @@ "use strict";

.catch((error) => {
parser.globalTerminal.writeErrorLine(error.toString());
parser.terminal.writeErrorLine(error.toString());
process.exit(1);
});
//# sourceMappingURL=start.js.map

@@ -32,3 +32,2 @@ "use strict";

const fs = __importStar(require("fs"));
const Constants_1 = require("./utilities/Constants");
const HEFT_PACKAGE_NAME = '@rushstack/heft';

@@ -67,12 +66,10 @@ // Excerpted from PackageJsonLookup.tryGetPackageFolderFor()

function tryStartLocalHeft() {
if (process.argv.indexOf(Constants_1.Constants.unmanagedParameterLongName) >= 0) {
console.log(`Bypassing the Heft version selector because ${JSON.stringify(Constants_1.Constants.unmanagedParameterLongName)} ` +
'was specified.');
if (process.argv.indexOf('--unmanaged') >= 0) {
console.log('(Bypassing the Heft version selector because "--unmanaged" was specified.)');
console.log();
return false;
}
else if (process.argv.indexOf(Constants_1.Constants.debugParameterLongName) >= 0) {
else if (process.argv.indexOf('--debug') >= 0) {
// The unmanaged flag could be undiscoverable if it's not in their locally installed version
console.log('Searching for a locally installed version of Heft. Use the ' +
`${JSON.stringify(Constants_1.Constants.unmanagedParameterLongName)} flag if you want to avoid this.`);
console.log('Searching for a locally installed version of Heft. Use the --unmanaged flag if you want to avoid this');
}

@@ -106,2 +103,4 @@ // Find the package.json file that governs the current folder location

}
console.log(`Using local Heft from ${heftFolder}`);
console.log();
}

@@ -108,0 +107,0 @@ catch (error) {

export declare class Constants {
static projectHeftFolderName: string;
static projectConfigFolderName: string;
static cacheFolderName: string;
static tempFolderName: string;
static heftConfigurationFilename: string;
static nodeServiceConfigurationFilename: string;
static cleanParameterLongName: string;
static cleanCacheParameterLongName: string;
static buildCacheFolderName: string;
static pluginParameterLongName: string;
static debugParameterLongName: string;
static localesParameterLongName: string;
static onlyParameterLongName: string;
static onlyParameterShortName: string;
static productionParameterLongName: string;
static toParameterLongName: string;
static toParameterShortName: string;
static toExceptParameterLongName: string;
static toExceptParameterShortName: string;
static unmanagedParameterLongName: string;
static verboseParameterLongName: string;
static verboseParameterShortName: string;
static maxParallelism: number;
}
//# sourceMappingURL=Constants.d.ts.map

@@ -9,22 +9,8 @@ "use strict";

exports.Constants = Constants;
Constants.projectHeftFolderName = '.heft';
Constants.projectConfigFolderName = 'config';
Constants.cacheFolderName = '.cache';
Constants.tempFolderName = 'temp';
Constants.heftConfigurationFilename = 'heft.json';
Constants.nodeServiceConfigurationFilename = 'node-service.json';
Constants.cleanParameterLongName = '--clean';
Constants.cleanCacheParameterLongName = '--clean-cache';
Constants.buildCacheFolderName = 'build-cache';
Constants.pluginParameterLongName = '--plugin';
Constants.debugParameterLongName = '--debug';
Constants.localesParameterLongName = '--locales';
Constants.onlyParameterLongName = '--only';
Constants.onlyParameterShortName = '-o';
Constants.productionParameterLongName = '--production';
Constants.toParameterLongName = '--to';
Constants.toParameterShortName = '-t';
Constants.toExceptParameterLongName = '--to-except';
Constants.toExceptParameterShortName = '-T';
Constants.unmanagedParameterLongName = '--unmanaged';
Constants.verboseParameterLongName = '--verbose';
Constants.verboseParameterShortName = '-v';
Constants.maxParallelism = 100;
//# sourceMappingURL=Constants.js.map
import { ConfigurationFile } from '@rushstack/heft-config-file';
import { ITerminal } from '@rushstack/node-core-library';
import type { RigConfig } from '@rushstack/rig-package';
import type { IDeleteOperation } from '../plugins/DeleteFilesPlugin';
import type { INodeServicePluginConfiguration } from '../plugins/NodeServicePlugin';
export declare type HeftEventKind = 'copyFiles' | 'deleteFiles' | 'runScript' | 'nodeService';
export interface IHeftConfigurationJsonEventSpecifier {
eventKind: HeftEventKind;
options?: object;
import { IApiExtractorPluginConfiguration } from '../plugins/ApiExtractorPlugin/ApiExtractorPlugin';
import { ITypeScriptConfigurationJson } from '../plugins/TypeScriptPlugin/TypeScriptPlugin';
import { HeftConfiguration } from '../configuration/HeftConfiguration';
import { INodeServicePluginConfiguration } from '../plugins/NodeServicePlugin';
export declare enum HeftEvent {
clean = "clean",
preCompile = "pre-compile",
compile = "compile",
bundle = "bundle",
postBuild = "post-build",
test = "test"
}
export interface IHeftConfigurationJsonPluginSpecifier {
pluginPackage: string;
pluginPackageRoot: string;
pluginName?: string;
options?: object;
export interface IHeftConfigurationJsonEventActionBase {
actionKind: string;
heftEvent: 'clean' | 'pre-compile' | 'compile' | 'bundle' | 'post-build' | 'test';
actionId: string;
}
export interface IHeftConfigurationJsonTaskSpecifier {
taskDependencies?: string[];
taskEvent?: IHeftConfigurationJsonEventSpecifier;
taskPlugin?: IHeftConfigurationJsonPluginSpecifier;
export interface IHeftConfigurationDeleteGlobsEventAction extends IHeftConfigurationJsonEventActionBase {
actionKind: 'deleteGlobs';
globsToDelete: string[];
}
export interface IHeftConfigurationJsonTasks {
[taskName: string]: IHeftConfigurationJsonTaskSpecifier;
export interface IHeftConfigurationRunScriptEventAction extends IHeftConfigurationJsonEventActionBase {
actionKind: 'runScript';
scriptPath: string;
scriptOptions: Record<string, any>;
}
export interface IHeftConfigurationJsonPhaseSpecifier {
phaseDescription?: string;
phaseDependencies?: string[];
cleanFiles?: IDeleteOperation[];
tasksByName?: IHeftConfigurationJsonTasks;
export interface ISharedCopyConfiguration {
/**
* File extensions that should be copied from the source folder to the destination folder(s)
*/
fileExtensions?: string[];
/**
* Globs that should be explicitly excluded. This takes precedence over globs listed in "includeGlobs" and
* files that match the file extensions provided in "fileExtensions".
*/
excludeGlobs?: string[];
/**
* Globs that should be explicitly included.
*/
includeGlobs?: string[];
/**
* Copy only the file and discard the relative path from the source folder.
*/
flatten?: boolean;
/**
* Hardlink files instead of copying.
*/
hardlink?: boolean;
}
export interface IHeftConfigurationJsonPhases {
[phaseName: string]: IHeftConfigurationJsonPhaseSpecifier;
export interface IExtendedSharedCopyConfiguration extends ISharedCopyConfiguration {
/**
* The folder from which files should be copied, relative to the project root. For example, "src".
*/
sourceFolder: string;
/**
* Folder(s) to which files should be copied, relative to the project root. For example ["lib", "lib-cjs"].
*/
destinationFolders: string[];
}
export interface IHeftConfigurationJsonWatchOptions {
ignoredSourceFileGlobs?: string[];
forbiddenSourceFileGlobs?: string[];
export interface IHeftConfigurationCopyFilesEventAction extends IHeftConfigurationJsonEventActionBase {
actionKind: 'copyFiles';
copyOperations: IExtendedSharedCopyConfiguration[];
}
export interface IHeftConfigurationJsonPluginSpecifier {
plugin: string;
options?: object;
}
export interface IHeftConfigurationJson {
eventActions?: IHeftConfigurationJsonEventActionBase[];
heftPlugins?: IHeftConfigurationJsonPluginSpecifier[];
phasesByName?: IHeftConfigurationJsonPhases;
watchOptions?: IHeftConfigurationJsonWatchOptions;
}
export interface IHeftEventActions {
copyFiles: Map<HeftEvent, IHeftConfigurationCopyFilesEventAction[]>;
deleteGlobs: Map<HeftEvent, IHeftConfigurationDeleteGlobsEventAction[]>;
runScript: Map<HeftEvent, IHeftConfigurationRunScriptEventAction[]>;
}
export declare class CoreConfigFiles {
private static _heftConfigFileLoader;
private static _heftConfigFileEventActionsCache;
private static _apiExtractorTaskConfigurationLoader;
private static _typeScriptConfigurationFileLoader;
private static _nodeServiceConfigurationLoader;

@@ -49,5 +88,23 @@ /**

*/
static loadHeftConfigurationFileForProjectAsync(terminal: ITerminal, projectPath: string, rigConfig?: RigConfig | undefined): Promise<IHeftConfigurationJson>;
static get nodeServiceConfigurationFile(): ConfigurationFile<INodeServicePluginConfiguration>;
static get heftConfigFileLoader(): ConfigurationFile<IHeftConfigurationJson>;
/**
* Gets the eventActions from config/heft.json
*/
static getConfigConfigFileEventActionsAsync(terminal: ITerminal, heftConfiguration: HeftConfiguration): Promise<IHeftEventActions>;
/**
* Returns the loader for the `config/api-extractor-task.json` config file.
*/
static get apiExtractorTaskConfigurationLoader(): ConfigurationFile<IApiExtractorPluginConfiguration>;
/**
* Returns the loader for the `config/typescript.json` config file.
*/
static get typeScriptConfigurationFileLoader(): ConfigurationFile<ITypeScriptConfigurationJson>;
/**
* Returns the loader for the `config/api-extractor-task.json` config file.
*/
static get nodeServiceConfigurationLoader(): ConfigurationFile<INodeServicePluginConfiguration>;
private static _addEventActionToMap;
private static _parseHeftEvent;
private static _inheritArray;
}
//# sourceMappingURL=CoreConfigFiles.d.ts.map

@@ -28,7 +28,17 @@ "use strict";

Object.defineProperty(exports, "__esModule", { value: true });
exports.CoreConfigFiles = void 0;
exports.CoreConfigFiles = exports.HeftEvent = void 0;
const path = __importStar(require("path"));
const heft_config_file_1 = require("@rushstack/heft-config-file");
const node_core_library_1 = require("@rushstack/node-core-library");
const Constants_1 = require("./Constants");
var HeftEvent;
(function (HeftEvent) {
// Part of the 'clean' stage
HeftEvent["clean"] = "clean";
// Part of the 'build' stage
HeftEvent["preCompile"] = "pre-compile";
HeftEvent["compile"] = "compile";
HeftEvent["bundle"] = "bundle";
HeftEvent["postBuild"] = "post-build";
// Part of the 'test' stage
HeftEvent["test"] = "test";
})(HeftEvent = exports.HeftEvent || (exports.HeftEvent = {}));
class CoreConfigFiles {

@@ -38,34 +48,18 @@ /**

*/
static async loadHeftConfigurationFileForProjectAsync(terminal, projectPath, rigConfig) {
static get heftConfigFileLoader() {
if (!CoreConfigFiles._heftConfigFileLoader) {
const pluginPackageResolver = (configurationFilePath, propertyName, propertyValue) => {
const configurationFileDirectory = path.dirname(configurationFilePath);
return node_core_library_1.Import.resolvePackage({
packageName: propertyValue,
baseFolderPath: configurationFileDirectory
});
};
const schemaPath = path.join(__dirname, '..', 'schemas', 'heft.schema.json');
CoreConfigFiles._heftConfigFileLoader = new heft_config_file_1.ConfigurationFile({
projectRelativeFilePath: `${Constants_1.Constants.projectConfigFolderName}/${Constants_1.Constants.heftConfigurationFilename}`,
projectRelativeFilePath: 'config/heft.json',
jsonSchemaPath: schemaPath,
propertyInheritanceDefaults: {
array: { inheritanceType: heft_config_file_1.InheritanceType.append },
object: { inheritanceType: heft_config_file_1.InheritanceType.merge }
propertyInheritance: {
heftPlugins: {
inheritanceType: heft_config_file_1.InheritanceType.append
}
},
jsonPathMetadata: {
// Use a custom resolver for the plugin packages, since the NodeResolve algorithm will resolve to the
// package.json exports/module property, which may or may not exist.
'$.heftPlugins.*.pluginPackage': {
pathResolutionMethod: heft_config_file_1.PathResolutionMethod.custom,
customResolver: pluginPackageResolver
'$.heftPlugins.*.plugin': {
pathResolutionMethod: heft_config_file_1.PathResolutionMethod.NodeResolve
},
// Use a custom resolver for the plugin packages, since the NodeResolve algorithm will resolve to the
// package.json exports/module property, which may or may not exist.
'$.phasesByName.*.tasksByName.*.taskPlugin.pluginPackage': {
pathResolutionMethod: heft_config_file_1.PathResolutionMethod.custom,
customResolver: pluginPackageResolver
},
// Special handling for "runScript" task events to resolve the script path
'$.phasesByName.*.tasksByName[?(@.taskEvent && @.taskEvent.eventKind == "runScript")].taskEvent.options.scriptPath': {
'$.eventActions.[?(@.actionKind==="runScript")].scriptPath': {
pathResolutionMethod: heft_config_file_1.PathResolutionMethod.resolvePathRelativeToProjectRoot

@@ -76,27 +70,84 @@ }

}
const configurationFile = await CoreConfigFiles._heftConfigFileLoader.loadConfigurationFileForProjectAsync(terminal, projectPath, rigConfig);
// The pluginPackage field was resolved to the root of the package, but we also want to have
// the original plugin package name in the config file. Gather all the plugin specifiers so we can
// add the original data ourselves.
const pluginSpecifiers = [
...(configurationFile.heftPlugins || [])
];
for (const { tasksByName } of Object.values(configurationFile.phasesByName || {})) {
for (const { taskPlugin } of Object.values(tasksByName || {})) {
if (taskPlugin) {
pluginSpecifiers.push(taskPlugin);
return CoreConfigFiles._heftConfigFileLoader;
}
/**
* Gets the eventActions from config/heft.json
*/
static async getConfigConfigFileEventActionsAsync(terminal, heftConfiguration) {
let result = CoreConfigFiles._heftConfigFileEventActionsCache.get(heftConfiguration);
if (!result) {
const heftConfigJson = await CoreConfigFiles.heftConfigFileLoader.tryLoadConfigurationFileForProjectAsync(terminal, heftConfiguration.buildFolder, heftConfiguration.rigConfig);
result = {
copyFiles: new Map(),
deleteGlobs: new Map(),
runScript: new Map()
};
CoreConfigFiles._heftConfigFileEventActionsCache.set(heftConfiguration, result);
for (const eventAction of (heftConfigJson === null || heftConfigJson === void 0 ? void 0 : heftConfigJson.eventActions) || []) {
switch (eventAction.actionKind) {
case 'copyFiles': {
CoreConfigFiles._addEventActionToMap(eventAction, result.copyFiles);
break;
}
case 'deleteGlobs': {
CoreConfigFiles._addEventActionToMap(eventAction, result.deleteGlobs);
break;
}
case 'runScript': {
CoreConfigFiles._addEventActionToMap(eventAction, result.runScript);
break;
}
default: {
throw new Error(`Unknown heft eventAction actionKind "${eventAction.actionKind}" in ` +
`"${CoreConfigFiles.heftConfigFileLoader.getObjectSourceFilePath(eventAction)}" `);
}
}
}
}
for (const pluginSpecifier of pluginSpecifiers) {
const pluginPackageName = CoreConfigFiles._heftConfigFileLoader.getPropertyOriginalValue({
parentObject: pluginSpecifier,
propertyName: 'pluginPackage'
});
pluginSpecifier.pluginPackageRoot = pluginSpecifier.pluginPackage;
pluginSpecifier.pluginPackage = pluginPackageName;
return result;
}
/**
* Returns the loader for the `config/api-extractor-task.json` config file.
*/
static get apiExtractorTaskConfigurationLoader() {
if (!CoreConfigFiles._apiExtractorTaskConfigurationLoader) {
const schemaPath = path.resolve(__dirname, '..', 'schemas', 'api-extractor-task.schema.json');
CoreConfigFiles._apiExtractorTaskConfigurationLoader =
new heft_config_file_1.ConfigurationFile({
projectRelativeFilePath: 'config/api-extractor-task.json',
jsonSchemaPath: schemaPath
});
}
return configurationFile;
return CoreConfigFiles._apiExtractorTaskConfigurationLoader;
}
static get nodeServiceConfigurationFile() {
/**
* Returns the loader for the `config/typescript.json` config file.
*/
static get typeScriptConfigurationFileLoader() {
if (!CoreConfigFiles._typeScriptConfigurationFileLoader) {
const schemaPath = path.resolve(__dirname, '..', 'schemas', 'typescript.schema.json');
CoreConfigFiles._typeScriptConfigurationFileLoader =
new heft_config_file_1.ConfigurationFile({
projectRelativeFilePath: 'config/typescript.json',
jsonSchemaPath: schemaPath,
propertyInheritance: {
staticAssetsToCopy: {
inheritanceType: heft_config_file_1.InheritanceType.custom,
inheritanceFunction: (currentObject, parentObject) => {
const result = {};
CoreConfigFiles._inheritArray(result, 'fileExtensions', currentObject, parentObject);
CoreConfigFiles._inheritArray(result, 'includeGlobs', currentObject, parentObject);
CoreConfigFiles._inheritArray(result, 'excludeGlobs', currentObject, parentObject);
return result;
}
}
}
});
}
return CoreConfigFiles._typeScriptConfigurationFileLoader;
}
/**
* Returns the loader for the `config/api-extractor-task.json` config file.
*/
static get nodeServiceConfigurationLoader() {
if (!CoreConfigFiles._nodeServiceConfigurationLoader) {

@@ -106,3 +157,3 @@ const schemaPath = path.resolve(__dirname, '..', 'schemas', 'node-service.schema.json');

new heft_config_file_1.ConfigurationFile({
projectRelativeFilePath: `${Constants_1.Constants.projectConfigFolderName}/${Constants_1.Constants.nodeServiceConfigurationFilename}`,
projectRelativeFilePath: 'config/node-service.json',
jsonSchemaPath: schemaPath

@@ -113,4 +164,48 @@ });

}
static _addEventActionToMap(eventAction, map) {
const heftEvent = CoreConfigFiles._parseHeftEvent(eventAction);
let eventArray = map.get(heftEvent);
if (!eventArray) {
eventArray = [];
map.set(heftEvent, eventArray);
}
eventArray.push(eventAction);
}
static _parseHeftEvent(eventAction) {
switch (eventAction.heftEvent) {
case 'clean':
return HeftEvent.clean;
case 'pre-compile':
return HeftEvent.preCompile;
case 'compile':
return HeftEvent.compile;
case 'bundle':
return HeftEvent.bundle;
case 'post-build':
return HeftEvent.postBuild;
case 'test':
return HeftEvent.test;
default:
throw new Error(`Unknown heft event "${eventAction.heftEvent}" in ` +
` "${CoreConfigFiles.heftConfigFileLoader.getObjectSourceFilePath(eventAction)}".`);
}
}
static _inheritArray(resultObject, propertyName, currentObject, parentObject) {
let newValue;
if (currentObject[propertyName] && parentObject[propertyName]) {
newValue = [
...currentObject[propertyName],
...parentObject[propertyName]
];
}
else {
newValue = currentObject[propertyName] || parentObject[propertyName];
}
if (newValue !== undefined) {
resultObject[propertyName] = newValue;
}
}
}
exports.CoreConfigFiles = CoreConfigFiles;
CoreConfigFiles._heftConfigFileEventActionsCache = new Map();
//# sourceMappingURL=CoreConfigFiles.js.map
{
"name": "@rushstack/heft",
"version": "0.49.0-rc.2",
"version": "0.49.0",
"description": "Build all your JavaScript projects the same way: A way that works.",

@@ -31,4 +31,4 @@ "keywords": [

"dependencies": {
"@rushstack/heft-config-file": "0.11.3",
"@rushstack/node-core-library": "3.53.2",
"@rushstack/heft-config-file": "0.11.4",
"@rushstack/node-core-library": "3.53.3",
"@rushstack/rig-package": "0.3.17",

@@ -40,4 +40,6 @@ "@rushstack/ts-command-line": "4.13.1",

"fast-glob": "~3.2.4",
"git-repo-info": "~2.1.0",
"ignore": "~5.1.6",
"glob": "~7.0.5",
"glob-escape": "~0.0.2",
"prettier": "~2.3.0",
"semver": "~7.3.0",
"tapable": "1.1.3",

@@ -47,5 +49,3 @@ "true-case-path": "~2.2.1"

"devDependencies": {
"@microsoft/api-extractor": "7.33.6",
"@nodelib/fs.scandir": "2.1.5",
"@nodelib/fs.stat": "2.0.5",
"@microsoft/api-extractor": "7.33.7",
"@rushstack/eslint-config": "3.1.1",

@@ -55,4 +55,9 @@ "@rushstack/heft": "0.48.0",

"@types/argparse": "1.0.38",
"@types/eslint": "8.2.0",
"@types/glob": "7.1.1",
"@types/heft-jest": "1.0.1",
"@types/node": "12.20.24",
"@types/semver": "7.3.5",
"colors": "~1.2.1",
"tslint": "~5.20.1",
"typescript": "~4.8.4"

@@ -59,0 +64,0 @@ },

# Upgrade notes for @rushstack/heft
### Heft 0.49.0-rc.1
Multi-phase Heft is a complete re-write of the `@rushstack/heft` project with the intention of being more closely compatible with multi-phase Rush builds. In addition, this update brings greater customizability and improved parallel process handling to Heft.
Some key areas that were improved with the updated version of Heft include:
- Developer-defined order of execution for Heft plugins and Heft events
- Partial execution of Heft actions via scoping parameters like `--to` or `--only`
- A simplified plugin API for developers making Heft plugins
- Explicit definition of Heft plugins via "heft-plugin.json"
- Native support for defining multiple plugins within a single plugin package
- Improved handling of plugin parameters
- Native support for incremental watch-mode in Heft actions
- Reduced overhead and performance improvements
- Much more!
#### Heft Tasks
Heft tasks are the smallest unit of work specified in "heft.json". Tasks can either implement _a single plugin_, or _a single Heft event_. Heft tasks may take dependencies on other tasks within the same phase, and all task dependencies must complete execution before dependent tasks can execute.
Heft events are essentially built-in plugins that can be used to provide the implementation of a Heft task. Available Heft events include:
- `copyFiles`
- `deleteFiles`
- `runScript`
- `nodeService`
#### Heft Phases
Heft phases are a collection of tasks that will run when executing a phase. Phases act as a logical collection of tasks that would reasonably (but not necessarily) map to a Rush phase. Heft phases may take dependencies on other phases, and when executing multiple phases, all selected phases must complete execution before dependent phases can execute.
#### Heft Actions
Using similar expansion logic to Rush, execution of a selection of Heft phases can be done through the use of the `heft run` action. This action executes a set of selected phases in order of phase dependency. If the selected phases are not dependencies, they will be executed in parallel. Selection parameters include:
- `--only` - Execute the specified phase
- `--to` - Execute the specified phase and all its dependencies
Additionally, task- and phase-specific parameters may be provided to the `heft run` action by appending `-- <parameters>` to the command. For example, `heft run --only build -- --clean` will run only the `build` phase and will run a clean before executing the phase.
In addition, Heft will generate actions for each phase specified in the "heft.json" configuration. These actions are executed by running `heft <phaseName>` and run Heft to the specified phase, including all phase dependencies. As such, these inferred Heft actions are equivalent to running `heft run --to <phaseName>`, and are intended as a CLI shorthand.
#### Watch Mode
Watch mode is now a first-class feature in Heft. Watch mode actions are created for all Heft actions. For example, to run "build" and "test" phases in watch mode, either of the commands `heft test-watch` or `heft run-watch --to test`. When running in watch mode, Heft will start a file watcher and watch for changes, restarting the watch loop when changes to source files are detected. The list of changed files will be provided to the plugin for incremental processing.
#### Heft Plugins
##### Heft Lifecycle Plugins
Heft lifecycle plugins provide the implementation for certain lifecycle-related hooks. These plugins will be used across all Heft phases, and as such should be rarely used outside of a few specific cases (such as for metrics reporting). Heft lifecycle plugins provide an `apply` method, and here plugins can hook into the following Tapable hooks:
- `toolStart` - Used to provide plugin-related functionality at the start of Heft execution
- `toolFinish` - Used to provide plugin-related functionality at the end of Heft execution, after all tasks are finished
- `recordMetrics` - Used to provide metrics information about the Heft run to the plugin after all tasks are finished
##### Heft Task Plugins
Heft task plugins provide the implementation for Heft tasks. Heft plugins provide an `apply` method, and here plugins can hook into the following Tapable hooks:
- `run` - Used to provide plugin-related task functionality
- `runIncremental` - Used to provide plugin-related task functionality when in watch mode. A list of modified files is provided for the plugin to process. If no `runIncremental` implementation is provided, Heft will fall back to using the `run` hook as usual.
##### Heft Cross-Plugin Interaction
Heft plugins can use the `requestAccessToPluginByName` API to access the requested plugin accessors. Accessors are objects provided by plugins for external use and are the ideal place to share plugin-specific information or hooks used to provide additional plugin functionality.
Access requests are fulfilled at the beginning of phase execution, prior to `clean` hook execution. If the requested plugin does not provide an accessor, an error will be thrown noting the plugin with the missing accessor. However, if the plugin requested is not present at all, the access request will silently fail. This is done to allow for non-required integrations with external plugins. For this reason, it is important to implement cross-plugin interaction in such a way as to expect this case and to handle it gracefully, or to throw a helpful error.
Plugins available for access are restricted based on scope. For lifecycle plugins, you may request access to any other lifecycle plugin added to the Heft configuration. For task plugins, you may request access to any other task plugin residing within the same phase in the Heft configuration.
#### heft.json
The "heft.json" file is where phases and tasks are defined. Since contains the relationships between the phases and tasks, it defines the order of operations for the execution of a Heft action.
##### Lifecycle Plugin Specification
Lifecycle plugins are specified in the top-level `heftPlugins` array. Plugins can be referenced by providing a package name and a plugin name. Optionally, if a package contains only a single plugin, a plugin can be referenced by providing only the package name and Heft will resolve to the only exported plugin. Lifecycle plugins can also be provided options to modify the default behavior.
```json
{
"$schema": "https://developer.microsoft.com/json-schemas/heft/heft.schema.json",
"extends": "base-project/config/heft.json",
"heftPlugins": [
{
"packageName": "@rushstack/heft-metrics-reporter",
"options": {
"disableMetrics": true
}
},
{
"packageName": "@rushstack/heft-initialization-plugin",
"pluginName": "my-lifecycle-plugin"
}
]
}
```
##### Phase, Task, and Task Plugin Specification
All phases are defined within the top-level `phasesByName` property. Each phase may specify `phaseDependencies` to define the order of phase execution when running a selection of Heft phases. Phases may also provide the `cleanFiles` option, which accepts an array of deletion operations to perform when running with the `--clean` flag.
Within the phase specification, `tasksByName` defines all tasks that run while executing a phase. Each task may specify `taskDependencies` to define the order of task execution. All tasks defined in `taskDependencies` must exist within the same phase. For CLI-availability reasons, phase names, task names, plugin names, and parameter scopes, must be `kabob-cased`.
The following is an example "heft.json" file defining both a "build" and a "test" phase:
```json
{
"$schema": "https://developer.microsoft.com/json-schemas/heft/heft.schema.json",
"extends": "base-project/config/heft.json",
// "heftPlugins" can be used alongside "phasesByName"
"heftPlugins": [
{
"packageName": "@rushstack/heft-metrics-reporter"
}
],
// "phasesByName" defines all phases, and each phase defines tasks to be run
"phasesByName": {
"build": {
"phaseDescription": "Transpile and run a linter against build output",
"cleanFiles": [
{
"sourcePath": "temp-build-output"
}
],
// "tasksByName" defines all tasks within a phase
"tasksByName": {
"typescript": {
"taskPlugin": {
"pluginPackage": "@rushstack/heft-typescript-plugin"
}
},
"lint": {
"taskDependencies": [ "typescript" ],
"taskPlugin": {
"pluginPackage": "@rushstack/heft-lint-plugin",
"pluginName": "eslint"
}
},
"copy-assets": {
"taskEvent": {
"eventKind": "copyFiles",
"options": {
"copyOperations": [
{
"sourceFolder": "src/assets",
"destinationFolders": [ "dist/assets" ]
}
]
}
}
}
}
},
"test": {
"phaseDependencies": [ "build" ],
"phaseDescription": "Run Jest tests, if provided.",
"tasksByName": {
"jest": {
"taskPlugin": {
"pluginPackage": "@rushstack/heft-jest-plugin"
}
}
}
}
}
}
```
##### Watch Mode Options in "heft.json"
Since watch mode and the associated file watcher are now directly managed by Heft, some watch mode options have been provided to modify behavior when encountering modified source files:
```json
{
"watchOptions": {
"ignoredSourceFileGlobs": [
"**/*.snap"
],
"forbiddenSourceFileGlobs": [
"tsconfig.json"
]
}
}
```
These options allow developers to tell Heft to ignore when specific source file changes are made (for example, when checked-in snapshot files are updated by Jest) or to break the Heft build when specific source file changes are made (for example, modifying a configuration file that is only loaded on the first execution). Notably, these properties are global and apply to all watch mode actions.
##### Property Inheritance in "heft.json"
Previously, common properties between a "heft.json" file its extended base file would merge arrays and overwrite objects. Now, both arrays and objects will merge, allowing for simplified use of the "heft.json" file when customizing extended base configurations.
Additionally, we now provide merge behavior overrides to allow modifying extended configurations more dynamically. This is done by using inline markup properties that define inheritance behavior. For example, assume that we are extending a file with a previously defined "property1" value that is a keyed object, and a "property2" value that is an array object:
```json
{
"$schema": "...",
"$extends": "...",
"$property1.inheritanceType": "override | merge",
"property1": {
"$subProperty1.inheritanceType": "override | merge",
"subProperty1": { ... },
"$subProperty2.inheritanceType": "override | append",
"subProperty2": [ ... ]
},
"$property2.inheritanceType": "override | append",
"property2": [ ... ]
}
```
Once an object is set to a `inheritanceType` of override, all sub-property `inheritanceType` values will be ignored, since the top-most object already overrides all sub-properties.
One thing to note is that different mergeBehavior verbs are used for the merging of keyed objects and arrays. This is to make it explicit that arrays will be appended as-is, and no additional processing (eg. deduping if the array is intended to be a set) is done during merge. If such behavior is required, it can be done on the implementation side. Deduping arrays within the @rushstack/heft-config-file package doesn't quite make sense, since deduping arrays of non-primitive objects is not easily defined.
##### Example "heft.json" Comparison
###### "heft.json" in `@rushstack/heft@0.49.0-rc.1`
```json
{
"$schema": "https://developer.microsoft.com/json-schemas/heft/heft.schema.json",
"watchOptions": {
"ignoredSourceFileGlobs": [
"**/*.snap"
]
},
"phasesByName": {
"build": {
"cleanFiles": [
{ "sourcePath": "dist" },
{ "sourcePath": "lib" }
],
"tasksByName": {
"typescript": {
"taskPlugin": {
"pluginPackage": "@rushstack/heft-typescript-plugin"
}
},
"lint": {
"taskDependencies": ["typescript"],
"taskPlugin": {
"pluginPackage": "@rushstack/heft-lint-plugin"
}
},
"api-extractor": {
"taskDependencies": ["typescript"],
"taskPlugin": {
"pluginPackage": "@rushstack/heft-api-extractor-plugin"
}
}
}
},
"test": {
"phaseDependencies": ["build"],
"tasksByName": {
"jest": {
"taskPlugin": {
"pluginPackage": "@rushstack/heft-jest-plugin"
}
}
}
}
}
}
```
###### "heft.json" in `@rushstack/heft@0.48.8`
```json
{
"$schema": "https://developer.microsoft.com/json-schemas/heft/heft.schema.json",
"eventActions": [
{
"actionKind": "deleteGlobs",
"heftEvent": "clean",
"actionId": "defaultClean",
"globsToDelete": ["dist", "lib", "lib-commonjs", "temp"]
}
],
"heftPlugins": [
{ "plugin": "@rushstack/heft-jest-plugin" }
]
}
```
*NOTE: This "heft.json" file is simple due to the implicitly included plugins, which must now be added by developers or consumed via a rig.*
#### heft-plugin.json
The new heft-plugin.json file is a new, required manifest file specified at the root of all Heft plugin packages. This file is used for multiple purposes, including the definition of all contained lifecycle or task plugins, the definition of all plugin-specific CLI parameters, and providing an optional schema file to validate plugin options that can be passed via "heft.json".
The following is an example "heft-plugin.json" file defining a lifecycle plugin and a task plugin:
```json
{
"$schema": "https://developer.microsoft.com/json-schemas/heft/heft-plugin.schema.json",
"lifecyclePlugins": [
{
"pluginName": "my-lifecycle-plugin",
"entryPoint": "./lib/MyLifecyclePlugin.js",
"optionsSchema": "./lib/schemas/mylifecycleplugin.schema.json",
"parameterScope": "my-lifecycle",
"parameters": [
{
"parameterKind": "string",
"longName": "--my-string",
"description": "…",
"argumentName": "ARG_NAME",
"required": false
}
]
}
],
"taskPlugins": [
{
"pluginName": "my-task-plugin",
"entryPoint": "./lib/MyTaskPlugin.js",
"optionsSchema": "./lib/schemas/mytaskplugin.schema.json",
"parameterScope": "my-task",
"parameters": [
{
"parameterKind": "string",
"longName": "--my-other-string",
"description": "…",
"argumentName": "ARG_NAME",
"required": false
}
]
}
]
}
```
##### Defining Plugin CLI Parameters
Defining CLI parameters is now only possible via "heft-plugin.json", and defined parameters can be consumed in plugins via the `HeftTaskSession.parameters` API. Additionally, all plugin parameters for the selected Heft phases are now discoverable on the CLI when using the `--help` argument (ex. `heft test --help` or `heft run --to test -- --help`).
These parameters can be automatically "de-duped" on the CLI using an optionally-provided `parameterScope`. By default, parameters defined in "heft-plugin.json" will be available on the CLI using `--<parameterName>` and `--<parameterScope>:<parameterName>`. When multiple plugins provide the same parameter, only the latter parameter will be available on the CLI in order to "de-dupe" conflicting parameters. For example, if PluginA with parameter scope "PluginA" defines `--parameter`, and PluginB with parameter scope "PluginB" also defines `--parameter`, the parameters will _only_ be available as `--PluginA:parameter` and `--PluginB:parameter`.
#### Updating "heft.json"
In updating to the new version of Heft, "heft.json" files will need to be updated to define the flow of your Heft run. This is a big change in behavior since legacy Heft defined a strict set of hooks, any of which could be tied into by any plugin. When converting to the new "heft.json" format, special care should be paid to the order-of-operations.
An important note on upgrading to the new version of Heft is that legacy Heft included a few plugins by default which have since been externalized. Due to this change, these default plugins need to be manually included in your Heft project. These plugins include:
- `@rushstack/heft-typescript-plugin`
- `@rushstack/heft-lint-plugin`
- `@rushstack/heft-api-extractor-plugin`
To simplify upgrading to the new version of Heft, usage of rigs is encouraged since rigs help centralize changes to Heft configurations in one location. The above plugins are included in the Rushstack-provided `@rushstack/heft-node-rig` and `@rushstack/heft-web-rig` packages.
#### Updating Heft Plugins
In updating to the new version of Heft, plugins will also need to be updated for compatibility. Some of the more notable API changes include:
- "heft.json" format completely changed. See above for more information on "heft.json"
- "heft-plugin.json" manifest file must accompany any plugin package. If no "heft-plugin.json" file is found, Heft will throw an error. See above for more information on "heft-plugin.json"
- Plugin classes must have parameterless constructors, and must be the default export of the file pointed to by the `entryPoint` property in "heft-plugin.json"
- Schema files for options provided in "heft.json" can now be specified using the `optionsSchema` property in "heft-plugin.json" and they will be validated by Heft
- Parameters are now defined in "heft-plugin.json" and are consumed in the plugin via the `IHeftTaskSession.parameters` or `IHeftLifecycleSession.parameters` property. *NOTE: Other than the default Heft-included parameters, only parameters defined by the calling plugin are accessible*
- Plugins can no longer define their own actions. If a plugin deserves its own action, a dedicated phase should be added to the consumers "heft.json"
- The `runScript` Heft event has been modified to only accept a `runAsync` method, and the properties have been updated to reflect what is available to normal Heft task plugins
- Path-related variables have been renamed to clarify they are paths (ex. `HeftConfiguration.buildFolder` is now `HeftConfiguration.buildFolderPath`)
- The `runIncremental` hook can now be utilized to add native incremental watch mode support
- The `clean` hook was removed in favor of the `cleanFiles` option in "heft.json" in order to make it obvious what files are being cleaned and when
- The `folderNameForTests` and `extensionForTests` properties are now specified in the `@rushstack/heft-jest-plugin` options in "heft.json" instead of in "typescript.json"
#### Testing on Your Own Project
The new version of Heft and all related plugins are available in the following prerelease packages:
- `@rushstack/heft@0.49.0-rc.1`
- `@rushstack/heft-typescript-plugin@0.1.0-rc.1`
- `@rushstack/heft-lint-plugin@0.1.0-rc.1`
- `@rushstack/heft-api-extractor-plugin@0.1.0-rc.1`
- `@rushstack/heft-jest-plugin@0.4.0-rc.1`
- `@rushstack/heft-sass-plugin@0.8.0-rc.1`
- `@rushstack/heft-storybook-plugin@0.2.0-rc.1`
- `@rushstack/heft-webpack4-plugin@0.6.0-rc.1`
- `@rushstack/heft-webpack5-plugin@0.6.0-rc.1`
- `@rushstack/heft-dev-cert-plugin@0.3.0-rc.1`
Additionally, Rushstack-provided rigs have been updated to be compatible with the new version of Heft:
- `@rushstack/heft-node-rig@1.12.0-rc.0`
- `@rushstack/heft-web-rig@0.13.0-rc.0`
If you have any issues with the prerelease packages or the new changes to Heft, please [file an issue](https://github.com/microsoft/rushstack/issues/new?assignees=&labels=&template=heft.md&title=%5Bheft%2Frc%2f0%5D+).
#### Known Issues
- TypeScript solution mode is not supported in watch mode
- TypeScript plugin copies all static assets when in watch mode
### Heft 0.35.0

@@ -368,0 +4,0 @@

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc