Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@yarnpkg/core

Package Overview
Dependencies
Maintainers
5
Versions
158
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@yarnpkg/core - npm Package Compare versions

Comparing version 2.4.0 to 3.0.0-rc.1

38

lib/Cache.js

@@ -8,9 +8,9 @@ "use strict";

const libzip_1 = require("@yarnpkg/libzip");
const fs_1 = tslib_1.__importDefault(require("fs"));
const fs_1 = (0, tslib_1.__importDefault)(require("fs"));
const MessageName_1 = require("./MessageName");
const Report_1 = require("./Report");
const hashUtils = tslib_1.__importStar(require("./hashUtils"));
const miscUtils = tslib_1.__importStar(require("./miscUtils"));
const structUtils = tslib_1.__importStar(require("./structUtils"));
const CACHE_VERSION = 7;
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
const CACHE_VERSION = 8;
class Cache {

@@ -91,2 +91,3 @@ constructor(cacheCwd, { configuration, immutable = configuration.get(`enableImmutableCache`), check = false }) {

}
await fslib_2.xfs.mkdirPromise(this.mirrorCwd || this.cwd, { recursive: true });
}

@@ -141,3 +142,3 @@ async fetchPackageFromCache(locator, expectedChecksum, { onHit, onMiss, loader, skipIntegrityCheck }) {

zipFs.saveAndClose();
return realPath;
return { source: `loader`, path: realPath };
}

@@ -147,3 +148,3 @@ const tempDir = await fslib_2.xfs.mktempPromise();

await fslib_2.xfs.copyFilePromise(mirrorPath, tempPath, fs_1.default.constants.COPYFILE_FICLONE);
return tempPath;
return { source: `mirror`, path: tempPath };
};

@@ -155,15 +156,21 @@ const loadPackage = async () => {

throw new Report_1.ReportError(MessageName_1.MessageName.IMMUTABLE_CACHE, `Cache entry required but missing for ${structUtils.prettyLocator(this.configuration, locator)}`);
const originalPath = await loadPackageThroughMirror();
await fslib_2.xfs.chmodPromise(originalPath, 0o644);
const { path: cachePathTemp, source: packageSource } = await loadPackageThroughMirror();
await fslib_2.xfs.chmodPromise(cachePathTemp, 0o644);
// Do this before moving the file so that we don't pollute the cache with corrupted archives
const checksum = await validateFile(originalPath);
const checksum = await validateFile(cachePathTemp);
const cachePath = this.getLocatorPath(locator, checksum);
if (!cachePath)
throw new Error(`Assertion failed: Expected the cache path to be available`);
let mirrorPathTemp = null;
if (packageSource !== `mirror` && mirrorPath !== null) {
const tempDir = await fslib_2.xfs.mktempPromise();
mirrorPathTemp = fslib_2.ppath.join(tempDir, this.getVersionFilename(locator));
await fslib_2.xfs.copyFilePromise(cachePathTemp, mirrorPathTemp, fs_1.default.constants.COPYFILE_FICLONE);
}
return await this.writeFileWithLock(cachePath, async () => {
return await this.writeFileWithLock(mirrorPath, async () => {
return await this.writeFileWithLock(packageSource === `mirror` ? null : mirrorPath, async () => {
// Doing a move is important to ensure atomic writes (todo: cross-drive?)
await fslib_2.xfs.movePromise(originalPath, cachePath);
if (mirrorPath !== null)
await fslib_2.xfs.copyFilePromise(cachePath, mirrorPath, fs_1.default.constants.COPYFILE_FICLONE);
await fslib_2.xfs.movePromise(cachePathTemp, cachePath);
if (mirrorPathTemp && mirrorPath)
await fslib_2.xfs.movePromise(mirrorPathTemp, mirrorPath);
return [cachePath, checksum];

@@ -214,3 +221,3 @@ });

let zipFs = null;
const libzip = await libzip_1.getLibzipPromise();
const libzip = await (0, libzip_1.getLibzipPromise)();
const lazyFs = new fslib_1.LazyFS(() => miscUtils.prettifySyncErrors(() => {

@@ -234,3 +241,2 @@ return zipFs = new fslib_1.ZipFS(cachePath, { baseFs, libzip, readOnly: true });

return await generator();
await fslib_2.xfs.mkdirPromise(fslib_2.ppath.dirname(file), { recursive: true });
return await fslib_2.xfs.lockPromise(file, async () => {

@@ -237,0 +243,0 @@ return await generator();

@@ -30,3 +30,25 @@ /// <reference types="node" />

export declare type FormatType = formatUtils.Type;
export declare const FormatType: typeof formatUtils.Type;
export declare const FormatType: {
readonly NO_HINT: "NO_HINT";
readonly NULL: "NULL";
readonly SCOPE: "SCOPE";
readonly NAME: "NAME";
readonly RANGE: "RANGE";
readonly REFERENCE: "REFERENCE";
readonly NUMBER: "NUMBER";
readonly PATH: "PATH";
readonly URL: "URL";
readonly ADDED: "ADDED";
readonly REMOVED: "REMOVED";
readonly CODE: "CODE";
readonly DURATION: "DURATION";
readonly SIZE: "SIZE";
readonly IDENT: "IDENT";
readonly DESCRIPTOR: "DESCRIPTOR";
readonly LOCATOR: "LOCATOR";
readonly RESOLUTION: "RESOLUTION";
readonly DEPENDENT: "DEPENDENT";
readonly PACKAGE_EXTENSION: "PACKAGE_EXTENSION";
readonly SETTING: "SETTING";
};
export declare type BaseSettingsDefinition<T extends SettingsType = SettingsType> = {

@@ -79,3 +101,2 @@ description: string;

virtualFolder: PortablePath;
bstatePath: PortablePath;
lockfileFilename: Filename;

@@ -86,3 +107,2 @@ installStatePath: PortablePath;

enableGlobalCache: boolean;
enableAbsoluteVirtuals: boolean;
enableColors: boolean;

@@ -253,6 +273,3 @@ enableHyperlinks: boolean;

get<K extends keyof ConfigurationValueMap>(key: K): ConfigurationValueMap[K];
/** @deprecated pass in a known configuration key instead */
get<T>(key: string): T;
/** @note Type will change to unknown in a future major version */
get(key: string): any;
get(key: string): unknown;
getSpecial<T = any>(key: string, { hideSecrets, getNativePaths }: Partial<SettingTransforms>): T;

@@ -272,3 +289,3 @@ getSubprocessStreams(logFile: PortablePath, { header, prefix, report }: {

normalizePackage(original: Package): Package;
getLimit(key: string): Limit;
getLimit<K extends miscUtils.FilterKeys<ConfigurationValueMap, number>>(key: K): Limit;
triggerHook<U extends Array<any>, V, HooksDefinition = Hooks>(get: (hooks: HooksDefinition) => ((...args: U) => V) | undefined, ...args: U): Promise<void>;

@@ -278,7 +295,3 @@ triggerMultipleHooks<U extends Array<any>, V, HooksDefinition = Hooks>(get: (hooks: HooksDefinition) => ((...args: U) => V) | undefined, argsList: Array<U>): Promise<void>;

firstHook<U extends Array<any>, V, HooksDefinition = Hooks>(get: (hooks: HooksDefinition) => ((...args: U) => Promise<V>) | undefined, ...args: U): Promise<Exclude<V, void> | null>;
/**
* @deprecated Prefer using formatUtils.pretty instead, which is type-safe
*/
format(value: string, formatType: formatUtils.Type | string): string;
}
export {};

@@ -8,7 +8,7 @@ "use strict";

const parsers_1 = require("@yarnpkg/parsers");
const camelcase_1 = tslib_1.__importDefault(require("camelcase"));
const camelcase_1 = (0, tslib_1.__importDefault)(require("camelcase"));
const ci_info_1 = require("ci-info");
const clipanion_1 = require("clipanion");
const p_limit_1 = tslib_1.__importDefault(require("p-limit"));
const semver_1 = tslib_1.__importDefault(require("semver"));
const p_limit_1 = (0, tslib_1.__importDefault)(require("p-limit"));
const semver_1 = (0, tslib_1.__importDefault)(require("semver"));
const stream_1 = require("stream");

@@ -24,8 +24,8 @@ const CorePlugin_1 = require("./CorePlugin");

const WorkspaceResolver_1 = require("./WorkspaceResolver");
const folderUtils = tslib_1.__importStar(require("./folderUtils"));
const formatUtils = tslib_1.__importStar(require("./formatUtils"));
const miscUtils = tslib_1.__importStar(require("./miscUtils"));
const nodeUtils = tslib_1.__importStar(require("./nodeUtils"));
const semverUtils = tslib_1.__importStar(require("./semverUtils"));
const structUtils = tslib_1.__importStar(require("./structUtils"));
const folderUtils = (0, tslib_1.__importStar)(require("./folderUtils"));
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
const nodeUtils = (0, tslib_1.__importStar)(require("./nodeUtils"));
const semverUtils = (0, tslib_1.__importStar)(require("./semverUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
const types_1 = require("./types");

@@ -133,11 +133,6 @@ const IGNORED_ENV_VARIABLES = new Set([

virtualFolder: {
description: `Folder where the virtual packages (cf doc) will be mapped on the disk (must be named $$virtual)`,
description: `Folder where the virtual packages (cf doc) will be mapped on the disk (must be named __virtual__)`,
type: SettingsType.ABSOLUTE_PATH,
default: `./.yarn/$$virtual`,
default: `./.yarn/__virtual__`,
},
bstatePath: {
description: `Path of the file where the current state of the built packages must be stored`,
type: SettingsType.ABSOLUTE_PATH,
default: `./.yarn/build-state.yml`,
},
lockfileFilename: {

@@ -169,7 +164,2 @@ description: `Name of the files where the Yarn dependency tree entries must be stored`,

},
enableAbsoluteVirtuals: {
description: `If true, the virtual symlinks will use absolute paths if required [non portable!!]`,
type: SettingsType.BOOLEAN,
default: false,
},
// Settings related to the output style

@@ -194,2 +184,7 @@ enableColors: {

},
enableMessageNames: {
description: `If true, the CLI will prefix most messages with codes suitable for search engines`,
type: SettingsType.BOOLEAN,
default: true,
},
enableProgressBars: {

@@ -426,3 +421,3 @@ description: `If true, the CLI is allowed to show a progress bar for long-running events`,

function parseValue(configuration, path, value, definition, folder) {
if (definition.isArray) {
if (definition.isArray || (definition.type === SettingsType.ANY && Array.isArray(value))) {
if (!Array.isArray(value)) {

@@ -461,3 +456,3 @@ return String(value).split(/,/).map(segment => {

const interpretValue = () => {
if (definition.type === SettingsType.BOOLEAN)
if (definition.type === SettingsType.BOOLEAN && typeof value !== `string`)
return miscUtils.parseBoolean(value);

@@ -478,2 +473,4 @@ if (typeof value !== `string`)

return structUtils.parseLocator(valueWithReplacedVariables);
case SettingsType.BOOLEAN:
return miscUtils.parseBoolean(valueWithReplacedVariables);
default:

@@ -607,3 +604,3 @@ return valueWithReplacedVariables;

continue;
key = camelcase_1.default(key.slice(exports.ENVIRONMENT_PREFIX.length));
key = (0, camelcase_1.default)(key.slice(exports.ENVIRONMENT_PREFIX.length));
environmentSettings[key] = value;

@@ -683,2 +680,13 @@ }

const homeRcFile = await Configuration.findHomeRcFile();
if (homeRcFile) {
const rcFile = rcFiles.find(rcFile => rcFile.path === homeRcFile.path);
// The home configuration is never strict because it improves support for
// multiple projects using different Yarn versions on the same machine
if (rcFile) {
rcFile.strict = false;
}
else {
rcFiles.push({ ...homeRcFile, strict: false });
}
}
const pickCoreFields = ({ ignoreCwd, yarnPath, ignorePath, lockfileFilename }) => ({ ignoreCwd, yarnPath, ignorePath, lockfileFilename });

@@ -691,4 +699,2 @@ const excludeCoreFields = ({ ignoreCwd, yarnPath, ignorePath, lockfileFilename, ...rest }) => rest;

configuration.useWithSource(path, pickCoreFields(data), cwd, { strict: false });
if (homeRcFile)
configuration.useWithSource(homeRcFile.path, pickCoreFields(homeRcFile.data), homeRcFile.cwd, { strict: false });
if (usePath) {

@@ -745,3 +751,3 @@ const yarnPath = configuration.get(`yarnPath`);

for (const request of nodeUtils.builtinModules())
requireEntries.set(request, () => nodeUtils.dynamicRequire(request));
requireEntries.set(request, () => miscUtils.dynamicRequire(request));
for (const [request, embedModule] of pluginConfiguration.modules)

@@ -754,3 +760,3 @@ requireEntries.set(request, () => embedModule);

const importPlugin = (pluginPath, source) => {
const { factory, name } = nodeUtils.dynamicRequire(fslib_1.npath.fromPortablePath(pluginPath));
const { factory, name } = miscUtils.dynamicRequire(fslib_1.npath.fromPortablePath(pluginPath));
// Prevent plugin redefinition so that the ones declared deeper in the

@@ -801,8 +807,4 @@ // filesystem always have precedence over the ones below.

configuration.useWithSource(`<environment>`, excludeCoreFields(environmentSettings), startingCwd, { strict });
for (const { path, cwd, data } of rcFiles)
configuration.useWithSource(path, excludeCoreFields(data), cwd, { strict });
// The home configuration is never strict because it improves support for
// multiple projects using different Yarn versions on the same machine
if (homeRcFile)
configuration.useWithSource(homeRcFile.path, excludeCoreFields(homeRcFile.data), homeRcFile.cwd, { strict: false });
for (const { path, cwd, data, strict: isStrict } of rcFiles)
configuration.useWithSource(path, excludeCoreFields(data), cwd, { strict: isStrict !== null && isStrict !== void 0 ? isStrict : strict });
if (configuration.get(`enableGlobalCache`)) {

@@ -827,3 +829,3 @@ configuration.values.set(`cacheFolder`, `${configuration.get(`globalFolder`)}/cache`);

try {
data = parsers_1.parseSyml(content);
data = (0, parsers_1.parseSyml)(content);
}

@@ -848,3 +850,3 @@ catch (error) {

const content = await fslib_1.xfs.readFilePromise(homeRcFilePath, `utf8`);
const data = parsers_1.parseSyml(content);
const data = (0, parsers_1.parseSyml)(content);
return { path: homeRcFilePath, cwd: homeFolder, data };

@@ -881,3 +883,3 @@ }

const current = fslib_1.xfs.existsSync(configurationPath)
? parsers_1.parseSyml(await fslib_1.xfs.readFilePromise(configurationPath, `utf8`))
? (0, parsers_1.parseSyml)(await fslib_1.xfs.readFilePromise(configurationPath, `utf8`))
: {};

@@ -923,3 +925,3 @@ let patched = false;

}
await fslib_1.xfs.changeFilePromise(configurationPath, parsers_1.stringifySyml(replacement), {
await fslib_1.xfs.changeFilePromise(configurationPath, (0, parsers_1.stringifySyml)(replacement), {
automaticNewlines: true,

@@ -1096,8 +1098,8 @@ });

for (const dependency of extension.dependencies.values())
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.Dependency, descriptor: dependency, description: `${structUtils.stringifyIdent(descriptor)} > ${structUtils.stringifyIdent(dependency)}` });
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.Dependency, descriptor: dependency });
for (const peerDependency of extension.peerDependencies.values())
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.PeerDependency, descriptor: peerDependency, description: `${structUtils.stringifyIdent(descriptor)} >> ${structUtils.stringifyIdent(peerDependency)}` });
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.PeerDependency, descriptor: peerDependency });
for (const [selector, meta] of extension.peerDependenciesMeta) {
for (const [key, value] of Object.entries(meta)) {
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.PeerDependencyMeta, selector, key: key, value, description: `${structUtils.stringifyIdent(descriptor)} >> ${selector} / ${key}` });
extensionsPerRange.push({ ...baseExtension, type: types_1.PackageExtensionType.PeerDependencyMeta, selector, key: key, value });
}

@@ -1179,9 +1181,10 @@ }

for (const descriptor of pkg.peerDependencies.values()) {
if (descriptor.scope === `@types`)
if (descriptor.scope === `types`)
continue;
const typesName = getTypesName(descriptor);
const typesIdent = structUtils.makeIdent(`types`, typesName);
if (pkg.peerDependencies.has(typesIdent.identHash) || pkg.peerDependenciesMeta.has(typesIdent.identHash))
const stringifiedTypesIdent = structUtils.stringifyIdent(typesIdent);
if (pkg.peerDependencies.has(typesIdent.identHash) || pkg.peerDependenciesMeta.has(stringifiedTypesIdent))
continue;
pkg.peerDependenciesMeta.set(structUtils.stringifyIdent(typesIdent), {
pkg.peerDependenciesMeta.set(stringifiedTypesIdent, {
optional: true,

@@ -1207,3 +1210,3 @@ });

return miscUtils.getFactoryWithDefault(this.limits, key, () => {
return p_limit_1.default(this.get(key));
return (0, p_limit_1.default)(this.get(key));
});

@@ -1256,10 +1259,4 @@ }

}
/**
* @deprecated Prefer using formatUtils.pretty instead, which is type-safe
*/
format(value, formatType) {
return formatUtils.pretty(this, value, formatType);
}
}
exports.Configuration = Configuration;
Configuration.telemetry = null;

@@ -6,3 +6,3 @@ "use strict";

const MessageName_1 = require("./MessageName");
const structUtils = tslib_1.__importStar(require("./structUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
exports.CorePlugin = {

@@ -12,7 +12,7 @@ hooks: {

for (const { pattern, reference } of project.topLevelWorkspace.manifest.resolutions) {
if (pattern.from && pattern.from.fullName !== structUtils.requirableIdent(locator))
if (pattern.from && pattern.from.fullName !== structUtils.stringifyIdent(locator))
continue;
if (pattern.from && pattern.from.description && pattern.from.description !== locator.reference)
continue;
if (pattern.descriptor.fullName !== structUtils.requirableIdent(dependency))
if (pattern.descriptor.fullName !== structUtils.stringifyIdent(dependency))
continue;

@@ -19,0 +19,0 @@ if (pattern.descriptor.description && pattern.descriptor.description !== dependency.range)

@@ -6,3 +6,3 @@ "use strict";

const fslib_1 = require("@yarnpkg/fslib");
const cross_spawn_1 = tslib_1.__importDefault(require("cross-spawn"));
const cross_spawn_1 = (0, tslib_1.__importDefault)(require("cross-spawn"));
var EndStrategy;

@@ -18,2 +18,3 @@ (function (EndStrategy) {

}
const activeChildren = new Set();
function sigintHandler() {

@@ -23,6 +24,7 @@ // We don't want SIGINT to kill our process; we want it to kill the

}
// Rather than attaching one SIGINT handler for each process, we
// attach a single one and use a refcount to detect once it's no
// longer needed.
let sigintRefCount = 0;
function sigtermHandler() {
for (const child of activeChildren) {
child.kill();
}
}
async function pipevp(fileName, args, { cwd, env = process.env, strict = false, stdin = null, stdout, stderr, end = EndStrategy.Always }) {

@@ -38,5 +40,3 @@ const stdio = [`pipe`, `pipe`, `pipe`];

stdio[2] = stderr;
if (sigintRefCount++ === 0)
process.on(`SIGINT`, sigintHandler);
const child = cross_spawn_1.default(fileName, args, {
const child = (0, cross_spawn_1.default)(fileName, args, {
cwd: fslib_1.npath.fromPortablePath(cwd),

@@ -49,2 +49,7 @@ env: {

});
activeChildren.add(child);
if (activeChildren.size === 1) {
process.on(`SIGINT`, sigintHandler);
process.on(`SIGTERM`, sigtermHandler);
}
if (!hasFd(stdin) && stdin !== null)

@@ -65,4 +70,7 @@ stdin.pipe(child.stdin);

child.on(`error`, error => {
if (--sigintRefCount === 0)
activeChildren.delete(child);
if (activeChildren.size === 0) {
process.off(`SIGINT`, sigintHandler);
process.off(`SIGTERM`, sigtermHandler);
}
if (end === EndStrategy.Always || end === EndStrategy.ErrorCode)

@@ -73,4 +81,7 @@ closeStreams();

child.on(`close`, (code, sig) => {
if (--sigintRefCount === 0)
activeChildren.delete(child);
if (activeChildren.size === 0) {
process.off(`SIGINT`, sigintHandler);
process.off(`SIGTERM`, sigtermHandler);
}
if (end === EndStrategy.Always || (end === EndStrategy.ErrorCode && code > 0))

@@ -98,3 +109,3 @@ closeStreams();

env = { ...env, PWD: nativeCwd };
const subprocess = cross_spawn_1.default(fileName, args, {
const subprocess = (0, cross_spawn_1.default)(fileName, args, {
cwd: nativeCwd,

@@ -111,3 +122,5 @@ env,

return await new Promise((resolve, reject) => {
subprocess.on(`error`, reject);
subprocess.on(`error`, () => {
reject();
});
subprocess.on(`close`, (code, signal) => {

@@ -138,3 +151,3 @@ const stdout = encoding === `buffer`

[`SIGKILL`, 9],
[`SIGTERM`, 15],
[`SIGTERM`, 15], // default signal for kill
]);

@@ -141,0 +154,0 @@ function getExitCode(code, signal) {

@@ -8,3 +8,3 @@ "use strict";

if (process.platform === `win32`) {
const base = fslib_1.npath.toPortablePath(process.env.LOCALAPPDATA || fslib_1.npath.join(os_1.homedir(), `AppData`, `Local`));
const base = fslib_1.npath.toPortablePath(process.env.LOCALAPPDATA || fslib_1.npath.join((0, os_1.homedir)(), `AppData`, `Local`));
return fslib_1.ppath.resolve(base, `Yarn/Berry`);

@@ -20,3 +20,3 @@ }

function getHomeFolder() {
return fslib_1.npath.toPortablePath(os_1.homedir() || `/usr/local/share`);
return fslib_1.npath.toPortablePath((0, os_1.homedir)() || `/usr/local/share`);
}

@@ -23,0 +23,0 @@ exports.getHomeFolder = getHomeFolder;

@@ -1,26 +0,28 @@

import { Configuration } from './Configuration';
import { Configuration, ConfigurationValueMap } from './Configuration';
import { Report } from './Report';
import { Descriptor, Locator, Ident, PackageExtension } from './types';
export declare enum Type {
NO_HINT = "NO_HINT",
NULL = "NULL",
SCOPE = "SCOPE",
NAME = "NAME",
RANGE = "RANGE",
REFERENCE = "REFERENCE",
NUMBER = "NUMBER",
PATH = "PATH",
URL = "URL",
ADDED = "ADDED",
REMOVED = "REMOVED",
CODE = "CODE",
DURATION = "DURATION",
SIZE = "SIZE",
IDENT = "IDENT",
DESCRIPTOR = "DESCRIPTOR",
LOCATOR = "LOCATOR",
RESOLUTION = "RESOLUTION",
DEPENDENT = "DEPENDENT",
PACKAGE_EXTENSION = "PACKAGE_EXTENSION"
}
export declare const Type: {
readonly NO_HINT: "NO_HINT";
readonly NULL: "NULL";
readonly SCOPE: "SCOPE";
readonly NAME: "NAME";
readonly RANGE: "RANGE";
readonly REFERENCE: "REFERENCE";
readonly NUMBER: "NUMBER";
readonly PATH: "PATH";
readonly URL: "URL";
readonly ADDED: "ADDED";
readonly REMOVED: "REMOVED";
readonly CODE: "CODE";
readonly DURATION: "DURATION";
readonly SIZE: "SIZE";
readonly IDENT: "IDENT";
readonly DESCRIPTOR: "DESCRIPTOR";
readonly LOCATOR: "LOCATOR";
readonly RESOLUTION: "RESOLUTION";
readonly DEPENDENT: "DEPENDENT";
readonly PACKAGE_EXTENSION: "PACKAGE_EXTENSION";
readonly SETTING: "SETTING";
};
export declare type Type = keyof typeof Type;
export declare enum Style {

@@ -72,2 +74,6 @@ BOLD = 2

};
SETTING: {
pretty: (configuration: any, val: keyof ConfigurationValueMap) => string;
json: (val: keyof ConfigurationValueMap) => any;
};
DURATION: {

@@ -89,5 +95,10 @@ pretty: (configuration: any, val: number) => string;

export declare type Tuple<T extends Type = Type> = readonly [Source<T>, T];
export declare type Field = {
label: string;
value: Tuple<any>;
};
export declare function tuple<T extends Type>(formatType: T, value: Source<T>): Tuple<T>;
export declare function applyStyle(configuration: Configuration, text: string, flags: Style): string;
export declare function applyColor(configuration: Configuration, value: string, formatType: Type | string): string;
export declare function applyHyperlink(configuration: Configuration, text: string, href: string): string;
export declare function pretty<T extends Type>(configuration: Configuration, value: Source<T>, formatType: T | string): string;

@@ -103,2 +114,3 @@ export declare function prettyList<T extends Type>(configuration: Configuration, values: Iterable<Source<T>>, formatType: T | string, { separator }?: {

};
export declare function prettyField(configuration: Configuration, { label, value: [value, formatType] }: Field): string;
export declare enum LogLevel {

@@ -105,0 +117,0 @@ Error = "error",

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.addLogFilterSupport = exports.LogLevel = exports.mark = exports.json = exports.prettyList = exports.pretty = exports.applyColor = exports.applyStyle = exports.tuple = exports.supportsHyperlinks = exports.supportsColor = exports.Style = exports.Type = void 0;
exports.addLogFilterSupport = exports.LogLevel = exports.prettyField = exports.mark = exports.json = exports.prettyList = exports.pretty = exports.applyHyperlink = exports.applyColor = exports.applyStyle = exports.tuple = exports.supportsHyperlinks = exports.supportsColor = exports.Style = exports.Type = void 0;
const tslib_1 = require("tslib");
const fslib_1 = require("@yarnpkg/fslib");
const chalk_1 = tslib_1.__importDefault(require("chalk"));
const chalk_1 = (0, tslib_1.__importDefault)(require("chalk"));
const strip_ansi_1 = (0, tslib_1.__importDefault)(require("strip-ansi"));
const MessageName_1 = require("./MessageName");
const miscUtils = tslib_1.__importStar(require("./miscUtils"));
const structUtils = tslib_1.__importStar(require("./structUtils"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
const types_1 = require("./types");
var Type;
(function (Type) {
Type["NO_HINT"] = "NO_HINT";
Type["NULL"] = "NULL";
Type["SCOPE"] = "SCOPE";
Type["NAME"] = "NAME";
Type["RANGE"] = "RANGE";
Type["REFERENCE"] = "REFERENCE";
Type["NUMBER"] = "NUMBER";
Type["PATH"] = "PATH";
Type["URL"] = "URL";
Type["ADDED"] = "ADDED";
Type["REMOVED"] = "REMOVED";
Type["CODE"] = "CODE";
Type["DURATION"] = "DURATION";
Type["SIZE"] = "SIZE";
Type["IDENT"] = "IDENT";
Type["DESCRIPTOR"] = "DESCRIPTOR";
Type["LOCATOR"] = "LOCATOR";
Type["RESOLUTION"] = "RESOLUTION";
Type["DEPENDENT"] = "DEPENDENT";
Type["PACKAGE_EXTENSION"] = "PACKAGE_EXTENSION";
})(Type = exports.Type || (exports.Type = {}));
// We have to workaround a TS bug:
// https://github.com/microsoft/TypeScript/issues/35329
//
// We also can't use const enum because Babel doesn't support them:
// https://github.com/babel/babel/issues/8741
//
exports.Type = {
NO_HINT: `NO_HINT`,
NULL: `NULL`,
SCOPE: `SCOPE`,
NAME: `NAME`,
RANGE: `RANGE`,
REFERENCE: `REFERENCE`,
NUMBER: `NUMBER`,
PATH: `PATH`,
URL: `URL`,
ADDED: `ADDED`,
REMOVED: `REMOVED`,
CODE: `CODE`,
DURATION: `DURATION`,
SIZE: `SIZE`,
IDENT: `IDENT`,
DESCRIPTOR: `DESCRIPTOR`,
LOCATOR: `LOCATOR`,
RESOLUTION: `RESOLUTION`,
DEPENDENT: `DEPENDENT`,
PACKAGE_EXTENSION: `PACKAGE_EXTENSION`,
SETTING: `SETTING`,
};
var Style;

@@ -47,15 +54,15 @@ (function (Style) {

const colors = new Map([
[Type.NO_HINT, null],
[Type.NULL, [`#a853b5`, 129]],
[Type.SCOPE, [`#d75f00`, 166]],
[Type.NAME, [`#d7875f`, 173]],
[Type.RANGE, [`#00afaf`, 37]],
[Type.REFERENCE, [`#87afff`, 111]],
[Type.NUMBER, [`#ffd700`, 220]],
[Type.PATH, [`#d75fd7`, 170]],
[Type.URL, [`#d75fd7`, 170]],
[Type.ADDED, [`#5faf00`, 70]],
[Type.REMOVED, [`#d70000`, 160]],
[Type.CODE, [`#87afff`, 111]],
[Type.SIZE, [`#ffd700`, 220]],
[exports.Type.NO_HINT, null],
[exports.Type.NULL, [`#a853b5`, 129]],
[exports.Type.SCOPE, [`#d75f00`, 166]],
[exports.Type.NAME, [`#d7875f`, 173]],
[exports.Type.RANGE, [`#00afaf`, 37]],
[exports.Type.REFERENCE, [`#87afff`, 111]],
[exports.Type.NUMBER, [`#ffd700`, 220]],
[exports.Type.PATH, [`#d75fd7`, 170]],
[exports.Type.URL, [`#d75fd7`, 170]],
[exports.Type.ADDED, [`#5faf00`, 70]],
[exports.Type.REMOVED, [`#d70000`, 160]],
[exports.Type.CODE, [`#87afff`, 111]],
[exports.Type.SIZE, [`#ffd700`, 220]],
]);

@@ -71,3 +78,3 @@ // Just to make sure that the individual fields of the transform map have

const transforms = {
[Type.NUMBER]: validateTransform({
[exports.Type.NUMBER]: validateTransform({
pretty: (configuration, value) => {

@@ -80,3 +87,3 @@ return `${value}`;

}),
[Type.IDENT]: validateTransform({
[exports.Type.IDENT]: validateTransform({
pretty: (configuration, ident) => {

@@ -89,3 +96,3 @@ return structUtils.prettyIdent(configuration, ident);

}),
[Type.LOCATOR]: validateTransform({
[exports.Type.LOCATOR]: validateTransform({
pretty: (configuration, locator) => {

@@ -98,3 +105,3 @@ return structUtils.prettyLocator(configuration, locator);

}),
[Type.DESCRIPTOR]: validateTransform({
[exports.Type.DESCRIPTOR]: validateTransform({
pretty: (configuration, descriptor) => {

@@ -107,3 +114,3 @@ return structUtils.prettyDescriptor(configuration, descriptor);

}),
[Type.RESOLUTION]: validateTransform({
[exports.Type.RESOLUTION]: validateTransform({
pretty: (configuration, { descriptor, locator }) => {

@@ -121,3 +128,3 @@ return structUtils.prettyResolution(configuration, descriptor, locator);

}),
[Type.DEPENDENT]: validateTransform({
[exports.Type.DEPENDENT]: validateTransform({
pretty: (configuration, { locator, descriptor }) => {

@@ -133,11 +140,11 @@ return structUtils.prettyDependent(configuration, locator, descriptor);

}),
[Type.PACKAGE_EXTENSION]: validateTransform({
[exports.Type.PACKAGE_EXTENSION]: validateTransform({
pretty: (configuration, packageExtension) => {
switch (packageExtension.type) {
case types_1.PackageExtensionType.Dependency:
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `dependencies`, Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, packageExtension.descriptor)}`;
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `dependencies`, exports.Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, packageExtension.descriptor)}`;
case types_1.PackageExtensionType.PeerDependency:
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `peerDependencies`, Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, packageExtension.descriptor)}`;
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `peerDependencies`, exports.Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, packageExtension.descriptor)}`;
case types_1.PackageExtensionType.PeerDependencyMeta:
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `peerDependenciesMeta`, Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, structUtils.parseIdent(packageExtension.selector))} ➤ ${applyColor(configuration, packageExtension.key, Type.CODE)}`;
return `${structUtils.prettyIdent(configuration, packageExtension.parentDescriptor)} ➤ ${applyColor(configuration, `peerDependenciesMeta`, exports.Type.CODE)} ➤ ${structUtils.prettyIdent(configuration, structUtils.parseIdent(packageExtension.selector))} ➤ ${applyColor(configuration, packageExtension.key, exports.Type.CODE)}`;
default:

@@ -160,3 +167,13 @@ throw new Error(`Assertion failed: Unsupported package extension type: ${packageExtension.type}`);

}),
[Type.DURATION]: validateTransform({
[exports.Type.SETTING]: validateTransform({
pretty: (configuration, settingName) => {
// Asserts that the setting is valid
configuration.get(settingName);
return applyHyperlink(configuration, applyColor(configuration, settingName, exports.Type.CODE), `https://yarnpkg.com/configuration/yarnrc#${settingName}`);
},
json: (settingName) => {
return settingName;
},
}),
[exports.Type.DURATION]: validateTransform({
pretty: (configuration, duration) => {

@@ -178,3 +195,3 @@ if (duration > 1000 * 60) {

}),
[Type.SIZE]: validateTransform({
[exports.Type.SIZE]: validateTransform({
pretty: (configuration, size) => {

@@ -187,3 +204,3 @@ const thresholds = [`KB`, `MB`, `GB`, `TB`];

const value = Math.floor(size * 100 / factor) / 100;
return applyColor(configuration, `${value} ${thresholds[power - 1]}`, Type.NUMBER);
return applyColor(configuration, `${value} ${thresholds[power - 1]}`, exports.Type.NUMBER);
},

@@ -194,5 +211,5 @@ json: (size) => {

}),
[Type.PATH]: validateTransform({
[exports.Type.PATH]: validateTransform({
pretty: (configuration, filePath) => {
return applyColor(configuration, fslib_1.npath.fromPortablePath(filePath), Type.PATH);
return applyColor(configuration, fslib_1.npath.fromPortablePath(filePath), exports.Type.PATH);
},

@@ -237,5 +254,15 @@ json: (filePath) => {

exports.applyColor = applyColor;
function applyHyperlink(configuration, text, href) {
// Only print hyperlinks if allowed per configuration
if (!configuration.get(`enableHyperlinks`))
return text;
// We use BELL as ST because it seems that iTerm doesn't properly support
// the \x1b\\ sequence described in the reference document
// https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda#the-escape-sequence
return `\u001b]8;;${href}\u0007${text}\u001b]8;;\u0007`;
}
exports.applyHyperlink = applyHyperlink;
function pretty(configuration, value, formatType) {
if (value === null)
return applyColor(configuration, `null`, Type.NULL);
return applyColor(configuration, `null`, exports.Type.NULL);
if (Object.prototype.hasOwnProperty.call(transforms, formatType)) {

@@ -275,2 +302,6 @@ const transform = transforms[formatType];

exports.mark = mark;
function prettyField(configuration, { label, value: [value, formatType] }) {
return `${pretty(configuration, label, exports.Type.CODE)}: ${pretty(configuration, value, formatType)}`;
}
exports.prettyField = prettyField;
var LogLevel;

@@ -307,3 +338,3 @@ (function (LogLevel) {

if (logFiltersByText.size > 0) {
const level = logFiltersByText.get(chalk_1.default.reset(text));
const level = logFiltersByText.get((0, strip_ansi_1.default)(text));
if (typeof level !== `undefined`) {

@@ -314,3 +345,3 @@ return level !== null && level !== void 0 ? level : defaultLevel;

if (logFiltersByCode.size > 0) {
const level = logFiltersByCode.get(MessageName_1.stringifyMessageName(name));
const level = logFiltersByCode.get((0, MessageName_1.stringifyMessageName)(name));
if (typeof level !== `undefined`) {

@@ -317,0 +348,0 @@ return level !== null && level !== void 0 ? level : defaultLevel;

@@ -7,5 +7,5 @@ "use strict";

const crypto_1 = require("crypto");
const globby_1 = tslib_1.__importDefault(require("globby"));
const globby_1 = (0, tslib_1.__importDefault)(require("globby"));
function makeHash(...args) {
const hash = crypto_1.createHash(`sha512`);
const hash = (0, crypto_1.createHash)(`sha512`);
for (const arg of args)

@@ -18,3 +18,3 @@ hash.update(arg ? arg : ``);

return new Promise((resolve, reject) => {
const hash = crypto_1.createHash(`sha512`);
const hash = (0, crypto_1.createHash)(`sha512`);
const stream = fslib_1.xfs.createReadStream(path);

@@ -38,3 +38,3 @@ stream.on(`data`, chunk => {

// Ref: https://github.com/sindresorhus/globby/issues/147
const dirListing = await globby_1.default(pattern, {
const dirListing = await (0, globby_1.default)(pattern, {
cwd: fslib_1.npath.fromPortablePath(cwd),

@@ -48,3 +48,3 @@ expandDirectories: false,

});
const listing = await globby_1.default([pattern, ...dirPatterns], {
const listing = await (0, globby_1.default)([pattern, ...dirPatterns], {
cwd: fslib_1.npath.fromPortablePath(cwd),

@@ -66,3 +66,3 @@ expandDirectories: false,

}));
const hash = crypto_1.createHash(`sha512`);
const hash = (0, crypto_1.createHash)(`sha512`);
for (const sub of hashes)

@@ -69,0 +69,0 @@ hash.update(sub);

/// <reference types="node" />
import { PortablePath } from '@yarnpkg/fslib';
import { Response } from 'got';
import { RequestError, Response } from 'got';
import { Configuration } from './Configuration';
export { RequestError } from 'got';
/**

@@ -27,2 +28,3 @@ * Searches through networkSettings and returns the most specific match

configuration: Configuration;
customErrorMessage?: (err: RequestError) => string | null;
headers?: {

@@ -33,10 +35,8 @@ [headerName: string]: string;

jsonResponse?: boolean;
/** @deprecated use jsonRequest and jsonResponse instead */
json?: boolean;
method?: Method;
};
export declare function request(target: string, body: Body, { configuration, headers, json, jsonRequest, jsonResponse, method }: Options): Promise<Response<any>>;
export declare function get(target: string, { configuration, json, jsonResponse, ...rest }: Options): Promise<any>;
export declare function put(target: string, body: Body, options: Options): Promise<Buffer>;
export declare function post(target: string, body: Body, options: Options): Promise<Buffer>;
export declare function del(target: string, options: Options): Promise<Buffer>;
export declare function request(target: string, body: Body, { configuration, headers, jsonRequest, jsonResponse, method }: Omit<Options, 'customErrorMessage'>): Promise<Response<any>>;
export declare function get(target: string, { configuration, jsonResponse, ...rest }: Options): Promise<any>;
export declare function put(target: string, body: Body, { customErrorMessage, ...options }: Options): Promise<Buffer>;
export declare function post(target: string, body: Body, { customErrorMessage, ...options }: Options): Promise<Buffer>;
export declare function del(target: string, { customErrorMessage, ...options }: Options): Promise<Buffer>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.del = exports.post = exports.put = exports.get = exports.request = exports.Method = exports.getNetworkSettings = void 0;
exports.del = exports.post = exports.put = exports.get = exports.request = exports.Method = exports.getNetworkSettings = exports.RequestError = void 0;
const tslib_1 = require("tslib");
const fslib_1 = require("@yarnpkg/fslib");
const got_1 = require("got");
const https_1 = require("https");
const http_1 = require("http");
const micromatch_1 = tslib_1.__importDefault(require("micromatch"));
const tunnel_1 = tslib_1.__importDefault(require("tunnel"));
const micromatch_1 = (0, tslib_1.__importDefault)(require("micromatch"));
const tunnel_1 = (0, tslib_1.__importDefault)(require("tunnel"));
const url_1 = require("url");
const MessageName_1 = require("./MessageName");
const Report_1 = require("./Report");
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
var got_2 = require("got");
Object.defineProperty(exports, "RequestError", { enumerable: true, get: function () { return got_2.RequestError; } });
const cache = new Map();

@@ -23,11 +30,66 @@ const certCache = new Map();

async function getCachedCertificate(caFilePath) {
let certificate = certCache.get(caFilePath);
if (!certificate) {
certificate = fslib_1.xfs.readFilePromise(caFilePath).then(cert => {
return miscUtils.getFactoryWithDefault(certCache, caFilePath, () => {
return fslib_1.xfs.readFilePromise(caFilePath).then(cert => {
certCache.set(caFilePath, cert);
return cert;
});
certCache.set(caFilePath, certificate);
});
}
function prettyResponseCode({ statusCode, statusMessage }, configuration) {
const prettyStatusCode = formatUtils.pretty(configuration, statusCode, formatUtils.Type.NUMBER);
const href = `https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/${statusCode}`;
return formatUtils.applyHyperlink(configuration, `${prettyStatusCode}${statusMessage ? ` (${statusMessage})` : ``}`, href);
}
async function prettyNetworkError(response, { configuration, customErrorMessage }) {
var _a, _b;
try {
return await response;
}
return certificate;
catch (err) {
if (err.name !== `HTTPError`)
throw err;
let message = (_a = customErrorMessage === null || customErrorMessage === void 0 ? void 0 : customErrorMessage(err)) !== null && _a !== void 0 ? _a : (_b = err.response.body) === null || _b === void 0 ? void 0 : _b.error;
if (message == null) {
if (err.message.startsWith(`Response code`)) {
message = `The remote server failed to provide the requested resource`;
}
else {
message = err.message;
}
}
if (err instanceof got_1.TimeoutError && err.event === `socket`)
message += `(can be increased via ${formatUtils.pretty(configuration, `httpTimeout`, formatUtils.Type.SETTING)})`;
const networkError = new Report_1.ReportError(MessageName_1.MessageName.NETWORK_ERROR, message, report => {
if (err.response) {
report.reportError(MessageName_1.MessageName.NETWORK_ERROR, ` ${formatUtils.prettyField(configuration, {
label: `Response Code`,
value: formatUtils.tuple(formatUtils.Type.NO_HINT, prettyResponseCode(err.response, configuration)),
})}`);
}
if (err.request) {
report.reportError(MessageName_1.MessageName.NETWORK_ERROR, ` ${formatUtils.prettyField(configuration, {
label: `Request Method`,
value: formatUtils.tuple(formatUtils.Type.NO_HINT, err.request.options.method),
})}`);
report.reportError(MessageName_1.MessageName.NETWORK_ERROR, ` ${formatUtils.prettyField(configuration, {
label: `Request URL`,
value: formatUtils.tuple(formatUtils.Type.URL, err.request.requestUrl),
})}`);
}
if (err.request.redirects.length > 0) {
report.reportError(MessageName_1.MessageName.NETWORK_ERROR, ` ${formatUtils.prettyField(configuration, {
label: `Request Redirects`,
value: formatUtils.tuple(formatUtils.Type.NO_HINT, formatUtils.prettyList(configuration, err.request.redirects, formatUtils.Type.URL)),
})}`);
}
if (err.request.retryCount === err.request.options.retry.limit) {
report.reportError(MessageName_1.MessageName.NETWORK_ERROR, ` ${formatUtils.prettyField(configuration, {
label: `Request Retry Count`,
value: formatUtils.tuple(formatUtils.Type.NO_HINT, `${formatUtils.pretty(configuration, err.request.retryCount, formatUtils.Type.NUMBER)} (can be increased via ${formatUtils.pretty(configuration, `httpRetry`, formatUtils.Type.SETTING)})`),
})}`);
}
});
networkError.originalError = err;
throw networkError;
}
}

@@ -61,7 +123,5 @@ /**

// Apply defaults
for (const key of mergableKeys) {
if (typeof mergedNetworkSettings[key] === `undefined`) {
for (const key of mergableKeys)
if (typeof mergedNetworkSettings[key] === `undefined`)
mergedNetworkSettings[key] = opts.configuration.get(key);
}
}
return mergedNetworkSettings;

@@ -77,3 +137,3 @@ }

})(Method = exports.Method || (exports.Method = {}));
async function request(target, body, { configuration, headers, json, jsonRequest = json, jsonResponse = json, method = Method.GET }) {
async function request(target, body, { configuration, headers, jsonRequest, jsonResponse, method = Method.GET }) {
const networkConfig = getNetworkSettings(target, { configuration });

@@ -110,3 +170,3 @@ if (networkConfig.enableNetwork === false)

const caFilePath = networkConfig.caFilePath;
const { default: got } = await Promise.resolve().then(() => tslib_1.__importStar(require(`got`)));
const { default: got } = await Promise.resolve().then(() => (0, tslib_1.__importStar)(require(`got`)));
const certificateAuthority = caFilePath

@@ -131,11 +191,9 @@ ? await getCachedCertificate(caFilePath)

exports.request = request;
async function get(target, { configuration, json, jsonResponse = json, ...rest }) {
let entry = cache.get(target);
if (!entry) {
entry = request(target, null, { configuration, ...rest }).then(response => {
async function get(target, { configuration, jsonResponse, ...rest }) {
let entry = miscUtils.getFactoryWithDefault(cache, target, () => {
return prettyNetworkError(request(target, null, { configuration, ...rest }), { configuration }).then(response => {
cache.set(target, response.body);
return response.body;
});
cache.set(target, entry);
}
});
if (Buffer.isBuffer(entry) === false)

@@ -151,16 +209,16 @@ entry = await entry;

exports.get = get;
async function put(target, body, options) {
const response = await request(target, body, { ...options, method: Method.PUT });
async function put(target, body, { customErrorMessage, ...options }) {
const response = await prettyNetworkError(request(target, body, { ...options, method: Method.PUT }), options);
return response.body;
}
exports.put = put;
async function post(target, body, options) {
const response = await request(target, body, { ...options, method: Method.POST });
async function post(target, body, { customErrorMessage, ...options }) {
const response = await prettyNetworkError(request(target, body, { ...options, method: Method.POST }), options);
return response.body;
}
exports.post = post;
async function del(target, options) {
const response = await request(target, null, { ...options, method: Method.DELETE });
async function del(target, { customErrorMessage, ...options }) {
const response = await prettyNetworkError(request(target, null, { ...options, method: Method.DELETE }), options);
return response.body;
}
exports.del = del;

@@ -5,23 +5,23 @@ "use strict";

const tslib_1 = require("tslib");
const execUtils = tslib_1.__importStar(require("./execUtils"));
const execUtils = (0, tslib_1.__importStar)(require("./execUtils"));
exports.execUtils = execUtils;
const folderUtils = tslib_1.__importStar(require("./folderUtils"));
const folderUtils = (0, tslib_1.__importStar)(require("./folderUtils"));
exports.folderUtils = folderUtils;
const formatUtils = tslib_1.__importStar(require("./formatUtils"));
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils"));
exports.formatUtils = formatUtils;
const hashUtils = tslib_1.__importStar(require("./hashUtils"));
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils"));
exports.hashUtils = hashUtils;
const httpUtils = tslib_1.__importStar(require("./httpUtils"));
const httpUtils = (0, tslib_1.__importStar)(require("./httpUtils"));
exports.httpUtils = httpUtils;
const miscUtils = tslib_1.__importStar(require("./miscUtils"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
exports.miscUtils = miscUtils;
const scriptUtils = tslib_1.__importStar(require("./scriptUtils"));
const scriptUtils = (0, tslib_1.__importStar)(require("./scriptUtils"));
exports.scriptUtils = scriptUtils;
const semverUtils = tslib_1.__importStar(require("./semverUtils"));
const semverUtils = (0, tslib_1.__importStar)(require("./semverUtils"));
exports.semverUtils = semverUtils;
const structUtils = tslib_1.__importStar(require("./structUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
exports.structUtils = structUtils;
const tgzUtils = tslib_1.__importStar(require("./tgzUtils"));
const tgzUtils = (0, tslib_1.__importStar)(require("./tgzUtils"));
exports.tgzUtils = tgzUtils;
const treeUtils = tslib_1.__importStar(require("./treeUtils"));
const treeUtils = (0, tslib_1.__importStar)(require("./treeUtils"));
exports.treeUtils = treeUtils;

@@ -28,0 +28,0 @@ var Cache_1 = require("./Cache");

@@ -7,5 +7,5 @@ "use strict";

const parsers_1 = require("@yarnpkg/parsers");
const semver_1 = tslib_1.__importDefault(require("semver"));
const semver_1 = (0, tslib_1.__importDefault)(require("semver"));
const MessageName_1 = require("./MessageName");
const structUtils = tslib_1.__importStar(require("./structUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
const IMPORTED_PATTERNS = [

@@ -19,3 +19,3 @@ // These ones come from Git urls

// Note: /download/ is used by custom registries like Taobao
[/^https?:\/\/[^/]+\/(?:[^/]+\/)*(?:@[^/]+\/)?([^/]+)\/(?:-|download)\/\1-[^/]+\.tgz(?:#|$)/, version => `npm:${version}`],
[/^https?:\/\/[^/]+\/(?:[^/]+\/)*(?:@.+(?:\/|(?:%2f)))?([^/]+)\/(?:-|download)\/\1-[^/]+\.tgz(?:#|$)/, version => `npm:${version}`],
// The GitHub package registry uses a different style of URLs

@@ -25,2 +25,4 @@ [/^https:\/\/npm\.pkg\.github\.com\/download\/(?:@[^/]+)\/(?:[^/]+)\/(?:[^/]+)\/(?:[0-9a-f]+)$/, version => `npm:${version}`],

[/^https:\/\/npm\.fontawesome\.com\/(?:@[^/]+)\/([^/]+)\/-\/([^/]+)\/\1-\2.tgz(?:#|$)/, version => `npm:${version}`],
// JFrog
[/^https?:\/\/(?:[^\\.]+)\.jfrog\.io\/.*\/(@[^/]+)\/([^/]+)\/-\/\1\/\2-(?:[.\d\w-]+)\.tgz(?:#|$)/, (version, $0) => structUtils.makeRange({ protocol: `npm:`, source: null, selector: version, params: { __archiveUrl: $0 } })],
// These ones come from the old Yarn offline mirror - we assume they came from npm

@@ -39,3 +41,3 @@ [/^[^/]+\.tgz#[0-9a-f]+$/, version => `npm:${version}`],

const content = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`);
const parsed = parsers_1.parseSyml(content);
const parsed = (0, parsers_1.parseSyml)(content);
// No need to enable it either if the lockfile is modern

@@ -70,4 +72,14 @@ if (Object.prototype.hasOwnProperty.call(parsed, `__metadata`))

}
const resolution = structUtils.makeLocator(descriptor, reference);
resolutions.set(descriptor.descriptorHash, resolution);
// If the range is a valid descriptor we're dealing with an alias ("foo": "npm:lodash@*")
// and need to make the locator from that instead of the original descriptor
let actualDescriptor = descriptor;
try {
const parsedRange = structUtils.parseRange(descriptor.range);
const potentialDescriptor = structUtils.tryParseDescriptor(parsedRange.selector, true);
if (potentialDescriptor) {
actualDescriptor = potentialDescriptor;
}
}
catch (_a) { }
resolutions.set(descriptor.descriptorHash, structUtils.makeLocator(actualDescriptor, reference));
}

@@ -74,0 +86,0 @@ }

@@ -39,4 +39,4 @@ /// <reference types="node" />

catch<TResult = never>(onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | null | undefined): Promise<void | TResult>;
finally(onfinally?: (() => void) | null | undefined): Promise<void>;
[Symbol.toStringTag]: string;
finally(onfinally?: (() => void) | null | undefined): Promise<void>;
};

@@ -43,0 +43,0 @@ reportJson(data: any): void;

@@ -7,3 +7,3 @@ "use strict";

const StreamReport_1 = require("./StreamReport");
const formatUtils = tslib_1.__importStar(require("./formatUtils"));
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils"));
class LightReport extends Report_1.Report {

@@ -79,2 +79,3 @@ constructor({ configuration, stdout, suggestInstall = true }) {

if (this.errorCount > 0) {
this.stdout.write(`\n`);
this.stdout.write(`${formatUtils.pretty(this.configuration, `➤`, `redBright`)} Errors happened when preparing the environment required to run this command.\n`);

@@ -87,3 +88,3 @@ if (this.suggestInstall) {

formatNameWithHyperlink(name) {
return StreamReport_1.formatNameWithHyperlink(name, {
return (0, StreamReport_1.formatNameWithHyperlink)(name, {
configuration: this.configuration,

@@ -90,0 +91,0 @@ json: false,

@@ -16,3 +16,3 @@ import { PortablePath } from '@yarnpkg/fslib';

* and put them on the filesystem in a way that their target environment will
* understand (for example, in Node's case, it will be to generate a .pnp.js
* understand (for example, in Node's case, it will be to generate a .pnp.cjs
* file).

@@ -19,0 +19,0 @@ *

@@ -5,3 +5,3 @@ "use strict";

const tslib_1 = require("tslib");
const structUtils = tslib_1.__importStar(require("./structUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
class LockfileResolver {

@@ -8,0 +8,0 @@ supportsDescriptor(descriptor, opts) {

@@ -7,6 +7,6 @@ "use strict";

const parsers_1 = require("@yarnpkg/parsers");
const semver_1 = tslib_1.__importDefault(require("semver"));
const miscUtils = tslib_1.__importStar(require("./miscUtils"));
const semverUtils = tslib_1.__importStar(require("./semverUtils"));
const structUtils = tslib_1.__importStar(require("./structUtils"));
const semver_1 = (0, tslib_1.__importDefault)(require("semver"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
const semverUtils = (0, tslib_1.__importStar)(require("./semverUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
class Manifest {

@@ -119,2 +119,3 @@ constructor() {

const errors = [];
this.name = null;
if (typeof data.name === `string`) {

@@ -130,2 +131,4 @@ try {

this.version = data.version;
else
this.version = null;
if (Array.isArray(data.os)) {

@@ -143,2 +146,5 @@ const os = [];

}
else {
this.os = null;
}
if (Array.isArray(data.cpu)) {

@@ -156,14 +162,29 @@ const cpu = [];

}
else {
this.cpu = null;
}
if (typeof data.type === `string`)
this.type = data.type;
else
this.type = null;
if (typeof data.private === `boolean`)
this.private = data.private;
else
this.private = false;
if (typeof data.license === `string`)
this.license = data.license;
else
this.license = null;
if (typeof data.languageName === `string`)
this.languageName = data.languageName;
else
this.languageName = null;
if (typeof data.main === `string`)
this.main = normalizeSlashes(data.main);
else
this.main = null;
if (typeof data.module === `string`)
this.module = normalizeSlashes(data.module);
else
this.module = null;
if (data.browser != null) {

@@ -180,5 +201,9 @@ if (typeof data.browser === `string`) {

}
else {
this.browser = null;
}
this.bin = new Map();
if (typeof data.bin === `string`) {
if (this.name !== null) {
this.bin = new Map([[this.name.name, normalizeSlashes(data.bin)]]);
this.bin.set(this.name.name, normalizeSlashes(data.bin));
}

@@ -198,2 +223,3 @@ else {

}
this.scripts = new Map();
if (typeof data.scripts === `object` && data.scripts !== null) {

@@ -208,2 +234,3 @@ for (const [key, value] of Object.entries(data.scripts)) {

}
this.dependencies = new Map();
if (typeof data.dependencies === `object` && data.dependencies !== null) {

@@ -227,2 +254,3 @@ for (const [name, range] of Object.entries(data.dependencies)) {

}
this.devDependencies = new Map();
if (typeof data.devDependencies === `object` && data.devDependencies !== null) {

@@ -246,2 +274,3 @@ for (const [name, range] of Object.entries(data.devDependencies)) {

}
this.peerDependencies = new Map();
if (typeof data.peerDependencies === `object` && data.peerDependencies !== null) {

@@ -272,2 +301,3 @@ for (let [name, range] of Object.entries(data.peerDependencies)) {

: [];
this.workspaceDefinitions = [];
for (const entry of workspaces) {

@@ -282,2 +312,3 @@ if (typeof entry !== `string`) {

}
this.dependenciesMeta = new Map();
if (typeof data.dependenciesMeta === `object` && data.dependenciesMeta !== null) {

@@ -309,2 +340,3 @@ for (const [pattern, meta] of Object.entries(data.dependenciesMeta)) {

}
this.peerDependenciesMeta = new Map();
if (typeof data.peerDependenciesMeta === `object` && data.peerDependenciesMeta !== null) {

@@ -326,2 +358,3 @@ for (const [pattern, meta] of Object.entries(data.peerDependenciesMeta)) {

}
this.resolutions = [];
if (typeof data.resolutions === `object` && data.resolutions !== null) {

@@ -334,3 +367,3 @@ for (const [pattern, reference] of Object.entries(data.resolutions)) {

try {
this.resolutions.push({ pattern: parsers_1.parseResolution(pattern), reference });
this.resolutions.push({ pattern: (0, parsers_1.parseResolution)(pattern), reference });
}

@@ -353,2 +386,5 @@ catch (error) {

}
else {
this.files = null;
}
if (typeof data.publishConfig === `object` && data.publishConfig !== null) {

@@ -404,2 +440,5 @@ this.publishConfig = {};

}
else {
this.publishConfig = null;
}
if (typeof data.installConfig === `object` && data.installConfig !== null) {

@@ -421,2 +460,5 @@ this.installConfig = {};

}
else {
this.installConfig = null;
}
// We treat optional dependencies after both the regular dependency field

@@ -452,2 +494,4 @@ // and the dependenciesMeta field have been generated (because we will

this.preferUnplugged = data.preferUnplugged;
else
this.preferUnplugged = null;
this.errors = errors;

@@ -706,3 +750,3 @@ }

data.resolutions = Object.assign({}, ...this.resolutions.map(({ pattern, reference }) => {
return { [parsers_1.stringifyResolution(pattern)]: reference };
return { [(0, parsers_1.stringifyResolution)(pattern)]: reference };
}));

@@ -709,0 +753,0 @@ }

@@ -71,4 +71,7 @@ export declare enum MessageName {

UNUSED_PACKAGE_EXTENSION = 68,
REDUNDANT_PACKAGE_EXTENSION = 69
REDUNDANT_PACKAGE_EXTENSION = 69,
AUTO_NM_SUCCESS = 70,
NM_CANT_INSTALL_PORTAL = 71,
NM_PRESERVE_SYMLINKS_REQUIRED = 72
}
export declare function stringifyMessageName(name: MessageName | number): string;

@@ -79,2 +79,5 @@ "use strict";

MessageName[MessageName["REDUNDANT_PACKAGE_EXTENSION"] = 69] = "REDUNDANT_PACKAGE_EXTENSION";
MessageName[MessageName["AUTO_NM_SUCCESS"] = 70] = "AUTO_NM_SUCCESS";
MessageName[MessageName["NM_CANT_INSTALL_PORTAL"] = 71] = "NM_CANT_INSTALL_PORTAL";
MessageName[MessageName["NM_PRESERVE_SYMLINKS_REQUIRED"] = 72] = "NM_PRESERVE_SYMLINKS_REQUIRED";
})(MessageName = exports.MessageName || (exports.MessageName = {}));

@@ -81,0 +84,0 @@ function stringifyMessageName(name) {

/// <reference types="node" />
/// <reference types="jest" />
import { PortablePath } from '@yarnpkg/fslib';

@@ -54,3 +55,3 @@ import { Readable, Transform } from 'stream';

}
export declare function dynamicRequire(path: string): any;
export declare const dynamicRequire: NodeRequire;
export declare function dynamicRequireNoCache(path: PortablePath): any;

@@ -74,2 +75,5 @@ export declare function sortMap<T>(values: Iterable<T>, mappers: ((value: T) => string) | Array<(value: T) => string>): T[];

export declare function tryParseOptionalBoolean(value: unknown): boolean | undefined | null;
export declare type FilterKeys<T extends {}, Filter> = {
[K in keyof T]: T[K] extends Filter ? K : never;
}[keyof T];
export {};

@@ -7,3 +7,3 @@ "use strict";

const clipanion_1 = require("clipanion");
const micromatch_1 = tslib_1.__importDefault(require("micromatch"));
const micromatch_1 = (0, tslib_1.__importDefault)(require("micromatch"));
const stream_1 = require("stream");

@@ -188,2 +188,5 @@ function escapeRegExp(str) {

}
else {
cb(null);
}
}

@@ -195,28 +198,19 @@ }

// of a web application, but is quite annoying when working with Node projects!
function dynamicRequire(path) {
// @ts-expect-error
if (typeof __non_webpack_require__ !== `undefined`) {
// @ts-expect-error
return __non_webpack_require__(path);
}
else {
return require(path);
}
}
exports.dynamicRequire = dynamicRequire;
exports.dynamicRequire = eval(`require`);
function dynamicRequireNoCache(path) {
const physicalPath = fslib_1.npath.fromPortablePath(path);
const currentCacheEntry = require.cache[physicalPath];
delete require.cache[physicalPath];
const currentCacheEntry = exports.dynamicRequire.cache[physicalPath];
delete exports.dynamicRequire.cache[physicalPath];
let result;
try {
result = dynamicRequire(physicalPath);
const freshCacheEntry = require.cache[physicalPath];
const freshCacheIndex = module.children.indexOf(freshCacheEntry);
result = (0, exports.dynamicRequire)(physicalPath);
const freshCacheEntry = exports.dynamicRequire.cache[physicalPath];
const dynamicModule = eval(`module`);
const freshCacheIndex = dynamicModule.children.indexOf(freshCacheEntry);
if (freshCacheIndex !== -1) {
module.children.splice(freshCacheIndex, 1);
dynamicModule.children.splice(freshCacheIndex, 1);
}
}
finally {
require.cache[physicalPath] = currentCacheEntry;
exports.dynamicRequire.cache[physicalPath] = currentCacheEntry;
}

@@ -269,2 +263,3 @@ return result;

windows: false,
dot: true,
}).source})`;

@@ -271,0 +266,0 @@ }).join(`|`);

@@ -7,3 +7,3 @@ "use strict";

const Report_1 = require("./Report");
const structUtils = tslib_1.__importStar(require("./structUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
class MultiFetcher {

@@ -10,0 +10,0 @@ constructor(fetchers) {

@@ -5,3 +5,3 @@ "use strict";

const tslib_1 = require("tslib");
const structUtils = tslib_1.__importStar(require("./structUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
class MultiResolver {

@@ -8,0 +8,0 @@ constructor(resolvers) {

@@ -1,2 +0,1 @@

export declare function dynamicRequire(request: string): any;
export declare function builtinModules(): Set<string>;
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.builtinModules = exports.dynamicRequire = void 0;
exports.builtinModules = void 0;
const tslib_1 = require("tslib");
const module_1 = tslib_1.__importDefault(require("module"));
function dynamicRequire(request) {
const req = typeof __non_webpack_require__ !== `undefined`
? __non_webpack_require__
: require;
return req(request);
}
exports.dynamicRequire = dynamicRequire;
const module_1 = (0, tslib_1.__importDefault)(require("module"));
function builtinModules() {

@@ -14,0 +7,0 @@ // @ts-expect-error

@@ -34,4 +34,23 @@ /// <reference types="node" />

export declare type Hooks = {
/**
* Called when the package extensions are setup. Can be used to inject new
* ones (for example, that's what the compat plugin uses to workaround
* metadata problems).
*/
registerPackageExtensions?: (configuration: Configuration, registerPackageExtension: (descriptor: Descriptor, extensionData: PackageExtensionData) => void) => Promise<void>;
/**
* Called before a script is executed. The hooks are allowed to modify the
* `env` object as they see fit, and any call to `makePathWrapper` will cause
* a binary of the given name to be injected somewhere within the PATH (we
* recommend you don't alter the PATH yourself unless required).
*
* The keys you get in the env are guaranteed to be uppercase. We strongly
* suggest you adopt this convention for any new key added to the env (we
* might enforce it later on).
*/
setupScriptEnvironment?: (project: Project, env: ProcessEnvironment, makePathWrapper: (name: string, argv0: string, args: Array<string>) => Promise<void>) => Promise<void>;
/**
* When a script is getting executed. You must call the executor, or the
* script won't be called at all.
*/
wrapScriptExecution?: (executor: () => Promise<number>, project: Project, locator: Locator, scriptName: string, extra: {

@@ -46,3 +65,12 @@ script: string;

}) => Promise<() => Promise<number>>;
/**
* Called before the build, to compute a global hash key that we will use
* to detect whether packages must be rebuilt (typically when the Node
* version changes)
*/
globalHashGeneration?: (project: Project, contributeHash: (data: string | Buffer) => void) => Promise<void>;
/**
* Before the resolution runs; should be use to setup new aliases that won't
* persist on the project instance itself.
*/
reduceDependency?: (dependency: Descriptor, project: Project, locator: Locator, initialDependency: Descriptor, extra: {

@@ -52,3 +80,11 @@ resolver: Resolver;

}) => Promise<Descriptor>;
/**
* Called after the `install` method from the `Project` class successfully
* completed.
*/
afterAllInstalled?: (project: Project, options: InstallOptions) => void;
/**
* Called during the `Validation step` of the `install` method from the `Project`
* class.
*/
validateProject?: (project: Project, report: {

@@ -58,2 +94,6 @@ reportWarning: (name: MessageName, text: string) => void;

}) => void;
/**
* Called during the `Validation step` of the `install` method from the `Project`
* class by the `validateProject` hook.
*/
validateWorkspace?: (workspace: Workspace, report: {

@@ -63,2 +103,5 @@ reportWarning: (name: MessageName, text: string) => void;

}) => void;
/**
* Used to notify the core of all the potential artifacts of the available linkers.
*/
populateYarnPaths?: (project: Project, definePath: (path: PortablePath | null) => void) => Promise<void>;

@@ -65,0 +108,0 @@ };

@@ -63,2 +63,3 @@ import { PortablePath } from '@yarnpkg/fslib';

restoreResolutions: readonly ["accessibleLocators", "optionalBuilds", "storedDescriptors", "storedResolutions", "storedPackages", "lockFileChecksum"];
restoreBuildState: readonly ["storedBuildState"];
};

@@ -95,2 +96,3 @@ declare type RestoreInstallStateOpts = {

storedChecksums: Map<LocatorHash, string>;
storedBuildState: Map<LocatorHash, string>;
accessibleLocators: Set<LocatorHash>;

@@ -113,3 +115,2 @@ originalPackages: Map<LocatorHash, Package>;

}>;
static generateBuildStateFile(buildState: Map<LocatorHash, string>, locatorStore: Map<LocatorHash, Locator>): string;
constructor(projectCwd: PortablePath, { configuration }: {

@@ -161,3 +162,3 @@ configuration: Configuration;

persistInstallStateFile(): Promise<void>;
restoreInstallState({ restoreInstallersCustomData, restoreResolutions }?: RestoreInstallStateOpts): Promise<void>;
restoreInstallState({ restoreInstallersCustomData, restoreResolutions, restoreBuildState }?: RestoreInstallStateOpts): Promise<void>;
applyLightResolution(): Promise<void>;

@@ -164,0 +165,0 @@ persist(): Promise<void>;

@@ -5,6 +5,6 @@ "use strict";

const tslib_1 = require("tslib");
const semver_1 = tslib_1.__importDefault(require("semver"));
const semverUtils = tslib_1.__importStar(require("./semverUtils"));
const structUtils = tslib_1.__importStar(require("./structUtils"));
exports.TAG_REGEXP = /^(?!v)[a-z0-9-.]+$/i;
const semver_1 = (0, tslib_1.__importDefault)(require("semver"));
const semverUtils = (0, tslib_1.__importStar)(require("./semverUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
exports.TAG_REGEXP = /^(?!v)[a-z0-9._-]+$/i;
class ProtocolResolver {

@@ -11,0 +11,0 @@ supportsDescriptor(descriptor, opts) {

@@ -8,2 +8,3 @@ /// <reference types="node" />

reportCode: MessageName;
originalError?: Error;
constructor(code: MessageName, message: string, reportExtra?: ((report: Report) => void) | undefined);

@@ -48,5 +49,7 @@ }

key?: any;
reportExtra?: (report: Report) => void;
}): void;
reportWarningOnce(name: MessageName, text: string, opts?: {
key?: any;
reportExtra?: (report: Report) => void;
}): void;

@@ -53,0 +56,0 @@ reportErrorOnce(name: MessageName, text: string, opts?: {

@@ -59,2 +59,3 @@ "use strict";

reportInfoOnce(name, text, opts) {
var _a;
const key = opts && opts.key ? opts.key : text;

@@ -64,5 +65,7 @@ if (!this.reportedInfos.has(key)) {

this.reportInfo(name, text);
(_a = opts === null || opts === void 0 ? void 0 : opts.reportExtra) === null || _a === void 0 ? void 0 : _a.call(opts, this);
}
}
reportWarningOnce(name, text, opts) {
var _a;
const key = opts && opts.key ? opts.key : text;

@@ -72,2 +75,3 @@ if (!this.reportedWarnings.has(key)) {

this.reportWarning(name, text);
(_a = opts === null || opts === void 0 ? void 0 : opts.reportExtra) === null || _a === void 0 ? void 0 : _a.call(opts, this);
}

@@ -74,0 +78,0 @@ }

@@ -26,11 +26,11 @@ "use strict";

async getCandidates(descriptor, dependencies, opts) {
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; try to make an install to update your resolutions`);
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; run "yarn install" to update the lockfile`);
}
async getSatisfying(descriptor, references, opts) {
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; try to make an install to update your resolutions`);
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; run "yarn install" to update the lockfile`);
}
async resolve(locator, opts) {
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; try to make an install to update your resolutions`);
throw new Report_1.ReportError(MessageName_1.MessageName.MISSING_LOCKFILE_ENTRY, `This package doesn't seem to be present in your lockfile; run "yarn install" to update the lockfile`);
}
}
exports.RunInstallPleaseResolver = RunInstallPleaseResolver;

@@ -9,4 +9,5 @@ /// <reference types="node" />

import { Locator } from './types';
export declare function makeScriptEnv({ project, binFolder, lifecycleScript }: {
export declare function makeScriptEnv({ project, locator, binFolder, lifecycleScript }: {
project?: Project;
locator?: Locator;
binFolder: PortablePath;

@@ -19,6 +20,7 @@ lifecycleScript?: string;

}>;
export declare function prepareExternalProject(cwd: PortablePath, outputPath: PortablePath, { configuration, report, workspace }: {
export declare function prepareExternalProject(cwd: PortablePath, outputPath: PortablePath, { configuration, report, workspace, locator }: {
configuration: Configuration;
report: Report;
workspace?: string | null;
locator?: Locator | null;
}): Promise<void>;

@@ -55,2 +57,4 @@ declare type HasPackageScriptOption = {

};
declare type Binary = [Locator, NativePath];
declare type PackageAccessibleBinaries = Map<string, Binary>;
/**

@@ -62,3 +66,3 @@ * Return the binaries that can be accessed by the specified package

*/
export declare function getPackageAccessibleBinaries(locator: Locator, { project }: GetPackageAccessibleBinariesOptions): Promise<Map<string, [Locator, NativePath]>>;
export declare function getPackageAccessibleBinaries(locator: Locator, { project }: GetPackageAccessibleBinariesOptions): Promise<PackageAccessibleBinaries>;
/**

@@ -69,3 +73,3 @@ * Return the binaries that can be accessed by the specified workspace

*/
export declare function getWorkspaceAccessibleBinaries(workspace: Workspace): Promise<Map<string, [Locator, NativePath]>>;
export declare function getWorkspaceAccessibleBinaries(workspace: Workspace): Promise<PackageAccessibleBinaries>;
declare type ExecutePackageAccessibleBinaryOptions = {

@@ -78,2 +82,4 @@ cwd: PortablePath;

stderr: Writable;
/** @internal */
packageAccessibleBinaries?: PackageAccessibleBinaries;
};

@@ -91,3 +97,3 @@ /**

*/
export declare function executePackageAccessibleBinary(locator: Locator, binaryName: string, args: Array<string>, { cwd, project, stdin, stdout, stderr, nodeArgs }: ExecutePackageAccessibleBinaryOptions): Promise<number>;
export declare function executePackageAccessibleBinary(locator: Locator, binaryName: string, args: Array<string>, { cwd, project, stdin, stdout, stderr, nodeArgs, packageAccessibleBinaries }: ExecutePackageAccessibleBinaryOptions): Promise<number>;
declare type ExecuteWorkspaceAccessibleBinaryOptions = {

@@ -98,2 +104,4 @@ cwd: PortablePath;

stderr: Writable;
/** @internal */
packageAccessibleBinaries?: PackageAccessibleBinaries;
};

@@ -107,3 +115,3 @@ /**

*/
export declare function executeWorkspaceAccessibleBinary(workspace: Workspace, binaryName: string, args: Array<string>, { cwd, stdin, stdout, stderr }: ExecuteWorkspaceAccessibleBinaryOptions): Promise<number>;
export declare function executeWorkspaceAccessibleBinary(workspace: Workspace, binaryName: string, args: Array<string>, { cwd, stdin, stdout, stderr, packageAccessibleBinaries }: ExecuteWorkspaceAccessibleBinaryOptions): Promise<number>;
export {};

@@ -9,5 +9,4 @@ "use strict";

const shell_1 = require("@yarnpkg/shell");
const binjumper_1 = require("binjumper");
const capitalize_1 = tslib_1.__importDefault(require("lodash/capitalize"));
const p_limit_1 = tslib_1.__importDefault(require("p-limit"));
const capitalize_1 = (0, tslib_1.__importDefault)(require("lodash/capitalize"));
const p_limit_1 = (0, tslib_1.__importDefault)(require("p-limit"));
const stream_1 = require("stream");

@@ -19,6 +18,6 @@ const Manifest_1 = require("./Manifest");

const YarnVersion_1 = require("./YarnVersion");
const execUtils = tslib_1.__importStar(require("./execUtils"));
const formatUtils = tslib_1.__importStar(require("./formatUtils"));
const miscUtils = tslib_1.__importStar(require("./miscUtils"));
const structUtils = tslib_1.__importStar(require("./structUtils"));
const execUtils = (0, tslib_1.__importStar)(require("./execUtils"));
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
var PackageManager;

@@ -33,7 +32,5 @@ (function (PackageManager) {

if (process.platform === `win32`) {
await Promise.all([
fslib_2.xfs.writeFilePromise(fslib_2.ppath.format({ dir: location, name, ext: `.exe` }), binjumper_1.getBinjumper()),
fslib_2.xfs.writeFilePromise(fslib_2.ppath.format({ dir: location, name, ext: `.exe.info` }), [argv0, ...args].join(`\n`)),
fslib_2.xfs.writeFilePromise(fslib_2.ppath.format({ dir: location, name, ext: `.cmd` }), `@"${argv0}" ${args.map(arg => `"${arg.replace(`"`, `""`)}"`).join(` `)} %*\n`),
]);
// https://github.com/microsoft/terminal/issues/217#issuecomment-737594785
const cmdScript = `@goto #_undefined_# 2>NUL || @title %COMSPEC% & @setlocal & @"${argv0}" ${args.map(arg => `"${arg.replace(`"`, `""`)}"`).join(` `)} %*`;
await fslib_2.xfs.writeFilePromise(fslib_2.ppath.format({ dir: location, name, ext: `.cmd` }), cmdScript);
}

@@ -51,15 +48,19 @@ await fslib_2.xfs.writeFilePromise(fslib_2.ppath.join(location, name), `#!/bin/sh\nexec "${argv0}" ${args.map(arg => `'${arg.replace(/'/g, `'"'"'`)}'`).join(` `)} "$@"\n`);

if (yarnLock.match(/^__metadata:$/m)) {
return PackageManager.Yarn2;
return { packageManager: PackageManager.Yarn2, reason: `"__metadata" key found in yarn.lock` };
}
else {
return PackageManager.Yarn1;
return {
packageManager: PackageManager.Yarn1,
reason: `"__metadata" key not found in yarn.lock, must be a Yarn classic lockfile`,
};
}
}
if (fslib_2.xfs.existsSync(fslib_2.ppath.join(location, `package-lock.json`)))
return PackageManager.Npm;
return { packageManager: PackageManager.Npm, reason: `found npm's "package-lock.json" lockfile` };
if (fslib_2.xfs.existsSync(fslib_2.ppath.join(location, `pnpm-lock.yaml`)))
return PackageManager.Pnpm;
return { packageManager: PackageManager.Pnpm, reason: `found pnpm's "pnpm-lock.yaml" lockfile` };
return null;
}
async function makeScriptEnv({ project, binFolder, lifecycleScript }) {
async function makeScriptEnv({ project, locator, binFolder, lifecycleScript }) {
var _a, _b;
const scriptEnv = {};

@@ -75,11 +76,15 @@ for (const [key, value] of Object.entries(process.env))

// spawned by Yarn (we thus ensure that they always use the right version)
await makePathWrapper(binFolder, `node`, process.execPath);
if (YarnVersion_1.YarnVersion !== null) {
await makePathWrapper(binFolder, `run`, process.execPath, [process.argv[1], `run`]);
await makePathWrapper(binFolder, `yarn`, process.execPath, [process.argv[1]]);
await makePathWrapper(binFolder, `yarnpkg`, process.execPath, [process.argv[1]]);
await makePathWrapper(binFolder, `node-gyp`, process.execPath, [process.argv[1], `run`, `--top-level`, `node-gyp`]);
await Promise.all([
makePathWrapper(binFolder, `node`, process.execPath),
...YarnVersion_1.YarnVersion !== null ? [
makePathWrapper(binFolder, `run`, process.execPath, [process.argv[1], `run`]),
makePathWrapper(binFolder, `yarn`, process.execPath, [process.argv[1]]),
makePathWrapper(binFolder, `yarnpkg`, process.execPath, [process.argv[1]]),
makePathWrapper(binFolder, `node-gyp`, process.execPath, [process.argv[1], `run`, `--top-level`, `node-gyp`]),
] : [],
]);
if (project) {
scriptEnv.INIT_CWD = fslib_2.npath.fromPortablePath(project.configuration.startingCwd);
scriptEnv.PROJECT_CWD = fslib_2.npath.fromPortablePath(project.cwd);
}
if (project)
scriptEnv.INIT_CWD = fslib_2.npath.fromPortablePath(project.configuration.startingCwd);
scriptEnv.PATH = scriptEnv.PATH

@@ -90,2 +95,14 @@ ? `${nBinFolder}${fslib_2.npath.delimiter}${scriptEnv.PATH}`

scriptEnv.npm_node_execpath = `${nBinFolder}${fslib_2.npath.sep}node`;
if (locator) {
if (!project)
throw new Error(`Assertion failed: Missing project`);
// Workspaces have 0.0.0-use.local in their "pkg" registrations, so we
// need to access the actual workspace to get its real version.
const workspace = project.tryWorkspaceByLocator(locator);
const version = workspace
? (_a = workspace.manifest.version) !== null && _a !== void 0 ? _a : ``
: (_b = project.storedPackages.get(locator.locatorHash).version) !== null && _b !== void 0 ? _b : ``;
scriptEnv.npm_package_name = structUtils.stringifyIdent(locator);
scriptEnv.npm_package_version = version;
}
const version = YarnVersion_1.YarnVersion !== null

@@ -99,3 +116,3 @@ ? `yarn/${YarnVersion_1.YarnVersion}`

await project.configuration.triggerHook(hook => hook.setupScriptEnvironment, project, scriptEnv, async (name, argv0, args) => {
return await makePathWrapper(binFolder, fslib_2.toFilename(name), argv0, args);
return await makePathWrapper(binFolder, (0, fslib_2.toFilename)(name), argv0, args);
});

@@ -111,4 +128,4 @@ }

const MAX_PREPARE_CONCURRENCY = 2;
const prepareLimit = p_limit_1.default(MAX_PREPARE_CONCURRENCY);
async function prepareExternalProject(cwd, outputPath, { configuration, report, workspace = null }) {
const prepareLimit = (0, p_limit_1.default)(MAX_PREPARE_CONCURRENCY);
async function prepareExternalProject(cwd, outputPath, { configuration, report, workspace = null, locator = null }) {
await prepareLimit(async () => {

@@ -118,11 +135,18 @@ await fslib_2.xfs.mktempPromise(async (logDir) => {

const stdin = null;
const { stdout, stderr } = configuration.getSubprocessStreams(logFile, { prefix: cwd, report });
const packageManager = await detectPackageManager(cwd);
const { stdout, stderr } = configuration.getSubprocessStreams(logFile, { prefix: fslib_2.npath.fromPortablePath(cwd), report });
const devirtualizedLocator = locator && structUtils.isVirtualLocator(locator)
? structUtils.devirtualizeLocator(locator)
: locator;
const name = devirtualizedLocator
? structUtils.stringifyLocator(devirtualizedLocator)
: `an external project`;
stdout.write(`Packing ${name} from sources\n`);
const packageManagerSelection = await detectPackageManager(cwd);
let effectivePackageManager;
if (packageManager !== null) {
stdout.write(`Installing the project using ${packageManager}\n\n`);
effectivePackageManager = packageManager;
if (packageManagerSelection !== null) {
stdout.write(`Using ${packageManagerSelection.packageManager} for bootstrap. Reason: ${packageManagerSelection.reason}\n\n`);
effectivePackageManager = packageManagerSelection.packageManager;
}
else {
stdout.write(`No package manager detected; defaulting to Yarn\n\n`);
stdout.write(`No package manager configuration detected; defaulting to Yarn\n\n`);
effectivePackageManager = PackageManager.Yarn2;

@@ -220,2 +244,6 @@ }

async function hasPackageScript(locator, scriptName, { project }) {
// We can avoid using the linkers if the locator is a workspace
const workspace = project.tryWorkspaceByLocator(locator);
if (workspace !== null)
return hasWorkspaceScript(workspace, scriptName);
const pkg = project.storedPackages.get(locator.locatorHash);

@@ -236,3 +264,3 @@ if (!pkg)

}, {
libzip: await libzip_1.getLibzipPromise(),
libzip: await (0, libzip_1.getLibzipPromise)(),
});

@@ -248,3 +276,3 @@ }

const realExecutor = async () => {
return await shell_1.execute(script, args, { cwd: realCwd, env, stdin, stdout, stderr });
return await (0, shell_1.execute)(script, args, { cwd: realCwd, env, stdin, stdout, stderr });
};

@@ -263,7 +291,16 @@ const executor = await project.configuration.reduceHook(hooks => {

const { env, cwd: realCwd } = await initializePackageEnvironment(locator, { project, binFolder, cwd });
return await shell_1.execute(command, args, { cwd: realCwd, env, stdin, stdout, stderr });
return await (0, shell_1.execute)(command, args, { cwd: realCwd, env, stdin, stdout, stderr });
});
}
exports.executePackageShellcode = executePackageShellcode;
async function initializeWorkspaceEnvironment(workspace, { binFolder, cwd, lifecycleScript }) {
const env = await makeScriptEnv({ project: workspace.project, locator: workspace.anchoredLocator, binFolder, lifecycleScript });
await Promise.all(Array.from(await getWorkspaceAccessibleBinaries(workspace), ([binaryName, [, binaryPath]]) => makePathWrapper(binFolder, (0, fslib_2.toFilename)(binaryName), process.execPath, [binaryPath])));
return { manifest: workspace.manifest, binFolder, env, cwd: cwd !== null && cwd !== void 0 ? cwd : workspace.cwd };
}
async function initializePackageEnvironment(locator, { project, binFolder, cwd, lifecycleScript }) {
// We can avoid using the linkers if the locator is a workspace
const workspace = project.tryWorkspaceByLocator(locator);
if (workspace !== null)
return initializeWorkspaceEnvironment(workspace, { binFolder, cwd, lifecycleScript });
const pkg = project.storedPackages.get(locator.locatorHash);

@@ -279,4 +316,4 @@ if (!pkg)

throw new Error(`The package ${structUtils.prettyLocator(project.configuration, pkg)} isn't supported by any of the available linkers`);
const env = await makeScriptEnv({ project, binFolder, lifecycleScript });
await Promise.all(Array.from(await getPackageAccessibleBinaries(locator, { project }), ([binaryName, [, binaryPath]]) => makePathWrapper(binFolder, fslib_2.toFilename(binaryName), process.execPath, [binaryPath])));
const env = await makeScriptEnv({ project, locator, binFolder, lifecycleScript });
await Promise.all(Array.from(await getPackageAccessibleBinaries(locator, { project }), ([binaryName, [, binaryPath]]) => makePathWrapper(binFolder, (0, fslib_2.toFilename)(binaryName), process.execPath, [binaryPath])));
const packageLocation = await linker.findPackageLocation(pkg, linkerOptions);

@@ -289,3 +326,3 @@ const packageFs = new fslib_1.CwdFS(packageLocation, { baseFs: zipOpenFs });

}, {
libzip: await libzip_1.getLibzipPromise(),
libzip: await (0, libzip_1.getLibzipPromise)(),
});

@@ -318,3 +355,3 @@ }

fslib_2.xfs.detachTemp(logDir);
throw new Report_1.ReportError(MessageName_1.MessageName.LIFECYCLE_SCRIPT, `${capitalize_1.default(lifecycleScriptName)} script failed (exit code ${formatUtils.pretty(configuration, exitCode, formatUtils.Type.NUMBER)}, logs can be found here: ${formatUtils.pretty(configuration, logFile, formatUtils.Type.PATH)}); run ${formatUtils.pretty(configuration, `yarn ${lifecycleScriptName}`, formatUtils.Type.CODE)} to investigate`);
throw new Report_1.ReportError(MessageName_1.MessageName.LIFECYCLE_SCRIPT, `${(0, capitalize_1.default)(lifecycleScriptName)} script failed (exit code ${formatUtils.pretty(configuration, exitCode, formatUtils.Type.NUMBER)}, logs can be found here: ${formatUtils.pretty(configuration, logFile, formatUtils.Type.PATH)}); run ${formatUtils.pretty(configuration, `yarn ${lifecycleScriptName}`, formatUtils.Type.CODE)} to investigate`);
}

@@ -352,3 +389,3 @@ });

}
for (const locatorHash of visibleLocators) {
const dependenciesWithBinaries = await Promise.all(Array.from(visibleLocators, async (locatorHash) => {
const dependency = project.storedPackages.get(locatorHash);

@@ -358,6 +395,6 @@ if (!dependency)

if (dependency.bin.size === 0)
continue;
return miscUtils.mapAndFilter.skip;
const linker = linkers.find(linker => linker.supportsPackage(dependency, linkerOptions));
if (!linker)
continue;
return miscUtils.mapAndFilter.skip;
let packageLocation = null;

@@ -371,3 +408,3 @@ try {

if (err.code === `LOCATOR_NOT_INSTALLED`) {
continue;
return miscUtils.mapAndFilter.skip;
}

@@ -378,2 +415,9 @@ else {

}
return { dependency, packageLocation };
}));
// The order in which binaries overwrite each other must be stable
for (const candidate of dependenciesWithBinaries) {
if (candidate === miscUtils.mapAndFilter.skip)
continue;
const { dependency, packageLocation } = candidate;
for (const [name, target] of dependency.bin) {

@@ -406,4 +450,4 @@ binaries.set(name, [dependency, fslib_2.npath.fromPortablePath(fslib_2.ppath.resolve(packageLocation, target))]);

*/
async function executePackageAccessibleBinary(locator, binaryName, args, { cwd, project, stdin, stdout, stderr, nodeArgs = [] }) {
const packageAccessibleBinaries = await getPackageAccessibleBinaries(locator, { project });
async function executePackageAccessibleBinary(locator, binaryName, args, { cwd, project, stdin, stdout, stderr, nodeArgs = [], packageAccessibleBinaries }) {
packageAccessibleBinaries !== null && packageAccessibleBinaries !== void 0 ? packageAccessibleBinaries : (packageAccessibleBinaries = await getPackageAccessibleBinaries(locator, { project }));
const binary = packageAccessibleBinaries.get(binaryName);

@@ -414,4 +458,4 @@ if (!binary)

const [, binaryPath] = binary;
const env = await makeScriptEnv({ project, binFolder });
await Promise.all(Array.from(packageAccessibleBinaries, ([binaryName, [, binaryPath]]) => makePathWrapper(env.BERRY_BIN_FOLDER, fslib_2.toFilename(binaryName), process.execPath, [binaryPath])));
const env = await makeScriptEnv({ project, locator, binFolder });
await Promise.all(Array.from(packageAccessibleBinaries, ([binaryName, [, binaryPath]]) => makePathWrapper(env.BERRY_BIN_FOLDER, (0, fslib_2.toFilename)(binaryName), process.execPath, [binaryPath])));
let result;

@@ -435,5 +479,5 @@ try {

*/
async function executeWorkspaceAccessibleBinary(workspace, binaryName, args, { cwd, stdin, stdout, stderr }) {
return await executePackageAccessibleBinary(workspace.anchoredLocator, binaryName, args, { project: workspace.project, cwd, stdin, stdout, stderr });
async function executeWorkspaceAccessibleBinary(workspace, binaryName, args, { cwd, stdin, stdout, stderr, packageAccessibleBinaries }) {
return await executePackageAccessibleBinary(workspace.anchoredLocator, binaryName, args, { project: workspace.project, cwd, stdin, stdout, stderr, packageAccessibleBinaries });
}
exports.executeWorkspaceAccessibleBinary = executeWorkspaceAccessibleBinary;
import semver from 'semver';
export { SemVer } from 'semver';
/**

@@ -3,0 +4,0 @@ * Returns whether the given semver version satisfies the given range. Notably

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.validRange = exports.satisfiesWithPrereleases = void 0;
exports.validRange = exports.satisfiesWithPrereleases = exports.SemVer = void 0;
const tslib_1 = require("tslib");
const semver_1 = tslib_1.__importDefault(require("semver"));
const semver_1 = (0, tslib_1.__importDefault)(require("semver"));
var semver_2 = require("semver");
Object.defineProperty(exports, "SemVer", { enumerable: true, get: function () { return semver_2.SemVer; } });
/**

@@ -7,0 +9,0 @@ * Returns whether the given semver version satisfies the given range. Notably

@@ -37,2 +37,3 @@ /// <reference types="node" />

private cacheMissCount;
private lastCacheMiss;
private warningCount;

@@ -72,4 +73,4 @@ private errorCount;

catch<TResult = never>(onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | null | undefined): Promise<void | TResult>;
finally(onfinally?: (() => void) | null | undefined): Promise<void>;
[Symbol.toStringTag]: string;
finally(onfinally?: (() => void) | null | undefined): Promise<void>;
};

@@ -76,0 +77,0 @@ reportJson(data: any): void;

@@ -5,6 +5,7 @@ "use strict";

const tslib_1 = require("tslib");
const slice_ansi_1 = tslib_1.__importDefault(require("@arcanis/slice-ansi"));
const slice_ansi_1 = (0, tslib_1.__importDefault)(require("@arcanis/slice-ansi"));
const MessageName_1 = require("./MessageName");
const Report_1 = require("./Report");
const formatUtils = tslib_1.__importStar(require("./formatUtils"));
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
const PROGRESS_FRAMES = [`⠋`, `⠙`, `⠹`, `⠸`, `⠼`, `⠴`, `⠦`, `⠧`, `⠇`, `⠏`];

@@ -60,4 +61,6 @@ const PROGRESS_INTERVAL = 80;

function formatName(name, { configuration, json }) {
if (!configuration.get(`enableMessageNames`))
return ``;
const num = name === null ? 0 : name;
const label = MessageName_1.stringifyMessageName(num);
const label = (0, MessageName_1.stringifyMessageName)(num);
if (!json && name === null) {

@@ -73,4 +76,3 @@ return formatUtils.pretty(configuration, label, `grey`);

const code = formatName(name, { configuration, json });
// Only print hyperlinks if allowed per configuration
if (!configuration.get(`enableHyperlinks`))
if (!code)
return code;

@@ -82,6 +84,3 @@ // Don't print hyperlinks for the generic messages

const href = `https://yarnpkg.com/advanced/error-codes#${code}---${desc}`.toLowerCase();
// We use BELL as ST because it seems that iTerm doesn't properly support
// the \x1b\\ sequence described in the reference document
// https://gist.github.com/egmontkob/eb114294efbcd5adb1944c9f3cb5feda#the-escape-sequence
return `\u001b]8;;${href}\u0007${code}\u001b]8;;\u0007`;
return formatUtils.applyHyperlink(configuration, code, href);
}

@@ -95,2 +94,3 @@ exports.formatNameWithHyperlink = formatNameWithHyperlink;

this.cacheMissCount = 0;
this.lastCacheMiss = null;
this.warningCount = 0;

@@ -158,2 +158,3 @@ this.errorCount = 0;

reportCacheMiss(locator, message) {
this.lastCacheMiss = locator;
this.cacheMissCount += 1;

@@ -275,3 +276,5 @@ if (typeof message !== `undefined` && !this.configuration.get(`preferAggregateCacheInfo`)) {

this.commit();
const message = `${formatUtils.pretty(this.configuration, `➤`, `blueBright`)} ${this.formatNameWithHyperlink(name)}: ${this.formatIndent()}${text}`;
const formattedName = this.formatNameWithHyperlink(name);
const prefix = formattedName ? `${formattedName}: ` : ``;
const message = `${formatUtils.pretty(this.configuration, `➤`, `blueBright`)} ${prefix}${this.formatIndent()}${text}`;
if (!this.json) {

@@ -302,4 +305,6 @@ if (this.forgettableNames.has(name)) {

this.commit();
const formattedName = this.formatNameWithHyperlink(name);
const prefix = formattedName ? `${formattedName}: ` : ``;
if (!this.json) {
this.writeLineWithForgettableReset(`${formatUtils.pretty(this.configuration, `➤`, `yellowBright`)} ${this.formatNameWithHyperlink(name)}: ${this.formatIndent()}${text}`);
this.writeLineWithForgettableReset(`${formatUtils.pretty(this.configuration, `➤`, `yellowBright`)} ${prefix}${this.formatIndent()}${text}`);
}

@@ -313,4 +318,6 @@ else {

this.commit();
const formattedName = this.formatNameWithHyperlink(name);
const prefix = formattedName ? `${formattedName}: ` : ``;
if (!this.json) {
this.writeLineWithForgettableReset(`${formatUtils.pretty(this.configuration, `➤`, `redBright`)} ${this.formatNameWithHyperlink(name)}: ${this.formatIndent()}${text}`, { truncate: false });
this.writeLineWithForgettableReset(`${formatUtils.pretty(this.configuration, `➤`, `redBright`)} ${prefix}${this.formatIndent()}${text}`, { truncate: false });
}

@@ -414,3 +421,3 @@ else {

else if (this.cacheMissCount === 1) {
fetchStatus += `, one had to be fetched`;
fetchStatus += `, one had to be fetched (${structUtils.prettyLocator(this.configuration, this.lastCacheMiss)})`;
}

@@ -423,3 +430,3 @@ }

else if (this.cacheMissCount === 1) {
fetchStatus += ` - one package had to be fetched`;
fetchStatus += ` - one package had to be fetched (${structUtils.prettyLocator(this.configuration, this.lastCacheMiss)})`;
}

@@ -464,3 +471,5 @@ }

const ko = this.progressStyle.chars[1].repeat(this.progressMaxScaledSize - progress.lastScaledSize);
this.stdout.write(`${formatUtils.pretty(this.configuration, `➤`, `blueBright`)} ${this.formatName(null)}: ${spinner} ${ok}${ko}\n`);
const formattedName = this.formatName(null);
const prefix = formattedName ? `${formattedName}: ` : ``;
this.stdout.write(`${formatUtils.pretty(this.configuration, `➤`, `blueBright`)} ${prefix}${spinner} ${ok}${ko}\n`);
}

@@ -500,3 +509,3 @@ this.progressTimeout = setTimeout(() => {

if (truncate)
str = slice_ansi_1.default(str, 0, process.stdout.columns - 1);
str = (0, slice_ansi_1.default)(str, 0, process.stdout.columns - 1);
return str;

@@ -503,0 +512,0 @@ }

@@ -286,6 +286,2 @@ /// <reference types="node" />

/**
* @deprecated Prefer using `stringifyIdent`
*/
export declare function requirableIdent(ident: Ident): string;
/**
* Returns a string from an ident (eg. `@types/lodash`).

@@ -292,0 +288,0 @@ */

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getIdentVendorPath = exports.prettyDependent = exports.prettyResolution = exports.prettyWorkspace = exports.sortDescriptors = exports.prettyLocatorNoColors = exports.prettyLocator = exports.prettyReference = exports.prettyDescriptor = exports.prettyRange = exports.prettyIdent = exports.slugifyLocator = exports.slugifyIdent = exports.stringifyLocator = exports.stringifyDescriptor = exports.stringifyIdent = exports.requirableIdent = exports.convertToManifestRange = exports.makeRange = exports.parseFileStyleRange = exports.parseRange = exports.tryParseLocator = exports.parseLocator = exports.tryParseDescriptor = exports.parseDescriptor = exports.tryParseIdent = exports.parseIdent = exports.areVirtualPackagesEquivalent = exports.areLocatorsEqual = exports.areDescriptorsEqual = exports.areIdentsEqual = exports.bindLocator = exports.bindDescriptor = exports.devirtualizeLocator = exports.devirtualizeDescriptor = exports.isVirtualLocator = exports.isVirtualDescriptor = exports.virtualizePackage = exports.virtualizeDescriptor = exports.copyPackage = exports.renamePackage = exports.convertPackageToLocator = exports.convertLocatorToDescriptor = exports.convertDescriptorToLocator = exports.convertToIdent = exports.makeLocator = exports.makeDescriptor = exports.makeIdent = void 0;
exports.getIdentVendorPath = exports.prettyDependent = exports.prettyResolution = exports.prettyWorkspace = exports.sortDescriptors = exports.prettyLocatorNoColors = exports.prettyLocator = exports.prettyReference = exports.prettyDescriptor = exports.prettyRange = exports.prettyIdent = exports.slugifyLocator = exports.slugifyIdent = exports.stringifyLocator = exports.stringifyDescriptor = exports.stringifyIdent = exports.convertToManifestRange = exports.makeRange = exports.parseFileStyleRange = exports.parseRange = exports.tryParseLocator = exports.parseLocator = exports.tryParseDescriptor = exports.parseDescriptor = exports.tryParseIdent = exports.parseIdent = exports.areVirtualPackagesEquivalent = exports.areLocatorsEqual = exports.areDescriptorsEqual = exports.areIdentsEqual = exports.bindLocator = exports.bindDescriptor = exports.devirtualizeLocator = exports.devirtualizeDescriptor = exports.isVirtualLocator = exports.isVirtualDescriptor = exports.virtualizePackage = exports.virtualizeDescriptor = exports.copyPackage = exports.renamePackage = exports.convertPackageToLocator = exports.convertLocatorToDescriptor = exports.convertDescriptorToLocator = exports.convertToIdent = exports.makeLocator = exports.makeDescriptor = exports.makeIdent = void 0;
const tslib_1 = require("tslib");
const fslib_1 = require("@yarnpkg/fslib");
const querystring_1 = tslib_1.__importDefault(require("querystring"));
const semver_1 = tslib_1.__importDefault(require("semver"));
const formatUtils = tslib_1.__importStar(require("./formatUtils"));
const hashUtils = tslib_1.__importStar(require("./hashUtils"));
const miscUtils = tslib_1.__importStar(require("./miscUtils"));
const structUtils = tslib_1.__importStar(require("./structUtils"));
const querystring_1 = (0, tslib_1.__importDefault)(require("querystring"));
const semver_1 = (0, tslib_1.__importDefault)(require("semver"));
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils"));
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
const VIRTUAL_PROTOCOL = `virtual:`;

@@ -497,14 +497,2 @@ const VIRTUAL_ABBREVIATE = 5;

/**
* @deprecated Prefer using `stringifyIdent`
*/
function requirableIdent(ident) {
if (ident.scope) {
return `@${ident.scope}/${ident.name}`;
}
else {
return `${ident.name}`;
}
}
exports.requirableIdent = requirableIdent;
/**
* Returns a string from an ident (eg. `@types/lodash`).

@@ -581,3 +569,3 @@ */

: `${slugifyIdent(locator)}-${humanReference}-${locator.locatorHash.slice(0, hashTruncate)}`;
return fslib_1.toFilename(slug);
return (0, fslib_1.toFilename)(slug);
}

@@ -740,4 +728,4 @@ exports.slugifyLocator = slugifyLocator;

function getIdentVendorPath(ident) {
return `node_modules/${requirableIdent(ident)}`;
return `node_modules/${stringifyIdent(ident)}`;
}
exports.getIdentVendorPath = getIdentVendorPath;

@@ -6,4 +6,5 @@ "use strict";

const fslib_1 = require("@yarnpkg/fslib");
const httpUtils = tslib_1.__importStar(require("./httpUtils"));
const miscUtils = tslib_1.__importStar(require("./miscUtils"));
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils"));
const httpUtils = (0, tslib_1.__importStar)(require("./httpUtils"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
var MetricName;

@@ -32,3 +33,4 @@ (function (MetricName) {

reportVersion(value) {
this.reportValue(MetricName.VERSION, value);
// We don't really care about the exact commit they're using
this.reportValue(MetricName.VERSION, value.replace(/-git\..*/, `-git`));
}

@@ -60,3 +62,3 @@ reportCommandName(value) {

reportEnumerator(metric, value) {
miscUtils.getSetWithDefault(this.enumerators, metric).add(value);
miscUtils.getSetWithDefault(this.enumerators, metric).add(hashUtils.makeHash(value));
}

@@ -100,2 +102,8 @@ reportHit(metric, extra = `*`) {

return;
const rawUrl = `https://browser-http-intake.logs.datadoghq.eu/v1/input/${accountId}?ddsource=yarn`;
const sendPayload = (payload) => httpUtils.post(rawUrl, payload, {
configuration: this.configuration,
}).catch(() => {
// Nothing we can do
});
for (const [userId, block] of Object.entries((_b = content.blocks) !== null && _b !== void 0 ? _b : {})) {

@@ -106,10 +114,28 @@ if (Object.keys(block).length === 0)

upload.userId = userId;
upload.reportType = `primary`;
for (const key of Object.keys((_c = upload.enumerators) !== null && _c !== void 0 ? _c : {}))
upload.enumerators[key] = upload.enumerators[key].length;
const rawUrl = `https://browser-http-intake.logs.datadoghq.eu/v1/input/${accountId}?ddsource=yarn`;
httpUtils.post(rawUrl, upload, {
configuration: this.configuration,
}).catch(() => {
// Nothing we can do
});
sendPayload(upload);
// Datadog doesn't support well sending multiple tags in a single
// payload, so we instead send them separately, at most one value
// per query (we still aggregate different tags together).
const toSend = new Map();
// Also the max amount of queries (at worst once a week, remember)
const maxValues = 20;
for (const [metricName, values] of Object.entries(upload.values))
if (values.length > 0)
toSend.set(metricName, values.slice(0, maxValues));
while (toSend.size > 0) {
const upload = {};
upload.userId = userId;
upload.reportType = `secondary`;
upload.metrics = {};
for (const [metricName, values] of toSend) {
upload.metrics[metricName] = values.shift();
if (values.length === 0) {
toSend.delete(metricName);
}
}
sendPayload(upload);
}
}

@@ -116,0 +142,0 @@ }

@@ -9,2 +9,3 @@ /// <reference types="node" />

}
export declare const safeTime = 456789000;
export declare function makeArchiveFromDirectory(source: PortablePath, { baseFs, prefixPath, compressionLevel, inMemory }?: MakeArchiveFromDirectoryOptions): Promise<ZipFS>;

@@ -11,0 +12,0 @@ interface ExtractBufferOptions {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.extractArchiveTo = exports.convertToZip = exports.makeArchiveFromDirectory = void 0;
exports.extractArchiveTo = exports.convertToZip = exports.makeArchiveFromDirectory = exports.safeTime = void 0;
const tslib_1 = require("tslib");
const fslib_1 = require("@yarnpkg/fslib");
const libzip_1 = require("@yarnpkg/libzip");
const tar_stream_1 = tslib_1.__importDefault(require("tar-stream"));
const util_1 = require("util");
const zlib_1 = tslib_1.__importDefault(require("zlib"));
const gunzip = util_1.promisify(zlib_1.default.gunzip);
const stream_1 = require("stream");
const tar_1 = (0, tslib_1.__importDefault)(require("tar"));
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils"));
// 1984-06-22T21:50:00.000Z
//
// It needs to be after 1980-01-01 because that's what Zip supports, and it
// needs to have a slight offset to account for different timezones (because
// zip assumes that all times are local to whoever writes the file, which is
// really silly).
//
exports.safeTime = 456789000;
async function makeArchiveFromDirectory(source, { baseFs = new fslib_1.NodeFS(), prefixPath = fslib_1.PortablePath.root, compressionLevel, inMemory = false } = {}) {
const libzip = await libzip_1.getLibzipPromise();
const libzip = await (0, libzip_1.getLibzipPromise)();
let zipFs;

@@ -31,14 +38,32 @@ if (inMemory) {

const { compressionLevel, ...bufferOpts } = opts;
return await extractArchiveTo(tgz, new fslib_1.ZipFS(tmpFile, { create: true, libzip: await libzip_1.getLibzipPromise(), level: compressionLevel }), bufferOpts);
return await extractArchiveTo(tgz, new fslib_1.ZipFS(tmpFile, { create: true, libzip: await (0, libzip_1.getLibzipPromise)(), level: compressionLevel }), bufferOpts);
}
exports.convertToZip = convertToZip;
async function* parseTar(tgz) {
// @ts-expect-error - Types are wrong about what this function returns
const parser = new tar_1.default.Parse();
const passthrough = new stream_1.PassThrough({ objectMode: true, autoDestroy: true, emitClose: true });
parser.on(`entry`, (entry) => {
passthrough.write(entry);
});
parser.on(`error`, error => {
passthrough.destroy(error);
});
parser.on(`close`, () => {
passthrough.destroy();
});
parser.end(tgz);
for await (const entry of passthrough) {
const it = entry;
yield it;
it.resume();
}
}
async function extractArchiveTo(tgz, targetFs, { stripComponents = 0, prefixPath = fslib_1.PortablePath.dot } = {}) {
// 1980-01-01, like Fedora
const defaultTime = 315532800;
const parser = tar_stream_1.default.extract();
var _a, _b;
function ignore(entry) {
// Disallow absolute paths; might be malicious (ex: /etc/passwd)
if (entry.name[0] === `/`)
if (entry.path[0] === `/`)
return true;
const parts = entry.name.split(/\//g);
const parts = entry.path.split(/\//g);
// We also ignore paths that could lead to escaping outside the archive

@@ -51,14 +76,8 @@ if (parts.some((part) => part === `..`))

}
parser.on(`entry`, (header, stream, next) => {
var _a, _b;
if (ignore(header)) {
next();
return;
}
const parts = fslib_1.ppath.normalize(fslib_1.npath.toPortablePath(header.name)).replace(/\/$/, ``).split(/\//g);
if (parts.length <= stripComponents) {
stream.resume();
next();
return;
}
for await (const entry of parseTar(tgz)) {
if (ignore(entry))
continue;
const parts = fslib_1.ppath.normalize(fslib_1.npath.toPortablePath(entry.path)).replace(/\/$/, ``).split(/\//g);
if (parts.length <= stripComponents)
continue;
const slicePath = parts.slice(stripComponents).join(`/`);

@@ -68,52 +87,33 @@ const mappedPath = fslib_1.ppath.join(prefixPath, slicePath);

// If a single executable bit is set, normalize so that all are
if (header.type === `directory` || (((_a = header.mode) !== null && _a !== void 0 ? _a : 0) & 0o111) !== 0)
if (entry.type === `Directory` || (((_a = entry.mode) !== null && _a !== void 0 ? _a : 0) & 0o111) !== 0)
mode |= 0o111;
switch (header.type) {
case `directory`:
switch (entry.type) {
case `Directory`:
{
targetFs.mkdirpSync(fslib_1.ppath.dirname(mappedPath), { chmod: 0o755, utimes: [defaultTime, defaultTime] });
targetFs.mkdirpSync(fslib_1.ppath.dirname(mappedPath), { chmod: 0o755, utimes: [exports.safeTime, exports.safeTime] });
targetFs.mkdirSync(mappedPath);
targetFs.chmodSync(mappedPath, mode);
targetFs.utimesSync(mappedPath, defaultTime, defaultTime);
next();
targetFs.utimesSync(mappedPath, exports.safeTime, exports.safeTime);
}
break;
case `file`:
case `OldFile`:
case `File`:
{
targetFs.mkdirpSync(fslib_1.ppath.dirname(mappedPath), { chmod: 0o755, utimes: [defaultTime, defaultTime] });
const chunks = [];
stream.on(`data`, (chunk) => chunks.push(chunk));
stream.on(`end`, () => {
targetFs.writeFileSync(mappedPath, Buffer.concat(chunks));
targetFs.chmodSync(mappedPath, mode);
targetFs.utimesSync(mappedPath, defaultTime, defaultTime);
next();
});
targetFs.mkdirpSync(fslib_1.ppath.dirname(mappedPath), { chmod: 0o755, utimes: [exports.safeTime, exports.safeTime] });
targetFs.writeFileSync(mappedPath, await miscUtils.bufferStream(entry));
targetFs.chmodSync(mappedPath, mode);
targetFs.utimesSync(mappedPath, exports.safeTime, exports.safeTime);
}
break;
case `symlink`:
case `SymbolicLink`:
{
targetFs.mkdirpSync(fslib_1.ppath.dirname(mappedPath), { chmod: 0o755, utimes: [defaultTime, defaultTime] });
targetFs.symlinkSync(header.linkname, mappedPath);
(_b = targetFs.lutimesSync) === null || _b === void 0 ? void 0 : _b.call(targetFs, mappedPath, defaultTime, defaultTime);
next();
targetFs.mkdirpSync(fslib_1.ppath.dirname(mappedPath), { chmod: 0o755, utimes: [exports.safeTime, exports.safeTime] });
targetFs.symlinkSync(entry.linkpath, mappedPath);
(_b = targetFs.lutimesSync) === null || _b === void 0 ? void 0 : _b.call(targetFs, mappedPath, exports.safeTime, exports.safeTime);
}
break;
default: {
stream.resume();
next();
}
}
});
const gunzipped = await gunzip(tgz);
return await new Promise((resolve, reject) => {
parser.on(`error`, (error) => {
reject(error);
});
parser.on(`finish`, () => {
resolve(targetFs);
});
parser.end(gunzipped);
});
}
return targetFs;
}
exports.extractArchiveTo = extractArchiveTo;

@@ -23,4 +23,4 @@ import { MessageName } from './MessageName';

catch<TResult = never>(onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | null | undefined): Promise<void | TResult>;
finally(onfinally?: (() => void) | null | undefined): Promise<void>;
[Symbol.toStringTag]: string;
finally(onfinally?: (() => void) | null | undefined): Promise<void>;
};

@@ -27,0 +27,0 @@ reportJson(data: any): void;

@@ -6,3 +6,3 @@ "use strict";

const treeify_1 = require("treeify");
const formatUtils = tslib_1.__importStar(require("./formatUtils"));
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils"));
function treeNodeToTreeify(printTree, { configuration }) {

@@ -76,3 +76,3 @@ const target = {};

}
let treeOutput = treeify_1.asTree(treeNodeToTreeify(tree, { configuration }), false, false);
let treeOutput = (0, treeify_1.asTree)(treeNodeToTreeify(tree, { configuration }), false, false);
// A slight hack to add line returns between two top-level entries

@@ -79,0 +79,0 @@ if (separators >= 1)

@@ -181,6 +181,2 @@ import { PortablePath } from '@yarnpkg/fslib';

parentDescriptor: Descriptor;
/**
* @deprecated Use `formatUtils.json(packageExtension, formatUtils.Type.PACKAGE_EXTENSION)` instead
*/
description: string;
};

@@ -6,3 +6,3 @@ "use strict";

const fslib_1 = require("@yarnpkg/fslib");
const structUtils = tslib_1.__importStar(require("./structUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
class VirtualFetcher {

@@ -9,0 +9,0 @@ supports(locator) {

import { PortablePath } from '@yarnpkg/fslib';
import { Manifest } from './Manifest';
import { HardDependencies, Manifest } from './Manifest';
import { Project } from './Project';

@@ -22,3 +22,23 @@ import { IdentHash } from './types';

computeCandidateName(): string;
/**
* Find workspaces marked as dependencies/devDependencies of the current workspace recursively.
*
* @param rootWorkspace root workspace
* @param project project
*
* @returns all the workspaces marked as dependencies
*/
getRecursiveWorkspaceDependencies({ dependencies }?: {
dependencies?: Array<HardDependencies>;
}): Set<Workspace>;
/**
* Retrieves all the child workspaces of a given root workspace recursively
*
* @param rootWorkspace root workspace
* @param project project
*
* @returns all the child workspaces
*/
getRecursiveWorkspaceChildren(): Workspace[];
persistManifest(): Promise<void>;
}

@@ -6,8 +6,8 @@ "use strict";

const fslib_1 = require("@yarnpkg/fslib");
const globby_1 = tslib_1.__importDefault(require("globby"));
const semver_1 = tslib_1.__importDefault(require("semver"));
const globby_1 = (0, tslib_1.__importDefault)(require("globby"));
const semver_1 = (0, tslib_1.__importDefault)(require("semver"));
const Manifest_1 = require("./Manifest");
const WorkspaceResolver_1 = require("./WorkspaceResolver");
const hashUtils = tslib_1.__importStar(require("./hashUtils"));
const structUtils = tslib_1.__importStar(require("./structUtils"));
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils"));
const structUtils = (0, tslib_1.__importStar)(require("./structUtils"));
class Workspace {

@@ -38,3 +38,3 @@ constructor(workspaceCwd, { project }) {

const patterns = this.manifest.workspaceDefinitions.map(({ pattern }) => pattern);
const relativeCwds = await globby_1.default(patterns, {
const relativeCwds = await (0, globby_1.default)(patterns, {
absolute: true,

@@ -86,2 +86,49 @@ cwd: fslib_1.npath.fromPortablePath(this.cwd),

}
/**
* Find workspaces marked as dependencies/devDependencies of the current workspace recursively.
*
* @param rootWorkspace root workspace
* @param project project
*
* @returns all the workspaces marked as dependencies
*/
getRecursiveWorkspaceDependencies({ dependencies = Manifest_1.Manifest.hardDependencies } = {}) {
const workspaceList = new Set();
const visitWorkspace = (workspace) => {
for (const dependencyType of dependencies) {
// Quick note: it means that if we have, say, a workspace in
// dev dependencies but not in dependencies, this workspace will be
// traversed (even if dependencies traditionally override dev
// dependencies). It's not clearly which behaviour is better, but
// at least it's consistent.
for (const descriptor of workspace.manifest[dependencyType].values()) {
const foundWorkspace = this.project.tryWorkspaceByDescriptor(descriptor);
if (foundWorkspace === null || workspaceList.has(foundWorkspace))
continue;
workspaceList.add(foundWorkspace);
visitWorkspace(foundWorkspace);
}
}
};
visitWorkspace(this);
return workspaceList;
}
/**
* Retrieves all the child workspaces of a given root workspace recursively
*
* @param rootWorkspace root workspace
* @param project project
*
* @returns all the child workspaces
*/
getRecursiveWorkspaceChildren() {
const workspaceList = [];
for (const childWorkspaceCwd of this.workspacesCwds) {
const childWorkspace = this.project.workspacesByCwd.get(childWorkspaceCwd);
if (childWorkspace) {
workspaceList.push(childWorkspace, ...childWorkspace.getRecursiveWorkspaceChildren());
}
}
return workspaceList;
}
async persistManifest() {

@@ -95,4 +142,5 @@ const data = {};

});
this.manifest.raw = data;
}
}
exports.Workspace = Workspace;
{
"name": "@yarnpkg/core",
"version": "2.4.0",
"version": "3.0.0-rc.1",
"license": "BSD-2-Clause",

@@ -11,13 +11,12 @@ "main": "./lib/index.js",

"@types/treeify": "^1.0.0",
"@yarnpkg/fslib": "^2.4.0",
"@yarnpkg/fslib": "^2.5.0-rc.1",
"@yarnpkg/json-proxy": "^2.1.0",
"@yarnpkg/libzip": "^2.2.1",
"@yarnpkg/parsers": "^2.3.0",
"@yarnpkg/pnp": "^2.3.2",
"@yarnpkg/shell": "^2.4.1",
"binjumper": "^0.1.4",
"@yarnpkg/parsers": "^2.3.1-rc.1",
"@yarnpkg/pnp": "^3.0.0-rc.1",
"@yarnpkg/shell": "^3.0.0-rc.1",
"camelcase": "^5.3.1",
"chalk": "^3.0.0",
"ci-info": "^2.0.0",
"clipanion": "^2.6.2",
"clipanion": "^3.0.0-rc.10",
"cross-spawn": "7.0.3",

@@ -36,3 +35,4 @@ "diff": "^4.0.1",

"stream-to-promise": "^2.2.0",
"tar-stream": "^2.0.1",
"strip-ansi": "^6.0.0",
"tar": "^6.0.5",
"treeify": "^1.1.0",

@@ -49,8 +49,8 @@ "tslib": "^1.13.0",

"@types/node": "^13.7.0",
"@types/tar-stream": "1.6.0",
"@types/tar": "^4.0.4",
"@types/tunnel": "^0.0.0",
"@yarnpkg/cli": "^2.4.0",
"@yarnpkg/plugin-link": "^2.1.1",
"@yarnpkg/plugin-npm": "^2.4.0",
"@yarnpkg/plugin-pnp": "^2.4.0"
"@yarnpkg/cli": "^3.0.0-rc.1",
"@yarnpkg/plugin-link": "^2.1.2-rc.1",
"@yarnpkg/plugin-npm": "^2.5.0-rc.1",
"@yarnpkg/plugin-pnp": "^3.0.0-rc.1"
},

@@ -70,3 +70,4 @@ "scripts": {

"type": "git",
"url": "ssh://git@github.com/yarnpkg/berry.git"
"url": "ssh://git@github.com/yarnpkg/berry.git",
"directory": "packages/yarnpkg-core"
},

@@ -76,3 +77,4 @@ "engines": {

},
"stableVersion": "2.4.0",
"types": "./lib/index.d.ts"
}

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc