Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@yarnpkg/core

Package Overview
Dependencies
Maintainers
6
Versions
158
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@yarnpkg/core - npm Package Compare versions

Comparing version 4.0.0-rc.48 to 4.0.0-rc.49

19

lib/Cache.d.ts

@@ -5,2 +5,16 @@ import { FakeFS, PortablePath, Filename } from '@yarnpkg/fslib';

import { LocatorHash, Locator } from './types';
/**
* If value defines the minimal cache version we can read files from. We need
* to bump this value every time we fix a bug in the cache implementation that
* causes the archived content to change.
*/
export declare const CACHE_CHECKPOINT: number;
/**
* The cache version, on the other hand, is meant to be bumped every time we
* change the archives in any way (for example when upgrading the libzip or zlib
* implementations in ways that would change the exact bytes). This way we can
* avoid refetching the archives when their content hasn't actually changed in
* a significant way.
*/
export declare const CACHE_VERSION: number;
export type CacheOptions = {

@@ -19,2 +33,3 @@ mockedPackages?: Set<LocatorHash>;

readonly cacheKey: string;
readonly cacheSpec: string;
private mutexes;

@@ -30,2 +45,6 @@ /**

}): Promise<Cache>;
static getCacheKey(configuration: Configuration): {
cacheKey: string;
cacheSpec: string;
};
constructor(cacheCwd: PortablePath, { configuration, immutable, check }: {

@@ -32,0 +51,0 @@ configuration: Configuration;

171

lib/Cache.js
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.Cache = void 0;
exports.Cache = exports.CACHE_VERSION = exports.CACHE_CHECKPOINT = void 0;
const tslib_1 = require("tslib");

@@ -15,3 +15,19 @@ const fslib_1 = require("@yarnpkg/fslib");

const structUtils = tslib_1.__importStar(require("./structUtils"));
const CACHE_VERSION = 10;
/**
* If value defines the minimal cache version we can read files from. We need
* to bump this value every time we fix a bug in the cache implementation that
* causes the archived content to change.
*/
exports.CACHE_CHECKPOINT = miscUtils.parseInt(process.env.YARN_CACHE_CHECKPOINT_OVERRIDE ??
process.env.YARN_CACHE_VERSION_OVERRIDE ??
9);
/**
* The cache version, on the other hand, is meant to be bumped every time we
* change the archives in any way (for example when upgrading the libzip or zlib
* implementations in ways that would change the exact bytes). This way we can
* avoid refetching the archives when their content hasn't actually changed in
* a significant way.
*/
exports.CACHE_VERSION = miscUtils.parseInt(process.env.YARN_CACHE_VERSION_OVERRIDE ??
10);
class Cache {

@@ -23,2 +39,16 @@ static async find(configuration, { immutable, check } = {}) {

}
static getCacheKey(configuration) {
const compressionLevel = configuration.get(`compressionLevel`);
const cacheSpec = compressionLevel !== `mixed`
? `c${compressionLevel}`
: ``;
const cacheKey = [
exports.CACHE_VERSION,
cacheSpec,
].join(``);
return {
cacheKey,
cacheSpec,
};
}
constructor(cacheCwd, { configuration, immutable = configuration.get(`enableImmutableCache`), check = false }) {

@@ -39,15 +69,5 @@ // Contains the list of cache files that got accessed since the last time

this.check = check;
const cacheKeyOverride = configuration.get(`cacheKeyOverride`);
if (cacheKeyOverride !== null) {
this.cacheKey = `${cacheKeyOverride}`;
}
else {
const compressionLevel = configuration.get(`compressionLevel`);
const compressionKey = compressionLevel !== `mixed`
? `c${compressionLevel}` : ``;
this.cacheKey = [
CACHE_VERSION,
compressionKey,
].join(``);
}
const { cacheSpec, cacheKey, } = Cache.getCacheKey(configuration);
this.cacheSpec = cacheSpec;
this.cacheKey = cacheKey;
}

@@ -66,3 +86,3 @@ get mirrorCwd() {

// point is to avoid changing the filenames when the cache version changes)
const contentChecksum = getHashComponent(checksum);
const contentChecksum = splitChecksumComponents(checksum).hash;
// We only care about the first few characters. It doesn't matter if that

@@ -75,3 +95,2 @@ // makes the hash easier to collide with, because we check the file hashes

getLocatorPath(locator, expectedChecksum, opts = {}) {
var _a;
// If there is no mirror, then the local cache *is* the mirror, in which

@@ -81,3 +100,3 @@ // case we use the versioned filename pattern. Same if the package is

// so we can't rely on its checksum to get a stable location.
if (this.mirrorCwd === null || ((_a = opts.unstablePackages) === null || _a === void 0 ? void 0 : _a.has(locator.locatorHash)))
if (this.mirrorCwd === null || opts.unstablePackages?.has(locator.locatorHash))
return fslib_2.ppath.resolve(this.cwd, this.getVersionFilename(locator));

@@ -88,7 +107,16 @@ // If we don't yet know the checksum, discard the path resolution for now

return null;
// If the cache key changed then we assume that the content probably got
// altered as well and thus the existing path won't be good enough anymore.
const cacheKey = getCacheKeyComponent(expectedChecksum);
if (cacheKey !== this.cacheKey)
const { cacheVersion, cacheSpec, } = splitChecksumComponents(expectedChecksum);
if (cacheVersion === null)
return null;
// The cache keys must always be at least as old as the last checkpoint.
if (cacheVersion < exports.CACHE_CHECKPOINT)
return null;
const migrationMode = this.configuration.get(`cacheMigrationMode`);
// If the global cache is used, then the lockfile must always be up-to-date,
// so the archives must be regenerated each time the version changes.
if (cacheVersion < exports.CACHE_VERSION && migrationMode === `always`)
return null;
// If the cache spec changed, we may need to regenerate the archive
if (cacheSpec !== this.cacheSpec && migrationMode !== `required-only`)
return null;
return fslib_2.ppath.resolve(this.cwd, this.getChecksumFilename(locator, expectedChecksum));

@@ -120,3 +148,2 @@ }

async fetchPackageFromCache(locator, expectedChecksum, { onHit, onMiss, loader, ...opts }) {
var _a;
const mirrorPath = this.getLocatorMirrorPath(locator);

@@ -142,14 +169,16 @@ const baseFs = new fslib_1.NodeFS();

};
const validateFile = async (path, refetchPath = null) => {
var _a;
const validateFile = async (path, { isColdHit, controlPath = null }) => {
// We hide the checksum if the package presence is conditional, because it becomes unreliable
// so there is no point in computing it unless we're checking the cache
if (refetchPath === null && ((_a = opts.unstablePackages) === null || _a === void 0 ? void 0 : _a.has(locator.locatorHash)))
if (controlPath === null && opts.unstablePackages?.has(locator.locatorHash))
return { isValid: true, hash: null };
const actualCacheKey = expectedChecksum && !isColdHit
? splitChecksumComponents(expectedChecksum).cacheKey
: this.cacheKey;
const actualChecksum = (!opts.skipIntegrityCheck || !expectedChecksum)
? `${this.cacheKey}/${await hashUtils.checksumFile(path)}`
? `${actualCacheKey}/${await hashUtils.checksumFile(path)}`
: expectedChecksum;
if (refetchPath !== null) {
if (controlPath !== null) {
const previousChecksum = (!opts.skipIntegrityCheck || !expectedChecksum)
? `${this.cacheKey}/${await hashUtils.checksumFile(refetchPath)}`
? `${this.cacheKey}/${await hashUtils.checksumFile(controlPath)}`
: expectedChecksum;

@@ -160,26 +189,29 @@ if (actualChecksum !== previousChecksum) {

}
let checksumBehavior = null;
if (expectedChecksum !== null && actualChecksum !== expectedChecksum) {
let checksumBehavior;
// Using --check-cache overrides any preconfigured checksum behavior
if (this.check)
if (this.check) {
checksumBehavior = `throw`;
// If the lockfile references an old cache format, we tolerate different checksums
else if (getCacheKeyComponent(expectedChecksum) !== getCacheKeyComponent(actualChecksum))
// If the lockfile references an old cache format, we tolerate different checksums
}
else if (splitChecksumComponents(expectedChecksum).cacheKey !== splitChecksumComponents(actualChecksum).cacheKey) {
checksumBehavior = `update`;
else
}
else {
checksumBehavior = this.configuration.get(`checksumBehavior`);
switch (checksumBehavior) {
case `ignore`:
return { isValid: true, hash: expectedChecksum };
case `update`:
return { isValid: true, hash: actualChecksum };
case `reset`:
return { isValid: false, hash: expectedChecksum };
default:
case `throw`: {
throw new Report_1.ReportError(MessageName_1.MessageName.CACHE_CHECKSUM_MISMATCH, `The remote archive doesn't match the expected checksum`);
}
}
}
return { isValid: true, hash: actualChecksum };
switch (checksumBehavior) {
case null:
case `update`:
return { isValid: true, hash: actualChecksum };
case `ignore`:
return { isValid: true, hash: expectedChecksum };
case `reset`:
return { isValid: false, hash: expectedChecksum };
default:
case `throw`: {
throw new Report_1.ReportError(MessageName_1.MessageName.CACHE_CHECKSUM_MISMATCH, `The remote archive doesn't match the expected checksum`);
}
}
};

@@ -190,6 +222,9 @@ const validateFileAgainstRemote = async (cachePath) => {

const zipFs = await loader();
const refetchPath = zipFs.getRealPath();
const controlPath = zipFs.getRealPath();
zipFs.saveAndClose();
await fslib_2.xfs.chmodPromise(refetchPath, 0o644);
const result = await validateFile(cachePath, refetchPath);
await fslib_2.xfs.chmodPromise(controlPath, 0o644);
const result = await validateFile(cachePath, {
controlPath,
isColdHit: false,
});
if (!result.isValid)

@@ -215,3 +250,5 @@ throw new Error(`Assertion failed: Expected a valid checksum`);

// Do this before moving the file so that we don't pollute the cache with corrupted archives
const checksum = (await validateFile(packagePath)).hash;
const checksum = (await validateFile(packagePath, {
isColdHit: true,
})).hash;
const cachePath = this.getLocatorPath(locator, checksum, opts);

@@ -242,3 +279,3 @@ if (!cachePath)

const finalPath = opts.mirrorWriteOnly
? mirrorPath !== null && mirrorPath !== void 0 ? mirrorPath : cachePath
? mirrorPath ?? cachePath
: cachePath;

@@ -250,3 +287,2 @@ await Promise.all(copyProcess.map(copy => copy()));

const mutexedLoad = async () => {
var _a;
// We don't yet know whether the cache path can be computed yet, since that

@@ -259,3 +295,3 @@ // depends on whether the cache is actually the mirror or not, and whether

: false;
const shouldMock = !!((_a = opts.mockedPackages) === null || _a === void 0 ? void 0 : _a.has(locator.locatorHash)) && (!this.check || !cacheFileExists);
const shouldMock = !!opts.mockedPackages?.has(locator.locatorHash) && (!this.check || !cacheFileExists);
const isCacheHit = shouldMock || cacheFileExists;

@@ -278,3 +314,5 @@ const action = isCacheHit

else {
const maybeChecksum = await validateFile(cachePath);
const maybeChecksum = await validateFile(cachePath, {
isColdHit: false,
});
if (maybeChecksum.isValid) {

@@ -318,6 +356,6 @@ checksum = maybeChecksum.hash;

const releaseFs = () => {
zipFs === null || zipFs === void 0 ? void 0 : zipFs.discardAndClose();
zipFs?.discardAndClose();
};
// We hide the checksum if the package presence is conditional, because it becomes unreliable
const exposedChecksum = !((_a = opts.unstablePackages) === null || _a === void 0 ? void 0 : _a.has(locator.locatorHash))
const exposedChecksum = !opts.unstablePackages?.has(locator.locatorHash)
? checksum

@@ -329,9 +367,16 @@ : null;

exports.Cache = Cache;
function getCacheKeyComponent(checksum) {
const split = checksum.indexOf(`/`);
return split !== -1 ? checksum.slice(0, split) : null;
const CHECKSUM_REGEX = /^(?:(?<cacheKey>(?<cacheVersion>[0-9]+)(?<cacheSpec>.*))\/)?(?<hash>.*)$/;
function splitChecksumComponents(checksum) {
const match = checksum.match(CHECKSUM_REGEX);
if (!match?.groups)
throw new Error(`Assertion failed: Expected the checksum to match the requested pattern`);
const cacheVersion = match.groups.cacheVersion
? parseInt(match.groups.cacheVersion)
: null;
return {
cacheKey: match.groups.cacheKey ?? null,
cacheVersion,
cacheSpec: match.groups.cacheSpec ?? null,
hash: match.groups.hash,
};
}
function getHashComponent(checksum) {
const split = checksum.indexOf(`/`);
return split !== -1 ? checksum.slice(split + 1) : checksum;
}

@@ -18,3 +18,2 @@ /// <reference types="node" />

export declare const DEFAULT_RC_FILENAME: Filename;
export declare const DEFAULT_LOCK_FILENAME: Filename;
export declare const SECRET = "********";

@@ -114,4 +113,2 @@ export declare enum SettingsType {

ignorePath: boolean;
ignoreCwd: boolean;
cacheKeyOverride: string | null;
globalFolder: PortablePath;

@@ -121,3 +118,2 @@ cacheFolder: PortablePath;

virtualFolder: PortablePath;
lockfileFilename: Filename;
installStatePath: PortablePath;

@@ -127,2 +123,3 @@ immutablePatterns: Array<string>;

enableGlobalCache: boolean;
cacheMigrationMode: `always` | `match-spec` | `required-only`;
enableColors: boolean;

@@ -132,5 +129,5 @@ enableHyperlinks: boolean;

enableMessageNames: boolean;
enableMotd: boolean;
enableProgressBars: boolean;
enableTimers: boolean;
enableTips: boolean;
preferInteractive: boolean;

@@ -215,3 +212,3 @@ preferTruncatedLines: boolean;

strict?: boolean;
usePath?: boolean;
usePathCheck?: PortablePath | null;
useRc?: boolean;

@@ -266,3 +263,3 @@ };

*/
static find(startingCwd: PortablePath, pluginConfiguration: PluginConfiguration | null, { lookup, strict, usePath, useRc }?: FindProjectOptions): Promise<Configuration>;
static find(startingCwd: PortablePath, pluginConfiguration: PluginConfiguration | null, { lookup, strict, usePathCheck, useRc }?: FindProjectOptions): Promise<Configuration>;
static findRcFiles(startingCwd: PortablePath): Promise<{

@@ -269,0 +266,0 @@ path: PortablePath;

@@ -30,3 +30,3 @@ "use strict";

function hasProperty(data, key) {
return Object.prototype.hasOwnProperty.call(data, key);
return Object.hasOwn(data, key);
}

@@ -84,3 +84,3 @@ function isConflictMarker(data) {

continue;
expectedValueType !== null && expectedValueType !== void 0 ? expectedValueType : (expectedValueType = valueType);
expectedValueType ??= valueType;
if (valueType !== expectedValueType || onConflict === `hardReset`) {

@@ -87,0 +87,0 @@ currentResetPosition = lastRelevantPosition;

@@ -10,3 +10,2 @@ "use strict";

reduceDependency: (dependency, project, locator, initialDependency, { resolver, resolveOptions }) => {
var _a, _b;
for (const { pattern, reference } of project.topLevelWorkspace.manifest.resolutions) {

@@ -16,3 +15,3 @@ if (pattern.from) {

continue;
const normalizedFrom = project.configuration.normalizeLocator(structUtils.makeLocator(structUtils.parseIdent(pattern.from.fullName), (_a = pattern.from.description) !== null && _a !== void 0 ? _a : locator.reference));
const normalizedFrom = project.configuration.normalizeLocator(structUtils.makeLocator(structUtils.parseIdent(pattern.from.fullName), pattern.from.description ?? locator.reference));
if (normalizedFrom.locatorHash !== locator.locatorHash) {

@@ -25,3 +24,3 @@ continue;

continue;
const normalizedDescriptor = project.configuration.normalizeDependency(structUtils.makeDescriptor(structUtils.parseLocator(pattern.descriptor.fullName), (_b = pattern.descriptor.description) !== null && _b !== void 0 ? _b : dependency.range));
const normalizedDescriptor = project.configuration.normalizeDependency(structUtils.makeDescriptor(structUtils.parseLocator(pattern.descriptor.fullName), pattern.descriptor.description ?? dependency.range));
if (normalizedDescriptor.descriptorHash !== dependency.descriptorHash) {

@@ -28,0 +27,0 @@ continue;

@@ -178,3 +178,3 @@ "use strict";

else {
return code !== null && code !== void 0 ? code : 1;
return code ?? 1;
}

@@ -181,0 +181,0 @@ }

@@ -330,3 +330,3 @@ "use strict";

return applyColor(configuration, `null`, exports.Type.NULL);
if (Object.prototype.hasOwnProperty.call(transforms, formatType)) {
if (Object.hasOwn(transforms, formatType)) {
const transform = transforms[formatType];

@@ -348,3 +348,3 @@ const typedTransform = transform;

return null;
if (Object.prototype.hasOwnProperty.call(transforms, formatType)) {
if (Object.hasOwn(transforms, formatType)) {
miscUtils.overrideType(formatType);

@@ -451,3 +451,3 @@ return transforms[formatType].json(value);

if (typeof level !== `undefined`) {
return level !== null && level !== void 0 ? level : defaultLevel;
return level ?? defaultLevel;
}

@@ -458,3 +458,3 @@ }

if (filterMatcher(strippedText)) {
return filterLevel !== null && filterLevel !== void 0 ? filterLevel : defaultLevel;
return filterLevel ?? defaultLevel;
}

@@ -466,3 +466,3 @@ }

if (typeof level !== `undefined`) {
return level !== null && level !== void 0 ? level : defaultLevel;
return level ?? defaultLevel;
}

@@ -484,3 +484,3 @@ }

{
reportWarning.call(report, name !== null && name !== void 0 ? name : MessageName_1.MessageName.UNNAMED, text);
reportWarning.call(report, name ?? MessageName_1.MessageName.UNNAMED, text);
}

@@ -490,3 +490,3 @@ break;

{
reportError.call(report, name !== null && name !== void 0 ? name : MessageName_1.MessageName.UNNAMED, text);
reportError.call(report, name ?? MessageName_1.MessageName.UNNAMED, text);
}

@@ -493,0 +493,0 @@ break;

@@ -7,3 +7,3 @@ "use strict";

const crypto_1 = require("crypto");
const globby_1 = tslib_1.__importDefault(require("globby"));
const fast_glob_1 = tslib_1.__importDefault(require("fast-glob"));
function makeHash(...args) {

@@ -46,11 +46,9 @@ const hash = (0, crypto_1.createHash)(`sha512`);

async function checksumPattern(pattern, { cwd }) {
// Note: We use a two-pass glob instead of using the expandDirectories option
// from globby, because the native implementation is broken.
// Note: We use a two-pass glob instead of using globby with the expandDirectories
// option, because the native implementation is broken.
//
// Ref: https://github.com/sindresorhus/globby/issues/147
const dirListing = await (0, globby_1.default)(pattern, {
const dirListing = await (0, fast_glob_1.default)(pattern, {
cwd: fslib_1.npath.fromPortablePath(cwd),
expandDirectories: false,
onlyDirectories: true,
unique: true,
});

@@ -60,8 +58,7 @@ const dirPatterns = dirListing.map(entry => {

});
const listing = await (0, globby_1.default)([pattern, ...dirPatterns], {
const listing = await (0, fast_glob_1.default)([pattern, ...dirPatterns], {
cwd: fslib_1.npath.fromPortablePath(cwd),
expandDirectories: false,
onlyFiles: false,
unique: true,
});
// fast-glob returns results in arbitrary order
listing.sort();

@@ -68,0 +65,0 @@ const hashes = await Promise.all(listing.map(async (entry) => {

@@ -42,3 +42,2 @@ "use strict";

async function prettyNetworkError(response, { configuration, customErrorMessage }) {
var _a, _b;
try {

@@ -50,3 +49,3 @@ return await response;

throw err;
let message = (_a = customErrorMessage === null || customErrorMessage === void 0 ? void 0 : customErrorMessage(err, configuration)) !== null && _a !== void 0 ? _a : (_b = err.response.body) === null || _b === void 0 ? void 0 : _b.error;
let message = customErrorMessage?.(err, configuration) ?? err.response.body?.error;
if (message == null) {

@@ -53,0 +52,0 @@ if (err.message.startsWith(`Response code`)) {

@@ -13,4 +13,4 @@ import * as execUtils from './execUtils';

import * as treeUtils from './treeUtils';
export { Cache } from './Cache';
export { DEFAULT_RC_FILENAME, DEFAULT_LOCK_FILENAME, LEGACY_PLUGINS, TAG_REGEXP } from './Configuration';
export { CACHE_VERSION, CACHE_CHECKPOINT, Cache } from './Cache';
export { DEFAULT_RC_FILENAME, LEGACY_PLUGINS, TAG_REGEXP } from './Configuration';
export { Configuration, FormatType, ProjectLookup, SettingsType, WindowsLinkType } from './Configuration';

@@ -32,3 +32,3 @@ export type { PluginConfiguration, SettingsDefinition, PackageExtensionData } from './Configuration';

export type { PeerRequirement } from './Project';
export { Project, InstallMode } from './Project';
export { LOCKFILE_VERSION, Project, InstallMode } from './Project';
export { ReportError, Report } from './Report';

@@ -35,0 +35,0 @@ export type { Resolver, ResolveOptions, MinimalResolveOptions } from './Resolver';

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.treeUtils = exports.tgzUtils = exports.structUtils = exports.semverUtils = exports.scriptUtils = exports.nodeUtils = exports.miscUtils = exports.formatUtils = exports.folderUtils = exports.execUtils = exports.httpUtils = exports.hashUtils = exports.PackageExtensionStatus = exports.PackageExtensionType = exports.LinkType = exports.YarnVersion = exports.Workspace = exports.WorkspaceResolver = exports.WorkspaceFetcher = exports.VirtualFetcher = exports.ThrowReport = exports.TelemetryManager = exports.StreamReport = exports.Report = exports.ReportError = exports.InstallMode = exports.Project = exports.MultiFetcher = exports.stringifyMessageName = exports.parseMessageName = exports.MessageName = exports.Manifest = exports.LockfileResolver = exports.LightReport = exports.LegacyMigrationResolver = exports.BuildDirectiveType = exports.WindowsLinkType = exports.SettingsType = exports.ProjectLookup = exports.FormatType = exports.Configuration = exports.TAG_REGEXP = exports.LEGACY_PLUGINS = exports.DEFAULT_LOCK_FILENAME = exports.DEFAULT_RC_FILENAME = exports.Cache = void 0;
exports.treeUtils = exports.tgzUtils = exports.structUtils = exports.semverUtils = exports.scriptUtils = exports.nodeUtils = exports.miscUtils = exports.formatUtils = exports.folderUtils = exports.execUtils = exports.httpUtils = exports.hashUtils = exports.PackageExtensionStatus = exports.PackageExtensionType = exports.LinkType = exports.YarnVersion = exports.Workspace = exports.WorkspaceResolver = exports.WorkspaceFetcher = exports.VirtualFetcher = exports.ThrowReport = exports.TelemetryManager = exports.StreamReport = exports.Report = exports.ReportError = exports.InstallMode = exports.Project = exports.LOCKFILE_VERSION = exports.MultiFetcher = exports.stringifyMessageName = exports.parseMessageName = exports.MessageName = exports.Manifest = exports.LockfileResolver = exports.LightReport = exports.LegacyMigrationResolver = exports.BuildDirectiveType = exports.WindowsLinkType = exports.SettingsType = exports.ProjectLookup = exports.FormatType = exports.Configuration = exports.TAG_REGEXP = exports.LEGACY_PLUGINS = exports.DEFAULT_RC_FILENAME = exports.Cache = exports.CACHE_CHECKPOINT = exports.CACHE_VERSION = void 0;
const tslib_1 = require("tslib");

@@ -30,6 +30,7 @@ const execUtils = tslib_1.__importStar(require("./execUtils"));

var Cache_1 = require("./Cache");
Object.defineProperty(exports, "CACHE_VERSION", { enumerable: true, get: function () { return Cache_1.CACHE_VERSION; } });
Object.defineProperty(exports, "CACHE_CHECKPOINT", { enumerable: true, get: function () { return Cache_1.CACHE_CHECKPOINT; } });
Object.defineProperty(exports, "Cache", { enumerable: true, get: function () { return Cache_1.Cache; } });
var Configuration_1 = require("./Configuration");
Object.defineProperty(exports, "DEFAULT_RC_FILENAME", { enumerable: true, get: function () { return Configuration_1.DEFAULT_RC_FILENAME; } });
Object.defineProperty(exports, "DEFAULT_LOCK_FILENAME", { enumerable: true, get: function () { return Configuration_1.DEFAULT_LOCK_FILENAME; } });
Object.defineProperty(exports, "LEGACY_PLUGINS", { enumerable: true, get: function () { return Configuration_1.LEGACY_PLUGINS; } });

@@ -60,2 +61,3 @@ Object.defineProperty(exports, "TAG_REGEXP", { enumerable: true, get: function () { return Configuration_1.TAG_REGEXP; } });

var Project_1 = require("./Project");
Object.defineProperty(exports, "LOCKFILE_VERSION", { enumerable: true, get: function () { return Project_1.LOCKFILE_VERSION; } });
Object.defineProperty(exports, "Project", { enumerable: true, get: function () { return Project_1.Project; } });

@@ -62,0 +64,0 @@ Object.defineProperty(exports, "InstallMode", { enumerable: true, get: function () { return Project_1.InstallMode; } });

@@ -34,3 +34,3 @@ "use strict";

async setup(project, { report }) {
const lockfilePath = fslib_1.ppath.join(project.cwd, project.configuration.get(`lockfileFilename`));
const lockfilePath = fslib_1.ppath.join(project.cwd, fslib_1.Filename.lockfile);
// No need to enable it if the lockfile doesn't exist

@@ -42,3 +42,3 @@ if (!fslib_1.xfs.existsSync(lockfilePath))

// No need to enable it either if the lockfile is modern
if (Object.prototype.hasOwnProperty.call(parsed, `__metadata`))
if (Object.hasOwn(parsed, `__metadata`))
return;

@@ -45,0 +45,0 @@ const resolutions = this.resolutions = new Map();

@@ -581,5 +581,5 @@ "use strict";

const afterSet = new Set(after.filter(key => {
return Object.prototype.hasOwnProperty.call(this.raw, key);
return Object.hasOwn(this.raw, key);
}));
if (afterSet.size === 0 || Object.prototype.hasOwnProperty.call(this.raw, name)) {
if (afterSet.size === 0 || Object.hasOwn(this.raw, name)) {
this.raw[name] = value;

@@ -604,3 +604,2 @@ }

exportTo(data, { compatibilityMode = true } = {}) {
var _a;
// Note that we even set the fields that we re-set later; it

@@ -785,3 +784,3 @@ // allows us to preserve the key ordering

if (this.scripts !== null && this.scripts.size > 0) {
(_a = data.scripts) !== null && _a !== void 0 ? _a : (data.scripts = {});
data.scripts ??= {};
for (const existingScriptName of Object.keys(data.scripts))

@@ -788,0 +787,0 @@ if (!this.scripts.has(existingScriptName))

@@ -100,5 +100,5 @@ export declare enum MessageName {

VERSION_NOTICE = 88,
MOTD_NOTICE = 89
TIPS_NOTICE = 89
}
export declare function stringifyMessageName(name: MessageName | number): string;
export declare function parseMessageName(messageName: string): MessageName;

@@ -107,3 +107,3 @@ "use strict";

MessageName[MessageName["VERSION_NOTICE"] = 88] = "VERSION_NOTICE";
MessageName[MessageName["MOTD_NOTICE"] = 89] = "MOTD_NOTICE";
MessageName[MessageName["TIPS_NOTICE"] = 89] = "TIPS_NOTICE";
})(MessageName || (exports.MessageName = MessageName = {}));

@@ -110,0 +110,0 @@ function stringifyMessageName(name) {

@@ -135,2 +135,3 @@ /// <reference types="node" />

};
export declare function parseInt(val: string | number): number;
export {};
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.groupBy = exports.toMerged = exports.mergeIntoTarget = exports.isPathLike = exports.tryParseOptionalBoolean = exports.parseOptionalBoolean = exports.parseBoolean = exports.replaceEnvVariables = exports.buildIgnorePattern = exports.sortMap = exports.dynamicRequire = exports.CachingStrategy = exports.DefaultStream = exports.AsyncActions = exports.makeDeferred = exports.BufferStream = exports.bufferStream = exports.prettifySyncErrors = exports.prettifyAsyncErrors = exports.releaseAfterUseAsync = exports.getMapWithDefault = exports.getSetWithDefault = exports.getArrayWithDefault = exports.getFactoryWithDefault = exports.convertMapsToIndexableObjects = exports.allSettledSafe = exports.isIndexableObject = exports.mapAndFind = exports.mapAndFilter = exports.validateEnum = exports.assertNever = exports.overrideType = exports.escapeRegExp = exports.plural = exports.isTaggedYarnVersion = void 0;
exports.parseInt = exports.groupBy = exports.toMerged = exports.mergeIntoTarget = exports.isPathLike = exports.tryParseOptionalBoolean = exports.parseOptionalBoolean = exports.parseBoolean = exports.replaceEnvVariables = exports.buildIgnorePattern = exports.sortMap = exports.dynamicRequire = exports.CachingStrategy = exports.DefaultStream = exports.AsyncActions = exports.makeDeferred = exports.BufferStream = exports.bufferStream = exports.prettifySyncErrors = exports.prettifyAsyncErrors = exports.releaseAfterUseAsync = exports.getMapWithDefault = exports.getSetWithDefault = exports.getArrayWithDefault = exports.getFactoryWithDefault = exports.convertMapsToIndexableObjects = exports.allSettledSafe = exports.isIndexableObject = exports.mapAndFind = exports.mapAndFilter = exports.validateEnum = exports.assertNever = exports.overrideType = exports.escapeRegExp = exports.plural = exports.isTaggedYarnVersion = void 0;
const tslib_1 = require("tslib");

@@ -234,4 +234,3 @@ const fslib_1 = require("@yarnpkg/fslib");

reduce(key, factory) {
var _a;
const promise = (_a = this.promises.get(key)) !== null && _a !== void 0 ? _a : Promise.resolve();
const promise = this.promises.get(key) ?? Promise.resolve();
this.set(key, () => factory(promise));

@@ -303,3 +302,3 @@ }

const stat = fslib_1.xfs.statSync(path);
if ((cachedInstance === null || cachedInstance === void 0 ? void 0 : cachedInstance.mtime) === stat.mtimeMs)
if (cachedInstance?.mtime === stat.mtimeMs)
return cachedInstance.instance;

@@ -382,3 +381,3 @@ const instance = dynamicRequireNoCache(path);

const { variableName, colon, fallback } = args[args.length - 1];
const variableExist = Object.prototype.hasOwnProperty.call(env, variableName);
const variableExist = Object.hasOwn(env, variableName);
const variableValue = env[variableName];

@@ -476,7 +475,6 @@ if (variableValue)

function groupBy(items, key) {
var _a;
const groups = Object.create(null);
for (const item of items) {
const groupKey = item[key];
(_a = groups[groupKey]) !== null && _a !== void 0 ? _a : (groups[groupKey] = []);
groups[groupKey] ??= [];
groups[groupKey].push(item);

@@ -487,1 +485,5 @@ }

exports.groupBy = groupBy;
function parseInt(val) {
return typeof val === `string` ? Number.parseInt(val, 10) : val;
}
exports.parseInt = parseInt;

@@ -32,3 +32,2 @@ "use strict";

function getLibc() {
var _a, _b, _c, _d;
// It seems that Node randomly crashes with no output under some circumstances when running a getReport() on Windows.

@@ -38,7 +37,7 @@ // Since Windows has no libc anyway, shortcut this path.

return null;
const report = (_b = (_a = process.report) === null || _a === void 0 ? void 0 : _a.getReport()) !== null && _b !== void 0 ? _b : {};
const sharedObjects = (_c = report.sharedObjects) !== null && _c !== void 0 ? _c : [];
const report = process.report?.getReport() ?? {};
const sharedObjects = report.sharedObjects ?? [];
// Matches the first group if libc, second group if musl
const libcRegExp = /\/(?:(ld-linux-|[^/]+-linux-gnu\/)|(libc.musl-|ld-musl-))/;
return (_d = miscUtils.mapAndFind(sharedObjects, entry => {
return miscUtils.mapAndFind(sharedObjects, entry => {
const match = entry.match(libcRegExp);

@@ -52,3 +51,3 @@ if (!match)

throw new Error(`Assertion failed: Expected the libc variant to have been detected`);
})) !== null && _d !== void 0 ? _d : null;
}) ?? null;
}

@@ -58,3 +57,3 @@ let architecture;

function getArchitecture() {
return architecture = architecture !== null && architecture !== void 0 ? architecture : {
return architecture = architecture ?? {
os: process.platform,

@@ -77,3 +76,3 @@ cpu: process.arch,

const architecture = getArchitecture();
return architectureSet = architectureSet !== null && architectureSet !== void 0 ? architectureSet : {
return architectureSet = architectureSet ?? {
os: [architecture.os],

@@ -80,0 +79,0 @@ cpu: [architecture.cpu],

@@ -13,2 +13,3 @@ /// <reference types="node" />

import { IdentHash, DescriptorHash, LocatorHash } from './types';
export declare const LOCKFILE_VERSION: number;
export declare enum InstallMode {

@@ -15,0 +16,0 @@ /**

@@ -29,3 +29,2 @@ /// <reference types="node" />

cacheMisses: Set<LocatorHash>;
protected buffered: Array<() => void>;
private reportedInfos;

@@ -32,0 +31,0 @@ private reportedWarnings;

@@ -26,3 +26,2 @@ "use strict";

this.cacheMisses = new Set();
this.buffered = [];
this.reportedInfos = new Set();

@@ -126,3 +125,2 @@ this.reportedWarnings = new Set();

reportInfoOnce(name, text, opts) {
var _a;
const key = opts && opts.key ? opts.key : text;

@@ -132,7 +130,6 @@ if (!this.reportedInfos.has(key)) {

this.reportInfo(name, text);
(_a = opts === null || opts === void 0 ? void 0 : opts.reportExtra) === null || _a === void 0 ? void 0 : _a.call(opts, this);
opts?.reportExtra?.(this);
}
}
reportWarningOnce(name, text, opts) {
var _a;
const key = opts && opts.key ? opts.key : text;

@@ -142,7 +139,6 @@ if (!this.reportedWarnings.has(key)) {

this.reportWarning(name, text);
(_a = opts === null || opts === void 0 ? void 0 : opts.reportExtra) === null || _a === void 0 ? void 0 : _a.call(opts, this);
opts?.reportExtra?.(this);
}
}
reportErrorOnce(name, text, opts) {
var _a;
const key = opts && opts.key ? opts.key : text;

@@ -152,3 +148,3 @@ if (!this.reportedErrors.has(key)) {

this.reportError(name, text);
(_a = opts === null || opts === void 0 ? void 0 : opts.reportExtra) === null || _a === void 0 ? void 0 : _a.call(opts, this);
opts?.reportExtra?.(this);
}

@@ -155,0 +151,0 @@ }

@@ -47,5 +47,5 @@ "use strict";

const manifest = await Manifest_1.Manifest.tryFind(location);
if (manifest === null || manifest === void 0 ? void 0 : manifest.packageManager) {
if (manifest?.packageManager) {
const locator = structUtils.tryParseLocator(manifest.packageManager);
if (locator === null || locator === void 0 ? void 0 : locator.name) {
if (locator?.name) {
const reason = `found ${JSON.stringify({ packageManager: manifest.packageManager })} in manifest`;

@@ -99,5 +99,3 @@ const [major] = locator.reference.split(`.`);

exports.detectPackageManager = detectPackageManager;
async function makeScriptEnv(_a) {
var _b, _c, _d;
var { project, locator, binFolder, ignoreCorepack, lifecycleScript, baseEnv = (_b = project === null || project === void 0 ? void 0 : project.configuration.env) !== null && _b !== void 0 ? _b : process.env } = _a;
async function makeScriptEnv({ project, locator, binFolder, ignoreCorepack, lifecycleScript, baseEnv = project?.configuration.env ?? process.env }) {
const scriptEnv = {};

@@ -144,4 +142,4 @@ // Ensure that the PATH environment variable is properly capitalized (Windows)

const version = workspace
? (_c = workspace.manifest.version) !== null && _c !== void 0 ? _c : ``
: (_d = project.storedPackages.get(locator.locatorHash).version) !== null && _d !== void 0 ? _d : ``;
? workspace.manifest.version ?? ``
: project.storedPackages.get(locator.locatorHash).version ?? ``;
scriptEnv.npm_package_name = structUtils.stringifyIdent(locator);

@@ -215,3 +213,3 @@ scriptEnv.npm_package_version = version;

const ignoreCorepack = effectivePackageManager === PackageManager.Yarn2 &&
!(packageManagerSelection === null || packageManagerSelection === void 0 ? void 0 : packageManagerSelection.packageManagerField);
!packageManagerSelection?.packageManagerField;
await fslib_2.xfs.mktempPromise(async (binFolder) => {

@@ -593,3 +591,3 @@ const env = await makeScriptEnv({ binFolder, ignoreCorepack });

async function executePackageAccessibleBinary(locator, binaryName, args, { cwd, project, stdin, stdout, stderr, nodeArgs = [], packageAccessibleBinaries }) {
packageAccessibleBinaries !== null && packageAccessibleBinaries !== void 0 ? packageAccessibleBinaries : (packageAccessibleBinaries = await getPackageAccessibleBinaries(locator, { project }));
packageAccessibleBinaries ??= await getPackageAccessibleBinaries(locator, { project });
const binary = packageAccessibleBinaries.get(binaryName);

@@ -596,0 +594,0 @@ if (!binary)

@@ -93,3 +93,3 @@ "use strict";

message = error.message;
name = name !== null && name !== void 0 ? name : error.name;
name = name ?? error.name;
}

@@ -143,3 +143,3 @@ const fullMessage = typeof name !== `undefined`

const styleName = configuration.get(`progressBarStyle`) || defaultStyle;
if (!Object.prototype.hasOwnProperty.call(PROGRESS_STYLES, styleName))
if (!Object.hasOwn(PROGRESS_STYLES, styleName))
throw new Error(`Assertion failed: Invalid progress bar style`);

@@ -168,3 +168,3 @@ this.progressStyle = PROGRESS_STYLES[styleName];

const mark = { committed: false, action: () => {
reportHeader === null || reportHeader === void 0 ? void 0 : reportHeader();
reportHeader?.();
} };

@@ -190,3 +190,3 @@ if (skipIfEmpty) {

if (mark.committed) {
reportFooter === null || reportFooter === void 0 ? void 0 : reportFooter(after - before);
reportFooter?.(after - before);
}

@@ -197,3 +197,3 @@ }

const mark = { committed: false, action: () => {
reportHeader === null || reportHeader === void 0 ? void 0 : reportHeader();
reportHeader?.();
} };

@@ -219,3 +219,3 @@ if (skipIfEmpty) {

if (mark.committed) {
reportFooter === null || reportFooter === void 0 ? void 0 : reportFooter(after - before);
reportFooter?.(after - before);
}

@@ -222,0 +222,0 @@ }

@@ -25,3 +25,3 @@ "use strict";

function makeIdent(scope, name) {
if (scope === null || scope === void 0 ? void 0 : scope.startsWith(`@`))
if (scope?.startsWith(`@`))
throw new Error(`Invalid scope: don't prefix it with '@'`);

@@ -432,5 +432,5 @@ return { identHash: hashUtils.makeHash(scope, name), scope, name };

: null;
if (typeof (opts === null || opts === void 0 ? void 0 : opts.requireProtocol) === `string` && protocol !== opts.requireProtocol)
if (typeof opts?.requireProtocol === `string` && protocol !== opts.requireProtocol)
throw new Error(`Invalid protocol (${protocol})`);
else if ((opts === null || opts === void 0 ? void 0 : opts.requireProtocol) && protocol === null)
else if (opts?.requireProtocol && protocol === null)
throw new Error(`Missing protocol (${protocol})`);

@@ -440,3 +440,3 @@ const source = typeof match[3] !== `undefined`

: null;
if ((opts === null || opts === void 0 ? void 0 : opts.requireSource) && source === null)
if (opts?.requireSource && source === null)
throw new Error(`Missing source (${range})`);

@@ -446,3 +446,3 @@ const rawSelector = typeof match[3] !== `undefined`

: decodeURIComponent(match[2]);
const selector = (opts === null || opts === void 0 ? void 0 : opts.parseSelector)
const selector = (opts?.parseSelector)
? querystring_1.default.parse(rawSelector)

@@ -719,3 +719,3 @@ : rawSelector;

function prettyWorkspace(configuration, workspace) {
return prettyIdent(configuration, workspace.locator);
return prettyIdent(configuration, workspace.anchoredLocator);
}

@@ -722,0 +722,0 @@ exports.prettyWorkspace = prettyWorkspace;

@@ -27,8 +27,8 @@ import { PortablePath } from '@yarnpkg/fslib';

export type RegistryFile = {
lastMotd?: number;
lastTips?: number;
lastUpdate?: number;
blocks?: Record<string, RegistryBlock>;
displayedMotd?: Array<number>;
displayedTips?: Array<number>;
};
export type Motd = {
export type Tip = {
selector?: string;

@@ -48,4 +48,4 @@ message: string;

triggerUpdate: boolean;
triggerMotd: boolean;
nextMotd: number;
triggerTips: boolean;
nextTips: number;
};

@@ -57,14 +57,14 @@ export declare class TelemetryManager {

private enumerators;
private nextMotd;
private displayedMotd;
private shouldCommitMotd;
private nextTips;
private displayedTips;
private shouldCommitTips;
isNew: boolean;
isMotd: boolean;
shouldShowTips: boolean;
constructor(configuration: Configuration, accountId: string);
/**
* Prevent the motd to be displayed today, but doesn't actually display it.
* We use it when we replaced the motd by something else (like an upgrade prompt).
* Prevents the tip from being displayed today, but doesn't actually display it.
* We use it when replacing the tip by something else (like an upgrade prompt).
*/
commitMotd(): void;
selectMotd(allMotds: Array<Motd | null>): Motd | null;
commitTips(): void;
selectTip(allTips: Array<Tip | null>): Tip | null;
reportVersion(value: string): void;

@@ -71,0 +71,0 @@ reportCommandName(value: string): void;

@@ -23,3 +23,2 @@ "use strict";

function derive(params) {
var _a, _b;
const hour = 60 * 60 * 1000;

@@ -29,18 +28,18 @@ const day = 24 * hour;

const updateIntervalMs = params.updateInterval * day;
const lastUpdate = (_a = params.state.lastUpdate) !== null && _a !== void 0 ? _a : params.timeNow + updateIntervalMs + Math.floor(updateIntervalMs * params.randomInitialInterval);
const lastUpdate = params.state.lastUpdate ?? params.timeNow + updateIntervalMs + Math.floor(updateIntervalMs * params.randomInitialInterval);
const nextUpdate = lastUpdate + updateIntervalMs;
// We reset the motd each day at 8am
const lastMotd = (_b = params.state.lastMotd) !== null && _b !== void 0 ? _b : nowDay * day;
const nextMotd = lastMotd + day + 8 * hour - params.timeZone;
// We reset the tips each day at 8am
const lastTips = params.state.lastTips ?? nowDay * day;
const nextTips = lastTips + day + 8 * hour - params.timeZone;
const triggerUpdate = nextUpdate <= params.timeNow;
const triggerMotd = nextMotd <= params.timeNow;
const triggerTips = nextTips <= params.timeNow;
let nextState = null;
if (triggerUpdate || triggerMotd || !params.state.lastUpdate || !params.state.lastMotd) {
if (triggerUpdate || triggerTips || !params.state.lastUpdate || !params.state.lastTips) {
nextState = {};
nextState.lastUpdate = triggerUpdate ? params.timeNow : lastUpdate;
nextState.lastMotd = lastMotd;
nextState.lastTips = lastTips;
nextState.blocks = triggerUpdate ? {} : params.state.blocks;
nextState.displayedMotd = params.state.displayedMotd;
nextState.displayedTips = params.state.displayedTips;
}
return { nextState, triggerUpdate, triggerMotd, nextMotd: triggerMotd ? nowDay * day : lastMotd };
return { nextState, triggerUpdate, triggerTips, nextTips: triggerTips ? nowDay * day : lastTips };
}

@@ -53,9 +52,9 @@ exports.derive = derive;

this.enumerators = new Map();
this.nextMotd = 0;
this.displayedMotd = [];
this.shouldCommitMotd = false;
this.nextTips = 0;
this.displayedTips = [];
this.shouldCommitTips = false;
this.configuration = configuration;
const registryFile = this.getRegistryPath();
this.isNew = !fslib_1.xfs.existsSync(registryFile);
this.isMotd = false;
this.shouldShowTips = false;
this.sendReport(accountId);

@@ -65,12 +64,12 @@ this.startBuffer();

/**
* Prevent the motd to be displayed today, but doesn't actually display it.
* We use it when we replaced the motd by something else (like an upgrade prompt).
* Prevents the tip from being displayed today, but doesn't actually display it.
* We use it when replacing the tip by something else (like an upgrade prompt).
*/
commitMotd() {
if (this.isMotd) {
this.shouldCommitMotd = true;
commitTips() {
if (this.shouldShowTips) {
this.shouldCommitTips = true;
}
}
selectMotd(allMotds) {
const displayedMotd = new Set(this.displayedMotd);
selectTip(allTips) {
const displayedTips = new Set(this.displayedTips);
const checkVersion = (selector) => {

@@ -85,25 +84,25 @@ if (selector && YarnVersion_1.YarnVersion) {

// Get all possible non-null messages
const activeMotds = allMotds
const activeTips = allTips
.map((_, index) => index)
.filter(index => { var _a; return allMotds[index] && checkVersion((_a = allMotds[index]) === null || _a === void 0 ? void 0 : _a.selector); });
if (activeMotds.length === 0)
.filter(index => allTips[index] && checkVersion(allTips[index]?.selector));
if (activeTips.length === 0)
return null;
// Filter out the ones that have already been displayed
let availableMotds = activeMotds
.filter(index => !displayedMotd.has(index));
// If we've seen all motd, we can reset the list. We still
let availableTips = activeTips
.filter(index => !displayedTips.has(index));
// If we've seen all tips, we can reset the list. We still
// keep the last few items there, just to make sure we don't
// immediately re-display the same motd as the last past days.
if (availableMotds.length === 0) {
const sliceLength = Math.floor(activeMotds.length * .2);
this.displayedMotd = sliceLength > 0
? this.displayedMotd.slice(-sliceLength)
// immediately re-display the same tip as the last past days.
if (availableTips.length === 0) {
const sliceLength = Math.floor(activeTips.length * .2);
this.displayedTips = sliceLength > 0
? this.displayedTips.slice(-sliceLength)
: [];
availableMotds = activeMotds
.filter(index => !displayedMotd.has(index));
availableTips = activeTips
.filter(index => !displayedTips.has(index));
}
const selectedMotd = availableMotds[Math.floor(Math.random() * availableMotds.length)];
this.displayedMotd.push(selectedMotd);
this.commitMotd();
return allMotds[selectedMotd];
const selectedTip = availableTips[Math.floor(Math.random() * availableTips.length)];
this.displayedTips.push(selectedTip);
this.commitTips();
return allTips[selectedTip];
}

@@ -151,3 +150,2 @@ reportVersion(value) {

sendReport(accountId) {
var _a, _b, _c, _d;
const registryFile = this.getRegistryPath();

@@ -161,3 +159,3 @@ let state;

}
const { nextState, triggerUpdate, triggerMotd, nextMotd, } = derive({
const { nextState, triggerUpdate, triggerTips, nextTips, } = derive({
state,

@@ -169,4 +167,4 @@ timeNow: Date.now(),

});
this.nextMotd = nextMotd;
this.displayedMotd = (_a = state.displayedMotd) !== null && _a !== void 0 ? _a : [];
this.nextTips = nextTips;
this.displayedTips = state.displayedTips ?? [];
if (nextState !== null) {

@@ -182,6 +180,6 @@ try {

}
if (triggerMotd && this.configuration.get(`enableMotd`))
this.isMotd = true;
if (triggerTips && this.configuration.get(`enableTips`))
this.shouldShowTips = true;
if (triggerUpdate) {
const blocks = (_b = state.blocks) !== null && _b !== void 0 ? _b : {};
const blocks = state.blocks ?? {};
if (Object.keys(blocks).length === 0) {

@@ -194,3 +192,3 @@ const rawUrl = `https://browser-http-intake.logs.datadoghq.eu/v1/input/${accountId}?ddsource=yarn`;

});
for (const [userId, block] of Object.entries((_c = state.blocks) !== null && _c !== void 0 ? _c : {})) {
for (const [userId, block] of Object.entries(state.blocks ?? {})) {
if (Object.keys(block).length === 0)

@@ -201,3 +199,3 @@ continue;

upload.reportType = `primary`;
for (const key of Object.keys((_d = upload.enumerators) !== null && _d !== void 0 ? _d : {}))
for (const key of Object.keys(upload.enumerators ?? {}))
upload.enumerators[key] = upload.enumerators[key].length;

@@ -233,3 +231,2 @@ sendPayload(upload);

applyChanges() {
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
const registryFile = this.getRegistryPath();

@@ -243,10 +240,10 @@ let state;

}
const userId = (_a = this.configuration.get(`telemetryUserId`)) !== null && _a !== void 0 ? _a : `*`;
const blocks = state.blocks = (_b = state.blocks) !== null && _b !== void 0 ? _b : {};
const block = blocks[userId] = (_c = blocks[userId]) !== null && _c !== void 0 ? _c : {};
const userId = this.configuration.get(`telemetryUserId`) ?? `*`;
const blocks = state.blocks = state.blocks ?? {};
const block = blocks[userId] = blocks[userId] ?? {};
for (const key of this.hits.keys()) {
const store = block.hits = (_d = block.hits) !== null && _d !== void 0 ? _d : {};
const ns = store[key] = (_e = store[key]) !== null && _e !== void 0 ? _e : {};
const store = block.hits = block.hits ?? {};
const ns = store[key] = store[key] ?? {};
for (const [extra, value] of this.hits.get(key)) {
ns[extra] = ((_f = ns[extra]) !== null && _f !== void 0 ? _f : 0) + value;
ns[extra] = (ns[extra] ?? 0) + value;
}

@@ -256,12 +253,12 @@ }

for (const key of this[field].keys()) {
const store = block[field] = (_g = block[field]) !== null && _g !== void 0 ? _g : {};
const store = block[field] = block[field] ?? {};
store[key] = [...new Set([
...(_h = store[key]) !== null && _h !== void 0 ? _h : [],
...(_j = this[field].get(key)) !== null && _j !== void 0 ? _j : [],
...store[key] ?? [],
...this[field].get(key) ?? [],
])];
}
}
if (this.shouldCommitMotd) {
state.lastMotd = this.nextMotd;
state.displayedMotd = this.displayedMotd;
if (this.shouldCommitTips) {
state.lastTips = this.nextTips;
state.displayedTips = this.displayedTips;
}

@@ -268,0 +265,0 @@ fslib_1.xfs.mkdirSync(fslib_1.ppath.dirname(registryFile), { recursive: true });

@@ -31,3 +31,3 @@ "use strict";

const tmpFile = fslib_1.ppath.join(tmpFolder, `archive.zip`);
workerPool || (workerPool = new WorkerPool_1.WorkerPool((0, worker_zip_1.getContent)()));
workerPool ||= new WorkerPool_1.WorkerPool((0, worker_zip_1.getContent)());
await workerPool.run({ tmpFile, tgz, opts });

@@ -60,3 +60,2 @@ return new libzip_1.ZipFS(tmpFile, { level: opts.compressionLevel });

async function extractArchiveTo(tgz, targetFs, { stripComponents = 0, prefixPath = fslib_1.PortablePath.dot } = {}) {
var _a;
function ignore(entry) {

@@ -84,3 +83,3 @@ // Disallow absolute paths; might be malicious (ex: /etc/passwd)

// If a single executable bit is set, normalize so that all are
if (entry.type === `Directory` || (((_a = entry.mode) !== null && _a !== void 0 ? _a : 0) & 0o111) !== 0)
if (entry.type === `Directory` || ((entry.mode ?? 0) & 0o111) !== 0)
mode |= 0o111;

@@ -87,0 +86,0 @@ switch (entry.type) {

@@ -44,3 +44,2 @@ "use strict";

const copyTree = (printNode) => {
var _a;
if (typeof printNode.children === `undefined`) {

@@ -53,3 +52,3 @@ if (typeof printNode.value === `undefined`)

? printNode.children.entries()
: Object.entries((_a = printNode.children) !== null && _a !== void 0 ? _a : {});
: Object.entries(printNode.children ?? {});
const targetChildren = Array.isArray(printNode.children)

@@ -77,7 +76,6 @@ ? []

function emitTree(tree, { configuration, stdout, json, separators = 0 }) {
var _a;
if (json) {
const iterator = Array.isArray(tree.children)
? tree.children.values()
: Object.values((_a = tree.children) !== null && _a !== void 0 ? _a : {});
: Object.values(tree.children ?? {});
for (const child of iterator)

@@ -84,0 +82,0 @@ if (child)

@@ -44,10 +44,8 @@ "use strict";

worker.on(`error`, err => {
var _a;
(_a = worker[kTaskInfo]) === null || _a === void 0 ? void 0 : _a.reject(err);
worker[kTaskInfo]?.reject(err);
worker[kTaskInfo] = null;
});
worker.on(`exit`, code => {
var _a;
if (code !== 0)
(_a = worker[kTaskInfo]) === null || _a === void 0 ? void 0 : _a.reject(new Error(`Worker exited with code ${code}`));
worker[kTaskInfo]?.reject(new Error(`Worker exited with code ${code}`));
worker[kTaskInfo] = null;

@@ -59,4 +57,3 @@ });

return this.limit(() => {
var _a;
const worker = (_a = this.workers.pop()) !== null && _a !== void 0 ? _a : this.createWorker();
const worker = this.workers.pop() ?? this.createWorker();
worker.ref();

@@ -63,0 +60,0 @@ return new Promise((resolve, reject) => {

@@ -11,3 +11,2 @@ import { PortablePath } from '@yarnpkg/fslib';

readonly anchoredLocator: Locator;
readonly locator: Locator;
readonly workspacesCwds: Set<PortablePath>;

@@ -14,0 +13,0 @@ manifest: Manifest;

@@ -6,3 +6,3 @@ "use strict";

const fslib_1 = require("@yarnpkg/fslib");
const globby_1 = tslib_1.__importDefault(require("globby"));
const fast_glob_1 = tslib_1.__importDefault(require("fast-glob"));
const Manifest_1 = require("./Manifest");

@@ -21,4 +21,3 @@ const WorkspaceResolver_1 = require("./WorkspaceResolver");

async setup() {
var _a;
this.manifest = (_a = await Manifest_1.Manifest.tryFind(this.cwd)) !== null && _a !== void 0 ? _a : new Manifest_1.Manifest();
this.manifest = await Manifest_1.Manifest.tryFind(this.cwd) ?? new Manifest_1.Manifest();
// We use ppath.relative to guarantee that the default hash will be consistent even if the project is installed on different OS / path

@@ -28,20 +27,15 @@ // @ts-expect-error: It's ok to initialize it now, even if it's readonly (setup is called right after construction)

const ident = this.manifest.name ? this.manifest.name : structUtils.makeIdent(null, `${this.computeCandidateName()}-${hashUtils.makeHash(this.relativeCwd).substring(0, 6)}`);
const reference = this.manifest.version ? this.manifest.version : `0.0.0`;
// @ts-expect-error: It's ok to initialize it now, even if it's readonly (setup is called right after construction)
this.locator = structUtils.makeLocator(ident, reference);
this.anchoredDescriptor = structUtils.makeDescriptor(ident, `${WorkspaceResolver_1.WorkspaceResolver.protocol}${this.relativeCwd}`);
// @ts-expect-error: It's ok to initialize it now, even if it's readonly (setup is called right after construction)
this.anchoredDescriptor = structUtils.makeDescriptor(this.locator, `${WorkspaceResolver_1.WorkspaceResolver.protocol}${this.relativeCwd}`);
// @ts-expect-error: It's ok to initialize it now, even if it's readonly (setup is called right after construction)
this.anchoredLocator = structUtils.makeLocator(this.locator, `${WorkspaceResolver_1.WorkspaceResolver.protocol}${this.relativeCwd}`);
this.anchoredLocator = structUtils.makeLocator(ident, `${WorkspaceResolver_1.WorkspaceResolver.protocol}${this.relativeCwd}`);
const patterns = this.manifest.workspaceDefinitions.map(({ pattern }) => pattern);
if (patterns.length === 0)
return;
const relativeCwds = await (0, globby_1.default)(patterns, {
const relativeCwds = await (0, fast_glob_1.default)(patterns, {
cwd: fslib_1.npath.fromPortablePath(this.cwd),
expandDirectories: false,
onlyDirectories: true,
onlyFiles: false,
ignore: [`**/node_modules`, `**/.git`, `**/.yarn`],
});
// It seems that the return value of globby isn't in any guaranteed order - not even the directory listing order
// fast-glob returns results in arbitrary order
relativeCwds.sort();

@@ -65,3 +59,2 @@ await relativeCwds.reduce(async (previousTask, relativeCwd) => {

accepts(range) {
var _a;
const protocolIndex = range.indexOf(`:`);

@@ -82,3 +75,3 @@ const protocol = protocolIndex !== -1

if (protocol === WorkspaceResolver_1.WorkspaceResolver.protocol)
return semverRange.test((_a = this.manifest.version) !== null && _a !== void 0 ? _a : `0.0.0`);
return semverRange.test(this.manifest.version ?? `0.0.0`);
if (!this.project.configuration.get(`enableTransparentWorkspaces`))

@@ -85,0 +78,0 @@ return false;

{
"name": "@yarnpkg/core",
"version": "4.0.0-rc.48",
"version": "4.0.0-rc.49",
"stableVersion": "3.5.2",

@@ -16,14 +16,14 @@ "license": "BSD-2-Clause",

"@types/treeify": "^1.0.0",
"@yarnpkg/fslib": "^3.0.0-rc.48",
"@yarnpkg/libzip": "^3.0.0-rc.48",
"@yarnpkg/parsers": "^3.0.0-rc.48",
"@yarnpkg/shell": "^4.0.0-rc.48",
"@yarnpkg/fslib": "^3.0.0-rc.49",
"@yarnpkg/libzip": "^3.0.0-rc.49",
"@yarnpkg/parsers": "^3.0.0-rc.49",
"@yarnpkg/shell": "^4.0.0-rc.49",
"camelcase": "^5.3.1",
"chalk": "^3.0.0",
"ci-info": "^3.2.0",
"clipanion": "^3.2.1",
"clipanion": "^4.0.0-rc.2",
"cross-spawn": "7.0.3",
"diff": "^5.1.0",
"dotenv": "^16.3.1",
"globby": "^11.0.1",
"fast-glob": "^3.2.2",
"got": "^11.7.0",

@@ -53,6 +53,6 @@ "lodash": "^4.17.15",

"@types/tunnel": "^0.0.0",
"@yarnpkg/cli": "^4.0.0-rc.48",
"@yarnpkg/plugin-link": "^3.0.0-rc.48",
"@yarnpkg/plugin-npm": "^3.0.0-rc.48",
"@yarnpkg/plugin-pnp": "^4.0.0-rc.48",
"@yarnpkg/cli": "^4.0.0-rc.49",
"@yarnpkg/plugin-link": "^3.0.0-rc.49",
"@yarnpkg/plugin-npm": "^3.0.0-rc.49",
"@yarnpkg/plugin-pnp": "^4.0.0-rc.49",
"comment-json": "^2.2.0",

@@ -59,0 +59,0 @@ "esbuild": "npm:esbuild-wasm@^0.15.15",

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc