@yarnpkg/core
Advanced tools
Comparing version 3.0.0-rc.2 to 3.0.0-rc.3
@@ -8,8 +8,8 @@ "use strict"; | ||
const libzip_1 = require("@yarnpkg/libzip"); | ||
const fs_1 = (0, tslib_1.__importDefault)(require("fs")); | ||
const fs_1 = tslib_1.__importDefault(require("fs")); | ||
const MessageName_1 = require("./MessageName"); | ||
const Report_1 = require("./Report"); | ||
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils")); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const hashUtils = tslib_1.__importStar(require("./hashUtils")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
const CACHE_VERSION = 8; | ||
@@ -86,8 +86,19 @@ class Cache { | ||
async setup() { | ||
// mkdir may cause write operations even when directories exist. To ensure that the cache can be successfully used | ||
// on read-only filesystems, only run mkdir when not running in immutable mode. | ||
if (!this.configuration.get(`enableGlobalCache`)) { | ||
await fslib_2.xfs.mkdirPromise(this.cwd, { recursive: true }); | ||
const gitignorePath = fslib_2.ppath.resolve(this.cwd, `.gitignore`); | ||
await fslib_2.xfs.changeFilePromise(gitignorePath, `/.gitignore\n*.flock\n`); | ||
if (this.immutable) { | ||
if (!await fslib_2.xfs.existsPromise(this.cwd)) { | ||
throw new Report_1.ReportError(MessageName_1.MessageName.IMMUTABLE_CACHE, `Cache path does not exist.`); | ||
} | ||
} | ||
else { | ||
await fslib_2.xfs.mkdirPromise(this.cwd, { recursive: true }); | ||
const gitignorePath = fslib_2.ppath.resolve(this.cwd, `.gitignore`); | ||
await fslib_2.xfs.changeFilePromise(gitignorePath, `/.gitignore\n*.flock\n`); | ||
} | ||
} | ||
await fslib_2.xfs.mkdirPromise(this.mirrorCwd || this.cwd, { recursive: true }); | ||
if (this.mirrorCwd || !this.immutable) { | ||
await fslib_2.xfs.mkdirPromise(this.mirrorCwd || this.cwd, { recursive: true }); | ||
} | ||
} | ||
@@ -218,3 +229,3 @@ async fetchPackageFromCache(locator, expectedChecksum, { onHit, onMiss, loader, skipIntegrityCheck }) { | ||
let zipFs = null; | ||
const libzip = await (0, libzip_1.getLibzipPromise)(); | ||
const libzip = await libzip_1.getLibzipPromise(); | ||
const lazyFs = new fslib_1.LazyFS(() => miscUtils.prettifySyncErrors(() => { | ||
@@ -221,0 +232,0 @@ return zipFs = new fslib_1.ZipFS(cachePath, { baseFs, libzip, readOnly: true }); |
@@ -8,7 +8,6 @@ "use strict"; | ||
const parsers_1 = require("@yarnpkg/parsers"); | ||
const camelcase_1 = (0, tslib_1.__importDefault)(require("camelcase")); | ||
const camelcase_1 = tslib_1.__importDefault(require("camelcase")); | ||
const ci_info_1 = require("ci-info"); | ||
const clipanion_1 = require("clipanion"); | ||
const p_limit_1 = (0, tslib_1.__importDefault)(require("p-limit")); | ||
const semver_1 = (0, tslib_1.__importDefault)(require("semver")); | ||
const p_limit_1 = tslib_1.__importDefault(require("p-limit")); | ||
const stream_1 = require("stream"); | ||
@@ -24,8 +23,8 @@ const CorePlugin_1 = require("./CorePlugin"); | ||
const WorkspaceResolver_1 = require("./WorkspaceResolver"); | ||
const folderUtils = (0, tslib_1.__importStar)(require("./folderUtils")); | ||
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils")); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const nodeUtils = (0, tslib_1.__importStar)(require("./nodeUtils")); | ||
const semverUtils = (0, tslib_1.__importStar)(require("./semverUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const folderUtils = tslib_1.__importStar(require("./folderUtils")); | ||
const formatUtils = tslib_1.__importStar(require("./formatUtils")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
const nodeUtils = tslib_1.__importStar(require("./nodeUtils")); | ||
const semverUtils = tslib_1.__importStar(require("./semverUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
const types_1 = require("./types"); | ||
@@ -598,3 +597,3 @@ const IGNORED_ENV_VARIABLES = new Set([ | ||
continue; | ||
key = (0, camelcase_1.default)(key.slice(exports.ENVIRONMENT_PREFIX.length)); | ||
key = camelcase_1.default(key.slice(exports.ENVIRONMENT_PREFIX.length)); | ||
environmentSettings[key] = value; | ||
@@ -735,8 +734,8 @@ } | ||
]); | ||
const interop = (obj) => obj.__esModule | ||
? obj.default | ||
: obj; | ||
const getDefault = (object) => { | ||
return `default` in object ? object.default : object; | ||
}; | ||
if (pluginConfiguration !== null) { | ||
for (const request of pluginConfiguration.plugins.keys()) | ||
plugins.set(request, interop(pluginConfiguration.modules.get(request))); | ||
plugins.set(request, getDefault(pluginConfiguration.modules.get(request))); | ||
const requireEntries = new Map(); | ||
@@ -748,5 +747,2 @@ for (const request of nodeUtils.builtinModules()) | ||
const dynamicPlugins = new Set(); | ||
const getDefault = (object) => { | ||
return object.default || object; | ||
}; | ||
const importPlugin = (pluginPath, source) => { | ||
@@ -820,3 +816,3 @@ const { factory, name } = miscUtils.dynamicRequire(fslib_1.npath.fromPortablePath(pluginPath)); | ||
try { | ||
data = (0, parsers_1.parseSyml)(content); | ||
data = parsers_1.parseSyml(content); | ||
} | ||
@@ -841,3 +837,3 @@ catch (error) { | ||
const content = await fslib_1.xfs.readFilePromise(homeRcFilePath, `utf8`); | ||
const data = (0, parsers_1.parseSyml)(content); | ||
const data = parsers_1.parseSyml(content); | ||
return { path: homeRcFilePath, cwd: homeFolder, data }; | ||
@@ -874,3 +870,3 @@ } | ||
const current = fslib_1.xfs.existsSync(configurationPath) | ||
? (0, parsers_1.parseSyml)(await fslib_1.xfs.readFilePromise(configurationPath, `utf8`)) | ||
? parsers_1.parseSyml(await fslib_1.xfs.readFilePromise(configurationPath, `utf8`)) | ||
: {}; | ||
@@ -916,3 +912,3 @@ let patched = false; | ||
} | ||
await fslib_1.xfs.changeFilePromise(configurationPath, (0, parsers_1.stringifySyml)(replacement), { | ||
await fslib_1.xfs.changeFilePromise(configurationPath, parsers_1.stringifySyml(replacement), { | ||
automaticNewlines: true, | ||
@@ -1076,3 +1072,3 @@ }); | ||
const registerPackageExtension = (descriptor, extensionData, { userProvided = false } = {}) => { | ||
if (!semver_1.default.validRange(descriptor.range)) | ||
if (!semverUtils.validRange(descriptor.range)) | ||
throw new Error(`Only semver ranges are allowed as keys for the lockfileExtensions setting`); | ||
@@ -1200,3 +1196,3 @@ const extension = new Manifest_1.Manifest(); | ||
return miscUtils.getFactoryWithDefault(this.limits, key, () => { | ||
return (0, p_limit_1.default)(this.get(key)); | ||
return p_limit_1.default(this.get(key)); | ||
}); | ||
@@ -1203,0 +1199,0 @@ } |
@@ -6,3 +6,3 @@ "use strict"; | ||
const MessageName_1 = require("./MessageName"); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
exports.CorePlugin = { | ||
@@ -9,0 +9,0 @@ hooks: { |
@@ -6,3 +6,3 @@ "use strict"; | ||
const fslib_1 = require("@yarnpkg/fslib"); | ||
const cross_spawn_1 = (0, tslib_1.__importDefault)(require("cross-spawn")); | ||
const cross_spawn_1 = tslib_1.__importDefault(require("cross-spawn")); | ||
var EndStrategy; | ||
@@ -38,3 +38,3 @@ (function (EndStrategy) { | ||
stdio[2] = stderr; | ||
const child = (0, cross_spawn_1.default)(fileName, args, { | ||
const child = cross_spawn_1.default(fileName, args, { | ||
cwd: fslib_1.npath.fromPortablePath(cwd), | ||
@@ -104,3 +104,3 @@ env: { | ||
env = { ...env, PWD: nativeCwd }; | ||
const subprocess = (0, cross_spawn_1.default)(fileName, args, { | ||
const subprocess = cross_spawn_1.default(fileName, args, { | ||
cwd: nativeCwd, | ||
@@ -107,0 +107,0 @@ env, |
@@ -8,3 +8,3 @@ "use strict"; | ||
if (process.platform === `win32`) { | ||
const base = fslib_1.npath.toPortablePath(process.env.LOCALAPPDATA || fslib_1.npath.join((0, os_1.homedir)(), `AppData`, `Local`)); | ||
const base = fslib_1.npath.toPortablePath(process.env.LOCALAPPDATA || fslib_1.npath.join(os_1.homedir(), `AppData`, `Local`)); | ||
return fslib_1.ppath.resolve(base, `Yarn/Berry`); | ||
@@ -20,3 +20,3 @@ } | ||
function getHomeFolder() { | ||
return fslib_1.npath.toPortablePath((0, os_1.homedir)() || `/usr/local/share`); | ||
return fslib_1.npath.toPortablePath(os_1.homedir() || `/usr/local/share`); | ||
} | ||
@@ -23,0 +23,0 @@ exports.getHomeFolder = getHomeFolder; |
@@ -6,7 +6,8 @@ "use strict"; | ||
const fslib_1 = require("@yarnpkg/fslib"); | ||
const chalk_1 = (0, tslib_1.__importDefault)(require("chalk")); | ||
const strip_ansi_1 = (0, tslib_1.__importDefault)(require("strip-ansi")); | ||
const chalk_1 = tslib_1.__importDefault(require("chalk")); | ||
const ci_info_1 = require("ci-info"); | ||
const strip_ansi_1 = tslib_1.__importDefault(require("strip-ansi")); | ||
const MessageName_1 = require("./MessageName"); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
const types_1 = require("./types"); | ||
@@ -52,3 +53,3 @@ // We have to workaround a TS bug: | ||
exports.supportsColor = chalkOptions.level !== 0; | ||
exports.supportsHyperlinks = exports.supportsColor && !process.env.GITHUB_ACTIONS; | ||
exports.supportsHyperlinks = exports.supportsColor && !process.env.GITHUB_ACTIONS && !ci_info_1.CIRCLE; | ||
const chalkInstance = new chalk_1.default.Instance(chalkOptions); | ||
@@ -244,2 +245,3 @@ const colors = new Map([ | ||
exports.applyColor = applyColor; | ||
const isKonsole = !!process.env.KONSOLE_VERSION; | ||
function applyHyperlink(configuration, text, href) { | ||
@@ -249,2 +251,6 @@ // Only print hyperlinks if allowed per configuration | ||
return text; | ||
// We use ESC as ST for Konsole because it doesn't support | ||
// the non-standard BEL character for hyperlinks | ||
if (isKonsole) | ||
return `\u001b]8;;${href}\u001b\\${text}\u001b]8;;\u001b\\`; | ||
// We use BELL as ST because it seems that iTerm doesn't properly support | ||
@@ -328,3 +334,3 @@ // the \x1b\\ sequence described in the reference document | ||
if (logFiltersByText.size > 0) { | ||
const level = logFiltersByText.get((0, strip_ansi_1.default)(text)); | ||
const level = logFiltersByText.get(strip_ansi_1.default(text)); | ||
if (typeof level !== `undefined`) { | ||
@@ -335,3 +341,3 @@ return level !== null && level !== void 0 ? level : defaultLevel; | ||
if (logFiltersByCode.size > 0) { | ||
const level = logFiltersByCode.get((0, MessageName_1.stringifyMessageName)(name)); | ||
const level = logFiltersByCode.get(MessageName_1.stringifyMessageName(name)); | ||
if (typeof level !== `undefined`) { | ||
@@ -338,0 +344,0 @@ return level !== null && level !== void 0 ? level : defaultLevel; |
/// <reference types="node" /> | ||
import { PortablePath } from '@yarnpkg/fslib'; | ||
import { PortablePath, FakeFS } from '@yarnpkg/fslib'; | ||
import { BinaryLike } from 'crypto'; | ||
export declare function makeHash<T extends string = string>(...args: Array<BinaryLike | null>): T; | ||
export declare function checksumFile(path: PortablePath): Promise<string>; | ||
export declare function checksumFile(path: PortablePath, { baseFs, algorithm }?: { | ||
baseFs: FakeFS<PortablePath>; | ||
algorithm: string; | ||
}): Promise<string>; | ||
export declare function checksumPattern(pattern: string, { cwd }: { | ||
cwd: PortablePath; | ||
}): Promise<string>; |
@@ -7,24 +7,37 @@ "use strict"; | ||
const crypto_1 = require("crypto"); | ||
const globby_1 = (0, tslib_1.__importDefault)(require("globby")); | ||
const globby_1 = tslib_1.__importDefault(require("globby")); | ||
function makeHash(...args) { | ||
const hash = (0, crypto_1.createHash)(`sha512`); | ||
for (const arg of args) | ||
hash.update(arg ? arg : ``); | ||
const hash = crypto_1.createHash(`sha512`); | ||
let acc = ``; | ||
for (const arg of args) { | ||
if (typeof arg === `string`) { | ||
acc += arg; | ||
} | ||
else if (arg) { | ||
if (acc) { | ||
hash.update(acc); | ||
acc = ``; | ||
} | ||
hash.update(arg); | ||
} | ||
} | ||
if (acc) | ||
hash.update(acc); | ||
return hash.digest(`hex`); | ||
} | ||
exports.makeHash = makeHash; | ||
function checksumFile(path) { | ||
return new Promise((resolve, reject) => { | ||
const hash = (0, crypto_1.createHash)(`sha512`); | ||
const stream = fslib_1.xfs.createReadStream(path); | ||
stream.on(`data`, chunk => { | ||
hash.update(chunk); | ||
}); | ||
stream.on(`error`, error => { | ||
reject(error); | ||
}); | ||
stream.on(`end`, () => { | ||
resolve(hash.digest(`hex`)); | ||
}); | ||
}); | ||
async function checksumFile(path, { baseFs, algorithm } = { baseFs: fslib_1.xfs, algorithm: `sha512` }) { | ||
const fd = await baseFs.openPromise(path, `r`); | ||
try { | ||
const CHUNK_SIZE = 65536; | ||
const chunk = Buffer.allocUnsafeSlow(CHUNK_SIZE); | ||
const hash = crypto_1.createHash(algorithm); | ||
let bytesRead = 0; | ||
while ((bytesRead = await baseFs.readPromise(fd, chunk, 0, CHUNK_SIZE)) !== 0) | ||
hash.update(bytesRead === CHUNK_SIZE ? chunk : chunk.slice(0, bytesRead)); | ||
return hash.digest(`hex`); | ||
} | ||
finally { | ||
await baseFs.closePromise(fd); | ||
} | ||
} | ||
@@ -37,3 +50,3 @@ exports.checksumFile = checksumFile; | ||
// Ref: https://github.com/sindresorhus/globby/issues/147 | ||
const dirListing = await (0, globby_1.default)(pattern, { | ||
const dirListing = await globby_1.default(pattern, { | ||
cwd: fslib_1.npath.fromPortablePath(cwd), | ||
@@ -47,3 +60,3 @@ expandDirectories: false, | ||
}); | ||
const listing = await (0, globby_1.default)([pattern, ...dirPatterns], { | ||
const listing = await globby_1.default([pattern, ...dirPatterns], { | ||
cwd: fslib_1.npath.fromPortablePath(cwd), | ||
@@ -65,3 +78,3 @@ expandDirectories: false, | ||
})); | ||
const hash = (0, crypto_1.createHash)(`sha512`); | ||
const hash = crypto_1.createHash(`sha512`); | ||
for (const sub of hashes) | ||
@@ -68,0 +81,0 @@ hash.update(sub); |
@@ -9,9 +9,9 @@ "use strict"; | ||
const http_1 = require("http"); | ||
const micromatch_1 = (0, tslib_1.__importDefault)(require("micromatch")); | ||
const tunnel_1 = (0, tslib_1.__importDefault)(require("tunnel")); | ||
const micromatch_1 = tslib_1.__importDefault(require("micromatch")); | ||
const tunnel_1 = tslib_1.__importDefault(require("tunnel")); | ||
const url_1 = require("url"); | ||
const MessageName_1 = require("./MessageName"); | ||
const Report_1 = require("./Report"); | ||
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils")); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const formatUtils = tslib_1.__importStar(require("./formatUtils")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
var got_2 = require("got"); | ||
@@ -168,3 +168,3 @@ Object.defineProperty(exports, "RequestError", { enumerable: true, get: function () { return got_2.RequestError; } }); | ||
const caFilePath = networkConfig.caFilePath; | ||
const { default: got } = await Promise.resolve().then(() => (0, tslib_1.__importStar)(require(`got`))); | ||
const { default: got } = await Promise.resolve().then(() => tslib_1.__importStar(require(`got`))); | ||
const certificateAuthority = caFilePath | ||
@@ -171,0 +171,0 @@ ? await getCachedCertificate(caFilePath) |
@@ -5,23 +5,23 @@ "use strict"; | ||
const tslib_1 = require("tslib"); | ||
const execUtils = (0, tslib_1.__importStar)(require("./execUtils")); | ||
const execUtils = tslib_1.__importStar(require("./execUtils")); | ||
exports.execUtils = execUtils; | ||
const folderUtils = (0, tslib_1.__importStar)(require("./folderUtils")); | ||
const folderUtils = tslib_1.__importStar(require("./folderUtils")); | ||
exports.folderUtils = folderUtils; | ||
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils")); | ||
const formatUtils = tslib_1.__importStar(require("./formatUtils")); | ||
exports.formatUtils = formatUtils; | ||
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils")); | ||
const hashUtils = tslib_1.__importStar(require("./hashUtils")); | ||
exports.hashUtils = hashUtils; | ||
const httpUtils = (0, tslib_1.__importStar)(require("./httpUtils")); | ||
const httpUtils = tslib_1.__importStar(require("./httpUtils")); | ||
exports.httpUtils = httpUtils; | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
exports.miscUtils = miscUtils; | ||
const scriptUtils = (0, tslib_1.__importStar)(require("./scriptUtils")); | ||
const scriptUtils = tslib_1.__importStar(require("./scriptUtils")); | ||
exports.scriptUtils = scriptUtils; | ||
const semverUtils = (0, tslib_1.__importStar)(require("./semverUtils")); | ||
const semverUtils = tslib_1.__importStar(require("./semverUtils")); | ||
exports.semverUtils = semverUtils; | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
exports.structUtils = structUtils; | ||
const tgzUtils = (0, tslib_1.__importStar)(require("./tgzUtils")); | ||
const tgzUtils = tslib_1.__importStar(require("./tgzUtils")); | ||
exports.tgzUtils = tgzUtils; | ||
const treeUtils = (0, tslib_1.__importStar)(require("./treeUtils")); | ||
const treeUtils = tslib_1.__importStar(require("./treeUtils")); | ||
exports.treeUtils = treeUtils; | ||
@@ -28,0 +28,0 @@ var Cache_1 = require("./Cache"); |
@@ -7,5 +7,5 @@ "use strict"; | ||
const parsers_1 = require("@yarnpkg/parsers"); | ||
const semver_1 = (0, tslib_1.__importDefault)(require("semver")); | ||
const MessageName_1 = require("./MessageName"); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const semverUtils = tslib_1.__importStar(require("./semverUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
const IMPORTED_PATTERNS = [ | ||
@@ -39,3 +39,3 @@ // These ones come from Git urls | ||
const content = await fslib_1.xfs.readFilePromise(lockfilePath, `utf8`); | ||
const parsed = (0, parsers_1.parseSyml)(content); | ||
const parsed = parsers_1.parseSyml(content); | ||
// No need to enable it either if the lockfile is modern | ||
@@ -51,3 +51,3 @@ if (Object.prototype.hasOwnProperty.call(parsed, `__metadata`)) | ||
} | ||
if (semver_1.default.validRange(descriptor.range)) | ||
if (semverUtils.validRange(descriptor.range)) | ||
descriptor = structUtils.makeDescriptor(descriptor, `npm:${descriptor.range}`); | ||
@@ -54,0 +54,0 @@ const { version, resolved } = parsed[key]; |
@@ -23,6 +23,6 @@ /// <reference types="node" /> | ||
reportCacheMiss(locator: Locator): void; | ||
startTimerSync<T>(what: string, opts: TimerOptions, cb: () => T): void; | ||
startTimerSync<T>(what: string, cb: () => T): void; | ||
startTimerPromise<T>(what: string, opts: TimerOptions, cb: () => Promise<T>): Promise<void>; | ||
startTimerPromise<T>(what: string, cb: () => Promise<T>): Promise<void>; | ||
startTimerSync<T>(what: string, opts: TimerOptions, cb: () => T): T; | ||
startTimerSync<T>(what: string, cb: () => T): T; | ||
startTimerPromise<T>(what: string, opts: TimerOptions, cb: () => Promise<T>): Promise<T>; | ||
startTimerPromise<T>(what: string, cb: () => Promise<T>): Promise<T>; | ||
startCacheReport<T>(cb: () => Promise<T>): Promise<T>; | ||
@@ -29,0 +29,0 @@ reportSeparator(): void; |
@@ -7,3 +7,3 @@ "use strict"; | ||
const StreamReport_1 = require("./StreamReport"); | ||
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils")); | ||
const formatUtils = tslib_1.__importStar(require("./formatUtils")); | ||
class LightReport extends Report_1.Report { | ||
@@ -87,3 +87,3 @@ constructor({ configuration, stdout, suggestInstall = true }) { | ||
formatNameWithHyperlink(name) { | ||
return (0, StreamReport_1.formatNameWithHyperlink)(name, { | ||
return StreamReport_1.formatNameWithHyperlink(name, { | ||
configuration: this.configuration, | ||
@@ -90,0 +90,0 @@ json: false, |
@@ -5,3 +5,3 @@ "use strict"; | ||
const tslib_1 = require("tslib"); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
class LockfileResolver { | ||
@@ -8,0 +8,0 @@ supportsDescriptor(descriptor, opts) { |
@@ -7,6 +7,6 @@ "use strict"; | ||
const parsers_1 = require("@yarnpkg/parsers"); | ||
const semver_1 = (0, tslib_1.__importDefault)(require("semver")); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const semverUtils = (0, tslib_1.__importStar)(require("./semverUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const semver_1 = tslib_1.__importDefault(require("semver")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
const semverUtils = tslib_1.__importStar(require("./semverUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
class Manifest { | ||
@@ -354,3 +354,3 @@ constructor() { | ||
try { | ||
this.resolutions.push({ pattern: (0, parsers_1.parseResolution)(pattern), reference }); | ||
this.resolutions.push({ pattern: parsers_1.parseResolution(pattern), reference }); | ||
} | ||
@@ -733,3 +733,3 @@ catch (error) { | ||
data.resolutions = Object.assign({}, ...this.resolutions.map(({ pattern, reference }) => { | ||
return { [(0, parsers_1.stringifyResolution)(pattern)]: reference }; | ||
return { [parsers_1.stringifyResolution(pattern)]: reference }; | ||
})); | ||
@@ -736,0 +736,0 @@ } |
@@ -73,5 +73,5 @@ export declare enum MessageName { | ||
AUTO_NM_SUCCESS = 70, | ||
NM_CANT_INSTALL_PORTAL = 71, | ||
NM_CANT_INSTALL_EXTERNAL_SOFT_LINK = 71, | ||
NM_PRESERVE_SYMLINKS_REQUIRED = 72 | ||
} | ||
export declare function stringifyMessageName(name: MessageName | number): string; |
@@ -80,3 +80,3 @@ "use strict"; | ||
MessageName[MessageName["AUTO_NM_SUCCESS"] = 70] = "AUTO_NM_SUCCESS"; | ||
MessageName[MessageName["NM_CANT_INSTALL_PORTAL"] = 71] = "NM_CANT_INSTALL_PORTAL"; | ||
MessageName[MessageName["NM_CANT_INSTALL_EXTERNAL_SOFT_LINK"] = 71] = "NM_CANT_INSTALL_EXTERNAL_SOFT_LINK"; | ||
MessageName[MessageName["NM_PRESERVE_SYMLINKS_REQUIRED"] = 72] = "NM_PRESERVE_SYMLINKS_REQUIRED"; | ||
@@ -83,0 +83,0 @@ })(MessageName = exports.MessageName || (exports.MessageName = {})); |
@@ -77,2 +77,3 @@ /// <reference types="node" /> | ||
}[keyof T]; | ||
export declare function isPathLike(value: string): boolean; | ||
export {}; |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.tryParseOptionalBoolean = exports.parseOptionalBoolean = exports.parseBoolean = exports.replaceEnvVariables = exports.buildIgnorePattern = exports.sortMap = exports.dynamicRequireNoCache = exports.dynamicRequire = exports.DefaultStream = exports.BufferStream = exports.bufferStream = exports.prettifySyncErrors = exports.prettifyAsyncErrors = exports.releaseAfterUseAsync = exports.getMapWithDefault = exports.getSetWithDefault = exports.getArrayWithDefault = exports.getFactoryWithDefault = exports.convertMapsToIndexableObjects = exports.isIndexableObject = exports.mapAndFind = exports.mapAndFilter = exports.validateEnum = exports.assertNever = exports.overrideType = exports.escapeRegExp = void 0; | ||
exports.isPathLike = exports.tryParseOptionalBoolean = exports.parseOptionalBoolean = exports.parseBoolean = exports.replaceEnvVariables = exports.buildIgnorePattern = exports.sortMap = exports.dynamicRequireNoCache = exports.dynamicRequire = exports.DefaultStream = exports.BufferStream = exports.bufferStream = exports.prettifySyncErrors = exports.prettifyAsyncErrors = exports.releaseAfterUseAsync = exports.getMapWithDefault = exports.getSetWithDefault = exports.getArrayWithDefault = exports.getFactoryWithDefault = exports.convertMapsToIndexableObjects = exports.isIndexableObject = exports.mapAndFind = exports.mapAndFilter = exports.validateEnum = exports.assertNever = exports.overrideType = exports.escapeRegExp = void 0; | ||
const tslib_1 = require("tslib"); | ||
const fslib_1 = require("@yarnpkg/fslib"); | ||
const clipanion_1 = require("clipanion"); | ||
const micromatch_1 = (0, tslib_1.__importDefault)(require("micromatch")); | ||
const micromatch_1 = tslib_1.__importDefault(require("micromatch")); | ||
const stream_1 = require("stream"); | ||
@@ -203,3 +203,3 @@ function escapeRegExp(str) { | ||
try { | ||
result = (0, exports.dynamicRequire)(physicalPath); | ||
result = exports.dynamicRequire(physicalPath); | ||
const freshCacheEntry = exports.dynamicRequire.cache[physicalPath]; | ||
@@ -317,1 +317,7 @@ const dynamicModule = eval(`module`); | ||
exports.tryParseOptionalBoolean = tryParseOptionalBoolean; | ||
function isPathLike(value) { | ||
if (fslib_1.npath.isAbsolute(value) || value.match(/^(\.{1,2}|~)\//)) | ||
return true; | ||
return false; | ||
} | ||
exports.isPathLike = isPathLike; |
@@ -7,3 +7,3 @@ "use strict"; | ||
const Report_1 = require("./Report"); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
class MultiFetcher { | ||
@@ -10,0 +10,0 @@ constructor(fetchers) { |
@@ -5,3 +5,3 @@ "use strict"; | ||
const tslib_1 = require("tslib"); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
class MultiResolver { | ||
@@ -8,0 +8,0 @@ constructor(resolvers) { |
@@ -5,3 +5,3 @@ "use strict"; | ||
const tslib_1 = require("tslib"); | ||
const module_1 = (0, tslib_1.__importDefault)(require("module")); | ||
const module_1 = tslib_1.__importDefault(require("module")); | ||
function builtinModules() { | ||
@@ -8,0 +8,0 @@ // @ts-expect-error |
@@ -103,2 +103,6 @@ /// <reference types="node" /> | ||
populateYarnPaths?: (project: Project, definePath: (path: PortablePath | null) => void) => Promise<void>; | ||
/** | ||
* Called when user requests to clean global cache | ||
*/ | ||
cleanGlobalArtifacts?: (configuration: Configuration) => Promise<void>; | ||
}; | ||
@@ -105,0 +109,0 @@ export declare type Plugin<PluginHooks = any> = { |
@@ -106,4 +106,13 @@ import { PortablePath } from '@yarnpkg/fslib'; | ||
peerRequirements: Map<string, PeerRequirement>; | ||
/** | ||
* Contains whatever data the installers (cf `Linker.ts`) want to persist | ||
* from an install to another. | ||
*/ | ||
installersCustomData: Map<string, unknown>; | ||
/** | ||
* Those checksums are used to detect whether the relevant files actually | ||
* changed since we last read them (to skip part of their generation). | ||
*/ | ||
lockFileChecksum: string | null; | ||
installStateChecksum: string | null; | ||
static find(configuration: Configuration, startingCwd: PortablePath): Promise<{ | ||
@@ -110,0 +119,0 @@ project: Project; |
@@ -5,5 +5,5 @@ "use strict"; | ||
const tslib_1 = require("tslib"); | ||
const semver_1 = (0, tslib_1.__importDefault)(require("semver")); | ||
const semverUtils = (0, tslib_1.__importStar)(require("./semverUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const semver_1 = tslib_1.__importDefault(require("semver")); | ||
const semverUtils = tslib_1.__importStar(require("./semverUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
exports.TAG_REGEXP = /^(?!v)[a-z0-9._-]+$/i; | ||
@@ -10,0 +10,0 @@ class ProtocolResolver { |
@@ -9,4 +9,4 @@ "use strict"; | ||
const shell_1 = require("@yarnpkg/shell"); | ||
const capitalize_1 = (0, tslib_1.__importDefault)(require("lodash/capitalize")); | ||
const p_limit_1 = (0, tslib_1.__importDefault)(require("p-limit")); | ||
const capitalize_1 = tslib_1.__importDefault(require("lodash/capitalize")); | ||
const p_limit_1 = tslib_1.__importDefault(require("p-limit")); | ||
const stream_1 = require("stream"); | ||
@@ -18,6 +18,6 @@ const Manifest_1 = require("./Manifest"); | ||
const YarnVersion_1 = require("./YarnVersion"); | ||
const execUtils = (0, tslib_1.__importStar)(require("./execUtils")); | ||
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils")); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const execUtils = tslib_1.__importStar(require("./execUtils")); | ||
const formatUtils = tslib_1.__importStar(require("./formatUtils")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
var PackageManager; | ||
@@ -112,3 +112,3 @@ (function (PackageManager) { | ||
await project.configuration.triggerHook(hook => hook.setupScriptEnvironment, project, scriptEnv, async (name, argv0, args) => { | ||
return await makePathWrapper(binFolder, (0, fslib_2.toFilename)(name), argv0, args); | ||
return await makePathWrapper(binFolder, fslib_2.toFilename(name), argv0, args); | ||
}); | ||
@@ -124,3 +124,3 @@ } | ||
const MAX_PREPARE_CONCURRENCY = 2; | ||
const prepareLimit = (0, p_limit_1.default)(MAX_PREPARE_CONCURRENCY); | ||
const prepareLimit = p_limit_1.default(MAX_PREPARE_CONCURRENCY); | ||
async function prepareExternalProject(cwd, outputPath, { configuration, report, workspace = null, locator = null }) { | ||
@@ -258,3 +258,3 @@ await prepareLimit(async () => { | ||
}, { | ||
libzip: await (0, libzip_1.getLibzipPromise)(), | ||
libzip: await libzip_1.getLibzipPromise(), | ||
}); | ||
@@ -270,3 +270,3 @@ } | ||
const realExecutor = async () => { | ||
return await (0, shell_1.execute)(script, args, { cwd: realCwd, env, stdin, stdout, stderr }); | ||
return await shell_1.execute(script, args, { cwd: realCwd, env, stdin, stdout, stderr }); | ||
}; | ||
@@ -285,3 +285,3 @@ const executor = await project.configuration.reduceHook(hooks => { | ||
const { env, cwd: realCwd } = await initializePackageEnvironment(locator, { project, binFolder, cwd }); | ||
return await (0, shell_1.execute)(command, args, { cwd: realCwd, env, stdin, stdout, stderr }); | ||
return await shell_1.execute(command, args, { cwd: realCwd, env, stdin, stdout, stderr }); | ||
}); | ||
@@ -292,4 +292,25 @@ } | ||
const env = await makeScriptEnv({ project: workspace.project, locator: workspace.anchoredLocator, binFolder, lifecycleScript }); | ||
await Promise.all(Array.from(await getWorkspaceAccessibleBinaries(workspace), ([binaryName, [, binaryPath]]) => makePathWrapper(binFolder, (0, fslib_2.toFilename)(binaryName), process.execPath, [binaryPath]))); | ||
return { manifest: workspace.manifest, binFolder, env, cwd: cwd !== null && cwd !== void 0 ? cwd : workspace.cwd }; | ||
await Promise.all(Array.from(await getWorkspaceAccessibleBinaries(workspace), ([binaryName, [, binaryPath]]) => makePathWrapper(binFolder, fslib_2.toFilename(binaryName), process.execPath, [binaryPath]))); | ||
// When operating under PnP, `initializePackageEnvironment` | ||
// yields package location to the linker, which goes into | ||
// the PnP hook, which resolves paths relative to dirname, | ||
// which is realpath'd (because of Node). The realpath that | ||
// follows ensures that workspaces are realpath'd in a | ||
// similar way. | ||
// | ||
// I'm not entirely comfortable with this, especially because | ||
// there are no tests pertaining to this behaviour and the use | ||
// case is still a bit fuzzy to me (something about Flow not | ||
// handling well the case where a project was 1:1 symlinked | ||
// into another place, I think?). I also don't like the idea | ||
// of realpathing thing in general, since it means losing | ||
// information... | ||
// | ||
// It's fine for now because it preserves a behaviour in 3.x | ||
// that was already there in 2.x, but it should be considered | ||
// for removal or standardization if it ever becomes a problem. | ||
// | ||
if (typeof cwd === `undefined`) | ||
cwd = fslib_2.ppath.dirname(await fslib_2.xfs.realpathPromise(fslib_2.ppath.join(workspace.cwd, `package.json`))); | ||
return { manifest: workspace.manifest, binFolder, env, cwd }; | ||
} | ||
@@ -312,3 +333,3 @@ async function initializePackageEnvironment(locator, { project, binFolder, cwd, lifecycleScript }) { | ||
const env = await makeScriptEnv({ project, locator, binFolder, lifecycleScript }); | ||
await Promise.all(Array.from(await getPackageAccessibleBinaries(locator, { project }), ([binaryName, [, binaryPath]]) => makePathWrapper(binFolder, (0, fslib_2.toFilename)(binaryName), process.execPath, [binaryPath]))); | ||
await Promise.all(Array.from(await getPackageAccessibleBinaries(locator, { project }), ([binaryName, [, binaryPath]]) => makePathWrapper(binFolder, fslib_2.toFilename(binaryName), process.execPath, [binaryPath]))); | ||
const packageLocation = await linker.findPackageLocation(pkg, linkerOptions); | ||
@@ -321,3 +342,3 @@ const packageFs = new fslib_1.CwdFS(packageLocation, { baseFs: zipOpenFs }); | ||
}, { | ||
libzip: await (0, libzip_1.getLibzipPromise)(), | ||
libzip: await libzip_1.getLibzipPromise(), | ||
}); | ||
@@ -350,3 +371,3 @@ } | ||
fslib_2.xfs.detachTemp(logDir); | ||
throw new Report_1.ReportError(MessageName_1.MessageName.LIFECYCLE_SCRIPT, `${(0, capitalize_1.default)(lifecycleScriptName)} script failed (exit code ${formatUtils.pretty(configuration, exitCode, formatUtils.Type.NUMBER)}, logs can be found here: ${formatUtils.pretty(configuration, logFile, formatUtils.Type.PATH)}); run ${formatUtils.pretty(configuration, `yarn ${lifecycleScriptName}`, formatUtils.Type.CODE)} to investigate`); | ||
throw new Report_1.ReportError(MessageName_1.MessageName.LIFECYCLE_SCRIPT, `${capitalize_1.default(lifecycleScriptName)} script failed (exit code ${formatUtils.pretty(configuration, exitCode, formatUtils.Type.NUMBER)}, logs can be found here: ${formatUtils.pretty(configuration, logFile, formatUtils.Type.PATH)}); run ${formatUtils.pretty(configuration, `yarn ${lifecycleScriptName}`, formatUtils.Type.CODE)} to investigate`); | ||
} | ||
@@ -449,3 +470,3 @@ }); | ||
const env = await makeScriptEnv({ project, locator, binFolder }); | ||
await Promise.all(Array.from(packageAccessibleBinaries, ([binaryName, [, binaryPath]]) => makePathWrapper(env.BERRY_BIN_FOLDER, (0, fslib_2.toFilename)(binaryName), process.execPath, [binaryPath]))); | ||
await Promise.all(Array.from(packageAccessibleBinaries, ([binaryName, [, binaryPath]]) => makePathWrapper(env.BERRY_BIN_FOLDER, fslib_2.toFilename(binaryName), process.execPath, [binaryPath]))); | ||
let result; | ||
@@ -452,0 +473,0 @@ try { |
@@ -5,5 +5,6 @@ "use strict"; | ||
const tslib_1 = require("tslib"); | ||
const semver_1 = (0, tslib_1.__importDefault)(require("semver")); | ||
const semver_1 = tslib_1.__importDefault(require("semver")); | ||
var semver_2 = require("semver"); | ||
Object.defineProperty(exports, "SemVer", { enumerable: true, get: function () { return semver_2.SemVer; } }); | ||
const satisfiesWithPrereleasesCache = new Map(); | ||
/** | ||
@@ -23,11 +24,20 @@ * Returns whether the given semver version satisfies the given range. Notably | ||
function satisfiesWithPrereleases(version, range, loose = false) { | ||
let semverRange; | ||
try { | ||
semverRange = new semver_1.default.Range(range, { includePrerelease: true, loose }); | ||
if (!version) | ||
return false; | ||
const key = `${range}${loose}`; | ||
let semverRange = satisfiesWithPrereleasesCache.get(key); | ||
if (typeof semverRange === `undefined`) { | ||
try { | ||
semverRange = new semver_1.default.Range(range, { includePrerelease: true, loose }); | ||
} | ||
catch (_a) { | ||
return false; | ||
} | ||
finally { | ||
satisfiesWithPrereleasesCache.set(key, semverRange || null); | ||
} | ||
} | ||
catch (err) { | ||
else if (semverRange === null) { | ||
return false; | ||
} | ||
if (!version) | ||
return false; | ||
let semverVersion; | ||
@@ -34,0 +44,0 @@ try { |
@@ -56,6 +56,6 @@ /// <reference types="node" /> | ||
reportCacheMiss(locator: Locator, message?: string): void; | ||
startTimerSync<T>(what: string, opts: TimerOptions, cb: () => T): void; | ||
startTimerSync<T>(what: string, cb: () => T): void; | ||
startTimerPromise<T>(what: string, opts: TimerOptions, cb: () => Promise<T>): Promise<void>; | ||
startTimerPromise<T>(what: string, cb: () => Promise<T>): Promise<void>; | ||
startTimerSync<T>(what: string, opts: TimerOptions, cb: () => T): T; | ||
startTimerSync<T>(what: string, cb: () => T): T; | ||
startTimerPromise<T>(what: string, opts: TimerOptions, cb: () => Promise<T>): Promise<T>; | ||
startTimerPromise<T>(what: string, cb: () => Promise<T>): Promise<T>; | ||
startCacheReport<T>(cb: () => Promise<T>): Promise<T>; | ||
@@ -62,0 +62,0 @@ reportSeparator(): void; |
@@ -5,7 +5,7 @@ "use strict"; | ||
const tslib_1 = require("tslib"); | ||
const slice_ansi_1 = (0, tslib_1.__importDefault)(require("@arcanis/slice-ansi")); | ||
const slice_ansi_1 = tslib_1.__importDefault(require("@arcanis/slice-ansi")); | ||
const MessageName_1 = require("./MessageName"); | ||
const Report_1 = require("./Report"); | ||
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const formatUtils = tslib_1.__importStar(require("./formatUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
const PROGRESS_FRAMES = [`⠋`, `⠙`, `⠹`, `⠸`, `⠼`, `⠴`, `⠦`, `⠧`, `⠇`, `⠏`]; | ||
@@ -64,3 +64,3 @@ const PROGRESS_INTERVAL = 80; | ||
const num = name === null ? 0 : name; | ||
const label = (0, MessageName_1.stringifyMessageName)(num); | ||
const label = MessageName_1.stringifyMessageName(num); | ||
if (!json && name === null) { | ||
@@ -167,3 +167,3 @@ return formatUtils.pretty(configuration, label, `grey`); | ||
this.indent += 1; | ||
if (GROUP !== null) { | ||
if (GROUP !== null && !this.json) { | ||
this.stdout.write(GROUP.start(what)); | ||
@@ -192,3 +192,3 @@ } | ||
this.indent -= 1; | ||
if (GROUP !== null) | ||
if (GROUP !== null && !this.json) | ||
this.stdout.write(GROUP.end(what)); | ||
@@ -210,3 +210,3 @@ if (this.configuration.get(`enableTimers`) && after - before > 200) { | ||
this.indent += 1; | ||
if (GROUP !== null) { | ||
if (GROUP !== null && !this.json) { | ||
this.stdout.write(GROUP.start(what)); | ||
@@ -235,3 +235,3 @@ } | ||
this.indent -= 1; | ||
if (GROUP !== null) | ||
if (GROUP !== null && !this.json) | ||
this.stdout.write(GROUP.end(what)); | ||
@@ -503,3 +503,3 @@ if (this.configuration.get(`enableTimers`) && after - before > 200) { | ||
if (truncate) | ||
str = (0, slice_ansi_1.default)(str, 0, process.stdout.columns - 1); | ||
str = slice_ansi_1.default(str, 0, process.stdout.columns - 1); | ||
return str; | ||
@@ -506,0 +506,0 @@ } |
@@ -6,8 +6,8 @@ "use strict"; | ||
const fslib_1 = require("@yarnpkg/fslib"); | ||
const querystring_1 = (0, tslib_1.__importDefault)(require("querystring")); | ||
const semver_1 = (0, tslib_1.__importDefault)(require("semver")); | ||
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils")); | ||
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils")); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const querystring_1 = tslib_1.__importDefault(require("querystring")); | ||
const semver_1 = tslib_1.__importDefault(require("semver")); | ||
const formatUtils = tslib_1.__importStar(require("./formatUtils")); | ||
const hashUtils = tslib_1.__importStar(require("./hashUtils")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
const VIRTUAL_PROTOCOL = `virtual:`; | ||
@@ -569,3 +569,3 @@ const VIRTUAL_ABBREVIATE = 5; | ||
: `${slugifyIdent(locator)}-${humanReference}-${locator.locatorHash.slice(0, hashTruncate)}`; | ||
return (0, fslib_1.toFilename)(slug); | ||
return fslib_1.toFilename(slug); | ||
} | ||
@@ -572,0 +572,0 @@ exports.slugifyLocator = slugifyLocator; |
@@ -6,5 +6,5 @@ "use strict"; | ||
const fslib_1 = require("@yarnpkg/fslib"); | ||
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils")); | ||
const httpUtils = (0, tslib_1.__importStar)(require("./httpUtils")); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const hashUtils = tslib_1.__importStar(require("./hashUtils")); | ||
const httpUtils = tslib_1.__importStar(require("./httpUtils")); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
var MetricName; | ||
@@ -11,0 +11,0 @@ (function (MetricName) { |
@@ -11,3 +11,3 @@ /// <reference types="node" /> | ||
export declare function makeArchiveFromDirectory(source: PortablePath, { baseFs, prefixPath, compressionLevel, inMemory }?: MakeArchiveFromDirectoryOptions): Promise<ZipFS>; | ||
interface ExtractBufferOptions { | ||
export interface ExtractBufferOptions { | ||
compressionLevel?: ZipCompression; | ||
@@ -14,0 +14,0 @@ prefixPath?: PortablePath; |
@@ -8,4 +8,6 @@ "use strict"; | ||
const stream_1 = require("stream"); | ||
const tar_1 = (0, tslib_1.__importDefault)(require("tar")); | ||
const miscUtils = (0, tslib_1.__importStar)(require("./miscUtils")); | ||
const tar_1 = tslib_1.__importDefault(require("tar")); | ||
const WorkerPool_1 = require("./WorkerPool"); | ||
const miscUtils = tslib_1.__importStar(require("./miscUtils")); | ||
const worker_zip_1 = require("./worker-zip"); | ||
// 1984-06-22T21:50:00.000Z | ||
@@ -20,3 +22,3 @@ // | ||
async function makeArchiveFromDirectory(source, { baseFs = new fslib_1.NodeFS(), prefixPath = fslib_1.PortablePath.root, compressionLevel, inMemory = false } = {}) { | ||
const libzip = await (0, libzip_1.getLibzipPromise)(); | ||
const libzip = await libzip_1.getLibzipPromise(); | ||
let zipFs; | ||
@@ -36,7 +38,9 @@ if (inMemory) { | ||
exports.makeArchiveFromDirectory = makeArchiveFromDirectory; | ||
let workerPool; | ||
async function convertToZip(tgz, opts) { | ||
const tmpFolder = await fslib_1.xfs.mktempPromise(); | ||
const tmpFile = fslib_1.ppath.join(tmpFolder, `archive.zip`); | ||
const { compressionLevel, ...bufferOpts } = opts; | ||
return await extractArchiveTo(tgz, new fslib_1.ZipFS(tmpFile, { create: true, libzip: await (0, libzip_1.getLibzipPromise)(), level: compressionLevel }), bufferOpts); | ||
workerPool || (workerPool = new WorkerPool_1.WorkerPool(worker_zip_1.getContent())); | ||
await workerPool.run({ tmpFile, tgz, opts }); | ||
return new fslib_1.ZipFS(tmpFile, { libzip: await libzip_1.getLibzipPromise(), level: opts.compressionLevel }); | ||
} | ||
@@ -43,0 +47,0 @@ exports.convertToZip = convertToZip; |
@@ -7,6 +7,6 @@ import { MessageName } from './MessageName'; | ||
reportCacheMiss(locator: Locator): void; | ||
startTimerSync<T>(what: string, opts: TimerOptions, cb: () => T): void; | ||
startTimerSync<T>(what: string, cb: () => T): void; | ||
startTimerPromise<T>(what: string, opts: TimerOptions, cb: () => Promise<T>): Promise<void>; | ||
startTimerPromise<T>(what: string, cb: () => Promise<T>): Promise<void>; | ||
startTimerSync<T>(what: string, opts: TimerOptions, cb: () => T): T; | ||
startTimerSync<T>(what: string, cb: () => T): T; | ||
startTimerPromise<T>(what: string, opts: TimerOptions, cb: () => Promise<T>): Promise<T>; | ||
startTimerPromise<T>(what: string, cb: () => Promise<T>): Promise<T>; | ||
startCacheReport<T>(cb: () => Promise<T>): Promise<T>; | ||
@@ -13,0 +13,0 @@ reportSeparator(): void; |
@@ -6,3 +6,3 @@ "use strict"; | ||
const treeify_1 = require("treeify"); | ||
const formatUtils = (0, tslib_1.__importStar)(require("./formatUtils")); | ||
const formatUtils = tslib_1.__importStar(require("./formatUtils")); | ||
function treeNodeToTreeify(printTree, { configuration }) { | ||
@@ -76,3 +76,3 @@ const target = {}; | ||
} | ||
let treeOutput = (0, treeify_1.asTree)(treeNodeToTreeify(tree, { configuration }), false, false); | ||
let treeOutput = treeify_1.asTree(treeNodeToTreeify(tree, { configuration }), false, false); | ||
// A slight hack to add line returns between two top-level entries | ||
@@ -79,0 +79,0 @@ if (separators >= 1) |
@@ -6,3 +6,3 @@ "use strict"; | ||
const fslib_1 = require("@yarnpkg/fslib"); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
class VirtualFetcher { | ||
@@ -9,0 +9,0 @@ supports(locator) { |
@@ -6,8 +6,8 @@ "use strict"; | ||
const fslib_1 = require("@yarnpkg/fslib"); | ||
const globby_1 = (0, tslib_1.__importDefault)(require("globby")); | ||
const semver_1 = (0, tslib_1.__importDefault)(require("semver")); | ||
const globby_1 = tslib_1.__importDefault(require("globby")); | ||
const Manifest_1 = require("./Manifest"); | ||
const WorkspaceResolver_1 = require("./WorkspaceResolver"); | ||
const hashUtils = (0, tslib_1.__importStar)(require("./hashUtils")); | ||
const structUtils = (0, tslib_1.__importStar)(require("./structUtils")); | ||
const hashUtils = tslib_1.__importStar(require("./hashUtils")); | ||
const semverUtils = tslib_1.__importStar(require("./semverUtils")); | ||
const structUtils = tslib_1.__importStar(require("./structUtils")); | ||
class Workspace { | ||
@@ -38,3 +38,3 @@ constructor(workspaceCwd, { project }) { | ||
const patterns = this.manifest.workspaceDefinitions.map(({ pattern }) => pattern); | ||
const relativeCwds = await (0, globby_1.default)(patterns, { | ||
const relativeCwds = await globby_1.default(patterns, { | ||
absolute: true, | ||
@@ -57,2 +57,3 @@ cwd: fslib_1.npath.fromPortablePath(this.cwd), | ||
accepts(range) { | ||
var _a; | ||
const protocolIndex = range.indexOf(`:`); | ||
@@ -69,10 +70,11 @@ const protocol = protocolIndex !== -1 | ||
return true; | ||
if (!semver_1.default.validRange(pathname)) | ||
const semverRange = semverUtils.validRange(pathname); | ||
if (!semverRange) | ||
return false; | ||
if (protocol === WorkspaceResolver_1.WorkspaceResolver.protocol) | ||
return semver_1.default.satisfies(this.manifest.version !== null ? this.manifest.version : `0.0.0`, pathname); | ||
return semverRange.test((_a = this.manifest.version) !== null && _a !== void 0 ? _a : `0.0.0`); | ||
if (!this.project.configuration.get(`enableTransparentWorkspaces`)) | ||
return false; | ||
if (this.manifest.version !== null) | ||
return semver_1.default.satisfies(this.manifest.version, pathname); | ||
return semverRange.test(this.manifest.version); | ||
return false; | ||
@@ -79,0 +81,0 @@ } |
{ | ||
"name": "@yarnpkg/core", | ||
"version": "3.0.0-rc.2", | ||
"version": "3.0.0-rc.3", | ||
"license": "BSD-2-Clause", | ||
@@ -11,8 +11,8 @@ "main": "./lib/index.js", | ||
"@types/treeify": "^1.0.0", | ||
"@yarnpkg/fslib": "^2.5.0-rc.2", | ||
"@yarnpkg/json-proxy": "^2.1.0", | ||
"@yarnpkg/libzip": "^2.2.1", | ||
"@yarnpkg/parsers": "^2.3.1-rc.2", | ||
"@yarnpkg/pnp": "^3.0.0-rc.2", | ||
"@yarnpkg/shell": "^3.0.0-rc.2", | ||
"@yarnpkg/fslib": "^2.5.0-rc.3", | ||
"@yarnpkg/json-proxy": "^2.1.1-rc.1", | ||
"@yarnpkg/libzip": "^2.2.2-rc.1", | ||
"@yarnpkg/parsers": "^2.4.0-rc.1", | ||
"@yarnpkg/pnp": "^3.0.0-rc.3", | ||
"@yarnpkg/shell": "^3.0.0-rc.3", | ||
"camelcase": "^5.3.1", | ||
@@ -31,2 +31,3 @@ "chalk": "^3.0.0", | ||
"p-limit": "^2.2.0", | ||
"p-queue": "^6.0.0", | ||
"pluralize": "^7.0.0", | ||
@@ -43,2 +44,4 @@ "pretty-bytes": "^5.1.0", | ||
"devDependencies": { | ||
"@rollup/plugin-commonjs": "^18.0.0", | ||
"@rollup/plugin-node-resolve": "^11.0.1", | ||
"@types/ci-info": "^2", | ||
@@ -52,8 +55,13 @@ "@types/cross-spawn": "6.0.0", | ||
"@types/tunnel": "^0.0.0", | ||
"@yarnpkg/cli": "^3.0.0-rc.2", | ||
"@yarnpkg/plugin-link": "^2.1.2-rc.2", | ||
"@yarnpkg/plugin-npm": "^2.5.0-rc.2", | ||
"@yarnpkg/plugin-pnp": "^3.0.0-rc.2" | ||
"@yarnpkg/cli": "3.0.0-rc.3", | ||
"@yarnpkg/plugin-link": "2.1.2-rc.3", | ||
"@yarnpkg/plugin-npm": "2.5.0-rc.3", | ||
"@yarnpkg/plugin-pnp": "3.0.0-rc.3", | ||
"esbuild": "npm:esbuild-wasm@^0.11.20", | ||
"rollup": "^2.43.0", | ||
"rollup-plugin-esbuild": "^3.0.2" | ||
}, | ||
"scripts": { | ||
"build:zip:worker": "rollup -c", | ||
"update:zip:worker": "run build:zip:worker", | ||
"postpack": "rm -rf lib", | ||
@@ -75,3 +83,3 @@ "prepack": "run build:compile \"$(pwd)\"" | ||
"engines": { | ||
"node": ">=10.19.0" | ||
"node": ">=12 <14 || 14.2 - 14.9 || >14.10.0" | ||
}, | ||
@@ -78,0 +86,0 @@ "stableVersion": "2.4.0", |
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
Environment variable access
Supply chain riskPackage accesses environment variables, which may be a sign of credential stuffing or data theft.
Found 1 instance in 1 package
Long strings
Supply chain riskContains long string literals, which may be a sign of obfuscated or packed code.
Found 1 instance in 1 package
Minified code
QualityThis package contains minified code. This may be harmless in some cases where minified code is included in packaged libraries, however packages on npm should not minify code.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
712843
91
12188
32
17
3
22
5
+ Addedp-queue@^6.0.0
+ Addedeventemitter3@4.0.7(transitive)
+ Addedp-finally@1.0.0(transitive)
+ Addedp-queue@6.6.2(transitive)
+ Addedp-timeout@3.2.0(transitive)
Updated@yarnpkg/fslib@^2.5.0-rc.3
Updated@yarnpkg/libzip@^2.2.2-rc.1
Updated@yarnpkg/parsers@^2.4.0-rc.1
Updated@yarnpkg/pnp@^3.0.0-rc.3
Updated@yarnpkg/shell@^3.0.0-rc.3