New Research: Supply Chain Attack on Axios Pulls Malicious Dependency from npm.Details →
Socket
Book a DemoSign in
Socket

@changesets/cli

Package Overview
Dependencies
Maintainers
4
Versions
112
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@changesets/cli - npm Package Compare versions

Comparing version
2.29.8
to
2.30.0
+2
-2
default-files/README.md

@@ -5,5 +5,5 @@ # Changesets

with multi-package repos, or single-package repos to help you version and publish your code. You can
find the full documentation for it [in our repository](https://github.com/changesets/changesets)
find the full documentation for it [in our repository](https://github.com/changesets/changesets).
We have a quick list of common questions to get you started engaging with this project in
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md).

@@ -25,6 +25,4 @@ 'use strict';

var semverParse = require('semver/functions/parse');
var pLimit = require('p-limit');
var packageManagerDetector = require('package-manager-detector');
var spawn = require('spawndamnit');
var ciInfo = require('ci-info');
var getReleasePlan = require('@changesets/get-release-plan');

@@ -66,3 +64,2 @@ var applyReleasePlan = require('@changesets/apply-release-plan');

var semverParse__default = /*#__PURE__*/_interopDefault(semverParse);
var pLimit__default = /*#__PURE__*/_interopDefault(pLimit);
var spawn__default = /*#__PURE__*/_interopDefault(spawn);

@@ -121,3 +118,3 @@ var getReleasePlan__default = /*#__PURE__*/_interopDefault(getReleasePlan);

function getCommitFunctions(commit, cwd) {
async function getCommitFunctions(commit, cwd) {
let commitFunctions = {};

@@ -130,5 +127,10 @@ if (!commit) {

let commitPath = resolveFrom__default["default"](changesetPath, commit[0]);
let possibleCommitFunc = require(commitPath);
let possibleCommitFunc = await (function (t) { return Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespace(require(t)); }); })(commitPath);
if (possibleCommitFunc.default) {
possibleCommitFunc = possibleCommitFunc.default;
// Check nested default again in case it's CJS with `__esModule` interop
if (possibleCommitFunc.default) {
possibleCommitFunc = possibleCommitFunc.default;
}
}

@@ -157,3 +159,6 @@ if (typeof possibleCommitFunc.getAddMessage === "function" || typeof possibleCommitFunc.getVersionMessage === "function") {

}();
const limit = Math.max(termSize__default["default"]().rows - 5, 10);
// this can exec tput so we make it compute lazily to avoid such side effects at init time
let limit;
const getLimit = () => limit !== null && limit !== void 0 ? limit : limit = Math.max(termSize__default["default"]().rows - 5, 10);
let cancelFlow = () => {

@@ -173,3 +178,3 @@ logger.success("Cancelled... 👋 ");

format,
limit,
limit: getLimit(),
onCancel: cancelFlow,

@@ -313,3 +318,3 @@ symbols: {

}
async function createChangeset(changedPackages, allPackages) {
async function createChangeset(changedPackages, allPackages, messageFromCli) {
const releases = [];

@@ -401,2 +406,9 @@ if (allPackages.length > 1) {

}
if (messageFromCli !== undefined) {
return {
confirmed: false,
summary: messageFromCli,
releases
};
}
logger.log("Please enter a summary for this change (this will be in the changelogs).");

@@ -418,5 +430,5 @@ logger.log(gray(" (submit empty line to open external editor)"));

}
summary = await askQuestion("");
summary = await askQuestion("Summary");
while (summary.length === 0) {
summary = await askQuestion("\n\n# A summary is required for the changelog! 😪");
summary = await askQuestion("A summary is required! Please enter a summary");
}

@@ -451,3 +463,5 @@ }

empty,
open
open,
since,
message
}, config) {

@@ -475,3 +489,3 @@ const packages = await getPackages.getPackages(cwd);

releases: [],
summary: ``
summary: message !== null && message !== void 0 ? message : ""
};

@@ -482,3 +496,4 @@ } else {

changedPackagesNames = (await getVersionableChangedPackages(config, {
cwd
cwd,
ref: since
})).map(pkg => pkg.packageJson.name);

@@ -488,6 +503,7 @@ } catch (e) {

// in the CLI. So if any error happens while we try to do so, we only log a warning and continue
logger.warn(`Failed to find changed packages from the "${config.baseBranch}" base branch due to error below`);
const branch = since !== null && since !== void 0 ? since : config.baseBranch;
logger.warn(`Failed to find changed packages from the "${branch}" ${since ? "ref" : "base branch"} due to error below`);
logger.warn(e);
}
newChangeset = await createChangeset(changedPackagesNames, versionablePackages);
newChangeset = await createChangeset(changedPackagesNames, versionablePackages, message);
printConfirmationMessage(newChangeset, versionablePackages.length > 1);

@@ -504,3 +520,3 @@ if (!newChangeset.confirmed) {

getAddMessage
}, commitOpts] = getCommitFunctions(config.commit, cwd);
}, commitOpts] = await getCommitFunctions(config.commit, cwd);
if (getAddMessage) {

@@ -563,3 +579,3 @@ await git__namespace.add(path__default["default"].resolve(changesetBase, `${changesetID}.md`), cwd);

await fs__default["default"].writeFile(path__default["default"].resolve(changesetBase, "config.json"), defaultConfig);
logger.log(`Thanks for choosing ${pc__default["default"].green("changesets")} to help manage your versioning and publishing\n`);
logger.log(`Thanks for choosing ${pc__default["default"].green("changesets")} to help manage your versioning and publishing.\n`);
logger.log("You should be set up to start using changesets now!\n");

@@ -572,6 +588,6 @@ logger.info("We have added a `.changeset` folder, and a couple of files to help you out:");

async function pre(cwd, options) {
async function pre(rootDir, options) {
if (options.command === "enter") {
try {
await pre$1.enterPre(cwd, options.tag);
await pre$1.enterPre(rootDir, options.tag);
logger__namespace.success(`Entered pre mode with tag ${pc__default["default"].cyan(options.tag)}`);

@@ -589,3 +605,3 @@ logger__namespace.info("Run `changeset version` to version packages with prerelease versions");

try {
await pre$1.exitPre(cwd);
await pre$1.exitPre(rootDir);
logger__namespace.success(`Exited pre mode`);

@@ -604,2 +620,57 @@ logger__namespace.info("Run `changeset version` to version packages with normal versions");

function withResolvers() {
const rv = {};
rv.promise = new Promise((resolve, reject) => {
rv.resolve = resolve;
rv.reject = reject;
});
return rv;
}
function promiseTry(fn) {
return new Promise(resolve => resolve(fn()));
}
function createPromiseQueue(concurrency) {
const jobs = [];
let active = 0;
function run() {
if (active >= concurrency) {
return;
}
const job = jobs.shift();
if (!job) {
return;
}
active++;
promiseTry(job.fn).then(result => {
active--;
job.resolve(result);
run();
}, error => {
active--;
job.reject(error);
run();
});
}
return {
add: fn => {
const {
promise,
resolve,
reject
} = withResolvers();
jobs.push({
fn,
resolve,
reject
});
run();
return promise;
},
setConcurrency: newConcurrency => {
concurrency = newConcurrency;
run();
}
};
}
const getLastJsonObjectFromString = str => {

@@ -619,4 +690,6 @@ str = str.replace(/[^}]*$/, "");

const npmRequestLimit = pLimit__default["default"](40);
const npmPublishLimit = pLimit__default["default"](10);
const NPM_REQUEST_CONCURRENCY_LIMIT = 40;
const NPM_PUBLISH_CONCURRENCY_LIMIT = 10;
const npmRequestQueue = createPromiseQueue(NPM_REQUEST_CONCURRENCY_LIMIT);
const npmPublishQueue = createPromiseQueue(NPM_PUBLISH_CONCURRENCY_LIMIT);
function jsonParse(input) {

@@ -632,2 +705,9 @@ try {

}
const isCustomRegistry = registry => {
registry = normalizeRegistry(registry);
return !!registry && registry !== "https://registry.npmjs.org" && registry !== "https://registry.yarnpkg.com";
};
function normalizeRegistry(registry) {
return registry && registry.replace(/\/+$/, "");
}
function getCorrectRegistry(packageJson) {

@@ -639,3 +719,3 @@ var _packageJson$publishC;

const scope = packageName.split("/")[0];
const scopedRegistry = ((_publishConfig = packageJson.publishConfig) === null || _publishConfig === void 0 ? void 0 : _publishConfig[`${scope}:registry`]) || process.env[`npm_config_${scope}:registry`];
const scopedRegistry = normalizeRegistry(((_publishConfig = packageJson.publishConfig) === null || _publishConfig === void 0 ? void 0 : _publishConfig[`${scope}:registry`]) || process.env[`npm_config_${scope}:registry`]);
if (scopedRegistry) {

@@ -648,3 +728,3 @@ return {

}
const registry = (packageJson === null || packageJson === void 0 || (_packageJson$publishC = packageJson.publishConfig) === null || _packageJson$publishC === void 0 ? void 0 : _packageJson$publishC.registry) || process.env.npm_config_registry;
const registry = normalizeRegistry((packageJson === null || packageJson === void 0 || (_packageJson$publishC = packageJson.publishConfig) === null || _packageJson$publishC === void 0 ? void 0 : _packageJson$publishC.registry) || process.env.npm_config_registry);
return {

@@ -703,3 +783,3 @@ scope: undefined,

function getPackageInfo(packageJson) {
return npmRequestLimit(async () => {
return npmRequestQueue.add(async () => {
logger.info(`npm info ${packageJson.name}`);

@@ -752,16 +832,2 @@ const {

}
let otpAskLimit = pLimit__default["default"](1);
let askForOtpCode = twoFactorState => otpAskLimit(async () => {
if (twoFactorState.token !== null) return twoFactorState.token;
logger.info("This operation requires a one-time password from your authenticator.");
let val = await askQuestion("Enter one-time password:");
twoFactorState.token = val;
return val;
});
let getOtpCode = async twoFactorState => {
if (twoFactorState.token !== null) {
return twoFactorState.token;
}
return askForOtpCode(twoFactorState);
};

@@ -771,9 +837,5 @@ // we have this so that we can do try a publish again after a publish without

async function internalPublish(packageJson, opts, twoFactorState) {
let publishTool = await getPublishTool(opts.cwd);
let publishFlags = opts.access ? ["--access", opts.access] : [];
const publishTool = await getPublishTool(opts.cwd);
const publishFlags = opts.access ? ["--access", opts.access] : [];
publishFlags.push("--tag", opts.tag);
if ((await twoFactorState.isRequired) && !ciInfo.isCI) {
let otpCode = await getOtpCode(twoFactorState);
publishFlags.push("--otp", otpCode);
}
if (publishTool.name === "pnpm" && publishTool.shouldAddNoGitChecks) {

@@ -792,2 +854,30 @@ publishFlags.push("--no-git-checks");

};
if (requiresDelegatedAuth(twoFactorState)) {
const result = publishTool.name === "pnpm" ? child_process.spawnSync("pnpm", ["publish", ...publishFlags], {
env: Object.assign({}, process.env, envOverride),
cwd: opts.cwd,
stdio: "inherit"
}) : child_process.spawnSync(publishTool.name, ["publish", opts.publishDir, ...publishFlags], {
env: Object.assign({}, process.env, envOverride),
stdio: "inherit"
});
if (result.status === 0) {
twoFactorState.allowConcurrency = true;
// bump for remaining packages
npmPublishQueue.setConcurrency(NPM_PUBLISH_CONCURRENCY_LIMIT);
return {
published: true
};
}
return {
published: false
};
}
// in the delegated mode we don't need the json output
// as we won't be handling the auth errors
publishFlags.push("--json");
if (twoFactorState.token) {
publishFlags.push("--otp", twoFactorState.token);
}
let {

@@ -797,6 +887,6 @@ code,

stderr
} = publishTool.name === "pnpm" ? await spawn__default["default"]("pnpm", ["publish", "--json", ...publishFlags], {
} = publishTool.name === "pnpm" ? await spawn__default["default"]("pnpm", ["publish", ...publishFlags], {
env: Object.assign({}, process.env, envOverride),
cwd: opts.cwd
}) : await spawn__default["default"](publishTool.name, ["publish", opts.publishDir, "--json", ...publishFlags], {
}) : await spawn__default["default"](publishTool.name, ["publish", opts.publishDir, ...publishFlags], {
env: Object.assign({}, process.env, envOverride)

@@ -812,11 +902,18 @@ });

if (json !== null && json !== void 0 && json.error) {
var _json$error$detail;
// The first case is no 2fa provided, the second is when the 2fa is wrong (timeout or wrong words)
if ((json.error.code === "EOTP" || json.error.code === "E401" && json.error.detail.includes("--otp=<code>")) && !ciInfo.isCI) {
if (twoFactorState.token !== null) {
// the current otp code must be invalid since it errored
twoFactorState.token = null;
}
if ((json.error.code === "EOTP" || json.error.code === "E401" && (_json$error$detail = json.error.detail) !== null && _json$error$detail !== void 0 && _json$error$detail.includes("--otp=<code>")) && process.stdin.isTTY) {
// the current otp code must be invalid since it errored
twoFactorState.token = undefined;
// just in case this isn't already true
twoFactorState.isRequired = Promise.resolve(true);
return internalPublish(packageJson, opts, twoFactorState);
twoFactorState.isRequired = true;
twoFactorState.allowConcurrency = false;
npmPublishQueue.setConcurrency(1);
return {
published: false,
// given we have just adjusted the concurrency, we need to handle the retries in the layer that requeues the publish
// calling internalPublish again would allow concurrent failures to run again concurrently
// but only one retried publish should get delegated to the npm cli and other ones should "await" its successful result before being retried
allowRetry: true
};
}

@@ -835,5 +932,11 @@ logger.error(`an error occurred while publishing ${packageJson.name}: ${json.error.code}`, json.error.summary, json.error.detail ? "\n" + json.error.detail : "");

function publish$1(packageJson, opts, twoFactorState) {
// If there are many packages to be published, it's better to limit the
// concurrency to avoid unwanted errors, for example from npm.
return npmRequestLimit(() => npmPublishLimit(() => internalPublish(packageJson, opts, twoFactorState)));
return npmRequestQueue.add(async () => {
let result;
do {
result = await npmPublishQueue.add(() => internalPublish(packageJson, opts, twoFactorState));
} while (result.allowRetry);
return {
published: result.published
};
});
}

@@ -848,4 +951,3 @@

}
const isCustomRegistry = registry => !!registry && registry !== "https://registry.npmjs.org" && registry !== "https://registry.yarnpkg.com";
const getTwoFactorState = ({
const getTwoFactorState = async ({
otp,

@@ -857,17 +959,19 @@ publicPackages

token: otp,
isRequired: Promise.resolve(true)
isRequired: true
};
}
if (ciInfo.isCI || publicPackages.some(pkg => isCustomRegistry(getCorrectRegistry(pkg.packageJson).registry)) || isCustomRegistry(process.env.npm_config_registry)) {
if (!process.stdin.isTTY || publicPackages.some(pkg => isCustomRegistry(getCorrectRegistry(pkg.packageJson).registry)) || isCustomRegistry(process.env.npm_config_registry)) {
return {
token: null,
isRequired: Promise.resolve(false)
token: undefined,
isRequired: false
};
}
return {
token: null,
// note: we're not awaiting this here, we want this request to happen in parallel with getUnpublishedPackages
isRequired: getTokenIsRequired()
token: undefined,
isRequired: await getTokenIsRequired()
};
};
const requiresDelegatedAuth = twoFactorState => {
return process.stdin.isTTY && !twoFactorState.token && !twoFactorState.allowConcurrency && twoFactorState.isRequired;
};
async function publishPackages({

@@ -886,6 +990,9 @@ packages,

}
const twoFactorState = getTwoFactorState({
const twoFactorState = await getTwoFactorState({
otp,
publicPackages
});
if (requiresDelegatedAuth(twoFactorState)) {
npmPublishQueue.setConcurrency(1);
}
return Promise.all(unpublishedPackagesInfo.map(pkgInfo => {

@@ -1093,3 +1200,3 @@ let pkg = packagesByName.get(pkgInfo.name);

if (output) {
await fs__default["default"].writeFile(path__default["default"].join(cwd, output), JSON.stringify(releasePlan, undefined, 2));
await fs__default["default"].writeFile(path__default["default"].resolve(cwd, output), JSON.stringify(releasePlan, undefined, 2));
return;

@@ -1216,3 +1323,3 @@ }

getVersionMessage
}, commitOpts] = getCommitFunctions(releaseConfig.commit, cwd);
}, commitOpts] = await getCommitFunctions(releaseConfig.commit, cwd);
if (getVersionMessage) {

@@ -1237,7 +1344,9 @@ let touchedFile;

async function run(input, flags, cwd) {
const packages = await getPackages.getPackages(cwd);
const rootDir = packages.root.dir;
if (input[0] === "init") {
await init(cwd);
await init(rootDir);
return;
}
if (!fs__default["default"].existsSync(path__default["default"].resolve(cwd, ".changeset"))) {
if (!fs__default["default"].existsSync(path__default["default"].resolve(rootDir, ".changeset"))) {
logger.error("There is no .changeset folder. ");

@@ -1248,8 +1357,7 @@ logger.error("If this is the first time `changesets` have been used in this project, run `yarn changeset init` to get set up.");

}
const packages = await getPackages.getPackages(cwd);
let config$1;
try {
config$1 = await config.read(cwd, packages);
config$1 = await config.read(rootDir, packages);
} catch (e) {
let oldConfigExists = await fs__default["default"].pathExists(path__default["default"].resolve(cwd, ".changeset/config.js"));
let oldConfigExists = await fs__default["default"].pathExists(path__default["default"].resolve(rootDir, ".changeset/config.js"));
if (oldConfigExists) {

@@ -1268,8 +1376,12 @@ logger.error("It looks like you're using the version 1 `.changeset/config.js` file");

empty,
open
open,
since,
message
} = flags;
// @ts-ignore if this is undefined, we have already exited
await add(cwd, {
await add(rootDir, {
empty,
open
open,
since,
message
}, config$1);

@@ -1291,3 +1403,4 @@ } else if (input[0] !== "pre" && input.length > 1) {

open,
gitTag
gitTag,
message
} = flags;

@@ -1312,5 +1425,7 @@ const deadFlags = ["updateChangelog", "isPublic", "skipCI", "commit"];

{
await add(cwd, {
await add(rootDir, {
empty,
open
open,
since,
message
}, config$1);

@@ -1347,3 +1462,7 @@ return;

// validate that all dependents of skipped packages are also skipped
// Validate that all dependents of skipped packages are also skipped.
// devDependencies are excluded because they don't affect published consumers —
// a stale devDep range on a skipped package is harmless.
// Note: assemble-release-plan uses a graph WITH devDeps because it needs to
// update devDep ranges in package.json even though they don't cause version bumps.
const dependentsGraph = getDependentsGraph.getDependentsGraph(packages, {

@@ -1364,2 +1483,9 @@ ignoreDevDependencies: true,

const dependentPkg = packagesByName.get(dependent);
if (dependentPkg.packageJson.private) {
// Private packages don't publish to npm,
// so they can safely depend on skipped packages.
// This also holds for private packages with other publish targets (like a VS Code extension)
// as those typically have to prebundle dependencies.
continue;
}
if (!shouldSkipPackage.shouldSkipPackage(dependentPkg, {

@@ -1380,3 +1506,3 @@ ignore: config$1.ignore,

}
await version(cwd, {
await version(rootDir, {
snapshot

@@ -1388,3 +1514,3 @@ }, config$1);

{
await publish(cwd, {
await publish(rootDir, {
otp,

@@ -1398,3 +1524,3 @@ tag: tag$1,

{
await status(cwd, {
await status(rootDir, {
sinceMaster,

@@ -1409,3 +1535,3 @@ since,

{
await tag(cwd, config$1);
await tag(rootDir, config$1);
return;

@@ -1425,4 +1551,3 @@ }

}
// @ts-ignore
await pre(cwd, {
await pre(rootDir, {
command,

@@ -1457,3 +1582,3 @@ tag

boolean: ["sinceMaster", "verbose", "empty", "open", "gitTag", "snapshot"],
string: ["output", "otp", "since", "ignore", "tag", "snapshot", "snapshotPrereleaseTemplate"],
string: ["output", "otp", "since", "ignore", "message", "tag", "snapshot", "snapshotPrereleaseTemplate"],
alias: {

@@ -1463,2 +1588,3 @@ // Short flags

o: "output",
m: "message",
// Support kebab-case flags

@@ -1494,3 +1620,3 @@ "since-master": "sinceMaster",

init
add [--empty] [--open]
add [--empty] [--open] [--since <branch>] [--message <text>]
version [--ignore] [--snapshot <?name>] [--snapshot-prerelease-template <template>]

@@ -1497,0 +1623,0 @@ publish [--tag <name>] [--otp <code>] [--no-git-tag]

@@ -13,3 +13,3 @@ import mri from 'mri';

import pc from 'picocolors';
import { spawn } from 'child_process';
import { spawn, spawnSync } from 'child_process';
import * as git from '@changesets/git';

@@ -26,6 +26,4 @@ import { getChangedPackagesSinceRef, getCurrentCommitId } from '@changesets/git';

import semverParse from 'semver/functions/parse';
import pLimit from 'p-limit';
import { detect } from 'package-manager-detector';
import spawn$1 from 'spawndamnit';
import { isCI } from 'ci-info';
import getReleasePlan from '@changesets/get-release-plan';

@@ -83,3 +81,3 @@ import applyReleasePlan from '@changesets/apply-release-plan';

function getCommitFunctions(commit, cwd) {
async function getCommitFunctions(commit, cwd) {
let commitFunctions = {};

@@ -92,5 +90,10 @@ if (!commit) {

let commitPath = resolveFrom(changesetPath, commit[0]);
let possibleCommitFunc = require(commitPath);
let possibleCommitFunc = await import(commitPath);
if (possibleCommitFunc.default) {
possibleCommitFunc = possibleCommitFunc.default;
// Check nested default again in case it's CJS with `__esModule` interop
if (possibleCommitFunc.default) {
possibleCommitFunc = possibleCommitFunc.default;
}
}

@@ -119,3 +122,6 @@ if (typeof possibleCommitFunc.getAddMessage === "function" || typeof possibleCommitFunc.getVersionMessage === "function") {

}();
const limit = Math.max(termSize().rows - 5, 10);
// this can exec tput so we make it compute lazily to avoid such side effects at init time
let limit;
const getLimit = () => limit !== null && limit !== void 0 ? limit : limit = Math.max(termSize().rows - 5, 10);
let cancelFlow = () => {

@@ -135,3 +141,3 @@ success("Cancelled... 👋 ");

format,
limit,
limit: getLimit(),
onCancel: cancelFlow,

@@ -275,3 +281,3 @@ symbols: {

}
async function createChangeset(changedPackages, allPackages) {
async function createChangeset(changedPackages, allPackages, messageFromCli) {
const releases = [];

@@ -363,2 +369,9 @@ if (allPackages.length > 1) {

}
if (messageFromCli !== undefined) {
return {
confirmed: false,
summary: messageFromCli,
releases
};
}
log("Please enter a summary for this change (this will be in the changelogs).");

@@ -380,5 +393,5 @@ log(gray(" (submit empty line to open external editor)"));

}
summary = await askQuestion("");
summary = await askQuestion("Summary");
while (summary.length === 0) {
summary = await askQuestion("\n\n# A summary is required for the changelog! 😪");
summary = await askQuestion("A summary is required! Please enter a summary");
}

@@ -413,3 +426,5 @@ }

empty,
open
open,
since,
message
}, config) {

@@ -437,3 +452,3 @@ const packages = await getPackages(cwd);

releases: [],
summary: ``
summary: message !== null && message !== void 0 ? message : ""
};

@@ -444,3 +459,4 @@ } else {

changedPackagesNames = (await getVersionableChangedPackages(config, {
cwd
cwd,
ref: since
})).map(pkg => pkg.packageJson.name);

@@ -450,6 +466,7 @@ } catch (e) {

// in the CLI. So if any error happens while we try to do so, we only log a warning and continue
warn(`Failed to find changed packages from the "${config.baseBranch}" base branch due to error below`);
const branch = since !== null && since !== void 0 ? since : config.baseBranch;
warn(`Failed to find changed packages from the "${branch}" ${since ? "ref" : "base branch"} due to error below`);
warn(e);
}
newChangeset = await createChangeset(changedPackagesNames, versionablePackages);
newChangeset = await createChangeset(changedPackagesNames, versionablePackages, message);
printConfirmationMessage(newChangeset, versionablePackages.length > 1);

@@ -466,3 +483,3 @@ if (!newChangeset.confirmed) {

getAddMessage
}, commitOpts] = getCommitFunctions(config.commit, cwd);
}, commitOpts] = await getCommitFunctions(config.commit, cwd);
if (getAddMessage) {

@@ -525,3 +542,3 @@ await git.add(path.resolve(changesetBase, `${changesetID}.md`), cwd);

await fs.writeFile(path.resolve(changesetBase, "config.json"), defaultConfig);
log(`Thanks for choosing ${pc.green("changesets")} to help manage your versioning and publishing\n`);
log(`Thanks for choosing ${pc.green("changesets")} to help manage your versioning and publishing.\n`);
log("You should be set up to start using changesets now!\n");

@@ -534,6 +551,6 @@ info("We have added a `.changeset` folder, and a couple of files to help you out:");

async function pre(cwd, options) {
async function pre(rootDir, options) {
if (options.command === "enter") {
try {
await enterPre(cwd, options.tag);
await enterPre(rootDir, options.tag);
logger.success(`Entered pre mode with tag ${pc.cyan(options.tag)}`);

@@ -551,3 +568,3 @@ logger.info("Run `changeset version` to version packages with prerelease versions");

try {
await exitPre(cwd);
await exitPre(rootDir);
logger.success(`Exited pre mode`);

@@ -566,2 +583,57 @@ logger.info("Run `changeset version` to version packages with normal versions");

function withResolvers() {
const rv = {};
rv.promise = new Promise((resolve, reject) => {
rv.resolve = resolve;
rv.reject = reject;
});
return rv;
}
function promiseTry(fn) {
return new Promise(resolve => resolve(fn()));
}
function createPromiseQueue(concurrency) {
const jobs = [];
let active = 0;
function run() {
if (active >= concurrency) {
return;
}
const job = jobs.shift();
if (!job) {
return;
}
active++;
promiseTry(job.fn).then(result => {
active--;
job.resolve(result);
run();
}, error => {
active--;
job.reject(error);
run();
});
}
return {
add: fn => {
const {
promise,
resolve,
reject
} = withResolvers();
jobs.push({
fn,
resolve,
reject
});
run();
return promise;
},
setConcurrency: newConcurrency => {
concurrency = newConcurrency;
run();
}
};
}
const getLastJsonObjectFromString = str => {

@@ -581,4 +653,6 @@ str = str.replace(/[^}]*$/, "");

const npmRequestLimit = pLimit(40);
const npmPublishLimit = pLimit(10);
const NPM_REQUEST_CONCURRENCY_LIMIT = 40;
const NPM_PUBLISH_CONCURRENCY_LIMIT = 10;
const npmRequestQueue = createPromiseQueue(NPM_REQUEST_CONCURRENCY_LIMIT);
const npmPublishQueue = createPromiseQueue(NPM_PUBLISH_CONCURRENCY_LIMIT);
function jsonParse(input) {

@@ -594,2 +668,9 @@ try {

}
const isCustomRegistry = registry => {
registry = normalizeRegistry(registry);
return !!registry && registry !== "https://registry.npmjs.org" && registry !== "https://registry.yarnpkg.com";
};
function normalizeRegistry(registry) {
return registry && registry.replace(/\/+$/, "");
}
function getCorrectRegistry(packageJson) {

@@ -601,3 +682,3 @@ var _packageJson$publishC;

const scope = packageName.split("/")[0];
const scopedRegistry = ((_publishConfig = packageJson.publishConfig) === null || _publishConfig === void 0 ? void 0 : _publishConfig[`${scope}:registry`]) || process.env[`npm_config_${scope}:registry`];
const scopedRegistry = normalizeRegistry(((_publishConfig = packageJson.publishConfig) === null || _publishConfig === void 0 ? void 0 : _publishConfig[`${scope}:registry`]) || process.env[`npm_config_${scope}:registry`]);
if (scopedRegistry) {

@@ -610,3 +691,3 @@ return {

}
const registry = (packageJson === null || packageJson === void 0 || (_packageJson$publishC = packageJson.publishConfig) === null || _packageJson$publishC === void 0 ? void 0 : _packageJson$publishC.registry) || process.env.npm_config_registry;
const registry = normalizeRegistry((packageJson === null || packageJson === void 0 || (_packageJson$publishC = packageJson.publishConfig) === null || _packageJson$publishC === void 0 ? void 0 : _packageJson$publishC.registry) || process.env.npm_config_registry);
return {

@@ -665,3 +746,3 @@ scope: undefined,

function getPackageInfo(packageJson) {
return npmRequestLimit(async () => {
return npmRequestQueue.add(async () => {
info(`npm info ${packageJson.name}`);

@@ -714,16 +795,2 @@ const {

}
let otpAskLimit = pLimit(1);
let askForOtpCode = twoFactorState => otpAskLimit(async () => {
if (twoFactorState.token !== null) return twoFactorState.token;
info("This operation requires a one-time password from your authenticator.");
let val = await askQuestion("Enter one-time password:");
twoFactorState.token = val;
return val;
});
let getOtpCode = async twoFactorState => {
if (twoFactorState.token !== null) {
return twoFactorState.token;
}
return askForOtpCode(twoFactorState);
};

@@ -733,9 +800,5 @@ // we have this so that we can do try a publish again after a publish without

async function internalPublish(packageJson, opts, twoFactorState) {
let publishTool = await getPublishTool(opts.cwd);
let publishFlags = opts.access ? ["--access", opts.access] : [];
const publishTool = await getPublishTool(opts.cwd);
const publishFlags = opts.access ? ["--access", opts.access] : [];
publishFlags.push("--tag", opts.tag);
if ((await twoFactorState.isRequired) && !isCI) {
let otpCode = await getOtpCode(twoFactorState);
publishFlags.push("--otp", otpCode);
}
if (publishTool.name === "pnpm" && publishTool.shouldAddNoGitChecks) {

@@ -754,2 +817,30 @@ publishFlags.push("--no-git-checks");

};
if (requiresDelegatedAuth(twoFactorState)) {
const result = publishTool.name === "pnpm" ? spawnSync("pnpm", ["publish", ...publishFlags], {
env: Object.assign({}, process.env, envOverride),
cwd: opts.cwd,
stdio: "inherit"
}) : spawnSync(publishTool.name, ["publish", opts.publishDir, ...publishFlags], {
env: Object.assign({}, process.env, envOverride),
stdio: "inherit"
});
if (result.status === 0) {
twoFactorState.allowConcurrency = true;
// bump for remaining packages
npmPublishQueue.setConcurrency(NPM_PUBLISH_CONCURRENCY_LIMIT);
return {
published: true
};
}
return {
published: false
};
}
// in the delegated mode we don't need the json output
// as we won't be handling the auth errors
publishFlags.push("--json");
if (twoFactorState.token) {
publishFlags.push("--otp", twoFactorState.token);
}
let {

@@ -759,6 +850,6 @@ code,

stderr
} = publishTool.name === "pnpm" ? await spawn$1("pnpm", ["publish", "--json", ...publishFlags], {
} = publishTool.name === "pnpm" ? await spawn$1("pnpm", ["publish", ...publishFlags], {
env: Object.assign({}, process.env, envOverride),
cwd: opts.cwd
}) : await spawn$1(publishTool.name, ["publish", opts.publishDir, "--json", ...publishFlags], {
}) : await spawn$1(publishTool.name, ["publish", opts.publishDir, ...publishFlags], {
env: Object.assign({}, process.env, envOverride)

@@ -774,11 +865,18 @@ });

if (json !== null && json !== void 0 && json.error) {
var _json$error$detail;
// The first case is no 2fa provided, the second is when the 2fa is wrong (timeout or wrong words)
if ((json.error.code === "EOTP" || json.error.code === "E401" && json.error.detail.includes("--otp=<code>")) && !isCI) {
if (twoFactorState.token !== null) {
// the current otp code must be invalid since it errored
twoFactorState.token = null;
}
if ((json.error.code === "EOTP" || json.error.code === "E401" && (_json$error$detail = json.error.detail) !== null && _json$error$detail !== void 0 && _json$error$detail.includes("--otp=<code>")) && process.stdin.isTTY) {
// the current otp code must be invalid since it errored
twoFactorState.token = undefined;
// just in case this isn't already true
twoFactorState.isRequired = Promise.resolve(true);
return internalPublish(packageJson, opts, twoFactorState);
twoFactorState.isRequired = true;
twoFactorState.allowConcurrency = false;
npmPublishQueue.setConcurrency(1);
return {
published: false,
// given we have just adjusted the concurrency, we need to handle the retries in the layer that requeues the publish
// calling internalPublish again would allow concurrent failures to run again concurrently
// but only one retried publish should get delegated to the npm cli and other ones should "await" its successful result before being retried
allowRetry: true
};
}

@@ -797,5 +895,11 @@ error(`an error occurred while publishing ${packageJson.name}: ${json.error.code}`, json.error.summary, json.error.detail ? "\n" + json.error.detail : "");

function publish$1(packageJson, opts, twoFactorState) {
// If there are many packages to be published, it's better to limit the
// concurrency to avoid unwanted errors, for example from npm.
return npmRequestLimit(() => npmPublishLimit(() => internalPublish(packageJson, opts, twoFactorState)));
return npmRequestQueue.add(async () => {
let result;
do {
result = await npmPublishQueue.add(() => internalPublish(packageJson, opts, twoFactorState));
} while (result.allowRetry);
return {
published: result.published
};
});
}

@@ -810,4 +914,3 @@

}
const isCustomRegistry = registry => !!registry && registry !== "https://registry.npmjs.org" && registry !== "https://registry.yarnpkg.com";
const getTwoFactorState = ({
const getTwoFactorState = async ({
otp,

@@ -819,17 +922,19 @@ publicPackages

token: otp,
isRequired: Promise.resolve(true)
isRequired: true
};
}
if (isCI || publicPackages.some(pkg => isCustomRegistry(getCorrectRegistry(pkg.packageJson).registry)) || isCustomRegistry(process.env.npm_config_registry)) {
if (!process.stdin.isTTY || publicPackages.some(pkg => isCustomRegistry(getCorrectRegistry(pkg.packageJson).registry)) || isCustomRegistry(process.env.npm_config_registry)) {
return {
token: null,
isRequired: Promise.resolve(false)
token: undefined,
isRequired: false
};
}
return {
token: null,
// note: we're not awaiting this here, we want this request to happen in parallel with getUnpublishedPackages
isRequired: getTokenIsRequired()
token: undefined,
isRequired: await getTokenIsRequired()
};
};
const requiresDelegatedAuth = twoFactorState => {
return process.stdin.isTTY && !twoFactorState.token && !twoFactorState.allowConcurrency && twoFactorState.isRequired;
};
async function publishPackages({

@@ -848,6 +953,9 @@ packages,

}
const twoFactorState = getTwoFactorState({
const twoFactorState = await getTwoFactorState({
otp,
publicPackages
});
if (requiresDelegatedAuth(twoFactorState)) {
npmPublishQueue.setConcurrency(1);
}
return Promise.all(unpublishedPackagesInfo.map(pkgInfo => {

@@ -1055,3 +1163,3 @@ let pkg = packagesByName.get(pkgInfo.name);

if (output) {
await fs.writeFile(path.join(cwd, output), JSON.stringify(releasePlan, undefined, 2));
await fs.writeFile(path.resolve(cwd, output), JSON.stringify(releasePlan, undefined, 2));
return;

@@ -1178,3 +1286,3 @@ }

getVersionMessage
}, commitOpts] = getCommitFunctions(releaseConfig.commit, cwd);
}, commitOpts] = await getCommitFunctions(releaseConfig.commit, cwd);
if (getVersionMessage) {

@@ -1199,7 +1307,9 @@ let touchedFile;

async function run(input, flags, cwd) {
const packages = await getPackages(cwd);
const rootDir = packages.root.dir;
if (input[0] === "init") {
await init(cwd);
await init(rootDir);
return;
}
if (!fs.existsSync(path.resolve(cwd, ".changeset"))) {
if (!fs.existsSync(path.resolve(rootDir, ".changeset"))) {
error("There is no .changeset folder. ");

@@ -1210,8 +1320,7 @@ error("If this is the first time `changesets` have been used in this project, run `yarn changeset init` to get set up.");

}
const packages = await getPackages(cwd);
let config;
try {
config = await read(cwd, packages);
config = await read(rootDir, packages);
} catch (e) {
let oldConfigExists = await fs.pathExists(path.resolve(cwd, ".changeset/config.js"));
let oldConfigExists = await fs.pathExists(path.resolve(rootDir, ".changeset/config.js"));
if (oldConfigExists) {

@@ -1230,8 +1339,12 @@ error("It looks like you're using the version 1 `.changeset/config.js` file");

empty,
open
open,
since,
message
} = flags;
// @ts-ignore if this is undefined, we have already exited
await add(cwd, {
await add(rootDir, {
empty,
open
open,
since,
message
}, config);

@@ -1253,3 +1366,4 @@ } else if (input[0] !== "pre" && input.length > 1) {

open,
gitTag
gitTag,
message
} = flags;

@@ -1274,5 +1388,7 @@ const deadFlags = ["updateChangelog", "isPublic", "skipCI", "commit"];

{
await add(cwd, {
await add(rootDir, {
empty,
open
open,
since,
message
}, config);

@@ -1309,3 +1425,7 @@ return;

// validate that all dependents of skipped packages are also skipped
// Validate that all dependents of skipped packages are also skipped.
// devDependencies are excluded because they don't affect published consumers —
// a stale devDep range on a skipped package is harmless.
// Note: assemble-release-plan uses a graph WITH devDeps because it needs to
// update devDep ranges in package.json even though they don't cause version bumps.
const dependentsGraph = getDependentsGraph(packages, {

@@ -1326,2 +1446,9 @@ ignoreDevDependencies: true,

const dependentPkg = packagesByName.get(dependent);
if (dependentPkg.packageJson.private) {
// Private packages don't publish to npm,
// so they can safely depend on skipped packages.
// This also holds for private packages with other publish targets (like a VS Code extension)
// as those typically have to prebundle dependencies.
continue;
}
if (!shouldSkipPackage(dependentPkg, {

@@ -1342,3 +1469,3 @@ ignore: config.ignore,

}
await version(cwd, {
await version(rootDir, {
snapshot

@@ -1350,3 +1477,3 @@ }, config);

{
await publish(cwd, {
await publish(rootDir, {
otp,

@@ -1360,3 +1487,3 @@ tag: tag$1,

{
await status(cwd, {
await status(rootDir, {
sinceMaster,

@@ -1371,3 +1498,3 @@ since,

{
await tag(cwd, config);
await tag(rootDir, config);
return;

@@ -1387,4 +1514,3 @@ }

}
// @ts-ignore
await pre(cwd, {
await pre(rootDir, {
command,

@@ -1419,3 +1545,3 @@ tag

boolean: ["sinceMaster", "verbose", "empty", "open", "gitTag", "snapshot"],
string: ["output", "otp", "since", "ignore", "tag", "snapshot", "snapshotPrereleaseTemplate"],
string: ["output", "otp", "since", "ignore", "message", "tag", "snapshot", "snapshotPrereleaseTemplate"],
alias: {

@@ -1425,2 +1551,3 @@ // Short flags

o: "output",
m: "message",
// Support kebab-case flags

@@ -1456,3 +1583,3 @@ "since-master": "sinceMaster",

init
add [--empty] [--open]
add [--empty] [--open] [--since <branch>] [--message <text>]
version [--ignore] [--snapshot <?name>] [--snapshot-prerelease-template <template>]

@@ -1459,0 +1586,0 @@ publish [--tag <name>] [--otp <code>] [--no-git-tag]

{
"name": "@changesets/cli",
"version": "2.29.8",
"version": "2.30.0",
"description": "Organise your package versioning and publishing to make both contributors and maintainers happy",

@@ -69,13 +69,13 @@ "bin": {

"dependencies": {
"@changesets/apply-release-plan": "^7.0.14",
"@changesets/apply-release-plan": "^7.1.0",
"@changesets/assemble-release-plan": "^6.0.9",
"@changesets/changelog-git": "^0.2.1",
"@changesets/config": "^3.1.2",
"@changesets/config": "^3.1.3",
"@changesets/errors": "^0.2.0",
"@changesets/get-dependents-graph": "^2.1.3",
"@changesets/get-release-plan": "^4.0.14",
"@changesets/get-release-plan": "^4.0.15",
"@changesets/git": "^3.0.4",
"@changesets/logger": "^0.1.1",
"@changesets/pre": "^2.0.2",
"@changesets/read": "^0.6.6",
"@changesets/read": "^0.6.7",
"@changesets/should-skip-package": "^0.1.2",

@@ -87,7 +87,5 @@ "@changesets/types": "^6.1.0",

"ansi-colors": "^4.1.3",
"ci-info": "^3.7.0",
"enquirer": "^2.4.1",
"fs-extra": "^7.0.1",
"mri": "^1.2.0",
"p-limit": "^2.2.0",
"package-manager-detector": "^0.2.0",

@@ -94,0 +92,0 @@ "picocolors": "^1.1.0",

@@ -103,3 +103,3 @@ ## @changesets/cli 🦋

```shell
changeset [--empty] [--open]
changeset [--empty] [--open] [--message <text>]
```

@@ -110,3 +110,3 @@

```shell
changeset add [--empty] [--open]
changeset add [--empty] [--open] [--message <text>]
```

@@ -142,2 +142,3 @@

- `--open` - opens the created changeset in an external editor
- `--message` (or `-m`) - provides the changeset summary from the command line instead of prompting for it.

@@ -144,0 +145,0 @@ ### version