Socket
Socket
Sign inDemoInstall

@changesets/cli

Package Overview
Dependencies
Maintainers
1
Versions
91
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@changesets/cli - npm Package Compare versions

Comparing version 1.3.0 to 1.3.1

9

CHANGELOG.md
# @changesets/cli
## 1.3.1
### Patch Changes
- [c46e9ee7](https://github.com/changesets/changesets/commit/c46e9ee7) - Use 'spawndamnit' package for all new process spawning
- [5b28c527](https://github.com/changesets/changesets/commit/5b28c527) - Fix 2FA check on release
- [6f8eb05a](https://github.com/changesets/changesets/commit/6f8eb05a) - Updated readme
- [6d119893](https://github.com/changesets/changesets/commit/6d119893) - Move `git` module to be its own external module
## 1.3.0

@@ -4,0 +13,0 @@

12

dist/cli.cjs.js

@@ -1,7 +0,7 @@

'use strict';
"use strict";
if (process.env.NODE_ENV === "production") {
module.exports = require("./cli.cjs.prod.js");
} else {
module.exports = require("./cli.cjs.dev.js");
}
let unregister = require("/Users/bconolly/Development/changesets/node_modules/@preconstruct/hook/dist/hook.cjs.js").___internalHook("/Users/bconolly/Development/changesets");
module.exports = require("/Users/bconolly/Development/changesets/packages/cli/src/index.js");
unregister();

@@ -1,1761 +0,15 @@

import meow from 'meow';
import chalk, { bold, red, yellow, green, blue, cyan } from 'chalk';
import util from 'util';
import path from 'path';
import fs from 'fs-extra';
import pkgDir from 'pkg-dir';
import _objectSpread from '@babel/runtime/helpers/esm/objectSpread';
import uuid from 'uuid/v1';
import termSize from 'term-size';
import { prompt } from 'enquirer';
import spawn from 'projector-spawn';
import getWorkspaces$1 from 'get-workspaces';
import semver from 'semver';
import _objectWithoutProperties from '@babel/runtime/helpers/esm/objectWithoutProperties';
import prettier from 'prettier';
import humanId from 'human-id';
import boxen from 'boxen';
import outdent from 'outdent';
import detectIndent from 'detect-indent';
import startCase from 'lodash.startcase';
import pLimit from 'p-limit';
import spawn$1 from 'spawndamnit';
import isCI from 'is-ci';
import table from 'tty-table';
// 👋 hey!!
// you might be reading this and seeing .esm in the filename
// and being confused why there is commonjs below this filename
// DON'T WORRY!
// this is intentional
// it's only commonjs with `preconstruct dev`
// when you run `preconstruct build`, it will be ESM
// why is it commonjs?
// we need to re-export every export from the source file
// but we can't do that with ESM without knowing what the exports are (because default exports aren't included in export/import *)
// and they could change after running `preconstruct dev` so we can't look at the file without forcing people to
// run preconstruct dev again which wouldn't be ideal
// this solution could change but for now, it's working
let prefix = "🦋 ";
function format(args, customPrefix) {
let fullPrefix = prefix + (customPrefix === undefined ? "" : " " + customPrefix);
return fullPrefix + util.format("", ...args).split("\n").join("\n" + fullPrefix + " ");
}
function log(...args) {
console.log(format(args));
}
function info(...args) {
console.error(format(args, chalk.cyan("info")));
}
function warn(...args) {
console.error(format(args, chalk.yellow("warn")));
}
function error(...args) {
console.error(format(args, chalk.red("error")));
}
function success(...args) {
console.log(format(args, chalk.green("success")));
}
var logger = {
log,
info,
warn,
error,
success,
format
};
async function getProjectDirectory(cwd) {
const projectDir = await pkgDir(cwd);
if (!projectDir) {
throw new Error("Could not find project directory");
}
return projectDir;
}
async function getChangesetBase(cwd) {
const dir = await getProjectDirectory(cwd);
return path.resolve(dir, ".changeset");
}
const pkgPath = path.dirname(require.resolve("@changesets/cli/package.json"));
const defaultConfig = require(path.join(pkgPath, "default-files/config"));
const DEPENDENCY_TYPES = ["dependencies", "devDependencies", "peerDependencies", "optionalDependencies"];
async function init({
cwd
}) {
const changesetBase = await getChangesetBase(cwd);
if (fs.existsSync(changesetBase)) {
logger.warn("It looks like you already have changesets initialized. You should be able to run changeset commands no problems.");
} else {
await fs.copy(path.resolve(pkgPath, "./default-files"), changesetBase);
logger.log(chalk`Thanks for choosing {green changesets} to help manage your versioning and publishing\n`);
logger.log("You should be set up to start using changesets now!\n");
logger.info("We have added a `.changeset` folder, and a couple of files to help you out:");
logger.info(chalk`- {blue .changeset/README.md} contains information about using changesets`);
logger.info(chalk`- {blue .changeset/config.js} is our default config, with a lot of comments about each option.`);
}
}
/* Notes on using inquirer:
* Each question needs a key, as inquirer is assembling an object behind-the-scenes.
* At each call, the entire responses object is returned, so we need a unique
* identifier for the name every time. This is why we are using UUIDs.
*/
const limit = Math.max(termSize().rows - 5, 10);
let cancelFlow = () => {
logger.success("Cancelled... 👋 ");
process.exit();
};
async function askCheckboxPlus(message, choices, format) {
const name = `CheckboxPlus-${uuid()}`;
return prompt({
type: "autocomplete",
name,
message,
// @ts-ignore
prefix,
multiple: true,
choices,
format,
limit,
onCancel: cancelFlow
}).then(responses => responses[name]).catch(err => {
logger.error(err);
});
}
async function askQuestion(message) {
const name = `Question-${uuid()}`;
return prompt([{
type: "input",
message,
name,
// @ts-ignore
prefix,
onCancel: cancelFlow
}]).then(responses => responses[name]).catch(err => {
logger.error(err);
});
}
async function askConfirm(message) {
const name = `Confirm-${uuid()}`;
return prompt([{
message,
name,
// @ts-ignore
prefix,
type: "confirm",
initial: true,
onCancel: cancelFlow
}]).then(responses => responses[name]).catch(err => {
logger.error(err);
});
}
async function getWorkspaces (opts) {
let workspaces = await getWorkspaces$1(_objectSpread({
tools: ["yarn", "bolt", "root"]
}, opts));
if (workspaces === null) {
return [];
}
return workspaces;
}
// This is a modified version of the graph-getting in bolt
const getAllDependencies = config => {
const allDependencies = new Map();
for (const type of DEPENDENCY_TYPES) {
const deps = config[type];
if (!deps) continue;
for (const name of Object.keys(deps)) {
allDependencies.set(name, deps[name]);
}
}
return allDependencies;
};
async function getDependencyGraph(packages, cwd) {
const graph = new Map();
let valid = true;
const pkgRoot = await fs.readFile(path.resolve(cwd, "package.json"), "utf8").then(JSON.parse);
const pkgRootConfigged = {
config: pkgRoot,
name: pkgRoot.name,
dir: path.resolve(cwd)
};
const packagesByName = {
[pkgRoot.name]: pkgRootConfigged
};
const queue = [pkgRootConfigged];
for (const pkg of packages) {
queue.push(pkg);
packagesByName[pkg.name] = pkg;
}
for (const pkg of queue) {
const {
name
} = pkg.config;
const dependencies = [];
const allDependencies = getAllDependencies(pkg.config);
for (const [depName, depVersion] of allDependencies) {
const match = packagesByName[depName];
if (!match) continue;
const expected = match.config.version; // Workspace dependencies only need to semver satisfy, not '==='
if (!semver.satisfies(expected, depVersion)) {
valid = false;
console.error(`Package ${chalk.cyan(`"${name}"`)} must depend on the current version of ${chalk.cyan(`"${depName}"`)}: ${chalk.green(`"${expected}"`)} vs ${chalk.red(`"${depVersion}"`)}`);
continue;
}
dependencies.push(depName);
}
graph.set(name, {
pkg,
dependencies
});
}
return {
graph,
valid
};
}
async function getDependentsGraph({
cwd
}) {
const packages = await getWorkspaces({
cwd
});
const graph = new Map();
const {
graph: dependencyGraph
} = await getDependencyGraph(packages, cwd);
const dependentsLookup = {};
packages.forEach(pkg => {
dependentsLookup[pkg.config.name] = {
pkg,
dependents: []
};
});
packages.forEach(pkg => {
const dependent = pkg.config.name;
const valFromDependencyGraph = dependencyGraph.get(dependent);
if (valFromDependencyGraph) {
const dependencies = valFromDependencyGraph.dependencies;
dependencies.forEach(dependency => {
dependentsLookup[dependency].dependents.push(dependent);
});
}
}); // can't use Object.entries here as the flow type for it is Array<[string, mixed]>;
Object.keys(dependentsLookup).forEach(key => {
graph.set(key, dependentsLookup[key]);
});
const simplifiedDependentsGraph = new Map();
graph.forEach((pkgInfo, pkgName) => {
simplifiedDependentsGraph.set(pkgName, pkgInfo.dependents);
});
return simplifiedDependentsGraph;
}
async function getMasterRef(cwd) {
const gitCmd = await spawn("git", ["rev-parse", "master"], {
cwd
});
return gitCmd.stdout.trim().split("\n")[0];
}
async function add(pathToFile, cwd) {
const gitCmd = await spawn("git", ["add", pathToFile], {
cwd
});
return gitCmd.code === 0;
}
async function commit(message, cwd) {
const gitCmd = await spawn("git", ["commit", "-m", message, "--allow-empty"], {
cwd
});
return gitCmd.code === 0;
} // used to create a single tag at a time for the current head only
async function tag(tagStr, cwd) {
// NOTE: it's important we use the -m flag otherwise 'git push --follow-tags' wont actually push
// the tags
const gitCmd = await spawn("git", ["tag", tagStr, "-m", tagStr], {
cwd
});
return gitCmd.code === 0;
}
async function getCommitThatAddsFile(gitPath, cwd) {
const gitCmd = await spawn("git", ["log", "--reverse", "--max-count=1", "--pretty=format:%h", "-p", gitPath], {
cwd
}); // For reasons I do not understand, passing pretty format through this is not working
// The slice below is aimed at achieving the same thing.
return gitCmd.stdout.split("\n")[0];
}
async function getChangedFilesSince(ref, cwd, fullPath = false) {
// First we need to find the commit where we diverged from `ref` at using `git merge-base`
let cmd = await spawn("git", ["merge-base", ref, "HEAD"], {
cwd
});
const divergedAt = cmd.stdout.trim(); // Now we can find which files we added
cmd = await spawn("git", ["diff", "--name-only", divergedAt], {
cwd
});
const files = cmd.stdout.trim().split("\n");
if (!fullPath) return files;
return files.map(file => path.resolve(cwd, file));
} // below are less generic functions that we use in combination with other things we are doing
async function getChangedChangesetFilesSinceMaster(cwd, fullPath = false) {
const ref = await getMasterRef(cwd); // First we need to find the commit where we diverged from `ref` at using `git merge-base`
let cmd = await spawn("git", ["merge-base", ref, "HEAD"], {
cwd
}); // Now we can find which files we added
cmd = await spawn("git", ["diff", "--name-only", "--diff-filter=d", "master"], {
cwd
});
const files = cmd.stdout.trim().split("\n").filter(file => file.includes("changes.json"));
if (!fullPath) return files;
return files.map(file => path.resolve(cwd, file));
}
async function getChangedPackagesSinceCommit(commitHash, cwd) {
const changedFiles = await getChangedFilesSince(commitHash, cwd, true);
const projectDir = await getProjectDirectory(cwd);
const workspaces = await getWorkspaces({
cwd
});
const allPackages = workspaces.map(pkg => _objectSpread({}, pkg, {
relativeDir: path.relative(projectDir, pkg.dir)
}));
const fileNameToPackage = fileName => allPackages.find(pkg => fileName.startsWith(pkg.dir + path.sep));
const fileExistsInPackage = fileName => !!fileNameToPackage(fileName);
return changedFiles // ignore deleted files
.filter(fileExistsInPackage).map(fileNameToPackage) // filter, so that we have only unique packages
.filter((pkg, idx, packages) => packages.indexOf(pkg) === idx);
} // Note: This returns the packages that have changed AND been committed since master,
// it wont include staged/unstaged changes
//
// Don't use this function in master branch as it returns nothing in that case.
async function getChangedPackagesSinceMaster(cwd) {
const masterRef = await getMasterRef(cwd);
return getChangedPackagesSinceCommit(masterRef, cwd);
}
// @flow
// folder, and tidy up the subfolders
const removeEmptyFolders = folderPath => {
const dirContents = fs.readdirSync(folderPath);
dirContents.forEach(contentPath => {
const singleChangesetPath = path.resolve(folderPath, contentPath);
if (fs.statSync(singleChangesetPath).isDirectory() && fs.readdirSync(singleChangesetPath).length < 1) {
fs.rmdirSync(singleChangesetPath);
}
});
};
const removeFolders = folderPath => {
if (!fs.existsSync(folderPath)) return;
const dirContents = fs.readdirSync(folderPath);
dirContents.forEach(contentPath => {
const singleChangesetPath = path.resolve(folderPath, contentPath);
if (fs.statSync(singleChangesetPath).isDirectory()) {
fs.emptyDirSync(singleChangesetPath);
fs.rmdirSync(singleChangesetPath);
}
});
};
async function writeChangeset(changesetData, opts) {
const cwd = opts.cwd || process.cwd();
const {
summary
} = changesetData,
jsonData = _objectWithoutProperties(changesetData, ["summary"]);
const dir = await pkgDir(cwd);
const changesetBase = await getChangesetBase(cwd); // Worth understanding that the ID merely needs to be a unique hash to avoid git conflicts
// experimenting with human readable ids to make finding changesets easier
const changesetID = humanId({
separator: "-",
capitalize: false
});
const prettierConfig = await prettier.resolveConfig(dir);
const newFolderPath = path.resolve(changesetBase, changesetID);
if (fs.existsSync(newFolderPath)) {
throw new Error(`A changeset with the unique ID ${changesetID} already exists`);
}
removeEmptyFolders(changesetBase);
fs.mkdirSync(newFolderPath); // the changeset is divided into two parts, a .md and a .json file.
// the .md file represents what will be written into the changelogs for packages
// the .json file includes metadata about the changeset.
fs.writeFileSync(path.resolve(newFolderPath, "changes.md"), summary);
fs.writeFileSync(path.resolve(newFolderPath, "changes.json"), prettier.format(JSON.stringify(jsonData), _objectSpread({}, prettierConfig, {
parser: "json"
})));
return changesetID;
}
/*
type releaseType = {
name: string,
type: string,
}
type dependentType = {
name: string,
type?: string,
dependencies: Array<string>,
}
type changesetDependentType = {
name: string,
dependencies: Array<string>,
type?: string,
}
type changesetType = {
summary: string,
releases: Array<releaseType>,
dependents: Array<changesetDependentType>,
releaseNotes?: any,
}
*/
async function getPackagesToRelease(changedPackages, allPackages) {
function askInitialReleaseQuestion(defaultChoiceList) {
return askCheckboxPlus( // TODO: Make this wording better
// TODO: take objects and be fancy with matching
`Which packages would you like to include?`, defaultChoiceList, x => {
// this removes changed packages and unchanged packages from the list
// of packages shown after selection
if (Array.isArray(x)) {
return x.filter(x => x !== "changed packages" && x !== "unchanged packages").map(x => cyan(x)).join(", ");
}
return x;
});
}
if (allPackages.length > 1) {
const unchangedPackagesNames = allPackages.map(({
name
}) => name).filter(name => !changedPackages.includes(name));
const defaultChoiceList = [{
name: "changed packages",
choices: changedPackages
}, {
name: "unchanged packages",
choices: unchangedPackagesNames
}].filter(({
choices
}) => choices.length !== 0);
let packagesToRelease = await askInitialReleaseQuestion(defaultChoiceList);
if (packagesToRelease.length === 0) {
do {
logger.error("You must select at least one package to release");
logger.error("(You most likely hit enter instead of space!)");
packagesToRelease = await askInitialReleaseQuestion(defaultChoiceList);
} while (packagesToRelease.length === 0);
}
return packagesToRelease.filter(pkgName => pkgName !== "changed packages" && pkgName !== "unchanged packages");
}
return [allPackages[0].name];
}
function formatPkgNameAndVersion(pkgName, version) {
return `${bold(pkgName)}@${bold(version)}`;
}
/*
Returns an object in the shape { depTypes: [], versionRange: '' } with a list of different depTypes
matched ('dependencies', 'peerDependencies', etc) and the versionRange itself ('^1.0.0')
*/
function getDependencyVersionRange(dependentPkgJSON, dependencyName) {
const DEPENDENCY_TYPES = ["dependencies", "devDependencies", "peerDependencies", "bundledDependencies", "optionalDependencies"];
const dependencyVersionRange = {
depTypes: [],
versionRange: ""
};
for (const type of DEPENDENCY_TYPES) {
const deps = dependentPkgJSON[type];
if (!deps) continue;
if (deps[dependencyName]) {
dependencyVersionRange.depTypes.push(type); // We'll just override this each time, *hypothetically* it *should* be the same...
dependencyVersionRange.versionRange = deps[dependencyName];
}
}
return dependencyVersionRange;
}
async function createChangeset(changedPackages
/* Array<string> */
, opts
/* { cwd?: string } */
= {}) {
const cwd = opts.cwd || process.cwd();
const allPackages = await getWorkspaces({
cwd
});
const dependencyGraph = await getDependentsGraph({
cwd
});
const packagesToRelease = await getPackagesToRelease(changedPackages, allPackages);
let pkgJsonsByName = new Map(allPackages.map(({
name,
config
}) => [name, config]));
const releases = [];
let pkgsLeftToGetBumpTypeFor = new Set(packagesToRelease);
let pkgsThatShouldBeMajorBumped = await askCheckboxPlus(bold(`Which packages should have a ${red("major")} bump?`), packagesToRelease.map(pkgName => {
return {
name: pkgName,
message: formatPkgNameAndVersion(pkgName, pkgJsonsByName.get(pkgName).version)
};
}));
for (const pkgName of pkgsThatShouldBeMajorBumped) {
// for packages that are under v1, we want to make sure major releases are intended,
// as some repo-wide sweeping changes have mistakenly release first majors
// of packages.
let {
version,
maintainers
} = pkgJsonsByName.get(pkgName);
if (semver.lt(version, "1.0.0")) {
let maintainersString = "";
if (maintainers && Array.isArray(maintainers) && maintainers.length > 0) {
maintainersString = ` (${maintainers.join(", ")})`;
} // prettier-ignore
logger.log(yellow(`WARNING: Releasing a major version for ${green(pkgName)} will be its ${red('first major release')}.`));
logger.log(yellow(`If you are unsure if this is correct, contact the package's maintainers${maintainersString} ${red("before committing this changeset")}.`));
let shouldReleaseFirstMajor = await askConfirm(bold(`Are you sure you want still want to release the ${red("first major release")} of ${pkgName}?`));
if (!shouldReleaseFirstMajor) {
continue;
}
}
pkgsLeftToGetBumpTypeFor.delete(pkgName);
releases.push({
name: pkgName,
type: "major"
});
}
if (pkgsLeftToGetBumpTypeFor.size !== 0) {
let pkgsThatShouldBeMinorBumped = await askCheckboxPlus(bold(`Which packages should have a ${green("minor")} bump?`), [...pkgsLeftToGetBumpTypeFor].map(pkgName => {
return {
name: pkgName,
message: formatPkgNameAndVersion(pkgName, pkgJsonsByName.get(pkgName).version)
};
}));
for (const pkgName of pkgsThatShouldBeMinorBumped) {
pkgsLeftToGetBumpTypeFor.delete(pkgName);
releases.push({
name: pkgName,
type: "minor"
});
}
}
if (pkgsLeftToGetBumpTypeFor.size !== 0) {
logger.log(`The following packages will be ${blue("patch")} bumped:`);
pkgsLeftToGetBumpTypeFor.forEach(pkgName => {
logger.log(formatPkgNameAndVersion(pkgName, pkgJsonsByName.get(pkgName).version));
});
for (const pkgName of pkgsLeftToGetBumpTypeFor) {
releases.push({
name: pkgName,
type: "patch"
});
}
}
logger.log("Please enter a summary for this change (this will be in the changelogs)");
let summary = await askQuestion("Summary");
while (summary.length === 0) {
logger.error("A summary is required for the changelog! 😪");
summary = await askQuestion("Summary");
}
const pkgsToSearch = [...releases];
const dependents = [];
while (pkgsToSearch.length > 0) {
// nextRelease is our dependency, think of it as "avatar"
const nextRelease = pkgsToSearch.shift(); // pkgDependents will be a list of packages that depend on nextRelease ie. ['avatar-group', 'comment']
const pkgDependents = dependencyGraph.get(nextRelease.name); // For each dependent we are going to see whether it needs to be bumped because it's dependency
// is leaving the version range.
pkgDependents.map(dependent => {
let type = "none";
const dependentPkgJSON = pkgJsonsByName.get(dependent);
const {
depTypes,
versionRange
} = getDependencyVersionRange(dependentPkgJSON, nextRelease.name); // Firstly we check if it is a peerDependency because if it is, our dependent bump type needs to be major.
if (depTypes.includes("peerDependencies") && nextRelease.type !== "patch") {
type = "major";
} else {
const nextReleaseVersion = semver.inc(pkgJsonsByName.get(nextRelease.name).version, nextRelease.type);
if (!dependents.some(dep => dep.name === dependent) && !releases.some(dep => dep.name === dependent) && !semver.satisfies(nextReleaseVersion, versionRange)) {
type = "patch";
}
}
return {
name: dependent,
type
};
}).filter(({
type
}) => type !== "none").forEach(dependent => {
const existing = dependents.find(dep => dep.name === dependent.name); // For things that are being given a major bump, we check if we have already
// added them here. If we have, we update the existing item instead of pushing it on to search.
// It is safe to not add it to pkgsToSearch because it should have already been searched at the
// largest possible bump type.
if (existing && dependent.type === "major" && existing.type !== "major") {
existing.type = "major";
} else {
pkgsToSearch.push(dependent);
dependents.push(dependent);
}
});
} // Now we need to fill in the dependencies arrays for each of the dependents. We couldn't accurately
// do it until now because we didn't have the entire list of packages being released yet
dependents.forEach(dependent => {
const dependentPkgJSON = pkgJsonsByName.get(dependent.name);
dependent.dependencies = [...dependents, ...releases].map(pkg => pkg.name).filter(dep => !!getDependencyVersionRange(dependentPkgJSON, dep).versionRange);
});
return {
summary,
releases,
dependents
};
}
async function resolveConfig(cwd) {
const changesetBase = await getChangesetBase(cwd);
const configPath = path.resolve(changesetBase, "config.js");
const hasConfigFile = await fs.pathExists(configPath);
if (hasConfigFile) {
try {
const loadedConfig = require(configPath);
return loadedConfig;
} catch (error) {
logger.error("There was an error reading your changeset config", error);
throw error;
}
} else {
return {};
}
}
function printConfirmationMessage(changeset) {
function getReleasesOfType(type) {
return changeset.releases.filter(release => release.type === type).map(release => release.name);
}
logger.log("=== Releasing the following packages ===");
const majorReleases = getReleasesOfType("major");
const minorReleases = getReleasesOfType("minor");
const patchReleases = getReleasesOfType("patch");
const patchDependents = changeset.dependents.filter(dep => dep.type === "patch").map(dep => dep.name);
const majorDependents = changeset.dependents.filter(dep => dep.type === "major").map(dep => red(dep.name));
if (majorReleases.length > 0) logger.log(`${green("[Major]")}\n ${majorReleases.join(", ")}`);
if (minorReleases.length > 0) logger.log(`${green("[Minor]")}\n ${minorReleases.join(", ")}`);
if (patchReleases.length > 0) logger.log(`${green("[Patch]")}\n ${patchReleases.join(", ")}`);
if (patchDependents.length > 0) logger.log(`${green("[Dependents (patch)]")}\n ${patchDependents.join("\n ")}`);
if (majorDependents.length > 0) logger.log(`${green("[Dependents (major)]")}\n ${majorDependents.join("\n ")}`);
if (changeset.dependents.length > 0) {
const message = outdent`
${red("========= NOTE ========")}
All dependents that are bumped will be ${red("patch bumped")}.
If any of the above need a higher bump than this, you will need to create a ${red("separate changeset")} for this
Please read the above list ${red("carefully")} to make sure you're not missing anything!`;
const prettyMessage = boxen(message, {
borderStyle: "double",
align: "center"
});
logger.log(prettyMessage);
}
}
async function add$1(opts) {
const userConfig = await resolveConfig(opts.cwd);
const userchangesetOptions = userConfig && userConfig.changesetOptions ? userConfig.changesetOptions : {};
const config = _objectSpread({}, defaultConfig.changesetOptions, userchangesetOptions, opts);
const changesetBase = await getChangesetBase(config.cwd);
if (!fs.existsSync(changesetBase)) {
console.warn("There is no .changeset folder. If this is the first time `changesets` have been used in this project, run `yarn changesets init` to get set up. If you expected there to be changesets, you should check git history for when the folder was removed to ensure you do not lose any configuration.");
return;
}
const changedPackages = await getChangedPackagesSinceMaster(config.cwd);
const changePackagesName = changedPackages.map(pkg => pkg.name);
const newChangeset = await createChangeset(changePackagesName, config);
printConfirmationMessage(newChangeset);
const confirmChangeset = await askConfirm("Is this your desired changeset?");
if (confirmChangeset) {
const changesetID = await writeChangeset(newChangeset, config);
if (config.commit) {
await add(path.resolve(changesetBase, changesetID), config.cwd);
await commit(`CHANGESET: ${changesetID}. ${newChangeset.summary}`, config.cwd);
logger.log(green("Changeset added and committed"));
} else {
logger.log(green("Changeset added! - you can now commit it\n"));
}
let hasMajorChange = [...newChangeset.releases, ...newChangeset.dependents].find(c => c.type === "major");
if (hasMajorChange) {
logger.warn("This Changeset includes a major change and we STRONGLY recommend adding more information to the changeset:");
logger.warn("WHAT the breaking change is");
logger.warn("WHY the change was made");
logger.warn("HOW a consumer should update their code");
} else {
logger.log(green("If you want to modify or expand on the changeset summary, you can find it here"));
}
logger.info(blue(path.resolve(changesetBase, changesetID, "changes.md")));
}
}
function maxType(types) {
if (types.includes("major")) return "major";
if (types.includes("minor")) return "minor";
if (types.includes("patch")) return "patch";
return "none";
}
function flattenReleases(changesets, allLinkedPackages) {
const flatChangesets = changesets.map(changeset => [...changeset.releases.map(release => ({
name: release.name,
type: release.type,
commit: changeset.commit,
id: changeset.id
})), ...changeset.dependents.map(dependent => ({
name: dependent.name,
type: dependent.type,
commit: changeset.commit,
id: changeset.id
}))]).reduce((acc, a) => [...acc, ...a], []) // flatten
.reduce((acc, release) => {
if (!acc[release.name]) {
acc[release.name] = [];
}
acc[release.name].push(release);
return acc;
}, {});
const flatReleases = new Map(Object.entries(flatChangesets).map(([name, releases]) => [name, {
name,
type: maxType(releases.map(r => r.type)),
commits: [...new Set(releases.map(r => r.commit))].filter(a => a),
changesets: [...new Set(releases.map(r => r.id))]
}]));
for (const linkedPackages of allLinkedPackages) {
const allBumpTypes = [];
for (let linkedPackage of linkedPackages) {
let release = flatReleases.get(linkedPackage);
if (release) {
allBumpTypes.push(release.type);
}
}
const highestBumpType = maxType(allBumpTypes);
for (let linkedPackage of linkedPackages) {
let release = flatReleases.get(linkedPackage);
if (release) {
release.type = highestBumpType;
}
}
}
return [...flatReleases.values()];
}
/*
This flattens an array of Version objects into one object that can be used to create the changelogs
and the publish commit messages.
Dependents will be calculated and added to releases, then final versions will be calculated.
It's output will look like
{
releases: [{
name: 'package-a',
version: '2.0.0', // actual version being released
commits: ['fc4229d'], // filtered to ones for this pkg
// (used in changelogs)
dependencies: ['package-c'] // list of dependencies that will need to be updated
},
{
name: 'package-b'
version: '1.1.0',
commits: ['fc4229d'], // these would be the commits that caused bumps
dependencies: ['package-a']
},
{
name: 'package-c'
version: '1.0.1',
commits: ['fc4229d'],
dependencies: ['package-b']
}]
changesets: [<Changeset>] // References to all the changesets used to build Release
// to be able to look up summary and release notes
// information when building changelogs
}
*/
function createRelease(changesets, allPackages, allLinkedPackages = []) {
// First, combine all the changeset.releases into one useful array
const flattenedChangesets = flattenReleases(changesets, allLinkedPackages);
let currentVersions = new Map();
for (let pkg of allPackages) {
currentVersions.set(pkg.name, // @ts-ignore
pkg.config.version !== undefined ? pkg.config.version : null);
}
for (let linkedPackages of allLinkedPackages) {
let highestVersion;
for (let linkedPackage of linkedPackages) {
let version = currentVersions.get(linkedPackage);
if (highestVersion === undefined || semver.gt(version, highestVersion)) {
highestVersion = version;
}
}
for (let linkedPackage of linkedPackages) {
currentVersions.set(linkedPackage, highestVersion);
}
}
const allReleases = [];
for (let flattenedChangeset of flattenedChangesets) {
if (flattenedChangeset.type === "none") {
continue;
}
allReleases.push(_objectSpread({}, flattenedChangeset, {
version: semver.inc(currentVersions.get(flattenedChangeset.name), flattenedChangeset.type)
}));
}
return {
releases: allReleases.filter(release => release.version !== null),
deleted: allReleases.filter(release => release.version === null),
changesets
};
}
// I believe it would be safe to deprecate this format
function createReleaseCommit(releaseObj, skipCi) {
const numPackagesReleased = releaseObj.releases.length;
const cleanReleaseObj = {};
cleanReleaseObj.releases = releaseObj.releases;
cleanReleaseObj.changesets = releaseObj.changesets.map(changeset => ({
commit: changeset.commit,
summary: changeset.summary
}));
const releasesLines = releaseObj.releases.map(release => ` ${release.name}@${release.version}`).join("\n");
const dependentsLines = releaseObj.releases.filter(release => release.dependencies && release.dependencies.length > 0).map(release => ` ${release.name}@${release.version}`).join("\n") || "[]";
const deletedLines = releaseObj.deleted.map(deleted => ` ${deleted.name}`).join("\n") || " []";
return outdent`
RELEASING: Releasing ${numPackagesReleased} package(s)
Releases:
${releasesLines}
Dependents:
${dependentsLines}
Deleted:
${deletedLines}
${skipCi ? "\n\n[skip ci]" : ""}
`;
}
async function smallHelper(obj, type) {
const releaseLines = obj[type];
if (!releaseLines.length) return "";
const resolvedLines = await Promise.all(releaseLines);
return `### ${startCase(type)} Changes\n\n${resolvedLines.join("")}`;
} // release is the package and version we are releasing
async function generateMarkdownTemplate(release, releaseObject, config) {
// NOTE: The release object we receive here has more information than the ones in release commit
// messages
const {
changesets,
releases
} = releaseObject; // get changesets that "release" this package (not a dependent bump)
const releaseObj = {
major: [],
minor: [],
patch: []
};
changesets.forEach(cs => {
const rls = cs.releases.find(r => r.name === release.name);
if (rls) {
releaseObj[rls.type].push(config.getReleaseLine(cs, rls.type));
}
}); // First, we construct the release lines, summaries of changesets that caused us to be released
const majorReleaseLines = await smallHelper(releaseObj, "major");
const minorReleaseLines = await smallHelper(releaseObj, "minor");
const patchReleaseLines = await smallHelper(releaseObj, "patch"); // get changesets that bump our dependencies
// There is a happy accident that this includes all dependents being bumped, so we
// do not need to enquire into why the dependents are being bumped.
const dependentChangesets = changesets.filter(cs => cs.dependents.find(d => d.name === release.name));
const dependenciesUpdated = new Set( // We use a set so we can dedupe on the fly
dependentChangesets.map(changeset => changeset.dependents.find(d => d.name === release.name).dependencies).reduce((acc, a) => [...acc, ...a], []) // flatten
);
const dependenciesUpdatedArr = [...dependenciesUpdated].map(dependency => releases.find(r => r.name === dependency)) // TODO: Figure out why we need to use the identity function here
// In all cases, it should always be here
.filter(r => r);
const dependencyReleaseLine = await config.getDependencyReleaseLine(dependentChangesets, dependenciesUpdatedArr);
return [`## ${release.version}`, majorReleaseLines, minorReleaseLines, patchReleaseLines, dependencyReleaseLine].filter(line => line).join("\n");
}
function writeFile(filePath, fileContents) {
return util.promisify(cb => fs.writeFile(filePath, fileContents, cb))();
}
async function updateChangelog(releaseObject, opts) {
const cwd = opts.cwd || process.cwd();
const allPackages = await getWorkspaces({
cwd
});
const udpatedChangelogs = []; // Updating ChangeLog files for each package
for (const release of releaseObject.releases) {
const pkg = allPackages.find(a => a.name === release.name);
if (!pkg) {
logger.warn(`While writing changelog, could not find workspace ${release.name} in project.`);
}
const changelogPath = path.join(pkg.dir, "CHANGELOG.md");
const markdown = await generateMarkdownTemplate(release, releaseObject, opts);
const templateString = `\n\n${markdown.trim("\n")}\n`;
try {
if (fs.existsSync(changelogPath)) {
await prependFile(changelogPath, templateString, pkg, cwd);
} else {
await writeFile(changelogPath, `# ${pkg.name}${templateString}`);
}
} catch (e) {
logger.warn(e);
return;
}
logger.log(`Updated file ${changelogPath}`);
udpatedChangelogs.push(changelogPath);
}
return udpatedChangelogs;
}
async function prependFile(filePath, data, pkg, cwd) {
const prettierConfig = await prettier.resolveConfig(cwd);
const fileData = fs.readFileSync(filePath).toString(); // if the file exists but doesn't have the header, we'll add it in
if (!fileData) {
const completelyNewChangelog = `# ${pkg.name}${data}`;
fs.writeFileSync(filePath, prettier.format(completelyNewChangelog, _objectSpread({}, prettierConfig, {
parser: "markdown"
})));
return;
}
const newChangelog = fileData.replace("\n", data);
fs.writeFileSync(filePath, prettier.format(newChangelog, _objectSpread({}, prettierConfig, {
parser: "markdown"
})));
}
// TODO take in cwd, and fetch changesetBase ourselves
async function getChangesets(changesetBase, sinceMasterOnly) {
if (!fs.existsSync(changesetBase)) {
throw new Error("There is no .changeset directory in this project");
}
const dirs = fs.readdirSync(changesetBase); // this needs to support just not dealing with dirs that aren't set up properly
let changesets = dirs.filter(dir => fs.lstatSync(path.join(changesetBase, dir)).isDirectory());
if (sinceMasterOnly) {
const newChangesets = await getChangedChangesetFilesSinceMaster(changesetBase);
const newHahses = newChangesets.map(c => c.split("/")[1]);
changesets = changesets.filter(dir => newHahses.includes(dir));
}
const changesetContents = changesets.map(async changesetDir => {
const summary = fs.readFileSync(path.join(changesetBase, changesetDir, "changes.md"), "utf-8");
const jsonPath = path.join(changesetBase, changesetDir, "changes.json");
const json = require(jsonPath);
const commit = await getCommitThatAddsFile(jsonPath, changesetBase);
return _objectSpread({}, json, {
summary,
commit,
id: changesetDir
});
});
return Promise.all(changesetContents);
}
function getDependencyTypes(depName, config) {
const matchedTypes = [];
for (const depType of DEPENDENCY_TYPES) {
const deps = getDeps(depType, config);
if (deps && deps[depName]) {
matchedTypes.push(depType);
}
}
return matchedTypes;
}
function getDependencyVersionRange$1(depName, config) {
for (const depType of DEPENDENCY_TYPES) {
const deps = getDeps(depType, config);
if (deps && deps[depName]) {
return deps[depName];
}
}
return null;
}
function getDeps(depType, config) {
const deps = config[depType];
if (typeof deps === "undefined") return;
return deps;
}
function versionRangeToRangeType(versionRange) {
if (versionRange.charAt(0) === "^") return "^";
if (versionRange.charAt(0) === "~") return "~";
return "";
}
const importantSeparator = chalk.red("===============================IMPORTANT!===============================");
async function version(opts) {
let userConfig = await resolveConfig(opts.cwd);
userConfig = userConfig && userConfig.versionOptions ? userConfig.versionOptions : {};
const config = _objectSpread({}, defaultConfig.versionOptions, userConfig, opts);
const cwd = config.cwd || process.cwd();
const allPackages = await getWorkspaces({
cwd
});
const changesetBase = await getChangesetBase(cwd);
removeEmptyFolders(changesetBase);
const unreleasedChangesets = await getChangesets(changesetBase);
const releaseObj = createRelease(unreleasedChangesets, allPackages, config.linked);
const publishCommit = createReleaseCommit(releaseObj, config.skipCI);
if (unreleasedChangesets.length === 0) {
logger.warn("No unreleased changesets found, exiting.");
return;
}
logger.log(publishCommit);
await bumpReleasedPackages(releaseObj, allPackages, config);
if (config.updateChangelog) {
logger.log("Updating changelogs..."); // Now update the changelogs
const changelogPaths = await updateChangelog(releaseObj, config);
if (config.commit) {
for (const changelogPath of changelogPaths) {
await add(changelogPath, cwd);
}
}
}
logger.log("Removing changesets..."); // This should then reset the changesets folder to a blank state
removeFolders(changesetBase);
if (config.commit) {
await add(changesetBase, cwd);
logger.log("Committing changes..."); // TODO: Check if there are any unstaged changed before committing and throw
// , as it means something went super-odd.
await commit(publishCommit, cwd);
} else {
logger.log("All files have been updated. Review them and commit at your leisure");
logger.warn("If you alter version changes in package.jsons, make sure to run bolt before publishing to ensure the repo is in a valid state");
}
}
async function bumpReleasedPackages(releaseObj, allPackages, config) {
const versionsToUpdate = releaseObj.releases.reduce((cur, next) => _objectSpread({}, cur, {
[next.name]: next.version
}), {});
const {
graph
} = await getDependencyGraph(allPackages, config.cwd);
const internalDeps = Object.keys(versionsToUpdate).filter(dep => graph.has(dep));
const externalDeps = Object.keys(versionsToUpdate).filter(dep => !graph.has(dep));
if (externalDeps.length !== 0) {
logger.warn(`Attempted to pass external dependencies to updatePackageVersions:\n${externalDeps.join(", ")}`);
} // for each package, even non-released ones we:
// Check if all its things are still in semver
// IF they are not AND it's not being released, collect an error about it
// If the package is being released, modify its
for (const pkg of allPackages) {
const newPkgJSON = _objectSpread({}, pkg.config);
const inUpdatedPackages = internalDeps.includes(pkg.name);
for (const depName of internalDeps) {
const depRange = String(getDependencyVersionRange$1(depName, pkg.config));
const depTypes = getDependencyTypes(depName, pkg.config);
const rangeType = versionRangeToRangeType(depRange);
const newDepRange = rangeType + versionsToUpdate[depName];
if (depTypes.length === 0) continue;
const willLeaveSemverRange = !semver.satisfies(versionsToUpdate[depName], depRange); // This check determines whether the package will be released. If the
// package will not be released, we throw.
if (!inUpdatedPackages && willLeaveSemverRange) {
// TODO: this error message was copied directly from bolt
// it seems wrong, shouldn't this case be covered by dependents stuff
// and this should be something like "this should never happen, please open an issue because there's probably a bug in changesets"
throw new Error(`${importantSeparator}
${pkg.name} has a dependency on ${depName} at ${depRange}, however the new version of ${// TODO: look into this, accessing internalDeps[depName] seems wrong
internalDeps[depName]} leaves this range.
You will need to make a new changeset that includes an update to ${pkg.name}
${importantSeparator}`);
}
for (const depType of depTypes) {
newPkgJSON[depType][depName] = newDepRange;
}
}
if (!inUpdatedPackages) continue;
const pkgDir = pkg.dir;
const pkgJsonPath = path.join(pkgDir, "package.json");
const pkgJsonRaw = await fs.readFile(pkgJsonPath, "utf-8");
const indent = detectIndent(pkgJsonRaw).indent || " ";
newPkgJSON.version = versionsToUpdate[pkg.name];
const pkgJsonStr = JSON.stringify(newPkgJSON, null, indent);
await fs.writeFile(pkgJsonPath, pkgJsonStr);
if (config.commit) {
await add(pkgJsonPath, config.cwd);
}
}
}
class ExitError extends Error {
constructor(code) {
super("the process exited with code: " + code);
this.code = code;
}
}
const npmRequestLimit = pLimit(40);
function getCorrectRegistry() {
let registry = process.env.npm_config_registry === "https://registry.yarnpkg.com" ? undefined : process.env.npm_config_registry;
return registry;
}
async function getTokenIsRequired() {
// Due to a super annoying issue in yarn, we have to manually override this env variable
// See: https://github.com/yarnpkg/yarn/issues/2935#issuecomment-355292633
const envOverride = {
npm_config_registry: getCorrectRegistry()
};
let result = await spawn$1("npm", ["profile", "get", "--json"], {
env: Object.assign({}, process.env, envOverride)
});
let json = JSON.parse(result.stdout.toString());
if (json.error) {
logger.error(`an error occurred while running \`npm profile get\`: ${json.error.code}`);
logger.error(json.error.summary);
if (json.error.summary) logger.error(json.error.summary);
throw new ExitError(1);
}
return json.tfa.mode === "auth-and-writes";
}
function info$1(pkgName) {
return npmRequestLimit(async () => {
logger.info(`npm info ${pkgName}`); // Due to a couple of issues with yarnpkg, we also want to override the npm registry when doing
// npm info.
// Issues: We sometimes get back cached responses, i.e old data about packages which causes
// `publish` to behave incorrectly. It can also cause issues when publishing private packages
// as they will always give a 404, which will tell `publish` to always try to publish.
// See: https://github.com/yarnpkg/yarn/issues/2935#issuecomment-355292633
const envOverride = {
npm_config_registry: getCorrectRegistry()
};
let result = await spawn$1("npm", ["info", pkgName, "--json"], {
env: Object.assign({}, process.env, envOverride)
});
return JSON.parse(result.stdout.toString());
});
}
async function infoAllow404(pkgName) {
let pkgInfo = await info$1(pkgName);
if (pkgInfo.error && pkgInfo.error.code === "E404") {
logger.warn(`Recieved 404 for npm info ${chalk.cyan(`"${pkgName}"`)}`);
return {
published: false,
pkgInfo: {}
};
}
if (pkgInfo.error) {
logger.error(`Recieved an unknown error code: ${pkgInfo.error.code} for npm info ${chalk.cyan(`"${pkgName}"`)}`);
logger.error(pkgInfo.error.summary);
if (pkgInfo.error.detail) logger.error(pkgInfo.error.detail);
throw new ExitError(1);
}
return {
published: true,
pkgInfo
};
}
let otpAskLimit = pLimit(1);
let askForOtpCode = twoFactorState => otpAskLimit(async () => {
if (twoFactorState.token !== null) return twoFactorState.token;
logger.info("This operation requires a one-time password from your authenticator.");
let val = await askQuestion("Enter one-time password:");
twoFactorState.token = val;
return val;
});
let getOtpCode = async twoFactorState => {
if (twoFactorState.token !== null) {
return twoFactorState.token;
}
return askForOtpCode(twoFactorState);
}; // we have this so that we can do try a publish again after a publish without
// the call being wrapped in the npm request limit and causing the publishes to potentially never run
async function internalPublish(pkgName, opts, twoFactorState) {
let publishFlags = opts.access ? ["--access", opts.access] : [];
if (twoFactorState.isRequired) {
let otpCode = await getOtpCode(twoFactorState);
publishFlags.push("--otp", otpCode);
} // Due to a super annoying issue in yarn, we have to manually override this env variable
// See: https://github.com/yarnpkg/yarn/issues/2935#issuecomment-355292633
const envOverride = {
npm_config_registry: getCorrectRegistry()
};
let {
stdout
} = await spawn$1("npm", ["publish", "--json", ...publishFlags], {
cwd: opts.cwd,
env: Object.assign({}, process.env, envOverride)
});
let json = JSON.parse(stdout.toString());
if (json.error) {
if (json.error.code === "EOTP" && !isCI) {
if (twoFactorState.token !== null) {
// the current otp code must be invalid since it errored
twoFactorState.token = null;
} // just in case this isn't already true
twoFactorState.isRequired = Promise.resolve(true);
return internalPublish(pkgName, opts, twoFactorState);
}
logger.error(`an error occurred while publishing ${pkgName}: ${json.error.code}`, json.error.summary, json.error.detail ? "\n" + json.error.detail : "");
return {
published: false
};
}
return {
published: true
};
}
function publish(pkgName, opts, twoFactorState) {
return npmRequestLimit(() => {
return internalPublish(pkgName, opts, twoFactorState);
});
}
async function publishPackages({
cwd,
access,
otp
}) {
const packages = await getWorkspaces({
cwd
});
const publicPackages = packages.filter(pkg => !pkg.config.private);
let twoFactorState = otp === undefined ? {
token: null,
// note: we're not awaiting this here, we want this request to happen in parallel with getUnpublishedPackages
isRequired: getTokenIsRequired()
} : {
token: otp,
isRequired: Promise.resolve(true)
};
const unpublishedPackagesInfo = await getUnpublishedPackages(publicPackages);
const unpublishedPackages = publicPackages.filter(pkg => {
return unpublishedPackagesInfo.some(p => pkg.name === p.name);
});
if (unpublishedPackagesInfo.length === 0) {
logger.warn("No unpublished packages to publish");
}
const publishedPackages = await Promise.all(unpublishedPackages.map(pkg => publishAPackage(pkg, access, twoFactorState)));
return publishedPackages;
}
async function publishAPackage(pkg, access, twoFactorState) {
const {
name,
version
} = pkg.config;
logger.info(`Publishing ${chalk.cyan(`"${name}"`)} at ${chalk.green(`"${version}"`)}`);
const publishDir = pkg.dir;
const publishConfirmation = await publish(name, {
cwd: publishDir,
access
}, twoFactorState);
return {
name,
newVersion: version,
published: publishConfirmation.published
};
}
async function getUnpublishedPackages(packages) {
const results = await Promise.all(packages.map(async pkg => {
const config = pkg.config;
const response = await infoAllow404(config.name);
return {
name: config.name,
localVersion: config.version,
isPublished: response.published,
publishedVersion: response.pkgInfo.version || ""
};
}));
const packagesToPublish = [];
for (const pkgInfo of results) {
const {
name,
isPublished,
localVersion,
publishedVersion
} = pkgInfo;
if (!isPublished) {
packagesToPublish.push(pkgInfo);
} else if (semver.gt(localVersion, publishedVersion)) {
packagesToPublish.push(pkgInfo);
logger.info(`${name} is being published because our local version (${localVersion}) is ahead of npm's (${publishedVersion})`);
} else if (semver.lt(localVersion, publishedVersion)) {
// If the local version is behind npm, something is wrong, we warn here, and by not getting published later, it will fail
logger.warn(`${name} is not being published because version ${publishedVersion} is already published on npm and we are trying to publish version ${localVersion}`);
}
}
return packagesToPublish;
}
function logReleases(pkgs) {
const mappedPkgs = pkgs.map(p => `${p.name}@${p.newVersion}`).join("\n");
logger.log(mappedPkgs);
}
async function run(opts) {
const userConfig = await resolveConfig(opts.cwd);
const userPublishOptions = userConfig && userConfig.publishOptions ? userConfig.publishOptions : {};
const config = _objectSpread({}, defaultConfig.publishOptions, userPublishOptions, opts);
const response = await publishPackages({
cwd: config.cwd || process.cwd(),
// if not public, we wont pass the access, and it works as normal
access: config.public ? "public" : undefined,
otp: opts.otp
});
const successful = response.filter(p => p.published);
const unsuccessful = response.filter(p => !p.published);
if (successful.length > 0) {
logger.success("packages published successfully:");
logReleases(successful); // We create the tags after the push above so that we know that HEAD wont change and that pushing
// wont suffer from a race condition if another merge happens in the mean time (pushing tags wont
// fail if we are behind master).
logger.log("Creating tags...");
for (const pkg of successful) {
const tag$1 = `${pkg.name}@${pkg.newVersion}`;
logger.log("New tag: ", tag$1);
await tag(tag$1, config.cwd);
}
}
if (unsuccessful.length > 0) {
logger.error("packages failed to publish:");
logReleases(unsuccessful);
throw new ExitError(1);
}
}
async function getStatus(_ref) {
let {
cwd,
sinceMaster,
verbose,
output
} = _ref,
opts = _objectWithoutProperties(_ref, ["cwd", "sinceMaster", "verbose", "output"]);
let userConfig = await resolveConfig(cwd);
userConfig = userConfig && userConfig.versionOptions ? userConfig.versionOptions : {};
const config = _objectSpread({}, defaultConfig.versionOptions, userConfig, opts);
const changesetBase = await getChangesetBase(cwd);
const allPackages = await getWorkspaces({
cwd
}); // TODO: Check if we are no master and give a different error message if we are
const changesets = await getChangesets(changesetBase, sinceMaster);
if (changesets.length < 1) {
logger.error("No changesets present");
process.exit(1);
}
const releaseObject = createRelease(changesets, allPackages, config.linked);
const {
releases
} = releaseObject;
logger.log("---");
if (output) {
await fs.writeFile(path.join(cwd, output), JSON.stringify(releaseObject, undefined, 2));
return;
}
const print = verbose ? verbosePrint : SimplePrint;
print("patch", releases);
logger.log("---");
print("minor", releases);
logger.log("---");
print("major", releases);
return releaseObject;
}
function SimplePrint(type, releases) {
const packages = releases.filter(r => r.type === type);
if (packages.length) {
logger.info(chalk`Packages to be bumped at {green ${type}}:\n`);
const pkgs = packages.map(({
name
}) => `- ${name}`).join("\n");
logger.log(chalk.green(pkgs));
} else {
logger.info(chalk`{red NO} packages to be bumped at {green ${type}}`);
}
}
function verbosePrint(type, releases) {
const packages = releases.filter(r => r.type === type);
if (packages.length) {
logger.info(chalk`Packages to be bumped at {green ${type}}`);
const columns = packages.map(({
name,
version,
changesets
}) => [chalk.green(name), version, changesets.map(c => chalk.blue(` .changeset/${c}/changes.md`)).join(" +")]);
const t1 = table([{
value: "Package Name",
width: 20
}, {
value: "New Version",
width: 20
}, {
value: "Related Changeset Summaries",
width: 70
}], columns, {
paddingLeft: 1,
paddingRight: 0,
headerAlign: "center",
align: "left"
});
logger.log(t1.render() + "\n");
} else {
logger.info(chalk`Running release would release {red NO} packages as a {green ${type}}`);
}
}
const {
input,
flags
} = meow(`
Usage
$ changesets [command]
Commands
init
add [--commit]
bump [--commit --update-changelog --skip-ci]
release [--public --otp=code]
status [--since-master --verbose --output=JSON_FILE.json]
`, {
flags: {
commit: {
type: "boolean",
// Command line options need to be undefined, otherwise their
// default value overrides the user's provided config in their
// config file
default: undefined
},
updateChangelog: {
type: "boolean",
default: undefined
},
skipCI: {
type: "boolean",
default: undefined
},
public: {
type: "boolean",
default: undefined
},
sinceMaster: {
type: "boolean"
},
verbose: {
type: "boolean",
alias: "v"
},
output: {
type: "string",
alias: "o"
},
otp: {
type: "string",
default: undefined
}
}
});
const cwd = process.cwd();
(async () => {
if (input.length < 1) {
await add$1({
cwd
});
} else if (input.length > 1) {
logger.error("Too many arguments passed to changesets - we only accept the command name as an argument");
} else {
const {
commit,
updateChangelog,
skipCI,
public: isPublic,
sinceMaster,
verbose,
output,
otp
} = flags; // Command line options need to be undefined, otherwise their
// default value overrides the user's provided config in their
// config file. For this reason, we only assign them to this
// object as and when they exist.
const config = {
cwd
};
try {
switch (input[0]) {
case "init":
{
await init({
cwd
});
return;
}
case "add":
{
if (commit !== undefined) {
config.commit = commit;
}
await add$1(config);
return;
}
case "bump":
{
// We only assign them to this
// object as and when they exist.
if (updateChangelog !== undefined) {
config.updateChangelog = updateChangelog;
}
if (skipCI !== undefined) {
config.skipCI = skipCI;
}
if (commit !== undefined) {
config.commit = commit;
}
await version(config);
return;
}
case "release":
{
if (isPublic !== undefined) {
// This exists as
config.public = isPublic;
}
config.otp = otp;
await run(config);
return;
}
case "status":
{
await getStatus({
cwd,
sinceMaster,
verbose,
output
});
return;
}
default:
{
logger.error(`Invalid command ${input[0]} was provided`);
}
}
} catch (err) {
if (err instanceof ExitError) {
return process.exit(err.code);
}
throw err;
}
}
})();
module.exports = require("/Users/bconolly/Development/changesets/packages/cli/src/index.js")
{
"name": "@changesets/cli",
"version": "1.3.0",
"version": "1.3.1",
"description": "Organise your package versioning and publishing to make both contributors and maintainers happy",

@@ -22,2 +22,3 @@ "bin": {

"@types/uuid": "^3.4.4",
"@changesets/git": "^0.1.0",
"boxen": "^1.3.0",

@@ -40,3 +41,2 @@ "chalk": "^2.1.0",

"prettier": "^1.14.3",
"projector-spawn": "^1.0.1",
"semver": "^5.4.1",

@@ -43,0 +43,0 @@ "spawndamnit": "^2.0.0",

@@ -7,3 +7,5 @@ ## @changeset/cli 🦋

This package is intended as a successor to `@atlaskit/build-releases` with a more general focus.
This package is intended as a successor to `@atlaskit/build-releases` with a more general focus. It works in
[bolt](https://www.npmjs.com/package/bolt) multi-package repositories, [yarn workspaces] multi-package repositories, and
in single-package repositories.

@@ -40,3 +42,3 @@ ## Getting Started

A single `changeset` is an intent to release stored as data, with the information we need to combine multiple changesets and coordinate releases. We also work along bolt's structure guidelines to make sure that packages within a mono-repository will all depend on the latest versions of each other. This approach comes from [bolt](https://www.npmjs.com/package/bolt).
A single `changeset` is an intent to release stored as data, with the information we need to combine multiple changesets and coordinate releases. It will also update internal dependencies within a multi-package repository.

@@ -43,0 +45,0 @@ ## Base workflow

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc