Socket
Socket
Sign inDemoInstall

commit-and-tag-version

Package Overview
Dependencies
155
Maintainers
1
Versions
19
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 12.0.0 to 12.1.0

.gitattributes

12

bin/cli.js

@@ -6,10 +6,10 @@ #!/usr/bin/env node

console.error(
'commit-and-tag-version: Node v6 or greater is required. `commit-and-tag-version` did not run.'
)
'commit-and-tag-version: Node v6 or greater is required. `commit-and-tag-version` did not run.',
);
} else {
const standardVersion = require('../index')
const cmdParser = require('../command')
const standardVersion = require('../index');
const cmdParser = require('../command');
standardVersion(cmdParser.argv).catch(() => {
process.exit(1)
})
process.exit(1);
});
}

@@ -5,2 +5,9 @@ # Changelog

## [12.1.0](https://github.com/absolute-version/commit-and-tag-version/compare/v12.0.0...v12.1.0) (2024-01-06)
### Features
* Add signoff option ([#120](https://github.com/absolute-version/commit-and-tag-version/issues/120)) ([d107e38](https://github.com/absolute-version/commit-and-tag-version/commit/d107e38eb906dfb21658d12803b7308f2e3dda7d))
## [12.0.0](https://github.com/absolute-version/commit-and-tag-version/compare/v11.3.0...v12.0.0) (2023-10-31)

@@ -7,0 +14,0 @@

@@ -1,4 +0,4 @@

const spec = require('conventional-changelog-config-spec')
const { getConfiguration } = require('./lib/configuration')
const defaults = require('./defaults')
const spec = require('conventional-changelog-config-spec');
const { getConfiguration } = require('./lib/configuration');
const defaults = require('./defaults');

@@ -9,7 +9,7 @@ const yargs = require('yargs')

default: defaults.packageFiles,
array: true
array: true,
})
.option('bumpFiles', {
default: defaults.bumpFiles,
array: true
array: true,
})

@@ -21,3 +21,3 @@ .option('release-as', {

requiresArg: true,
string: true
string: true,
})

@@ -28,3 +28,3 @@ .option('prerelease', {

'make a pre-release with optional option value to specify a tag id',
string: true
string: true,
})

@@ -34,3 +34,3 @@ .option('infile', {

describe: 'Read the CHANGELOG from this file',
default: defaults.infile
default: defaults.infile,
})

@@ -41,3 +41,3 @@ .option('message', {

'[DEPRECATED] Commit message, replaces %s with new version.\nThis option will be removed in the next major version, please use --releaseCommitMessageFormat.',
type: 'string'
type: 'string',
})

@@ -48,3 +48,3 @@ .option('first-release', {

type: 'boolean',
default: defaults.firstRelease
default: defaults.firstRelease,
})

@@ -55,4 +55,9 @@ .option('sign', {

type: 'boolean',
default: defaults.sign
default: defaults.sign,
})
.option('signoff', {
describe: 'Should the git commit have a "Signed-off-by" trailer',
type: 'boolean',
default: defaults.signoff,
})
.option('no-verify', {

@@ -63,3 +68,3 @@ alias: 'n',

type: 'boolean',
default: defaults.noVerify
default: defaults.noVerify,
})

@@ -71,3 +76,3 @@ .option('commit-all', {

type: 'boolean',
default: defaults.commitAll
default: defaults.commitAll,
})

@@ -77,3 +82,3 @@ .option('silent', {

type: 'boolean',
default: defaults.silent
default: defaults.silent,
})

@@ -84,8 +89,9 @@ .option('tag-prefix', {

type: 'string',
default: defaults.tagPrefix
default: defaults.tagPrefix,
})
.option('release-count', {
describe: 'How many releases of changelog you want to generate. It counts from the upcoming release. Useful when you forgot to generate any previous changelog. Set to 0 to regenerate all.',
describe:
'How many releases of changelog you want to generate. It counts from the upcoming release. Useful when you forgot to generate any previous changelog. Set to 0 to regenerate all.',
type: 'number',
default: defaults.releaseCount
default: defaults.releaseCount,
})

@@ -95,3 +101,3 @@ .option('tag-force', {

type: 'boolean',
default: defaults.tagForce
default: defaults.tagForce,
})

@@ -101,7 +107,7 @@ .option('scripts', {

'Provide scripts to execute for lifecycle events (prebump, precommit, etc.,)',
default: defaults.scripts
default: defaults.scripts,
})
.option('skip', {
describe: 'Map of steps in the release process that should be skipped',
default: defaults.skip
default: defaults.skip,
})

@@ -111,3 +117,3 @@ .option('dry-run', {

default: defaults.dryRun,
describe: 'See the commands that running commit-and-tag-version would run'
describe: 'See the commands that running commit-and-tag-version would run',
})

@@ -118,7 +124,7 @@ .option('git-tag-fallback', {

describe:
'fallback to git tags for version, if no meta-information file is found (e.g., package.json)'
'fallback to git tags for version, if no meta-information file is found (e.g., package.json)',
})
.option('path', {
type: 'string',
describe: 'Only populate commits made under this path'
describe: 'Only populate commits made under this path',
})

@@ -128,3 +134,3 @@ .option('changelogHeader', {

describe:
'[DEPRECATED] Use a custom header when generating and updating changelog.\nThis option will be removed in the next major version, please use --header.'
'[DEPRECATED] Use a custom header when generating and updating changelog.\nThis option will be removed in the next major version, please use --header.',
})

@@ -134,7 +140,7 @@ .option('preset', {

default: defaults.preset,
describe: 'Commit message guideline preset'
describe: 'Commit message guideline preset',
})
.option('lerna-package', {
type: 'string',
describe: 'Name of the package from which the tags will be extracted'
describe: 'Name of the package from which the tags will be extracted',
})

@@ -144,11 +150,11 @@ .option('npmPublishHint', {

default: defaults.npmPublishHint,
describe: 'Customized publishing hint'
describe: 'Customized publishing hint',
})
.check((argv) => {
if (typeof argv.scripts !== 'object' || Array.isArray(argv.scripts)) {
throw Error('scripts must be an object')
throw Error('scripts must be an object');
} else if (typeof argv.skip !== 'object' || Array.isArray(argv.skip)) {
throw Error('skip must be an object')
throw Error('skip must be an object');
} else {
return true
return true;
}

@@ -161,3 +167,3 @@ })

'$0 -m "%s: see changelog for details"',
'Update changelog and tag release with custom commit message'
'Update changelog and tag release with custom commit message',
)

@@ -167,6 +173,6 @@ .pkgConf('standard-version')

.config(getConfiguration())
.wrap(97)
.wrap(97);
Object.keys(spec.properties).forEach((propertyKey) => {
const property = spec.properties[propertyKey]
const property = spec.properties[propertyKey];
yargs.option(propertyKey, {

@@ -176,6 +182,6 @@ type: property.type,

default: defaults[propertyKey] ? defaults[propertyKey] : property.default,
group: 'Preset Configuration:'
})
})
group: 'Preset Configuration:',
});
});
module.exports = yargs
module.exports = yargs;

@@ -1,2 +0,2 @@

const spec = require('conventional-changelog-config-spec')
const spec = require('conventional-changelog-config-spec');

@@ -7,2 +7,3 @@ const defaults = {

sign: false,
signoff: false,
noVerify: false,

@@ -19,4 +20,4 @@ commitAll: false,

preset: require.resolve('conventional-changelog-conventionalcommits'),
npmPublishHint: undefined
}
npmPublishHint: undefined,
};

@@ -27,5 +28,5 @@ /**

Object.keys(spec.properties).forEach((propertyKey) => {
const property = spec.properties[propertyKey]
defaults[propertyKey] = property.default
})
const property = spec.properties[propertyKey];
defaults[propertyKey] = property.default;
});

@@ -37,11 +38,11 @@ /**

defaults.header =
'# Changelog\n\nAll notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines.\n'
'# Changelog\n\nAll notable changes to this project will be documented in this file. See [commit-and-tag-version](https://github.com/absolute-version/commit-and-tag-version) for commit guidelines.\n';
defaults.packageFiles = ['package.json', 'bower.json', 'manifest.json']
defaults.packageFiles = ['package.json', 'bower.json', 'manifest.json'];
defaults.bumpFiles = defaults.packageFiles.concat([
'package-lock.json',
'npm-shrinkwrap.json'
])
'npm-shrinkwrap.json',
]);
module.exports = defaults
module.exports = defaults;

@@ -1,17 +0,17 @@

const bump = require('./lib/lifecycles/bump')
const changelog = require('./lib/lifecycles/changelog')
const commit = require('./lib/lifecycles/commit')
const fs = require('fs')
const latestSemverTag = require('./lib/latest-semver-tag')
const path = require('path')
const printError = require('./lib/print-error')
const tag = require('./lib/lifecycles/tag')
const { resolveUpdaterObjectFromArgument } = require('./lib/updaters')
const bump = require('./lib/lifecycles/bump');
const changelog = require('./lib/lifecycles/changelog');
const commit = require('./lib/lifecycles/commit');
const fs = require('fs');
const latestSemverTag = require('./lib/latest-semver-tag');
const path = require('path');
const printError = require('./lib/print-error');
const tag = require('./lib/lifecycles/tag');
const { resolveUpdaterObjectFromArgument } = require('./lib/updaters');
module.exports = async function standardVersion (argv) {
const defaults = require('./defaults')
module.exports = async function standardVersion(argv) {
const defaults = require('./defaults');
/**
* `--message` (`-m`) support will be removed in the next major version.
*/
const message = argv.m || argv.message
const message = argv.m || argv.message;
if (message) {

@@ -23,7 +23,7 @@ /**

*/
argv.releaseCommitMessageFormat = message.replace(/%s/g, '{{currentTag}}')
argv.releaseCommitMessageFormat = message.replace(/%s/g, '{{currentTag}}');
if (!argv.silent) {
console.warn(
'[commit-and-tag-version]: --message (-m) will be removed in the next major release. Use --releaseCommitMessageFormat.'
)
'[commit-and-tag-version]: --message (-m) will be removed in the next major release. Use --releaseCommitMessageFormat.',
);
}

@@ -33,7 +33,7 @@ }

if (argv.changelogHeader) {
argv.header = argv.changelogHeader
argv.header = argv.changelogHeader;
if (!argv.silent) {
console.warn(
'[commit-and-tag-version]: --changelogHeader will be removed in the next major release. Use --header.'
)
'[commit-and-tag-version]: --changelogHeader will be removed in the next major release. Use --header.',
);
}

@@ -47,4 +47,4 @@ }

throw Error(
`custom changelog header must not match ${changelog.START_OF_LAST_RELEASE_PATTERN}`
)
`custom changelog header must not match ${changelog.START_OF_LAST_RELEASE_PATTERN}`,
);
}

@@ -56,13 +56,13 @@

if (argv.packageFiles) {
defaults.bumpFiles = defaults.bumpFiles.concat(argv.packageFiles)
defaults.bumpFiles = defaults.bumpFiles.concat(argv.packageFiles);
}
const args = Object.assign({}, defaults, argv)
let pkg
const args = Object.assign({}, defaults, argv);
let pkg;
for (const packageFile of args.packageFiles) {
const updater = resolveUpdaterObjectFromArgument(packageFile)
if (!updater) return
const pkgPath = path.resolve(process.cwd(), updater.filename)
const updater = resolveUpdaterObjectFromArgument(packageFile);
if (!updater) return;
const pkgPath = path.resolve(process.cwd(), updater.filename);
try {
const contents = fs.readFileSync(pkgPath, 'utf8')
const contents = fs.readFileSync(pkgPath, 'utf8');
pkg = {

@@ -73,25 +73,27 @@ version: updater.updater.readVersion(contents),

? updater.updater.isPrivate(contents)
: false
}
break
} catch (err) {}
: false,
};
break;
} catch (err) {
/* This probably shouldn't be empty? */
}
}
try {
let version
let version;
if (pkg && pkg.version) {
version = pkg.version
version = pkg.version;
} else if (args.gitTagFallback) {
version = await latestSemverTag(args.tagPrefix)
version = await latestSemverTag(args.tagPrefix);
} else {
throw new Error('no package file found')
throw new Error('no package file found');
}
const newVersion = await bump(args, version)
await changelog(args, newVersion)
await commit(args, newVersion)
await tag(newVersion, pkg ? pkg.private : false, args)
const newVersion = await bump(args, version);
await changelog(args, newVersion);
await commit(args, newVersion);
await tag(newVersion, pkg ? pkg.private : false, args);
} catch (err) {
printError(args, err.message)
throw err
printError(args, err.message);
throw err;
}
}
};

@@ -1,12 +0,23 @@

const chalk = require('chalk')
const figures = require('figures')
const util = require('util')
const chalk = require('chalk');
const figures = require('figures');
const util = require('util');
module.exports = function (argv, msg, args, figure) {
const defaultFigure = argv.dryRun ? chalk.yellow(figures.tick) : chalk.green(figures.tick)
const defaultFigure = argv.dryRun
? chalk.yellow(figures.tick)
: chalk.green(figures.tick);
if (!argv.silent) {
console.info((figure || defaultFigure) + ' ' + util.format.apply(util, [msg].concat(args.map(function (arg) {
return chalk.bold(arg)
}))))
console.info(
(figure || defaultFigure) +
' ' +
util.format.apply(
util,
[msg].concat(
args.map(function (arg) {
return chalk.bold(arg);
}),
),
),
);
}
}
};

@@ -1,4 +0,4 @@

const path = require('path')
const findUp = require('find-up')
const { readFileSync } = require('fs')
const path = require('path');
const findUp = require('find-up');
const { readFileSync } = require('fs');

@@ -9,21 +9,21 @@ const CONFIGURATION_FILES = [

'.versionrc.json',
'.versionrc.js'
]
'.versionrc.js',
];
module.exports.getConfiguration = function () {
let config = {}
const configPath = findUp.sync(CONFIGURATION_FILES)
let config = {};
const configPath = findUp.sync(CONFIGURATION_FILES);
if (!configPath) {
return config
return config;
}
const ext = path.extname(configPath)
const ext = path.extname(configPath);
if (ext === '.js' || ext === '.cjs') {
const jsConfiguration = require(configPath)
const jsConfiguration = require(configPath);
if (typeof jsConfiguration === 'function') {
config = jsConfiguration()
config = jsConfiguration();
} else {
config = jsConfiguration
config = jsConfiguration;
}
} else {
config = JSON.parse(readFileSync(configPath))
config = JSON.parse(readFileSync(configPath));
}

@@ -37,7 +37,7 @@

throw Error(
`[commit-and-tag-version] Invalid configuration in ${configPath} provided. Expected an object but found ${typeof config}.`
)
`[commit-and-tag-version] Invalid configuration in ${configPath} provided. Expected an object but found ${typeof config}.`,
);
}
return config
}
return config;
};

@@ -7,4 +7,4 @@ /**

const { promises: fs } = require('fs')
const { resolve } = require('path')
const { promises: fs } = require('fs');
const { resolve } = require('path');

@@ -14,42 +14,42 @@ /**

*/
async function pathExists (p) {
async function pathExists(p) {
try {
await fs.access(p)
return true
await fs.access(p);
return true;
} catch {
return false
return false;
}
}
function getTypeofLockFile (cwd = '.') {
function getTypeofLockFile(cwd = '.') {
return Promise.all([
pathExists(resolve(cwd, 'yarn.lock')),
pathExists(resolve(cwd, 'package-lock.json')),
pathExists(resolve(cwd, 'pnpm-lock.yaml'))
pathExists(resolve(cwd, 'pnpm-lock.yaml')),
]).then(([isYarn, isNpm, isPnpm]) => {
let value = null
let value = null;
if (isYarn) {
value = 'yarn'
value = 'yarn';
} else if (isPnpm) {
value = 'pnpm'
value = 'pnpm';
} else if (isNpm) {
value = 'npm'
value = 'npm';
}
return value
})
return value;
});
}
const detectPMByLockFile = async (cwd) => {
const type = await getTypeofLockFile(cwd)
const type = await getTypeofLockFile(cwd);
if (type) {
return type
return type;
}
return 'npm'
}
return 'npm';
};
module.exports = {
detectPMByLockFile
}
detectPMByLockFile,
};
module.exports = function (rawMsg, newVersion) {
const message = String(rawMsg)
return message.replace(/{{currentTag}}/g, newVersion)
}
const message = String(rawMsg);
return message.replace(/{{currentTag}}/g, newVersion);
};

@@ -1,3 +0,3 @@

const gitSemverTags = require('git-semver-tags')
const semver = require('semver')
const gitSemverTags = require('git-semver-tags');
const semver = require('semver');

@@ -7,12 +7,14 @@ module.exports = function (tagPrefix = undefined) {

gitSemverTags({ tagPrefix }, function (err, tags) {
if (err) return reject(err)
else if (!tags.length) return resolve('1.0.0')
if (err) return reject(err);
else if (!tags.length) return resolve('1.0.0');
// Respect tagPrefix
tags = tags.map(tag => tag.replace(new RegExp('^' + tagPrefix), ''))
tags = tags.map((tag) => tag.replace(new RegExp('^' + tagPrefix), ''));
// ensure that the largest semver tag is at the head.
tags = tags.map(tag => { return semver.clean(tag) })
tags.sort(semver.rcompare)
return resolve(tags[0])
})
})
}
tags = tags.map((tag) => {
return semver.clean(tag);
});
tags.sort(semver.rcompare);
return resolve(tags[0]);
});
});
};

@@ -1,74 +0,101 @@

'use strict'
'use strict';
const chalk = require('chalk')
const checkpoint = require('../checkpoint')
const conventionalRecommendedBump = require('conventional-recommended-bump')
const figures = require('figures')
const fs = require('fs')
const DotGitignore = require('dotgitignore')
const path = require('path')
const presetLoader = require('../preset-loader')
const runLifecycleScript = require('../run-lifecycle-script')
const semver = require('semver')
const writeFile = require('../write-file')
const { resolveUpdaterObjectFromArgument } = require('../updaters')
let configsToUpdate = {}
const chalk = require('chalk');
const checkpoint = require('../checkpoint');
const conventionalRecommendedBump = require('conventional-recommended-bump');
const figures = require('figures');
const fs = require('fs');
const DotGitignore = require('dotgitignore');
const path = require('path');
const presetLoader = require('../preset-loader');
const runLifecycleScript = require('../run-lifecycle-script');
const semver = require('semver');
const writeFile = require('../write-file');
const { resolveUpdaterObjectFromArgument } = require('../updaters');
let configsToUpdate = {};
async function Bump (args, version) {
async function Bump(args, version) {
// reset the cache of updated config files each
// time we perform the version bump step.
configsToUpdate = {}
configsToUpdate = {};
if (args.skip.bump) return version
if (args.skip.bump) return version;
if (args.releaseAs && !(['major', 'minor', 'patch'].includes(args.releaseAs.toLowerCase()) || semver.valid(args.releaseAs))) {
throw new Error("releaseAs must be one of 'major', 'minor' or 'patch', or a valid semvar version.")
if (
args.releaseAs &&
!(
['major', 'minor', 'patch'].includes(args.releaseAs.toLowerCase()) ||
semver.valid(args.releaseAs)
)
) {
throw new Error(
"releaseAs must be one of 'major', 'minor' or 'patch', or a valid semvar version.",
);
}
let newVersion = version
await runLifecycleScript(args, 'prerelease')
const stdout = await runLifecycleScript(args, 'prebump')
let newVersion = version;
await runLifecycleScript(args, 'prerelease');
const stdout = await runLifecycleScript(args, 'prebump');
if (stdout?.trim().length) {
const prebumpString = stdout.trim()
if (semver.valid(prebumpString)) args.releaseAs = prebumpString
const prebumpString = stdout.trim();
if (semver.valid(prebumpString)) args.releaseAs = prebumpString;
}
if (!args.firstRelease) {
if (semver.valid(args.releaseAs)) {
const releaseAs = new semver.SemVer(args.releaseAs)
if (isString(args.prerelease) && releaseAs.prerelease.length && releaseAs.prerelease.slice(0, -1).join('.') !== args.prerelease) {
const releaseAs = new semver.SemVer(args.releaseAs);
if (
isString(args.prerelease) &&
releaseAs.prerelease.length &&
releaseAs.prerelease.slice(0, -1).join('.') !== args.prerelease
) {
// If both releaseAs and the prerelease identifier are supplied, they must match. The behavior
// for a mismatch is undefined, so error out instead.
throw new Error('releaseAs and prerelease have conflicting prerelease identifiers')
throw new Error(
'releaseAs and prerelease have conflicting prerelease identifiers',
);
} else if (isString(args.prerelease) && releaseAs.prerelease.length) {
newVersion = releaseAs.version
newVersion = releaseAs.version;
} else if (isString(args.prerelease)) {
newVersion = `${releaseAs.major}.${releaseAs.minor}.${releaseAs.patch}-${args.prerelease}.0`
newVersion = `${releaseAs.major}.${releaseAs.minor}.${releaseAs.patch}-${args.prerelease}.0`;
} else {
newVersion = releaseAs.version
newVersion = releaseAs.version;
}
// Check if the previous version is the same version and prerelease, and increment if so
if (isString(args.prerelease) && ['prerelease', null].includes(semver.diff(version, newVersion)) && semver.lte(newVersion, version)) {
newVersion = semver.inc(version, 'prerelease', args.prerelease)
if (
isString(args.prerelease) &&
['prerelease', null].includes(semver.diff(version, newVersion)) &&
semver.lte(newVersion, version)
) {
newVersion = semver.inc(version, 'prerelease', args.prerelease);
}
// Append any build info from releaseAs
newVersion = semvarToVersionStr(newVersion, releaseAs.build)
newVersion = semvarToVersionStr(newVersion, releaseAs.build);
} else {
const release = await bumpVersion(args.releaseAs, version, args)
const releaseType = getReleaseType(args.prerelease, release.releaseType, version)
const release = await bumpVersion(args.releaseAs, version, args);
const releaseType = getReleaseType(
args.prerelease,
release.releaseType,
version,
);
newVersion = semver.inc(version, releaseType, args.prerelease)
newVersion = semver.inc(version, releaseType, args.prerelease);
}
updateConfigs(args, newVersion)
updateConfigs(args, newVersion);
} else {
checkpoint(args, 'skip version bump on first release', [], chalk.red(figures.cross))
checkpoint(
args,
'skip version bump on first release',
[],
chalk.red(figures.cross),
);
}
await runLifecycleScript(args, 'postbump')
return newVersion
await runLifecycleScript(args, 'postbump');
return newVersion;
}
Bump.getUpdatedConfigs = function () {
return configsToUpdate
}
return configsToUpdate;
};

@@ -81,24 +108,26 @@ /**

*/
function semvarToVersionStr (semverVersion, semverBuild) {
return [semverVersion, semverBuild.join('.')].filter(Boolean).join('+')
function semvarToVersionStr(semverVersion, semverBuild) {
return [semverVersion, semverBuild.join('.')].filter(Boolean).join('+');
}
function getReleaseType (prerelease, expectedReleaseType, currentVersion) {
function getReleaseType(prerelease, expectedReleaseType, currentVersion) {
if (isString(prerelease)) {
if (isInPrerelease(currentVersion)) {
if (shouldContinuePrerelease(currentVersion, expectedReleaseType) ||
getTypePriority(getCurrentActiveType(currentVersion)) > getTypePriority(expectedReleaseType)
if (
shouldContinuePrerelease(currentVersion, expectedReleaseType) ||
getTypePriority(getCurrentActiveType(currentVersion)) >
getTypePriority(expectedReleaseType)
) {
return 'prerelease'
return 'prerelease';
}
}
return 'pre' + expectedReleaseType
return 'pre' + expectedReleaseType;
} else {
return expectedReleaseType
return expectedReleaseType;
}
}
function isString (val) {
return typeof val === 'string'
function isString(val) {
return typeof val === 'string';
}

@@ -115,11 +144,11 @@

*/
function shouldContinuePrerelease (version, expectType) {
return getCurrentActiveType(version) === expectType
function shouldContinuePrerelease(version, expectType) {
return getCurrentActiveType(version) === expectType;
}
function isInPrerelease (version) {
return Array.isArray(semver.prerelease(version))
function isInPrerelease(version) {
return Array.isArray(semver.prerelease(version));
}
const TypeList = ['major', 'minor', 'patch'].reverse()
const TypeList = ['major', 'minor', 'patch'].reverse();

@@ -132,7 +161,7 @@ /**

*/
function getCurrentActiveType (version) {
const typelist = TypeList
function getCurrentActiveType(version) {
const typelist = TypeList;
for (let i = 0; i < typelist.length; i++) {
if (semver[typelist[i]](version)) {
return typelist[i]
return typelist[i];
}

@@ -149,29 +178,35 @@ }

*/
function getTypePriority (type) {
return TypeList.indexOf(type)
function getTypePriority(type) {
return TypeList.indexOf(type);
}
function bumpVersion (releaseAs, currentVersion, args) {
function bumpVersion(releaseAs, currentVersion, args) {
return new Promise((resolve, reject) => {
if (releaseAs) {
return resolve({
releaseType: releaseAs
})
releaseType: releaseAs,
});
} else {
const presetOptions = presetLoader(args)
const presetOptions = presetLoader(args);
if (typeof presetOptions === 'object') {
if (semver.lt(currentVersion, '1.0.0')) presetOptions.preMajor = true
if (semver.lt(currentVersion, '1.0.0')) presetOptions.preMajor = true;
}
conventionalRecommendedBump({
debug: args.verbose && console.info.bind(console, 'conventional-recommended-bump'),
preset: presetOptions,
path: args.path,
tagPrefix: args.tagPrefix,
lernaPackage: args.lernaPackage
}, args.parserOpts, function (err, release) {
if (err) return reject(err)
else return resolve(release)
})
conventionalRecommendedBump(
{
debug:
args.verbose &&
console.info.bind(console, 'conventional-recommended-bump'),
preset: presetOptions,
path: args.path,
tagPrefix: args.tagPrefix,
lernaPackage: args.lernaPackage,
},
args.parserOpts,
function (err, release) {
if (err) return reject(err);
else return resolve(release);
},
);
}
})
});
}

@@ -185,37 +220,33 @@

*/
function updateConfigs (args, newVersion) {
const dotgit = DotGitignore()
function updateConfigs(args, newVersion) {
const dotgit = DotGitignore();
args.bumpFiles.forEach(function (bumpFile) {
const updater = resolveUpdaterObjectFromArgument(bumpFile)
const updater = resolveUpdaterObjectFromArgument(bumpFile);
if (!updater) {
return
return;
}
const configPath = path.resolve(process.cwd(), updater.filename)
const configPath = path.resolve(process.cwd(), updater.filename);
try {
if (dotgit.ignore(updater.filename)) return
const stat = fs.lstatSync(configPath)
if (dotgit.ignore(updater.filename)) return;
const stat = fs.lstatSync(configPath);
if (!stat.isFile()) return
const contents = fs.readFileSync(configPath, 'utf8')
const newContents = updater.updater.writeVersion(contents, newVersion)
const realNewVersion = updater.updater.readVersion(newContents)
if (!stat.isFile()) return;
const contents = fs.readFileSync(configPath, 'utf8');
const newContents = updater.updater.writeVersion(contents, newVersion);
const realNewVersion = updater.updater.readVersion(newContents);
checkpoint(
args,
'bumping version in ' + updater.filename + ' from %s to %s',
[updater.updater.readVersion(contents), realNewVersion]
)
writeFile(
args,
configPath,
newContents
)
[updater.updater.readVersion(contents), realNewVersion],
);
writeFile(args, configPath, newContents);
// flag any config files that we modify the version # for
// as having been updated.
configsToUpdate[updater.filename] = true
configsToUpdate[updater.filename] = true;
} catch (err) {
if (err.code !== 'ENOENT') console.warn(err.message)
if (err.code !== 'ENOENT') console.warn(err.message);
}
})
});
}
module.exports = Bump
module.exports = Bump;

@@ -1,20 +0,21 @@

const chalk = require('chalk')
const checkpoint = require('../checkpoint')
const conventionalChangelog = require('conventional-changelog')
const fs = require('fs')
const presetLoader = require('../preset-loader')
const runLifecycleScript = require('../run-lifecycle-script')
const writeFile = require('../write-file')
const START_OF_LAST_RELEASE_PATTERN = /(^#+ \[?[0-9]+\.[0-9]+\.[0-9]+|<a name=)/m
const chalk = require('chalk');
const checkpoint = require('../checkpoint');
const conventionalChangelog = require('conventional-changelog');
const fs = require('fs');
const presetLoader = require('../preset-loader');
const runLifecycleScript = require('../run-lifecycle-script');
const writeFile = require('../write-file');
const START_OF_LAST_RELEASE_PATTERN =
/(^#+ \[?[0-9]+\.[0-9]+\.[0-9]+|<a name=)/m;
async function Changelog (args, newVersion) {
if (args.skip.changelog) return
await runLifecycleScript(args, 'prechangelog')
await outputChangelog(args, newVersion)
await runLifecycleScript(args, 'postchangelog')
async function Changelog(args, newVersion) {
if (args.skip.changelog) return;
await runLifecycleScript(args, 'prechangelog');
await outputChangelog(args, newVersion);
await runLifecycleScript(args, 'postchangelog');
}
Changelog.START_OF_LAST_RELEASE_PATTERN = START_OF_LAST_RELEASE_PATTERN
Changelog.START_OF_LAST_RELEASE_PATTERN = START_OF_LAST_RELEASE_PATTERN;
module.exports = Changelog
module.exports = Changelog;

@@ -25,7 +26,7 @@ /**

*/
function extractFrontMatter (oldContent) {
const headerStart = oldContent.indexOf('# Changelog')
function extractFrontMatter(oldContent) {
const headerStart = oldContent.indexOf('# Changelog');
return headerStart !== -1 || headerStart !== 0
? oldContent.substring(0, headerStart)
: ''
: '';
}

@@ -36,55 +37,73 @@

*/
function extractChangelogBody (oldContent) {
const oldContentStart = oldContent.search(START_OF_LAST_RELEASE_PATTERN)
function extractChangelogBody(oldContent) {
const oldContentStart = oldContent.search(START_OF_LAST_RELEASE_PATTERN);
return oldContentStart !== -1
? oldContent.substring(oldContentStart)
: oldContent
: oldContent;
}
function outputChangelog (args, newVersion) {
function outputChangelog(args, newVersion) {
return new Promise((resolve, reject) => {
createIfMissing(args)
const header = args.header
createIfMissing(args);
const header = args.header;
const oldContent = args.dryRun || args.releaseCount === 0 ? '' : fs.readFileSync(args.infile, 'utf-8')
const oldContent =
args.dryRun || args.releaseCount === 0
? ''
: fs.readFileSync(args.infile, 'utf-8');
const oldContentBody = extractChangelogBody(oldContent)
const oldContentBody = extractChangelogBody(oldContent);
const changelogFrontMatter = extractFrontMatter(oldContent)
const changelogFrontMatter = extractFrontMatter(oldContent);
let content = ''
const context = { version: newVersion }
const changelogStream = conventionalChangelog({
debug: args.verbose && console.info.bind(console, 'conventional-changelog'),
preset: presetLoader(args),
tagPrefix: args.tagPrefix,
releaseCount: args.releaseCount
}, context, { merges: null, path: args.path, showSignature: false }, args.parserOpts, args.writerOpts)
.on('error', function (err) {
return reject(err)
})
let content = '';
const context = { version: newVersion };
const changelogStream = conventionalChangelog(
{
debug:
args.verbose && console.info.bind(console, 'conventional-changelog'),
preset: presetLoader(args),
tagPrefix: args.tagPrefix,
releaseCount: args.releaseCount,
},
context,
{ merges: null, path: args.path, showSignature: false },
args.parserOpts,
args.writerOpts,
).on('error', function (err) {
return reject(err);
});
changelogStream.on('data', function (buffer) {
content += buffer.toString()
})
content += buffer.toString();
});
changelogStream.on('end', function () {
checkpoint(args, 'outputting changes to %s', [args.infile])
if (args.dryRun) console.info(`\n---\n${chalk.gray(content.trim())}\n---\n`)
else writeFile(args, args.infile, changelogFrontMatter + header + '\n' + (content + oldContentBody).replace(/\n+$/, '\n'))
return resolve()
})
})
checkpoint(args, 'outputting changes to %s', [args.infile]);
if (args.dryRun)
console.info(`\n---\n${chalk.gray(content.trim())}\n---\n`);
else
writeFile(
args,
args.infile,
changelogFrontMatter +
header +
'\n' +
(content + oldContentBody).replace(/\n+$/, '\n'),
);
return resolve();
});
});
}
function createIfMissing (args) {
function createIfMissing(args) {
try {
fs.accessSync(args.infile, fs.F_OK)
fs.accessSync(args.infile, fs.F_OK);
} catch (err) {
if (err.code === 'ENOENT') {
checkpoint(args, 'created %s', [args.infile])
args.outputUnreleased = true
writeFile(args, args.infile, '\n')
checkpoint(args, 'created %s', [args.infile]);
args.outputUnreleased = true;
writeFile(args, args.infile, '\n');
}
}
}

@@ -1,27 +0,28 @@

const bump = require('../lifecycles/bump')
const checkpoint = require('../checkpoint')
const formatCommitMessage = require('../format-commit-message')
const path = require('path')
const runExecFile = require('../run-execFile')
const runLifecycleScript = require('../run-lifecycle-script')
const bump = require('../lifecycles/bump');
const checkpoint = require('../checkpoint');
const formatCommitMessage = require('../format-commit-message');
const path = require('path');
const runExecFile = require('../run-execFile');
const runLifecycleScript = require('../run-lifecycle-script');
module.exports = async function (args, newVersion) {
if (args.skip.commit) return
const message = await runLifecycleScript(args, 'precommit')
if (message && message.length) args.releaseCommitMessageFormat = message
await execCommit(args, newVersion)
await runLifecycleScript(args, 'postcommit')
}
if (args.skip.commit) return;
const message = await runLifecycleScript(args, 'precommit');
if (message && message.length) args.releaseCommitMessageFormat = message;
await execCommit(args, newVersion);
await runLifecycleScript(args, 'postcommit');
};
async function execCommit (args, newVersion) {
let msg = 'committing %s'
let paths = []
const verify = args.verify === false || args.n ? ['--no-verify'] : []
const sign = args.sign ? ['-S'] : []
const toAdd = []
async function execCommit(args, newVersion) {
let msg = 'committing %s';
let paths = [];
const verify = args.verify === false || args.n ? ['--no-verify'] : [];
const sign = args.sign ? ['-S'] : [];
const signoff = args.signoff ? ['--signoff'] : [];
const toAdd = [];
// only start with a pre-populated paths list when CHANGELOG processing is not skipped
if (!args.skip.changelog) {
paths = [args.infile]
toAdd.push(args.infile)
paths = [args.infile];
toAdd.push(args.infile);
}

@@ -32,39 +33,37 @@

Object.keys(bump.getUpdatedConfigs()).forEach(function (p) {
paths.unshift(p)
toAdd.push(path.relative(process.cwd(), p))
paths.unshift(p);
toAdd.push(path.relative(process.cwd(), p));
// account for multiple files in the output message
if (paths.length > 1) {
msg += ' and %s'
msg += ' and %s';
}
})
});
if (args.commitAll) {
msg += ' and %s'
paths.push('all staged files')
msg += ' and %s';
paths.push('all staged files');
}
checkpoint(args, msg, paths)
checkpoint(args, msg, paths);
// nothing to do, exit without commit anything
if (!args.commitAll && args.skip.changelog && args.skip.bump && toAdd.length === 0) {
return
if (
!args.commitAll &&
args.skip.changelog &&
args.skip.bump &&
toAdd.length === 0
) {
return;
}
await runExecFile(args, 'git', ['add'].concat(toAdd))
await runExecFile(args, 'git', ['add'].concat(toAdd));
await runExecFile(
args,
'git',
[
'commit'
].concat(
verify,
sign,
args.commitAll ? [] : toAdd,
[
'-m',
`${formatCommitMessage(args.releaseCommitMessageFormat, newVersion)}`
]
)
)
['commit'].concat(verify, sign, signoff, args.commitAll ? [] : toAdd, [
'-m',
`${formatCommitMessage(args.releaseCommitMessageFormat, newVersion)}`,
]),
);
}

@@ -1,45 +0,55 @@

const bump = require('../lifecycles/bump')
const chalk = require('chalk')
const checkpoint = require('../checkpoint')
const figures = require('figures')
const formatCommitMessage = require('../format-commit-message')
const runExecFile = require('../run-execFile')
const runLifecycleScript = require('../run-lifecycle-script')
const { detectPMByLockFile } = require('../detect-package-manager')
const bump = require('../lifecycles/bump');
const chalk = require('chalk');
const checkpoint = require('../checkpoint');
const figures = require('figures');
const formatCommitMessage = require('../format-commit-message');
const runExecFile = require('../run-execFile');
const runLifecycleScript = require('../run-lifecycle-script');
const { detectPMByLockFile } = require('../detect-package-manager');
module.exports = async function (newVersion, pkgPrivate, args) {
if (args.skip.tag) return
await runLifecycleScript(args, 'pretag')
await execTag(newVersion, pkgPrivate, args)
await runLifecycleScript(args, 'posttag')
}
if (args.skip.tag) return;
await runLifecycleScript(args, 'pretag');
await execTag(newVersion, pkgPrivate, args);
await runLifecycleScript(args, 'posttag');
};
async function detectPublishHint () {
const npmClientName = await detectPMByLockFile()
const publishCommand = 'publish'
return `${npmClientName} ${publishCommand}`
async function detectPublishHint() {
const npmClientName = await detectPMByLockFile();
const publishCommand = 'publish';
return `${npmClientName} ${publishCommand}`;
}
async function execTag (newVersion, pkgPrivate, args) {
const tagOption = []
async function execTag(newVersion, pkgPrivate, args) {
const tagOption = [];
if (args.sign) {
tagOption.push('-s')
tagOption.push('-s');
} else {
tagOption.push('-a')
tagOption.push('-a');
}
if (args.tagForce) {
tagOption.push('-f')
tagOption.push('-f');
}
checkpoint(args, 'tagging release %s%s', [args.tagPrefix, newVersion])
await runExecFile(args, 'git', ['tag', ...tagOption, args.tagPrefix + newVersion, '-m', `${formatCommitMessage(args.releaseCommitMessageFormat, newVersion)}`])
const currentBranch = await runExecFile('', 'git', ['rev-parse', '--abbrev-ref', 'HEAD'])
let message = 'git push --follow-tags origin ' + currentBranch.trim()
checkpoint(args, 'tagging release %s%s', [args.tagPrefix, newVersion]);
await runExecFile(args, 'git', [
'tag',
...tagOption,
args.tagPrefix + newVersion,
'-m',
`${formatCommitMessage(args.releaseCommitMessageFormat, newVersion)}`,
]);
const currentBranch = await runExecFile('', 'git', [
'rev-parse',
'--abbrev-ref',
'HEAD',
]);
let message = 'git push --follow-tags origin ' + currentBranch.trim();
if (pkgPrivate !== true && bump.getUpdatedConfigs()['package.json']) {
const npmPublishHint = args.npmPublishHint || await detectPublishHint()
message += ` && ${npmPublishHint}`
const npmPublishHint = args.npmPublishHint || (await detectPublishHint());
message += ` && ${npmPublishHint}`;
if (args.prerelease !== undefined) {
if (args.prerelease === '') {
message += ' --tag prerelease'
message += ' --tag prerelease';
} else {
message += ' --tag ' + args.prerelease
message += ' --tag ' + args.prerelease;
}

@@ -49,3 +59,3 @@ }

checkpoint(args, 'Run `%s` to publish', [message], chalk.blue(figures.info))
checkpoint(args, 'Run `%s` to publish', [message], chalk.blue(figures.info));
}
// TODO: this should be replaced with an object we maintain and
// describe in: https://github.com/conventional-changelog/conventional-changelog-config-spec
const spec = require('conventional-changelog-config-spec')
const spec = require('conventional-changelog-config-spec');
module.exports = (args) => {
const defaultPreset = require.resolve('conventional-changelog-conventionalcommits')
let preset = args.preset || defaultPreset
const defaultPreset = require.resolve(
'conventional-changelog-conventionalcommits',
);
let preset = args.preset || defaultPreset;
if (preset === defaultPreset) {
preset = {
name: defaultPreset
}
Object.keys(spec.properties).forEach(key => {
if (args[key] !== undefined) preset[key] = args[key]
})
name: defaultPreset,
};
Object.keys(spec.properties).forEach((key) => {
if (args[key] !== undefined) preset[key] = args[key];
});
}
return preset
}
return preset;
};

@@ -1,12 +0,15 @@

const chalk = require('chalk')
const chalk = require('chalk');
module.exports = function (args, msg, opts) {
if (!args.silent) {
opts = Object.assign({
level: 'error',
color: 'red'
}, opts)
opts = Object.assign(
{
level: 'error',
color: 'red',
},
opts,
);
console[opts.level](chalk[opts.color](msg))
console[opts.level](chalk[opts.color](msg));
}
}
};

@@ -1,18 +0,18 @@

const { promisify } = require('util')
const printError = require('./print-error')
const { promisify } = require('util');
const printError = require('./print-error');
const exec = promisify(require('child_process').exec)
const exec = promisify(require('child_process').exec);
module.exports = async function (args, cmd) {
if (args.dryRun) return
if (args.dryRun) return;
try {
const { stderr, stdout } = await exec(cmd)
const { stderr, stdout } = await exec(cmd);
// If exec returns content in stderr, but no error, print it as a warning
if (stderr) printError(args, stderr, { level: 'warn', color: 'yellow' })
return stdout
if (stderr) printError(args, stderr, { level: 'warn', color: 'yellow' });
return stdout;
} catch (error) {
// If exec returns an error, print it and exit with return code 1
printError(args, error.stderr || error.message)
throw error
printError(args, error.stderr || error.message);
throw error;
}
}
};

@@ -1,18 +0,18 @@

const { promisify } = require('util')
const printError = require('./print-error')
const { promisify } = require('util');
const printError = require('./print-error');
const execFile = promisify(require('child_process').execFile)
const execFile = promisify(require('child_process').execFile);
module.exports = async function (args, cmd, cmdArgs) {
if (args.dryRun) return
if (args.dryRun) return;
try {
const { stderr, stdout } = await execFile(cmd, cmdArgs)
const { stderr, stdout } = await execFile(cmd, cmdArgs);
// If execFile returns content in stderr, but no error, print it as a warning
if (stderr) printError(args, stderr, { level: 'warn', color: 'yellow' })
return stdout
if (stderr) printError(args, stderr, { level: 'warn', color: 'yellow' });
return stdout;
} catch (error) {
// If execFile returns an error, print it and exit with return code 1
printError(args, error.stderr || error.message)
throw error
printError(args, error.stderr || error.message);
throw error;
}
}
};

@@ -1,13 +0,18 @@

const chalk = require('chalk')
const checkpoint = require('./checkpoint')
const figures = require('figures')
const runExec = require('./run-exec')
const chalk = require('chalk');
const checkpoint = require('./checkpoint');
const figures = require('figures');
const runExec = require('./run-exec');
module.exports = function (args, hookName) {
const scripts = args.scripts
if (!scripts || !scripts[hookName]) return Promise.resolve()
const command = scripts[hookName]
checkpoint(args, 'Running lifecycle script "%s"', [hookName])
checkpoint(args, '- execute command: "%s"', [command], chalk.blue(figures.info))
return runExec(args, command)
}
const scripts = args.scripts;
if (!scripts || !scripts[hookName]) return Promise.resolve();
const command = scripts[hookName];
checkpoint(args, 'Running lifecycle script "%s"', [hookName]);
checkpoint(
args,
'- execute command: "%s"',
[command],
chalk.blue(figures.info),
);
return runExec(args, command);
};

@@ -19,19 +19,19 @@ /*

'use strict'
'use strict';
module.exports = stringifyPackage
module.exports = stringifyPackage;
const DEFAULT_INDENT = 2
const CRLF = '\r\n'
const LF = '\n'
const DEFAULT_INDENT = 2;
const CRLF = '\r\n';
const LF = '\n';
function stringifyPackage (data, indent, newline) {
indent = indent || (indent === 0 ? 0 : DEFAULT_INDENT)
const json = JSON.stringify(data, null, indent)
function stringifyPackage(data, indent, newline) {
indent = indent || (indent === 0 ? 0 : DEFAULT_INDENT);
const json = JSON.stringify(data, null, indent);
if (newline === CRLF) {
return json.replace(/\n/g, CRLF) + CRLF
return json.replace(/\n/g, CRLF) + CRLF;
}
return json + LF
return json + LF;
}

@@ -1,3 +0,3 @@

const path = require('path')
const JSON_BUMP_FILES = require('../../defaults').bumpFiles
const path = require('path');
const JSON_BUMP_FILES = require('../../defaults').bumpFiles;
const updatersByType = {

@@ -7,35 +7,35 @@ json: require('./types/json'),

gradle: require('./types/gradle'),
csproj: require('./types/csproj')
}
const PLAIN_TEXT_BUMP_FILES = ['VERSION.txt', 'version.txt']
csproj: require('./types/csproj'),
};
const PLAIN_TEXT_BUMP_FILES = ['VERSION.txt', 'version.txt'];
function getUpdaterByType (type) {
const updater = updatersByType[type]
function getUpdaterByType(type) {
const updater = updatersByType[type];
if (!updater) {
throw Error(`Unable to locate updater for provided type (${type}).`)
throw Error(`Unable to locate updater for provided type (${type}).`);
}
return updater
return updater;
}
function getUpdaterByFilename (filename) {
function getUpdaterByFilename(filename) {
if (JSON_BUMP_FILES.includes(path.basename(filename))) {
return getUpdaterByType('json')
return getUpdaterByType('json');
}
if (PLAIN_TEXT_BUMP_FILES.includes(filename)) {
return getUpdaterByType('plain-text')
return getUpdaterByType('plain-text');
}
if (/build.gradle/.test(filename)) {
return getUpdaterByType('gradle')
return getUpdaterByType('gradle');
}
if (filename.endsWith('.csproj')) {
return getUpdaterByType('csproj')
return getUpdaterByType('csproj');
}
throw Error(
`Unsupported file (${filename}) provided for bumping.\n Please specify the updater \`type\` or use a custom \`updater\`.`
)
`Unsupported file (${filename}) provided for bumping.\n Please specify the updater \`type\` or use a custom \`updater\`.`,
);
}
function getCustomUpdaterFromPath (updater) {
function getCustomUpdaterFromPath(updater) {
if (typeof updater === 'string') {
return require(path.resolve(process.cwd(), updater))
return require(path.resolve(process.cwd(), updater));
}

@@ -46,5 +46,7 @@ if (

) {
return updater
return updater;
}
throw new Error('Updater must be a string path or an object with readVersion and writeVersion methods')
throw new Error(
'Updater must be a string path or an object with readVersion and writeVersion methods',
);
}

@@ -55,3 +57,3 @@

*/
function isValidUpdater (obj) {
function isValidUpdater(obj) {
return (

@@ -61,3 +63,3 @@ obj &&

typeof obj.writeVersion === 'function'
)
);
}

@@ -70,10 +72,10 @@

*/
let updater = arg
let updater = arg;
if (isValidUpdater(updater)) {
return updater
return updater;
}
if (typeof updater !== 'object') {
updater = {
filename: arg
}
filename: arg,
};
}

@@ -84,10 +86,15 @@

if (typeof updater.updater === 'string') {
updater.updater = getCustomUpdaterFromPath(updater.updater)
updater.updater = getCustomUpdaterFromPath(updater.updater);
} else if (updater.type) {
updater.updater = getUpdaterByType(updater.type)
updater.updater = getUpdaterByType(updater.type);
} else {
updater.updater = getUpdaterByFilename(updater.filename)
updater.updater = getUpdaterByFilename(updater.filename);
}
} catch (err) {
if (err.code !== 'ENOENT') console.warn(`Unable to obtain updater for: ${JSON.stringify(arg)}\n - Error: ${err.message}\n - Skipping...`)
if (err.code !== 'ENOENT')
console.warn(
`Unable to obtain updater for: ${JSON.stringify(arg)}\n - Error: ${
err.message
}\n - Skipping...`,
);
}

@@ -99,6 +106,6 @@ }

if (!isValidUpdater(updater.updater)) {
return false
return false;
}
return updater
}
return updater;
};

@@ -1,13 +0,15 @@

const versionRegex = /<Version>(.*)<\/Version>/
const versionRegex = /<Version>(.*)<\/Version>/;
module.exports.readVersion = function (contents) {
const matches = versionRegex.exec(contents)
const matches = versionRegex.exec(contents);
if (matches === null || matches.length !== 2) {
throw new Error('Failed to read the Version field in your csproj file - is it present?')
throw new Error(
'Failed to read the Version field in your csproj file - is it present?',
);
}
return matches[1]
}
return matches[1];
};
module.exports.writeVersion = function (contents, version) {
return contents.replace(versionRegex, `<Version>${version}</Version>`)
}
return contents.replace(versionRegex, `<Version>${version}</Version>`);
};

@@ -1,16 +0,18 @@

const versionRegex = /^version\s+=\s+['"]([\d.]+)['"]/m
const versionRegex = /^version\s+=\s+['"]([\d.]+)['"]/m;
module.exports.readVersion = function (contents) {
const matches = versionRegex.exec(contents)
const matches = versionRegex.exec(contents);
if (matches === null) {
throw new Error('Failed to read the version field in your gradle file - is it present?')
throw new Error(
'Failed to read the version field in your gradle file - is it present?',
);
}
return matches[1]
}
return matches[1];
};
module.exports.writeVersion = function (contents, version) {
return contents.replace(versionRegex, () => {
return `version = "${version}"`
})
}
return `version = "${version}"`;
});
};

@@ -1,25 +0,25 @@

const stringifyPackage = require('../../stringify-package')
const detectIndent = require('detect-indent')
const detectNewline = require('detect-newline')
const stringifyPackage = require('../../stringify-package');
const detectIndent = require('detect-indent');
const detectNewline = require('detect-newline');
module.exports.readVersion = function (contents) {
return JSON.parse(contents).version
}
return JSON.parse(contents).version;
};
module.exports.writeVersion = function (contents, version) {
const json = JSON.parse(contents)
const indent = detectIndent(contents).indent
const newline = detectNewline(contents)
json.version = version
const json = JSON.parse(contents);
const indent = detectIndent(contents).indent;
const newline = detectNewline(contents);
json.version = version;
if (json.packages && json.packages['']) {
// package-lock v2 stores version there too
json.packages[''].version = version
json.packages[''].version = version;
}
return stringifyPackage(json, indent, newline)
}
return stringifyPackage(json, indent, newline);
};
module.exports.isPrivate = function (contents) {
return JSON.parse(contents).private
}
return JSON.parse(contents).private;
};
module.exports.readVersion = function (contents) {
return contents
}
return contents;
};
module.exports.writeVersion = function (_contents, version) {
return version
}
return version;
};

@@ -1,6 +0,6 @@

const fs = require('fs')
const fs = require('fs');
module.exports = function (args, filePath, content) {
if (args.dryRun) return
fs.writeFileSync(filePath, content, 'utf8')
}
if (args.dryRun) return;
fs.writeFileSync(filePath, content, 'utf8');
};
{
"name": "commit-and-tag-version",
"version": "12.0.0",
"version": "12.1.0",
"description": "replacement for `npm version` with automatic CHANGELOG generation",

@@ -8,16 +8,14 @@ "bin": "bin/cli.js",

"fix": "eslint . --fix",
"posttest": "eslint .",
"test": "nyc mocha --timeout=30000",
"test:unit": "mocha --exclude test/git.spec.js",
"coverage": "nyc report --reporter=lcov",
"posttest": "eslint . && npm run format:check",
"format:base": "prettier \"./**/*.{ts,js}\"",
"format:fix": "npm run format:base -- --write",
"format:check": "npm run format:base -- --check",
"test": "jest",
"test:unit": "jest --testPathIgnorePatterns test/git.integration-test.js",
"test:git-integration": "jest --testPathPattern test/git.integration-test.js",
"release": "bin/cli.js"
},
"nyc": {
"exclude": [
"tmp/**"
]
},
"repository": "absolute-version/commit-and-tag-version",
"engines": {
"node": ">=14"
"node": ">=18"
},

@@ -59,19 +57,14 @@ "keywords": [

"devDependencies": {
"@fintechstudios/eslint-plugin-chai-as-promised": "^3.1.0",
"chai": "^4.3.10",
"chai-as-promised": "^7.1.1",
"eslint": "^8.52.0",
"eslint-config-standard": "^17.1.0",
"eslint-config-prettier": "^9.0.0",
"eslint-plugin-import": "^2.29.0",
"eslint-plugin-mocha": "^10.2.0",
"eslint-plugin-jest": "^27.6.0",
"eslint-plugin-n": "^15.2.0",
"eslint-plugin-promise": "^6.1.1",
"mocha": "^10.2.0",
"mock-fs": "^5.2.0",
"mockery": "^2.1.0",
"nyc": "^15.1.0",
"jest": "^29.7.0",
"jest-serial-runner": "^1.2.1",
"prettier": "3.0.3",
"shelljs": "^0.8.5",
"std-mocks": "^1.0.1",
"strip-ansi": "^6.0.0"
}
}

@@ -282,2 +282,6 @@ # Commit and Tag Version

### Signed-off-by trailer
To add the "Signed-off-by" trailer to the commit message add the `--signoff` flag to your `commit-and-tag-version` command.
### Lifecycle Scripts

@@ -284,0 +288,0 @@

@@ -1,38 +0,145 @@

/* global describe it afterEach */
'use strict';
'use strict'
const shell = require('shelljs');
const stripAnsi = require('strip-ansi');
const fs = require('fs');
const shell = require('shelljs')
const fs = require('fs')
const { resolve } = require('path')
const { Readable } = require('stream')
const mockFS = require('mock-fs')
const mockery = require('mockery')
const stdMocks = require('std-mocks')
const stripAnsi = require('strip-ansi')
const mockers = require('./mocks/jest-mocks');
const cli = require('../command')
const formatCommitMessage = require('../lib/format-commit-message')
const runExecFile = require('../lib/run-execFile');
const chai = require('chai')
const should = chai.should()
const expect = chai.expect
chai.use(require('chai-as-promised'))
const cli = require('../command');
const formatCommitMessage = require('../lib/format-commit-message');
// set by mock()
let standardVersion
let standardVersion;
let readFileSyncSpy;
let lstatSyncSpy;
function exec (opt = '', git) {
// Rather than trying to re-read something written out during tests, we can spy on writeFileSync
// we can trust fs is capable of writing the file
let writeFileSyncSpy;
const consoleErrorSpy = jest.spyOn(console, 'warn').mockImplementation();
const consoleInfoSpy = jest.spyOn(console, 'info').mockImplementation();
jest.mock('../lib/run-execFile');
const { readFileSync: readFileSyncActual, lstatSync: lstatSyncActual } = fs;
function exec(opt = '', git) {
if (typeof opt === 'string') {
opt = cli.parse(`commit-and-tag-version ${opt}`)
opt = cli.parse(`commit-and-tag-version ${opt}`);
}
if (!git) opt.skip = Object.assign({}, opt.skip, { commit: true, tag: true })
return standardVersion(opt)
if (!git) opt.skip = Object.assign({}, opt.skip, { commit: true, tag: true });
return standardVersion(opt);
}
function getPackageVersion () {
return JSON.parse(fs.readFileSync('package.json', 'utf-8')).version
function attemptingToReadPackageJson(path) {
return path.includes('package.json') || path.includes('package-lock.json');
}
/**
* @param fs - reference to 'fs' - needs to be defined in the root test class so that mocking works correctly
* @param readFileSyncActual - actual implementation of fs.readFileSync - reference should be defined as a variable in root test class so we can unset spy after
* @param existingChangelog ?: string - Existing CHANGELOG.md content
* @param testFiles ?: object[] - with Path and Value fields, for mocking readFileSynch on packageFiles such as package.json, bower.json, manifest.json
* @param realTestFiles ?: object[] - with Filename (e.g. mix.exs) and Path to real file in a directory
* @return Jest spy on readFileSync
*/
const mockReadFilesFromDisk = ({
fs,
readFileSyncActual,
existingChangelog,
testFiles,
realTestFiles,
}) =>
jest.spyOn(fs, 'readFileSync').mockImplementation((path, opts) => {
if (path === 'CHANGELOG.md') {
if (existingChangelog) {
return existingChangelog;
}
return '';
}
// If deliberately set to null when mocking, don't create a fake package.json
if (testFiles === null && attemptingToReadPackageJson(path)) {
return '{}';
}
if (testFiles) {
const file = testFiles.find((otherFile) => {
return path.includes(otherFile.path);
});
if (file) {
if (file.value instanceof String || typeof file.value === 'string') {
return file.value;
}
return JSON.stringify(file.value);
}
// For scenarios where we have defined testFiles such as bower.json
// Do not create a fake package.json file
if (attemptingToReadPackageJson(path)) {
return '{}';
}
}
// If no package files defined and not explicitly set to null, create a fake package json
// otherwise fs will read the real package.json in the root of this project!
if (attemptingToReadPackageJson(path)) {
return JSON.stringify({ version: '1.0.0' });
}
if (realTestFiles) {
const testFile = realTestFiles.find((testFile) => {
return path.includes(testFile.filename);
});
if (testFile) {
return readFileSyncActual(testFile.path, opts);
}
}
return readFileSyncActual(path, opts);
});
/**
* @param fs - reference to 'fs' - needs to be defined in the root test class so that mocking works correctly
* @param lstatSyncActual - actual implementation of fs.lstatSync
* @param testFiles ?: object[] - with Path and Value fields, for mocking lstatSync on packageFiles such as package.json, bower.json, manifest.json
* @param realTestFiles ?: object[] - with Filename (e.g. mix.exs) and Path to real file in a directory
* @return Jest spy on lstatSync
*/
const mockFsLStat = ({ fs, lstatSyncActual, testFiles, realTestFiles }) =>
jest.spyOn(fs, 'lstatSync').mockImplementation((path) => {
if (testFiles) {
const file = testFiles.find((otherFile) => {
return path.includes(otherFile.path);
});
if (file) {
return {
isFile: () => true,
};
}
}
if (realTestFiles) {
const file = realTestFiles.find((otherFile) => {
return path.includes(otherFile.filename);
});
if (file) {
return {
isFile: () => true,
};
}
}
return lstatSyncActual(path);
});
/**
* Mock external conventional-changelog modules

@@ -43,105 +150,107 @@ *

* bump?: 'major' | 'minor' | 'patch' | Error | (opt, parserOpts, cb) => { cb(err) | cb(null, { releaseType }) }
* changelog?: string | Error | Array<string | Error | (opt) => string | null>
* changelog?: string | Error | Array<string | Error | (opt) => string | null> - Changelog to be "generated" by conventional-changelog when reading commit history
* execFile?: ({ dryRun, silent }, cmd, cmdArgs) => Promise<string>
* fs?: { [string]: string | Buffer | any }
* pkg?: { [string]: any }
* tags?: string[] | Error
* existingChangelog?: string - Existing CHANGELOG.md content
* testFiles?: object[] - with Path and Value fields, for mocking readFileSynch on packageFiles such as package.json, bower.json, manifest.json
* realTestFiles?: object[] - with Filename (e.g. mix.exs) and Path to real file in test directory
*/
function mock ({ bump, changelog, execFile, fs, pkg, tags } = {}) {
mockery.enable({ warnOnUnregistered: false, useCleanCache: true })
function mock({
bump,
changelog,
tags,
existingChangelog,
testFiles,
realTestFiles,
} = {}) {
mockers.mockRecommendedBump({ bump });
mockery.registerMock('conventional-recommended-bump', function (opt, parserOpts, cb) {
if (typeof bump === 'function') bump(opt, parserOpts, cb)
else if (bump instanceof Error) cb(bump)
else cb(null, bump ? { releaseType: bump } : {})
})
if (!Array.isArray(changelog)) changelog = [changelog];
if (!Array.isArray(changelog)) changelog = [changelog]
mockery.registerMock(
'conventional-changelog',
(opt) =>
new Readable({
read (_size) {
const next = changelog.shift()
if (next instanceof Error) {
this.destroy(next)
} else if (typeof next === 'function') {
this.push(next(opt))
} else {
this.push(next ? Buffer.from(next, 'utf8') : null)
}
}
})
)
mockers.mockConventionalChangelog({
changelog,
});
mockery.registerMock('git-semver-tags', function (cb) {
if (tags instanceof Error) cb(tags)
else cb(null, tags | [])
})
mockers.mockGitSemverTags({
tags,
});
if (typeof execFile === 'function') {
// called from commit & tag lifecycle methods
mockery.registerMock('../run-execFile', execFile)
}
// needs to be set after mockery, but before mock-fs
standardVersion = require('../index')
standardVersion = require('../index');
fs = Object.assign({}, fs)
if (pkg) {
fs['package.json'] = JSON.stringify(pkg)
} else if (pkg === undefined && !fs['package.json']) {
fs['package.json'] = JSON.stringify({ version: '1.0.0' })
}
mockFS(fs)
// For fake and injected test files pretend they exist at root level when Fs queries lstat
// Package.json works without this as it'll check the one in this actual repo...
lstatSyncSpy = mockFsLStat({
fs,
lstatSyncActual,
testFiles,
realTestFiles,
});
stdMocks.use()
return () => stdMocks.flush()
readFileSyncSpy = mockReadFilesFromDisk({
fs,
readFileSyncActual,
existingChangelog,
testFiles,
realTestFiles,
});
// Spies on writeFileSync to capture calls and ensure we don't actually try write anything to disc
writeFileSyncSpy = jest.spyOn(fs, 'writeFileSync').mockImplementation();
}
function unmock () {
mockery.deregisterAll()
mockery.disable()
mockFS.restore()
stdMocks.restore()
standardVersion = null
function clearCapturedSpyCalls() {
consoleInfoSpy.mockClear();
consoleErrorSpy.mockClear();
}
// push out prints from the Mocha reporter
const { stdout } = stdMocks.flush()
for (const str of stdout) {
if (str.startsWith(' ')) process.stdout.write(str)
}
function restoreMocksToRealImplementation() {
readFileSyncSpy.mockRestore();
writeFileSyncSpy.mockRestore();
lstatSyncSpy.mockRestore();
}
function unmock() {
clearCapturedSpyCalls();
restoreMocksToRealImplementation();
standardVersion = null;
}
describe('format-commit-message', function () {
it('works for no {{currentTag}}', function () {
formatCommitMessage('chore(release): 1.0.0', '1.0.0').should.equal(
'chore(release): 1.0.0'
)
})
expect(formatCommitMessage('chore(release): 1.0.0', '1.0.0')).toEqual(
'chore(release): 1.0.0',
);
});
it('works for one {{currentTag}}', function () {
formatCommitMessage('chore(release): {{currentTag}}', '1.0.0').should.equal(
'chore(release): 1.0.0'
)
})
expect(
formatCommitMessage('chore(release): {{currentTag}}', '1.0.0'),
).toEqual('chore(release): 1.0.0');
});
it('works for two {{currentTag}}', function () {
formatCommitMessage(
'chore(release): {{currentTag}} \n\n* CHANGELOG: https://github.com/absolute-version/commit-and-tag-version/blob/v{{currentTag}}/CHANGELOG.md',
'1.0.0'
).should.equal(
'chore(release): 1.0.0 \n\n* CHANGELOG: https://github.com/absolute-version/commit-and-tag-version/blob/v1.0.0/CHANGELOG.md'
)
})
})
expect(
formatCommitMessage(
'chore(release): {{currentTag}} \n\n* CHANGELOG: https://github.com/absolute-version/commit-and-tag-version/blob/v{{currentTag}}/CHANGELOG.md',
'1.0.0',
),
).toEqual(
'chore(release): 1.0.0 \n\n* CHANGELOG: https://github.com/absolute-version/commit-and-tag-version/blob/v1.0.0/CHANGELOG.md',
);
});
});
describe('cli', function () {
afterEach(unmock)
afterEach(unmock);
describe('CHANGELOG.md does not exist', function () {
it('populates changelog with commits since last tag by default', async function () {
mock({ bump: 'patch', changelog: 'patch release\n', tags: ['v1.0.0'] })
await exec()
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.match(/patch release/)
})
mock({ bump: 'patch', changelog: 'patch release\n', tags: ['v1.0.0'] });
await exec();
verifyNewChangelogContentMatches({
writeFileSyncSpy,
expectedContent: /patch release/,
});
});

@@ -152,51 +261,69 @@ it('includes all commits if --first-release is true', async function () {

changelog: 'first commit\npatch release\n',
pkg: { version: '1.0.1' }
})
await exec('--first-release')
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.match(/patch release/)
content.should.match(/first commit/)
})
testFiles: [{ path: 'package.json', value: { version: '1.0.1' } }],
});
await exec('--first-release');
verifyNewChangelogContentMatches({
writeFileSyncSpy,
expectedContent: /patch release/,
});
verifyNewChangelogContentMatches({
writeFileSyncSpy,
expectedContent: /first commit/,
});
});
it('skipping changelog will not create a changelog file', async function () {
mock({ bump: 'minor', changelog: 'foo\n' })
await exec('--skip.changelog true')
getPackageVersion().should.equal('1.1.0')
expect(() => fs.readFileSync('CHANGELOG.md', 'utf-8')).to.throw(/ENOENT/)
})
})
mock({ bump: 'minor', changelog: 'foo\n' });
await exec('--skip.changelog true');
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.1.0' });
expect(writeFileSyncSpy).not.toHaveBeenCalledWith('CHANGELOG.md');
});
});
describe('CHANGELOG.md exists', function () {
afterEach(unmock);
it('appends the new release above the last release, removing the old header (legacy format), and does not retain any front matter', async function () {
const frontMatter =
'---\nstatus: new\n---\n'
const frontMatter = '---\nstatus: new\n---\n';
mock({
bump: 'patch',
changelog: 'release 1.0.1\n',
fs: { 'CHANGELOG.md': frontMatter + 'legacy header format<a name="1.0.0">\n' },
tags: ['v1.0.0']
})
await exec()
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.match(/1\.0\.1/)
content.should.not.match(/legacy header format/)
content.should.not.match(/---status: new---/)
})
existingChangelog:
frontMatter + 'legacy header format<a name="1.0.0">\n',
tags: ['v1.0.0'],
});
await exec();
verifyNewChangelogContentMatches({
writeFileSyncSpy,
expectedContent: /1\.0\.1/,
});
verifyNewChangelogContentDoesNotMatch({
writeFileSyncSpy,
expectedContent: /legacy header format/,
});
verifyNewChangelogContentDoesNotMatch({
writeFileSyncSpy,
expectedContent: /---status: new---/,
});
});
it('appends the new release above the last release, replacing the old header (standard-version format) with header (new format), and retains any front matter', async function () {
const { header } = require('../defaults')
const { header } = require('../defaults');
const standardVersionHeader =
'# Changelog\n\nAll notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.'
'# Changelog\n\nAll notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.';
const frontMatter =
'---\nstatus: new\n---\n'
const frontMatter = '---\nstatus: new\n---\n';
const changelog101 =
'### [1.0.1](/compare/v1.0.0...v1.0.1) (YYYY-MM-DD)\n\n\n### Bug Fixes\n\n* patch release ABCDEFXY\n'
'### [1.0.1](/compare/v1.0.0...v1.0.1) (YYYY-MM-DD)\n\n\n### Bug Fixes\n\n* patch release ABCDEFXY\n';
const changelog100 =
'### [1.0.0](/compare/v0.0.1...v1.0.0) (YYYY-MM-DD)\n\n\n### Features\n\n* Version one feature set\n'
'### [1.0.0](/compare/v0.0.1...v1.0.0) (YYYY-MM-DD)\n\n\n### Features\n\n* Version one feature set\n';
const initialChangelog = frontMatter + '\n' + standardVersionHeader + '\n' + changelog100
const initialChangelog =
frontMatter + '\n' + standardVersionHeader + '\n' + changelog100;

@@ -206,22 +333,26 @@ mock({

changelog: changelog101,
fs: { 'CHANGELOG.md': initialChangelog },
tags: ['v1.0.0']
})
await exec()
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.equal(frontMatter + '\n' + header + '\n' + changelog101 + changelog100)
})
existingChangelog: initialChangelog,
tags: ['v1.0.0'],
});
await exec();
verifyNewChangelogContentEquals({
writeFileSyncSpy,
expectedContent:
frontMatter + '\n' + header + '\n' + changelog101 + changelog100,
});
});
it('appends the new release above the last release, removing the old header (new format), and retains any front matter', async function () {
const { header } = require('../defaults')
const frontMatter =
'---\nstatus: new\n---\n'
const { header } = require('../defaults');
const frontMatter = '---\nstatus: new\n---\n';
const changelog101 =
'### [1.0.1](/compare/v1.0.0...v1.0.1) (YYYY-MM-DD)\n\n\n### Bug Fixes\n\n* patch release ABCDEFXY\n'
'### [1.0.1](/compare/v1.0.0...v1.0.1) (YYYY-MM-DD)\n\n\n### Bug Fixes\n\n* patch release ABCDEFXY\n';
const changelog100 =
'### [1.0.0](/compare/v0.0.1...v1.0.0) (YYYY-MM-DD)\n\n\n### Features\n\n* Version one feature set\n'
'### [1.0.0](/compare/v0.0.1...v1.0.0) (YYYY-MM-DD)\n\n\n### Features\n\n* Version one feature set\n';
const initialChangelog = frontMatter + '\n' + header + '\n' + changelog100
const initialChangelog =
frontMatter + '\n' + header + '\n' + changelog100;

@@ -231,99 +362,116 @@ mock({

changelog: changelog101,
fs: { 'CHANGELOG.md': initialChangelog },
tags: ['v1.0.0']
})
await exec()
existingChangelog: initialChangelog,
tags: ['v1.0.0'],
});
await exec();
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.equal(frontMatter + '\n' + header + '\n' + changelog101 + changelog100)
})
verifyNewChangelogContentEquals({
writeFileSyncSpy,
expectedContent:
frontMatter + '\n' + header + '\n' + changelog101 + changelog100,
});
});
it('appends the new release above the last release, removing the old header (new format)', async function () {
const { header } = require('../defaults')
const { header } = require('../defaults');
const changelog1 =
'### [1.0.1](/compare/v1.0.0...v1.0.1) (YYYY-MM-DD)\n\n\n### Bug Fixes\n\n* patch release ABCDEFXY\n'
mock({ bump: 'patch', changelog: changelog1, tags: ['v1.0.0'] })
await exec()
let content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.equal(header + '\n' + changelog1)
'### [1.0.1](/compare/v1.0.0...v1.0.1) (YYYY-MM-DD)\n\n\n### Bug Fixes\n\n* patch release ABCDEFXY\n';
mock({ bump: 'patch', changelog: changelog1, tags: ['v1.0.0'] });
await exec();
const content = header + '\n' + changelog1;
verifyNewChangelogContentEquals({
writeFileSyncSpy,
expectedContent: content,
});
const changelog2 =
'### [1.0.2](/compare/v1.0.1...v1.0.2) (YYYY-MM-DD)\n\n\n### Bug Fixes\n\n* another patch release ABCDEFXY\n'
unmock()
'### [1.0.2](/compare/v1.0.1...v1.0.2) (YYYY-MM-DD)\n\n\n### Bug Fixes\n\n* another patch release ABCDEFXY\n';
unmock();
mock({
bump: 'patch',
changelog: changelog2,
fs: { 'CHANGELOG.md': content },
tags: ['v1.0.0', 'v1.0.1']
})
await exec()
content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.equal(header + '\n' + changelog2 + changelog1)
})
existingChangelog: content,
tags: ['v1.0.0', 'v1.0.1'],
});
await exec();
verifyNewChangelogContentEquals({
writeFileSyncSpy,
expectedContent: header + '\n' + changelog2 + changelog1,
});
});
it('[DEPRECATED] (--changelogHeader) allows for a custom changelog header', async function () {
const header = '# Pork Chop Log'
const header = '# Pork Chop Log';
mock({
bump: 'minor',
changelog: header + '\n',
fs: { 'CHANGELOG.md': '' }
})
await exec(`--changelogHeader="${header}"`)
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.match(new RegExp(header))
})
existingChangelog: '',
});
await exec(`--changelogHeader="${header}"`);
verifyNewChangelogContentMatches({
writeFileSyncSpy,
expectedContent: new RegExp(header),
});
});
it('[DEPRECATED] (--changelogHeader) exits with error if changelog header matches last version search regex', async function () {
mock({ bump: 'minor', fs: { 'CHANGELOG.md': '' } })
await expect(exec('--changelogHeader="## 3.0.2"')).to.be.rejectedWith(/custom changelog header must not match/)
})
})
mock({ bump: 'minor', existingChangelog: '' });
await expect(exec('--changelogHeader="## 3.0.2"')).rejects.toThrow(
/custom changelog header must not match/,
);
});
});
describe('lifecycle scripts', function () {
afterEach(unmock);
describe('prerelease hook', function () {
it('should run the prerelease hook when provided', async function () {
const flush = mock({
mock({
bump: 'minor',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec({
scripts: {
prerelease: "node -e \"console.error('prerelease' + ' ran')\""
}
})
const { stderr } = flush()
stderr.join('\n').should.match(/prerelease ran/)
})
prerelease: "node -e \"console.error('prerelease' + ' ran')\"",
},
});
const expectedLog = 'prerelease ran';
verifyLogPrinted({ consoleInfoSpy: consoleErrorSpy, expectedLog });
});
it('should abort if the hook returns a non-zero exit code', async function () {
mock({
bump: 'minor',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await expect(exec({
scripts: {
prerelease: "node -e \"throw new Error('prerelease' + ' fail')\""
}
})).to.be.rejectedWith(/prerelease fail/)
})
})
await expect(
exec({
scripts: {
prerelease: "node -e \"throw new Error('prerelease' + ' fail')\"",
},
}),
).rejects.toThrow(/prerelease fail/);
});
});
describe('prebump hook', function () {
it('should allow prebump hook to return an alternate version #', async function () {
const flush = mock({
mock({
bump: 'minor',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec({
scripts: {
prebump: "node -e \"console.log(Array.of(9, 9, 9).join('.'))\""
}
})
const { stdout } = flush()
stdout.join('').should.match(/9\.9\.9/)
getPackageVersion().should.equal('9.9.9')
})
prebump: 'node -e "console.log(Array.of(9, 9, 9).join(\'.\'))"',
},
});
verifyLogPrinted({ consoleInfoSpy, expectedLog: '9.9.9' });
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '9.9.9' });
});

@@ -333,12 +481,12 @@ it('should not allow prebump hook to return a releaseAs command', async function () {

bump: 'minor',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec({
scripts: {
prebump: "node -e \"console.log('major')\""
}
})
getPackageVersion().should.equal('1.1.0')
})
prebump: 'node -e "console.log(\'major\')"',
},
});
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.1.0' });
});

@@ -348,12 +496,12 @@ it('should allow prebump hook to return an arbitrary string', async function () {

bump: 'minor',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec({
scripts: {
prebump: "node -e \"console.log('Hello World')\""
}
})
getPackageVersion().should.equal('1.1.0')
})
prebump: 'node -e "console.log(\'Hello World\')"',
},
});
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.1.0' });
});

@@ -363,48 +511,54 @@ it('should allow prebump hook to return a version with build info', async function () {

bump: 'minor',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec({
scripts: {
prebump: "node -e \"console.log('9.9.9-test+build')\""
}
})
getPackageVersion().should.equal('9.9.9-test+build')
})
})
prebump: 'node -e "console.log(\'9.9.9-test+build\')"',
},
});
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '9.9.9-test+build',
});
});
});
describe('postbump hook', function () {
it('should run the postbump hook when provided', async function () {
const flush = mock({
mock({
bump: 'minor',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec({
scripts: {
postbump: "node -e \"console.error('postbump' + ' ran')\""
}
})
const { stderr } = flush()
stderr.join('\n').should.match(/postbump ran/)
})
postbump: "node -e \"console.error('postbump' + ' ran')\"",
},
});
const expectedLog = 'postbump ran';
verifyLogPrinted({ consoleInfoSpy: consoleErrorSpy, expectedLog });
});
it('should run the postbump and exit with error when postbump fails', async function () {
mock({
bump: 'minor',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await expect(exec({
scripts: {
postbump: "node -e \"throw new Error('postbump' + ' fail')\""
}
})).to.be.rejectedWith(/postbump fail/)
})
})
await expect(
exec({
scripts: {
postbump: "node -e \"throw new Error('postbump' + ' fail')\"",
},
}),
).rejects.toThrow(/postbump fail/);
});
});
describe('manual-release', function () {
describe('release-types', function () {
const regularTypes = ['major', 'minor', 'patch']
const nextVersion = { major: '2.0.0', minor: '1.1.0', patch: '1.0.1' }
const regularTypes = ['major', 'minor', 'patch'];
const nextVersion = { major: '2.0.0', minor: '1.1.0', patch: '1.0.1' };

@@ -415,8 +569,11 @@ regularTypes.forEach(function (type) {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
await exec('--release-as ' + type)
getPackageVersion().should.equal(nextVersion[type])
})
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec('--release-as ' + type);
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: nextVersion[type],
});
});
});

@@ -428,15 +585,20 @@ // this is for pre-releases

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
await exec('--release-as ' + type + ' --prerelease ' + type)
getPackageVersion().should.equal(`${nextVersion[type]}-${type}.0`)
})
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec('--release-as ' + type + ' --prerelease ' + type);
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: `${nextVersion[type]}-${type}.0`,
});
});
});
it('exits with error if an invalid release type is provided', async function () {
mock({ bump: 'minor', fs: { 'CHANGELOG.md': '' } })
mock({ bump: 'minor', existingChangelog: '' });
await expect(exec('--release-as invalid')).to.be.rejectedWith(/releaseAs must be one of/)
})
})
await expect(exec('--release-as invalid')).rejects.toThrow(
/releaseAs must be one of/,
);
});
});

@@ -447,7 +609,10 @@ describe('release-as-exact', function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
await exec('--release-as v100.0.0')
getPackageVersion().should.equal('100.0.0')
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec('--release-as v100.0.0');
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '100.0.0',
});
});

@@ -457,7 +622,10 @@ it('releases as 200.0.0-amazing', async function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
await exec('--release-as 200.0.0-amazing')
getPackageVersion().should.equal('200.0.0-amazing')
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec('--release-as 200.0.0-amazing');
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '200.0.0-amazing',
});
});

@@ -467,10 +635,18 @@ it('releases as 100.0.0 with prerelease amazing', async function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' },
pkg: {
version: '1.0.0'
}
})
await exec('--release-as 100.0.0 --prerelease amazing')
should.equal(getPackageVersion(), '100.0.0-amazing.0')
})
existingChangelog: 'legacy header format<a name="1.0.0">\n',
testFiles: [
{
path: 'package.json',
value: {
version: '1.0.0',
},
},
],
});
await exec('--release-as 100.0.0 --prerelease amazing');
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '100.0.0-amazing.0',
});
});

@@ -480,10 +656,21 @@ it('release 100.0.0 with prerelease amazing bumps build', async function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="100.0.0-amazing.0">\n' },
pkg: {
version: '100.0.0-amazing.0'
}
})
await exec('--release-as 100.0.0 --prerelease amazing')
should.equal(getPackageVersion(), '100.0.0-amazing.1')
})
fs: {
'CHANGELOG.md':
'legacy header format<a name="100.0.0-amazing.0">\n',
},
testFiles: [
{
path: 'package.json',
value: {
version: '100.0.0-amazing.0',
},
},
],
});
await exec('--release-as 100.0.0 --prerelease amazing');
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '100.0.0-amazing.1',
});
});

@@ -493,10 +680,21 @@ it('release 100.0.0-amazing.0 with prerelease amazing bumps build', async function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="100.0.0-amazing.0">\n' },
pkg: {
version: '100.0.0-amazing.1'
}
})
await exec('--release-as 100.0.0-amazing.0 --prerelease amazing')
should.equal(getPackageVersion(), '100.0.0-amazing.2')
})
fs: {
'CHANGELOG.md':
'legacy header format<a name="100.0.0-amazing.0">\n',
},
testFiles: [
{
path: 'package.json',
value: {
version: '100.0.0-amazing.1',
},
},
],
});
await exec('--release-as 100.0.0-amazing.0 --prerelease amazing');
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '100.0.0-amazing.2',
});
});

@@ -506,10 +704,21 @@ it('release 100.0.0 with prerelease amazing correctly sets version', async function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="100.0.0-amazing.0">\n' },
pkg: {
version: '99.0.0-amazing.0'
}
})
await exec('--release-as 100.0.0 --prerelease amazing')
should.equal(getPackageVersion(), '100.0.0-amazing.0')
})
fs: {
'CHANGELOG.md':
'legacy header format<a name="100.0.0-amazing.0">\n',
},
testFiles: [
{
path: 'package.json',
value: {
version: '99.0.0-amazing.0',
},
},
],
});
await exec('--release-as 100.0.0 --prerelease amazing');
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '100.0.0-amazing.0',
});
});

@@ -519,10 +728,21 @@ it('release 100.0.0-amazing.0 with prerelease amazing correctly sets version', async function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="100.0.0-amazing.0">\n' },
pkg: {
version: '99.0.0-amazing.0'
}
})
await exec('--release-as 100.0.0-amazing.0 --prerelease amazing')
should.equal(getPackageVersion(), '100.0.0-amazing.0')
})
fs: {
'CHANGELOG.md':
'legacy header format<a name="100.0.0-amazing.0">\n',
},
testFiles: [
{
path: 'package.json',
value: {
version: '99.0.0-amazing.0',
},
},
],
});
await exec('--release-as 100.0.0-amazing.0 --prerelease amazing');
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '100.0.0-amazing.0',
});
});

@@ -532,10 +752,23 @@ it('release 100.0.0-amazing.0 with prerelease amazing retains build metadata', async function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="100.0.0-amazing.0">\n' },
pkg: {
version: '100.0.0-amazing.0'
}
})
await exec('--release-as 100.0.0-amazing.0+build.1234 --prerelease amazing')
should.equal(getPackageVersion(), '100.0.0-amazing.1+build.1234')
})
fs: {
'CHANGELOG.md':
'legacy header format<a name="100.0.0-amazing.0">\n',
},
testFiles: [
{
path: 'package.json',
value: {
version: '100.0.0-amazing.0',
},
},
],
});
await exec(
'--release-as 100.0.0-amazing.0+build.1234 --prerelease amazing',
);
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '100.0.0-amazing.1+build.1234',
});
});

@@ -545,57 +778,107 @@ it('release 100.0.0-amazing.3 with prerelease amazing correctly sets prerelease version', async function () {

bump: 'patch',
fs: { 'CHANGELOG.md': 'legacy header format<a name="100.0.0-amazing.0">\n' },
pkg: {
version: '100.0.0-amazing.0'
}
})
await exec('--release-as 100.0.0-amazing.3 --prerelease amazing')
should.equal(getPackageVersion(), '100.0.0-amazing.3')
})
})
fs: {
'CHANGELOG.md':
'legacy header format<a name="100.0.0-amazing.0">\n',
},
testFiles: [
{
path: 'package.json',
value: {
version: '100.0.0-amazing.0',
},
},
],
});
await exec('--release-as 100.0.0-amazing.3 --prerelease amazing');
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '100.0.0-amazing.3',
});
});
});
it('creates a prerelease with a new minor version after two prerelease patches', async function () {
let releaseType = 'patch'
const bump = (_, __, cb) => cb(null, { releaseType })
let releaseType = 'patch';
mock({
bump,
fs: { 'CHANGELOG.md': 'legacy header format<a name="1.0.0">\n' }
})
bump: (_, __, cb) => cb(null, { releaseType }),
existingChangelog: 'legacy header format<a name="1.0.0">\n',
});
await exec('--release-as patch --prerelease dev')
getPackageVersion().should.equal('1.0.1-dev.0')
let version = '1.0.1-dev.0';
await exec('--release-as patch --prerelease dev');
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: version });
await exec('--prerelease dev')
getPackageVersion().should.equal('1.0.1-dev.1')
unmock();
mock({
bump: (_, __, cb) => cb(null, { releaseType }),
existingChangelog: 'legacy header format<a name="1.0.0">\n',
testFiles: [{ path: 'package.json', value: { version } }],
});
releaseType = 'minor'
await exec('--release-as minor --prerelease dev')
getPackageVersion().should.equal('1.1.0-dev.0')
version = '1.0.1-dev.1';
await exec('--prerelease dev');
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: version });
await exec('--release-as minor --prerelease dev')
getPackageVersion().should.equal('1.1.0-dev.1')
releaseType = 'minor';
unmock();
mock({
bump: (_, __, cb) => cb(null, { releaseType }),
existingChangelog: 'legacy header format<a name="1.0.0">\n',
testFiles: [{ path: 'package.json', value: { version } }],
});
await exec('--prerelease dev')
getPackageVersion().should.equal('1.1.0-dev.2')
})
version = '1.1.0-dev.0';
await exec('--release-as minor --prerelease dev');
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: version });
unmock();
mock({
bump: (_, __, cb) => cb(null, { releaseType }),
existingChangelog: 'legacy header format<a name="1.0.0">\n',
testFiles: [{ path: 'package.json', value: { version } }],
});
version = '1.1.0-dev.1';
await exec('--release-as minor --prerelease dev');
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: version });
unmock();
mock({
bump: (_, __, cb) => cb(null, { releaseType }),
existingChangelog: 'legacy header format<a name="1.0.0">\n',
testFiles: [{ path: 'package.json', value: { version } }],
});
version = '1.1.0-dev.2';
await exec('--prerelease dev');
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: version });
});
it('exits with error if an invalid release version is provided', async function () {
mock({ bump: 'minor', fs: { 'CHANGELOG.md': '' } })
mock({ bump: 'minor', existingChangelog: '' });
await expect(exec('--release-as 10.2')).to.be.rejectedWith(/releaseAs must be one of/)
})
await expect(exec('--release-as 10.2')).rejects.toThrow(
/releaseAs must be one of/,
);
});
it('exits with error if release version conflicts with prerelease', async function () {
mock({ bump: 'minor', fs: { 'CHANGELOG.md': '' } })
mock({ bump: 'minor', existingChangelog: '' });
await expect(exec('--release-as 1.2.3-amazing.2 --prerelease awesome')).to.be
.rejectedWith(/releaseAs and prerelease have conflicting prerelease identifiers/)
})
})
await expect(
exec('--release-as 1.2.3-amazing.2 --prerelease awesome'),
).rejects.toThrow(
/releaseAs and prerelease have conflicting prerelease identifiers/,
);
});
});
it('appends line feed at end of package.json', async function () {
mock({ bump: 'patch' })
await exec()
const pkgJson = fs.readFileSync('package.json', 'utf-8')
pkgJson.should.equal('{\n "version": "1.0.1"\n}\n')
})
mock({ bump: 'patch' });
await exec();
verifyFileContentEquals({
writeFileSyncSpy,
content: '{\n "version": "1.0.1"\n}\n',
});
});

@@ -605,8 +888,13 @@ it('preserves indentation of tabs in package.json', async function () {

bump: 'patch',
fs: { 'package.json': '{\n\t"version": "1.0.0"\n}\n' }
})
await exec()
const pkgJson = fs.readFileSync('package.json', 'utf-8')
pkgJson.should.equal('{\n\t"version": "1.0.1"\n}\n')
})
testFiles: [
{ path: 'package.json', value: '{\n\t"version": "1.0.0"\n}\n' },
],
});
await exec();
// TODO: a) not bumping to 1.0.1, b) need to check how jest might handle tabbing etc
verifyFileContentEquals({
writeFileSyncSpy,
content: '{\n\t"version": "1.0.1"\n}\n',
});
});

@@ -616,8 +904,12 @@ it('preserves indentation of spaces in package.json', async function () {

bump: 'patch',
fs: { 'package.json': '{\n "version": "1.0.0"\n}\n' }
})
await exec()
const pkgJson = fs.readFileSync('package.json', 'utf-8')
pkgJson.should.equal('{\n "version": "1.0.1"\n}\n')
})
testFiles: [
{ path: 'package.json', value: '{\n "version": "1.0.0"\n}\n' },
],
});
await exec();
verifyFileContentEquals({
writeFileSyncSpy,
content: '{\n "version": "1.0.1"\n}\n',
});
});

@@ -627,36 +919,43 @@ it('preserves carriage return + line feed in package.json', async function () {

bump: 'patch',
fs: { 'package.json': '{\r\n "version": "1.0.0"\r\n}\r\n' }
})
await exec()
const pkgJson = fs.readFileSync('package.json', 'utf-8')
pkgJson.should.equal('{\r\n "version": "1.0.1"\r\n}\r\n')
})
testFiles: [
{ path: 'package.json', value: '{\r\n "version": "1.0.0"\r\n}\r\n' },
],
});
await exec();
verifyFileContentEquals({
writeFileSyncSpy,
content: '{\r\n "version": "1.0.1"\r\n}\r\n',
});
});
it('does not print output when the --silent flag is passed', async function () {
const flush = mock()
await exec('--silent')
flush().should.eql({ stdout: [], stderr: [] })
})
})
mock();
await exec('--silent');
expect(consoleErrorSpy).not.toHaveBeenCalled();
expect(consoleInfoSpy).not.toHaveBeenCalled();
});
});
describe('commit-and-tag-version', function () {
afterEach(unmock)
afterEach(unmock);
it('should exit on bump error', async function () {
mock({ bump: new Error('bump err') })
mock({ bump: new Error('bump err') });
await expect(exec()).to.be.rejectedWith(/bump err/)
})
await expect(exec()).rejects.toThrow(/bump err/);
});
it('should exit on changelog error', async function () {
mock({ bump: 'minor', changelog: new Error('changelog err') })
mock({ bump: 'minor', changelog: new Error('changelog err') });
await expect(exec()).to.be.rejectedWith(/changelog err/)
})
await expect(exec()).rejects.toThrow(/changelog err/);
});
it('should exit with error without a package file to bump', async function () {
mock({ bump: 'patch', pkg: false })
mock({ bump: 'patch', testFiles: null });
await expect(exec({ gitTagFallback: false })).to.be.rejectedWith('no package file found')
})
await expect(exec({ gitTagFallback: false })).rejects.toThrow(
'no package file found',
);
});

@@ -666,38 +965,43 @@ it('bumps version # in bower.json', async function () {

bump: 'minor',
fs: { 'bower.json': JSON.stringify({ version: '1.0.0' }) },
tags: ['v1.0.0']
})
await exec()
JSON.parse(fs.readFileSync('bower.json', 'utf-8')).version.should.equal(
'1.1.0'
)
getPackageVersion().should.equal('1.1.0')
})
testFiles: [{ path: 'bower.json', value: { version: '1.0.0' } }],
tags: ['v1.0.0'],
});
await exec();
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '1.1.0',
filename: 'bower.json',
});
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.1.0' });
});
it('bumps version # in manifest.json', async function () {
mock({
bump: 'minor',
fs: { 'manifest.json': JSON.stringify({ version: '1.0.0' }) },
tags: ['v1.0.0']
})
await exec()
JSON.parse(fs.readFileSync('manifest.json', 'utf-8')).version.should.equal(
'1.1.0'
)
getPackageVersion().should.equal('1.1.0')
})
testFiles: [{ path: 'manifest.json', value: { version: '1.0.0' } }],
tags: ['v1.0.0'],
});
await exec();
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '1.1.0',
filename: 'manifest.json',
});
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.1.0' });
});
describe('custom `bumpFiles` support', function () {
afterEach(unmock);
it('mix.exs + version.txt', async function () {
const updater = 'custom-updater.js'
const updaterModule = require('./mocks/updater/customer-updater')
mock({
bump: 'minor',
fs: {
'mix.exs': fs.readFileSync('./test/mocks/mix.exs'),
'version.txt': fs.readFileSync('./test/mocks/version.txt')
},
tags: ['v1.0.0']
})
mockery.registerMock(resolve(process.cwd(), updater), updaterModule)
realTestFiles: [
{ filename: 'mix.exs', path: './test/mocks/mix.exs' },
{ filename: 'version.txt', path: './test/mocks/version.txt' },
],
tags: ['v1.0.0'],
});

@@ -707,95 +1011,147 @@ await exec({

'version.txt',
{ filename: 'mix.exs', updater: 'custom-updater.js' }
]
})
fs.readFileSync('mix.exs', 'utf-8').should.contain('version: "1.1.0"')
fs.readFileSync('version.txt', 'utf-8').should.equal('1.1.0')
})
{
filename: 'mix.exs',
updater: './test/mocks/updater/customer-updater',
},
],
});
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '1.1.0',
filename: 'mix.exs',
asString: true,
});
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '1.1.0',
filename: 'version.txt',
asString: true,
});
});
it('bumps a custom `plain-text` file', async function () {
mock({
bump: 'minor',
fs: {
'VERSION_TRACKER.txt': fs.readFileSync(
'./test/mocks/VERSION-1.0.0.txt'
)
}
})
realTestFiles: [
{
filename: 'VERSION_TRACKER.txt',
path: './test/mocks/VERSION-1.0.0.txt',
},
],
});
await exec({
bumpFiles: [{ filename: 'VERSION_TRACKER.txt', type: 'plain-text' }]
})
fs.readFileSync('VERSION_TRACKER.txt', 'utf-8').should.equal('1.1.0')
})
bumpFiles: [{ filename: 'VERSION_TRACKER.txt', type: 'plain-text' }],
});
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '1.1.0',
filename: 'VERSION_TRACKER.txt',
asString: true,
});
});
it('displays the new version from custom bumper with --dry-run', async function () {
const updater = 'increment-updater.js'
const updaterModule = require('./mocks/updater/increment-updater')
mock({
bump: 'minor',
fs: {
'increment-version.txt': fs.readFileSync(
'./test/mocks/increment-version.txt'
)
}
})
mockery.registerMock(resolve(process.cwd(), updater), updaterModule)
realTestFiles: [
{
filename: 'increment-version.txt',
path: './test/mocks/increment-version.txt',
},
],
});
const origInfo = console.info
const capturedOutput = []
const origInfo = console.info;
const capturedOutput = [];
console.info = (...args) => {
capturedOutput.push(...args)
origInfo(...args)
}
capturedOutput.push(...args);
origInfo(...args);
};
try {
await exec({
bumpFiles: [{ filename: 'increment-version.txt', updater: 'increment-updater.js' }],
dryRun: true
})
const logOutput = capturedOutput.join(' ')
stripAnsi(logOutput).should.include('bumping version in increment-version.txt from 1 to 2')
bumpFiles: [
{
filename: 'increment-version.txt',
updater: './test/mocks/updater/increment-updater',
},
],
dryRun: true,
});
const logOutput = capturedOutput.join(' ');
expect(stripAnsi(logOutput)).toContain(
'bumping version in increment-version.txt from 1 to 2',
);
} finally {
console.info = origInfo
console.info = origInfo;
}
})
})
});
});
describe('custom `packageFiles` support', function () {
afterEach(unmock);
it('reads and writes to a custom `plain-text` file', async function () {
mock({
bump: 'minor',
fs: {
'VERSION_TRACKER.txt': fs.readFileSync(
'./test/mocks/VERSION-6.3.1.txt'
)
}
})
realTestFiles: [
{
filename: 'VERSION_TRACKER.txt',
path: './test/mocks/VERSION-6.3.1.txt',
},
],
});
await exec({
packageFiles: [{ filename: 'VERSION_TRACKER.txt', type: 'plain-text' }],
bumpFiles: [{ filename: 'VERSION_TRACKER.txt', type: 'plain-text' }]
})
fs.readFileSync('VERSION_TRACKER.txt', 'utf-8').should.equal('6.4.0')
})
packageFiles: [
{ filename: 'VERSION_TRACKER.txt', type: 'plain-text' },
],
bumpFiles: [{ filename: 'VERSION_TRACKER.txt', type: 'plain-text' }],
});
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '6.4.0',
filename: 'VERSION_TRACKER.txt',
asString: true,
});
});
it('allows same object to be used in packageFiles and bumpFiles', async function () {
mock({
bump: 'minor',
fs: {
'VERSION_TRACKER.txt': fs.readFileSync(
'./test/mocks/VERSION-6.3.1.txt'
)
}
})
const origWarn = console.warn
realTestFiles: [
{
filename: 'VERSION_TRACKER.txt',
path: './test/mocks/VERSION-6.3.1.txt',
},
],
});
const origWarn = console.warn;
console.warn = () => {
throw new Error('console.warn should not be called')
}
const filedesc = { filename: 'VERSION_TRACKER.txt', type: 'plain-text' }
throw new Error('console.warn should not be called');
};
const filedesc = {
filename: 'VERSION_TRACKER.txt',
type: 'plain-text',
};
try {
await exec({ packageFiles: [filedesc], bumpFiles: [filedesc] })
fs.readFileSync('VERSION_TRACKER.txt', 'utf-8').should.equal('6.4.0')
await exec({ packageFiles: [filedesc], bumpFiles: [filedesc] });
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '6.4.0',
filename: 'VERSION_TRACKER.txt',
asString: true,
});
} finally {
console.warn = origWarn
console.warn = origWarn;
}
})
})
});
});

@@ -805,155 +1161,252 @@ it('`packageFiles` are bumped along with `bumpFiles` defaults [commit-and-tag-version#533]', async function () {

bump: 'minor',
fs: {
'.gitignore': '',
'package-lock.json': JSON.stringify({ version: '1.0.0' }),
'manifest.json': fs.readFileSync('./test/mocks/manifest-6.3.1.json')
},
tags: ['v1.0.0']
})
testFiles: [
{
path: '.gitignore',
value: '',
},
{
path: 'package-lock.json',
value: { version: '1.0.0' },
},
],
realTestFiles: [
{
filename: 'manifest.json',
path: './test/mocks/manifest-6.3.1.json',
},
],
tags: ['v1.0.0'],
});
await exec({
silent: true,
packageFiles: [
{
filename: 'manifest.json',
type: 'json'
}
]
})
type: 'json',
},
],
});
JSON.parse(fs.readFileSync('manifest.json', 'utf-8')).version.should.equal(
'6.4.0'
)
JSON.parse(fs.readFileSync('package.json', 'utf-8')).version.should.equal(
'6.4.0'
)
JSON.parse(
fs.readFileSync('package-lock.json', 'utf-8')
).version.should.equal('6.4.0')
})
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '6.4.0',
filename: 'package.json',
});
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '6.4.0',
filename: 'package-lock.json',
});
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '6.4.0',
filename: 'manifest.json',
});
});
it('bumps version in Gradle `build.gradle.kts` file', async function () {
const expected = fs.readFileSync('./test/mocks/build-6.4.0.gradle.kts', 'utf-8')
const expected = fs.readFileSync(
'./test/mocks/build-6.4.0.gradle.kts',
'utf-8',
);
mock({
bump: 'minor',
fs: {
'build.gradle.kts': fs.readFileSync('./test/mocks/build-6.3.1.gradle.kts')
}
})
realTestFiles: [
{
filename: 'build.gradle.kts',
path: './test/mocks/build-6.3.1.gradle.kts',
},
],
});
await exec({
packageFiles: [{ filename: 'build.gradle.kts', type: 'gradle' }],
bumpFiles: [{ filename: 'build.gradle.kts', type: 'gradle' }]
})
fs.readFileSync('build.gradle.kts', 'utf-8').should.equal(expected)
})
bumpFiles: [{ filename: 'build.gradle.kts', type: 'gradle' }],
});
// filePath is the first arg passed to writeFileSync
const filename = 'build.gradle.kts';
const packageJsonWriteFileSynchCall = findWriteFileCallForPath({
writeFileSyncSpy,
filename,
});
if (!packageJsonWriteFileSynchCall) {
throw new Error(`writeFileSynch not invoked with path ${filename}`);
}
const calledWithContentStr = packageJsonWriteFileSynchCall[1];
expect(calledWithContentStr).toEqual(expected);
});
it('bumps version in .NET `Project.csproj` file', async function () {
const expected = fs.readFileSync('./test/mocks/Project-6.4.0.csproj', 'utf-8')
const filename = 'Project.csproj'
const expected = fs.readFileSync(
'./test/mocks/Project-6.4.0.csproj',
'utf-8',
);
const filename = 'Project.csproj';
mock({
bump: 'minor',
fs: {
[filename]: fs.readFileSync('./test/mocks/Project-6.3.1.csproj')
}
})
realTestFiles: [
{
filename,
path: './test/mocks/Project-6.3.1.csproj',
},
],
});
await exec({
packageFiles: [{ filename, type: 'csproj' }],
bumpFiles: [{ filename, type: 'csproj' }]
})
fs.readFileSync(filename, 'utf-8').should.equal(expected)
})
bumpFiles: [{ filename, type: 'csproj' }],
});
// filePath is the first arg passed to writeFileSync
const packageJsonWriteFileSynchCall = findWriteFileCallForPath({
writeFileSyncSpy,
filename,
});
if (!packageJsonWriteFileSynchCall) {
throw new Error(`writeFileSynch not invoked with path ${filename}`);
}
const calledWithContentStr = packageJsonWriteFileSynchCall[1];
expect(calledWithContentStr).toEqual(expected);
});
it('bumps version # in npm-shrinkwrap.json', async function () {
mock({
bump: 'minor',
fs: {
'npm-shrinkwrap.json': JSON.stringify({ version: '1.0.0' })
},
tags: ['v1.0.0']
})
await exec()
JSON.parse(
fs.readFileSync('npm-shrinkwrap.json', 'utf-8')
).version.should.equal('1.1.0')
getPackageVersion().should.equal('1.1.0')
})
testFiles: [
{
path: 'npm-shrinkwrap.json',
value: { version: '1.0.0' },
},
],
tags: ['v1.0.0'],
});
await exec();
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '1.1.0',
filename: 'npm-shrinkwrap.json',
});
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.1.0' });
});
it('bumps version # in package-lock.json', async function () {
mock({
bump: 'minor',
fs: {
'.gitignore': '',
'package-lock.json': JSON.stringify({ version: '1.0.0' })
},
tags: ['v1.0.0']
})
await exec()
JSON.parse(
fs.readFileSync('package-lock.json', 'utf-8')
).version.should.equal('1.1.0')
getPackageVersion().should.equal('1.1.0')
})
testFiles: [
{
path: '.gitignore',
value: '',
},
{
path: 'package-lock.json',
value: { version: '1.0.0' },
},
],
tags: ['v1.0.0'],
});
await exec();
verifyPackageVersion({
writeFileSyncSpy,
expectedVersion: '1.1.0',
filename: 'package-lock.json',
});
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.1.0' });
});
describe('skip', function () {
it('allows bump and changelog generation to be skipped', async function () {
const changelogContent = 'legacy header format<a name="1.0.0">\n'
const changelogContent = 'legacy header format<a name="1.0.0">\n';
mock({
bump: 'minor',
changelog: 'foo\n',
fs: { 'CHANGELOG.md': changelogContent }
})
existingChangelog: changelogContent,
});
await exec('--skip.bump true --skip.changelog true')
getPackageVersion().should.equal('1.0.0')
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.equal(changelogContent)
})
})
await exec('--skip.bump true --skip.changelog true');
expect(writeFileSyncSpy).not.toHaveBeenCalledWith('package.json');
expect(writeFileSyncSpy).not.toHaveBeenCalledWith('CHANGELOG.md');
});
});
it('does not update files present in .gitignore', async function () {
const DotGitIgnore = require('dotgitignore');
jest.mock('dotgitignore');
DotGitIgnore.mockImplementation(() => {
return {
ignore: (filename) => {
if (filename === 'package-lock.json' || filename === 'bower.json') {
return true;
}
return false;
},
};
});
mock({
bump: 'minor',
fs: {
'.gitignore': 'package-lock.json\nbower.json',
// test a defaults.packageFiles
'bower.json': JSON.stringify({ version: '1.0.0' }),
// test a defaults.bumpFiles
'package-lock.json': JSON.stringify({
name: '@org/package',
version: '1.0.0',
lockfileVersion: 1
})
},
tags: ['v1.0.0']
})
await exec()
JSON.parse(
fs.readFileSync('package-lock.json', 'utf-8')
).version.should.equal('1.0.0')
JSON.parse(fs.readFileSync('bower.json', 'utf-8')).version.should.equal(
'1.0.0'
)
getPackageVersion().should.equal('1.1.0')
})
testFiles: [
{
path: 'bower.json',
value: { version: '1.0.0' },
},
{
path: 'package-lock.json',
value: {
name: '@org/package',
version: '1.0.0',
lockfileVersion: 1,
},
},
],
tags: ['v1.0.0'],
});
await exec();
// does not bump these as in .gitignore
expect(writeFileSyncSpy).not.toHaveBeenCalledWith('package-lock.json');
expect(writeFileSyncSpy).not.toHaveBeenCalledWith('bower.json');
// should still bump version in package.json
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.1.0' });
DotGitIgnore.mockRestore();
});
describe('configuration', function () {
it('--header', async function () {
mock({ bump: 'minor', fs: { 'CHANGELOG.md': '' } })
await exec('--header="# Welcome to our CHANGELOG.md"')
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.match(/# Welcome to our CHANGELOG.md/)
})
mock({ bump: 'minor', existingChangelog: '' });
await exec('--header="# Welcome to our CHANGELOG.md"');
verifyNewChangelogContentMatches({
writeFileSyncSpy,
expectedContent: /# Welcome to our CHANGELOG.md/,
});
});
it('--issuePrefixes and --issueUrlFormat', async function () {
const format = 'http://www.foo.com/{{prefix}}{{id}}'
const prefix = 'ABC-'
const format = 'http://www.foo.com/{{prefix}}{{id}}';
const prefix = 'ABC-';
const changelog = ({ preset }) =>
preset.issueUrlFormat + ':' + preset.issuePrefixes
mock({ bump: 'minor', changelog })
await exec(`--issuePrefixes="${prefix}" --issueUrlFormat="${format}"`)
const content = fs.readFileSync('CHANGELOG.md', 'utf-8')
content.should.include(`${format}:${prefix}`)
})
})
preset.issueUrlFormat + ':' + preset.issuePrefixes;
mock({ bump: 'minor', changelog });
await exec(`--issuePrefixes="${prefix}" --issueUrlFormat="${format}"`);
verifyNewChangelogContentMatches({
writeFileSyncSpy,
expectedContent: `${format}:${prefix}`,
});
});
});
describe('pre-major', function () {

@@ -963,10 +1416,15 @@ it('bumps the minor rather than major, if version < 1.0.0', async function () {

bump: 'minor',
pkg: {
version: '0.5.0',
repository: { url: 'https://github.com/yargs/yargs.git' }
}
})
await exec()
getPackageVersion().should.equal('0.6.0')
})
testFiles: [
{
path: 'package.json',
value: {
version: '0.5.0',
repository: { url: 'https://github.com/yargs/yargs.git' },
},
},
],
});
await exec();
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '0.6.0' });
});

@@ -976,34 +1434,38 @@ it('bumps major if --release-as=major specified, if version < 1.0.0', async function () {

bump: 'major',
pkg: {
version: '0.5.0',
repository: { url: 'https://github.com/yargs/yargs.git' }
}
})
await exec('-r major')
getPackageVersion().should.equal('1.0.0')
})
})
})
testFiles: [
{
path: 'package.json',
value: {
version: '0.5.0',
repository: { url: 'https://github.com/yargs/yargs.git' },
},
},
],
});
await exec('-r major');
verifyPackageVersion({ writeFileSyncSpy, expectedVersion: '1.0.0' });
});
});
});
describe('GHSL-2020-111', function () {
afterEach(unmock)
afterEach(unmock);
it('does not allow command injection via basic configuration', async function () {
mock({ bump: 'patch' })
mock({ bump: 'patch' });
await exec({
noVerify: true,
infile: 'foo.txt',
releaseCommitMessageFormat: 'bla `touch exploit`'
})
const stat = shell.test('-f', './exploit')
stat.should.equal(false)
})
})
releaseCommitMessageFormat: 'bla `touch exploit`',
});
const stat = shell.test('-f', './exploit');
expect(stat).toEqual(false);
});
});
describe('with mocked git', function () {
afterEach(unmock)
afterEach(unmock);
it('--sign signs the commit and tag', async function () {
const gitArgs = [
['add', 'CHANGELOG.md', 'package.json'],
['add', 'CHANGELOG.md', 'package.json', 'package-lock.json'],
[

@@ -1014,100 +1476,303 @@ 'commit',

'package.json',
'package-lock.json',
'-m',
'chore(release): 1.0.1'
'chore(release): 1.0.1',
],
['tag', '-s', 'v1.0.1', '-m', 'chore(release): 1.0.1'],
['rev-parse', '--abbrev-ref', 'HEAD']
]
const execFile = (_args, cmd, cmdArgs) => {
cmd.should.equal('git')
const expected = gitArgs.shift()
cmdArgs.should.deep.equal(expected)
if (expected[0] === 'rev-parse') return Promise.resolve('master')
return Promise.resolve('')
}
mock({ bump: 'patch', changelog: 'foo\n', execFile })
['rev-parse', '--abbrev-ref', 'HEAD'],
];
await exec('--sign', true)
gitArgs.should.have.lengthOf(0)
})
runExecFile.mockImplementation((_args, cmd, cmdArgs) => {
expect(cmd).toEqual('git');
const expected = gitArgs.shift();
expect(cmdArgs).toEqual(expected);
if (expected[0] === 'rev-parse') return Promise.resolve('master');
return Promise.resolve('');
});
mock({
bump: 'patch',
changelog: 'foo\n',
});
await exec('--sign', true);
expect(gitArgs).toHaveLength(0);
});
it('--signedoff adds signed-off-by to the commit message', async function () {
const gitArgs = [
['add', 'CHANGELOG.md', 'package.json', 'package-lock.json'],
[
'commit',
'--signoff',
'CHANGELOG.md',
'package.json',
'package-lock.json',
'-m',
'chore(release): 1.0.1',
],
['tag', '-a', 'v1.0.1', '-m', 'chore(release): 1.0.1'],
['rev-parse', '--abbrev-ref', 'HEAD'],
];
runExecFile.mockImplementation((_args, cmd, cmdArgs) => {
expect(cmd).toEqual('git');
const expected = gitArgs.shift();
expect(cmdArgs).toEqual(expected);
if (expected[0] === 'rev-parse') return Promise.resolve('master');
return Promise.resolve('');
});
mock({
bump: 'patch',
changelog: 'foo\n',
});
await exec('--signoff', true);
expect(gitArgs).toHaveLength(0);
});
it('--tag-force forces tag replacement', async function () {
const gitArgs = [
['add', 'CHANGELOG.md', 'package.json'],
['commit', 'CHANGELOG.md', 'package.json', '-m', 'chore(release): 1.0.1'],
['add', 'CHANGELOG.md', 'package.json', 'package-lock.json'],
[
'commit',
'CHANGELOG.md',
'package.json',
'package-lock.json',
'-m',
'chore(release): 1.0.1',
],
['tag', '-a', '-f', 'v1.0.1', '-m', 'chore(release): 1.0.1'],
['rev-parse', '--abbrev-ref', 'HEAD']
]
const execFile = (_args, cmd, cmdArgs) => {
cmd.should.equal('git')
const expected = gitArgs.shift()
cmdArgs.should.deep.equal(expected)
if (expected[0] === 'rev-parse') return Promise.resolve('master')
return Promise.resolve('')
}
mock({ bump: 'patch', changelog: 'foo\n', execFile })
['rev-parse', '--abbrev-ref', 'HEAD'],
];
await exec('--tag-force', true)
gitArgs.should.have.lengthOf(0)
})
runExecFile.mockImplementation((_args, cmd, cmdArgs) => {
expect(cmd).toEqual('git');
const expected = gitArgs.shift();
expect(cmdArgs).toEqual(expected);
if (expected[0] === 'rev-parse') return Promise.resolve('master');
return Promise.resolve('');
});
mock({ bump: 'patch', changelog: 'foo\n' });
await exec('--tag-force', true);
expect(gitArgs).toHaveLength(0);
});
it('fails if git add fails', async function () {
const gitArgs = [['add', 'CHANGELOG.md', 'package.json']]
const gitError = new Error('Command failed: git\nfailed add')
const execFile = (_args, cmd, cmdArgs) => {
cmd.should.equal('git')
const expected = gitArgs.shift()
cmdArgs.should.deep.equal(expected)
const gitArgs = [
['add', 'CHANGELOG.md', 'package.json', 'package-lock.json'],
];
const gitError = new Error('Command failed: git\nfailed add');
runExecFile.mockImplementation((_args, cmd, cmdArgs) => {
expect(cmd).toEqual('git');
const expected = gitArgs.shift();
expect(cmdArgs).toEqual(expected);
if (expected[0] === 'add') {
return Promise.reject(gitError)
return Promise.reject(gitError);
}
return Promise.resolve('')
}
mock({ bump: 'patch', changelog: 'foo\n', execFile })
return Promise.resolve('');
});
await expect(exec({}, true)).to.be.rejectedWith(gitError)
})
mock({ bump: 'patch', changelog: 'foo\n' });
await expect(exec({}, true)).rejects.toThrow(gitError);
});
it('fails if git commit fails', async function () {
const gitArgs = [
['add', 'CHANGELOG.md', 'package.json'],
['commit', 'CHANGELOG.md', 'package.json', '-m', 'chore(release): 1.0.1']
]
const gitError = new Error('Command failed: git\nfailed commit')
const execFile = (_args, cmd, cmdArgs) => {
cmd.should.equal('git')
const expected = gitArgs.shift()
cmdArgs.should.deep.equal(expected)
['add', 'CHANGELOG.md', 'package.json', 'package-lock.json'],
[
'commit',
'CHANGELOG.md',
'package.json',
'package-lock.json',
'-m',
'chore(release): 1.0.1',
],
];
const gitError = new Error('Command failed: git\nfailed commit');
runExecFile.mockImplementation((_args, cmd, cmdArgs) => {
expect(cmd).toEqual('git');
const expected = gitArgs.shift();
expect(cmdArgs).toEqual(expected);
if (expected[0] === 'commit') {
return Promise.reject(gitError)
return Promise.reject(gitError);
}
return Promise.resolve('')
}
mock({ bump: 'patch', changelog: 'foo\n', execFile })
return Promise.resolve('');
});
await expect(exec({}, true)).to.be.rejectedWith(gitError)
})
mock({ bump: 'patch', changelog: 'foo\n' });
await expect(exec({}, true)).rejects.toThrow(gitError);
});
it('fails if git tag fails', async function () {
const gitArgs = [
['add', 'CHANGELOG.md', 'package.json'],
['commit', 'CHANGELOG.md', 'package.json', '-m', 'chore(release): 1.0.1'],
['tag', '-a', 'v1.0.1', '-m', 'chore(release): 1.0.1']
]
const gitError = new Error('Command failed: git\nfailed tag')
const execFile = (_args, cmd, cmdArgs) => {
cmd.should.equal('git')
const expected = gitArgs.shift()
cmdArgs.should.deep.equal(expected)
['add', 'CHANGELOG.md', 'package.json', 'package-lock.json'],
[
'commit',
'CHANGELOG.md',
'package.json',
'package-lock.json',
'-m',
'chore(release): 1.0.1',
],
['tag', '-a', 'v1.0.1', '-m', 'chore(release): 1.0.1'],
];
const gitError = new Error('Command failed: git\nfailed tag');
runExecFile.mockImplementation((_args, cmd, cmdArgs) => {
expect(cmd).toEqual('git');
const expected = gitArgs.shift();
expect(cmdArgs).toEqual(expected);
if (expected[0] === 'tag') {
return Promise.reject(gitError)
return Promise.reject(gitError);
}
return Promise.resolve('')
return Promise.resolve('');
});
mock({ bump: 'patch', changelog: 'foo\n' });
await expect(exec({}, true)).rejects.toThrow(gitError);
});
});
// ------- Verifiers ------
function findWriteFileCallForPath({ writeFileSyncSpy, filename }) {
// filePath is the first arg passed to writeFileSync
return writeFileSyncSpy.mock.calls.find((args) =>
args[0].includes(filename),
);
}
function verifyPackageVersion({
writeFileSyncSpy,
expectedVersion,
filename = 'package.json',
asString = false,
}) {
// filePath is the first arg passed to writeFileSync
const packageJsonWriteFileSynchCall = findWriteFileCallForPath({
writeFileSyncSpy,
filename,
});
if (!packageJsonWriteFileSynchCall) {
throw new Error(`writeFileSynch not invoked with path ${filename}`);
}
const calledWithContentStr = packageJsonWriteFileSynchCall[1];
if (!asString) {
// parse to JSON and verify has property
const calledWithContent = JSON.parse(calledWithContentStr);
expect(calledWithContent).toHaveProperty('version');
expect(calledWithContent.version).toEqual(expectedVersion);
} else {
// for non-JSON files i.e. .exs and .txt just verify version exists
if (filename.includes('.exs')) {
expect(calledWithContentStr).toMatch(`version: "${expectedVersion}"`);
} else {
expect(calledWithContentStr).toMatch(expectedVersion);
}
mock({ bump: 'patch', changelog: 'foo\n', execFile })
}
}
await expect(exec({}, true)).to.be.rejectedWith(gitError)
})
})
})
function verifyFileContentEquals({
writeFileSyncSpy,
content,
filename = 'package.json',
}) {
// filePath is the first arg passed to writeFileSync
const packageJsonWriteFileSynchCall = findWriteFileCallForPath({
writeFileSyncSpy,
filename,
});
if (!packageJsonWriteFileSynchCall) {
throw new Error('writeFileSynch not invoked with path package.json');
}
const calledWithContentStr = packageJsonWriteFileSynchCall[1];
expect(calledWithContentStr).toEqual(content);
}
function verifyNewChangelogContentMatches({
writeFileSyncSpy,
expectedContent,
}) {
const changelogWriteFileSynchCall = findWriteFileCallForPath({
writeFileSyncSpy,
filename: 'CHANGELOG.md',
});
if (!changelogWriteFileSynchCall) {
throw new Error('writeFileSynch not invoked with path CHANGELOG.md');
}
const calledWithContent = changelogWriteFileSynchCall[1];
expect(calledWithContent).toMatch(expectedContent);
}
function verifyNewChangelogContentEquals({
writeFileSyncSpy,
expectedContent,
}) {
const changelogWriteFileSynchCall = findWriteFileCallForPath({
writeFileSyncSpy,
filename: 'CHANGELOG.md',
});
if (!changelogWriteFileSynchCall) {
throw new Error('writeFileSynch not invoked with path CHANGELOG.md');
}
const calledWithContent = changelogWriteFileSynchCall[1];
expect(calledWithContent).toEqual(expectedContent);
}
function verifyNewChangelogContentDoesNotMatch({
writeFileSyncSpy,
expectedContent,
}) {
const changelogWriteFileSynchCall = findWriteFileCallForPath({
writeFileSyncSpy,
filename: 'CHANGELOG.md',
});
if (!changelogWriteFileSynchCall) {
throw new Error('writeFileSynch not invoked with path CHANGELOG.md');
}
const calledWithContent = changelogWriteFileSynchCall[1];
expect(calledWithContent).not.toMatch(expectedContent);
}
function verifyLogPrinted({ consoleInfoSpy, expectedLog }) {
const consoleInfoLogs = consoleInfoSpy.mock.calls.map((args) => args[0]);
const desiredLog = consoleInfoLogs.find((log) => log.includes(expectedLog));
expect(desiredLog).not.toBeUndefined();
expect(desiredLog).toMatch(expectedLog);
}
});

@@ -1,12 +0,9 @@

const REPLACER = /version: "(.*)"/
const REPLACER = /version: "(.*)"/;
module.exports.readVersion = function (contents) {
return REPLACER.exec(contents)[1]
}
return REPLACER.exec(contents)[1];
};
module.exports.writeVersion = function (contents, version) {
return contents.replace(
REPLACER.exec(contents)[0],
`version: "${version}"`
)
}
return contents.replace(REPLACER.exec(contents)[0], `version: "${version}"`);
};
module.exports.readVersion = function (contents) {
return Number.parseInt(contents)
}
return Number.parseInt(contents);
};
module.exports.writeVersion = function (contents, version) {
return this.readVersion(contents) + 1
}
module.exports.writeVersion = function (contents) {
return this.readVersion(contents) + 1;
};

@@ -1,39 +0,35 @@

/* global describe it */
'use strict';
'use strict'
const stringifyPackage = require('../lib/stringify-package');
const stringifyPackage = require('../lib/stringify-package')
require('chai').should()
describe('stringifyPackage()', function () {
const dummy = { name: 'dummy' }
const dummy = { name: 'dummy' };
it('with no params uses \\n', function () {
stringifyPackage(dummy).should.match(/\n$/m)
})
expect(stringifyPackage(dummy)).toMatch(/\n$/m);
});
it('uses \\n', function () {
stringifyPackage(dummy, 2, '\n').should.match(/\n$/m)
})
expect(stringifyPackage(dummy, 2, '\n')).toMatch(/\n$/m);
});
it('uses \\r\\n', function () {
stringifyPackage(dummy, 2, '\r\n').should.match(/\r\n$/m)
})
expect(stringifyPackage(dummy, 2, '\r\n')).toMatch(/\r\n$/m);
});
it('with no params uses 2-space indent', function () {
stringifyPackage(dummy).should.match(/^ {2}"name": "dummy"/m)
})
expect(stringifyPackage(dummy)).toMatch(/^ {2}"name": "dummy"/m);
});
it('uses 2-space indent', function () {
stringifyPackage(dummy, 2, '\n').should.match(/^ {2}"name": "dummy"/m)
})
expect(stringifyPackage(dummy, 2, '\n')).toMatch(/^ {2}"name": "dummy"/m);
});
it('uses 4-space indent', function () {
stringifyPackage(dummy, 4, '\n').should.match(/^ {4}"name": "dummy"/m)
})
expect(stringifyPackage(dummy, 4, '\n')).toMatch(/^ {4}"name": "dummy"/m);
});
it('0 works', function () {
stringifyPackage(dummy, 0).split(/\r\n|\r|\n/).length.should.equal(2)
})
})
expect(stringifyPackage(dummy, 0).split(/\r\n|\r|\n/).length).toEqual(2);
});
});

@@ -1,52 +0,36 @@

/* global describe it */
const { promises: fsp } = require('fs');
const mockery = require('mockery')
const { promises: fsp } = require('fs')
require('chai').should()
let mockFs;
function mockNpm () {
mockery.enable({ warnOnUnregistered: false, useCleanCache: true })
let lockFile = ''
const fsMock = {
promises: {
access: async function (path) {
if (lockFile && path.endsWith(lockFile)) {
return true
}
await fsp.access(path)
}
}
const setLockFile = (lockFile) => {
if (mockFs) {
mockFs.mockRestore();
}
mockery.registerMock('fs', fsMock)
return {
setLockFile (file) {
lockFile = file
mockFs = jest.spyOn(fsp, 'access').mockImplementation(async (path) => {
if (lockFile && path.endsWith(lockFile)) {
return Promise.resolve();
}
}
}
return Promise.reject(new Error('Invalid lockfile'));
});
};
describe('utils', function () {
it('detectPMByLockFile should work', async function () {
const { setLockFile } = mockNpm()
const { detectPMByLockFile } = require('../lib/detect-package-manager')
const { detectPMByLockFile } = require('../lib/detect-package-manager');
let pm = await detectPMByLockFile()
pm.should.equal('npm')
let pm = await detectPMByLockFile();
expect(pm).toEqual('npm');
setLockFile('yarn.lock')
pm = await detectPMByLockFile()
pm.should.equal('yarn')
setLockFile('yarn.lock');
pm = await detectPMByLockFile();
expect(pm).toEqual('yarn');
setLockFile('package-lock.json')
pm = await detectPMByLockFile()
pm.should.equal('npm')
setLockFile('package-lock.json');
pm = await detectPMByLockFile();
expect(pm).toEqual('npm');
setLockFile('pnpm-lock.yaml')
pm = await detectPMByLockFile()
pm.should.equal('pnpm')
mockery.deregisterAll()
mockery.disable()
})
})
setLockFile('pnpm-lock.yaml');
pm = await detectPMByLockFile();
expect(pm).toEqual('pnpm');
});
});

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc