beachball
Advanced tools
Comparing version 2.32.2 to 2.32.3
@@ -24,3 +24,3 @@ "use strict"; | ||
} | ||
['dependencies', 'devDependencies', 'peerDependencies'].forEach(depKind => { | ||
for (const depKind of ['dependencies', 'devDependencies', 'peerDependencies']) { | ||
// updatedDeps contains all of the dependencies in the bump info since the beginning of a build job | ||
@@ -32,10 +32,9 @@ const updatedDepsVersions = info[depKind]; | ||
// would overwrite those incorrectly! | ||
const modifiedDeps = Object.keys(updatedDepsVersions).filter(dep => modifiedPackages.has(dep)); | ||
for (const dep of modifiedDeps) { | ||
if (packageJson[depKind] && packageJson[depKind][dep]) { | ||
packageJson[depKind][dep] = updatedDepsVersions[dep]; | ||
for (const [dep, updatedVersion] of Object.entries(updatedDepsVersions)) { | ||
if (modifiedPackages.has(dep) && packageJson[depKind]?.[dep]) { | ||
packageJson[depKind][dep] = updatedVersion; | ||
} | ||
} | ||
} | ||
}); | ||
} | ||
fs_extra_1.default.writeJSONSync(info.packageJsonPath, packageJson, { spaces: 2 }); | ||
@@ -42,0 +41,0 @@ } |
@@ -11,24 +11,18 @@ "use strict"; | ||
const { packageInfos, scopedPackages } = bumpInfo; | ||
const packages = Object.keys(packageInfos); | ||
const dependents = {}; | ||
packages.forEach(pkgName => { | ||
for (const [pkgName, info] of Object.entries(packageInfos)) { | ||
if (!scopedPackages.has(pkgName)) { | ||
return; | ||
continue; | ||
} | ||
const info = packageInfos[pkgName]; | ||
const depTypes = ['dependencies', 'devDependencies', 'peerDependencies']; | ||
depTypes.forEach(depType => { | ||
const deps = info[depType]; | ||
if (deps) { | ||
for (let dep of Object.keys(deps)) { | ||
if (packages.includes(dep)) { | ||
dependents[dep] = dependents[dep] || []; | ||
if (!dependents[dep].includes(pkgName)) { | ||
dependents[dep].push(pkgName); | ||
} | ||
for (const deps of [info.dependencies, info.devDependencies, info.peerDependencies]) { | ||
for (const dep of Object.keys(deps || {})) { | ||
if (packageInfos[dep]) { | ||
dependents[dep] ?? (dependents[dep] = []); | ||
if (!dependents[dep].includes(pkgName)) { | ||
dependents[dep].push(pkgName); | ||
} | ||
} | ||
} | ||
}); | ||
}); | ||
} | ||
} | ||
bumpInfo.dependents = dependents; | ||
@@ -35,0 +29,0 @@ } |
@@ -7,29 +7,27 @@ "use strict"; | ||
const dependentChangedBy = {}; | ||
Object.keys(packageInfos).forEach(pkgName => { | ||
for (const [pkgName, info] of Object.entries(packageInfos)) { | ||
if (!scopedPackages.has(pkgName)) { | ||
return; | ||
continue; | ||
} | ||
const info = packageInfos[pkgName]; | ||
const depTypes = ['dependencies', 'devDependencies', 'peerDependencies']; | ||
depTypes.forEach(depKind => { | ||
const deps = info[depKind]; | ||
if (deps) { | ||
Object.keys(deps).forEach(dep => { | ||
const packageInfo = packageInfos[dep]; | ||
if (packageInfo) { | ||
const existingVersionRange = deps[dep]; | ||
const bumpedVersionRange = bumpMinSemverRange_1.bumpMinSemverRange(packageInfo.version, existingVersionRange); | ||
if (existingVersionRange !== bumpedVersionRange) { | ||
deps[dep] = bumpedVersionRange; | ||
dependentChangedBy[pkgName] = dependentChangedBy[pkgName] || new Set(); | ||
dependentChangedBy[pkgName].add(dep); | ||
if (verbose) { | ||
console.log(`${pkgName} needs to be bumped because ${dep} ${existingVersionRange} -> ${bumpedVersionRange}`); | ||
} | ||
} | ||
for (const deps of [info.dependencies, info.devDependencies, info.peerDependencies]) { | ||
if (!deps) { | ||
continue; | ||
} | ||
for (const [dep, existingVersionRange] of Object.entries(deps)) { | ||
const packageInfo = packageInfos[dep]; | ||
if (!packageInfo) { | ||
continue; | ||
} | ||
const bumpedVersionRange = bumpMinSemverRange_1.bumpMinSemverRange(packageInfo.version, existingVersionRange); | ||
if (existingVersionRange !== bumpedVersionRange) { | ||
deps[dep] = bumpedVersionRange; | ||
dependentChangedBy[pkgName] ?? (dependentChangedBy[pkgName] = new Set()); | ||
dependentChangedBy[pkgName].add(dep); | ||
if (verbose) { | ||
console.log(`${pkgName} needs to be bumped because ${dep} ${existingVersionRange} -> ${bumpedVersionRange}`); | ||
} | ||
}); | ||
} | ||
} | ||
}); | ||
}); | ||
} | ||
} | ||
return dependentChangedBy; | ||
@@ -36,0 +34,0 @@ } |
@@ -115,4 +115,4 @@ "use strict"; | ||
// Filter out changed files which are ignored by ignorePatterns. | ||
// Also ignore the CHANGELOG files because they're generated by beachball. | ||
const ignorePatterns = [...(options.ignorePatterns || []), 'CHANGELOG.md', 'CHANGELOG.json']; | ||
// Also ignore the CHANGELOG files and change files because they're generated by beachball. | ||
const ignorePatterns = [...(options.ignorePatterns || []), `${paths_1.changeFolder}/*.json`, 'CHANGELOG.{md,json}']; | ||
const nonIgnoredChanges = changes.filter(moddedFile => { | ||
@@ -172,16 +172,12 @@ const ignorePattern = ignorePatterns.find(pattern => minimatch_1.default(moddedFile, pattern, { matchBase: true })); | ||
const changes = changeFilesResult.stdout.split(/\n/); | ||
const changeFiles = changes.filter(name => path_1.default.dirname(name) === 'change'); | ||
const changeFiles = changes.filter(name => path_1.default.dirname(name) === paths_1.changeFolder); | ||
const changeFilePackageSet = new Set(); | ||
// Loop through the change files, building up a set of packages that we can skip | ||
changeFiles.forEach(file => { | ||
for (const file of changeFiles) { | ||
try { | ||
const changeInfo = fs_extra_1.default.readJSONSync(file); | ||
if ('changes' in changeInfo) { | ||
for (const change of changeInfo.changes) { | ||
changeFilePackageSet.add(change.packageName); | ||
} | ||
const changeInfo = fs_extra_1.default.readJSONSync(path_1.default.join(cwd, file)); | ||
const changes = changeInfo.changes || [changeInfo]; | ||
for (const change of changes) { | ||
changeFilePackageSet.add(change.packageName); | ||
} | ||
else { | ||
changeFilePackageSet.add(changeInfo.packageName); | ||
} | ||
} | ||
@@ -191,3 +187,3 @@ catch (e) { | ||
} | ||
}); | ||
} | ||
if (changeFilePackageSet.size > 0) { | ||
@@ -194,0 +190,0 @@ console.log('Your local repository already has change files for these packages:' + |
@@ -46,4 +46,4 @@ "use strict"; | ||
console.log(`git ${commitChangeFiles ? 'committed' : 'staged'} these change files: ${changeFiles | ||
.map(f => ` - ${f}`) | ||
.join('\n')}`); | ||
.map(f => `\n - ${f}`) | ||
.join('')}`); | ||
return changeFiles; | ||
@@ -50,0 +50,0 @@ } |
@@ -0,5 +1,7 @@ | ||
/** Relative path to the change files folder */ | ||
export declare const changeFolder = "change"; | ||
/** | ||
* Get the folder containing beachball change files. | ||
* Get the absolute path to the folder containing beachball change files. | ||
*/ | ||
export declare function getChangePath(cwd: string): string; | ||
//# sourceMappingURL=paths.d.ts.map |
@@ -6,13 +6,15 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getChangePath = void 0; | ||
exports.getChangePath = exports.changeFolder = void 0; | ||
const path_1 = __importDefault(require("path")); | ||
const workspace_tools_1 = require("workspace-tools"); | ||
/** Relative path to the change files folder */ | ||
exports.changeFolder = 'change'; | ||
/** | ||
* Get the folder containing beachball change files. | ||
* Get the absolute path to the folder containing beachball change files. | ||
*/ | ||
function getChangePath(cwd) { | ||
const root = workspace_tools_1.findProjectRoot(cwd); | ||
return path_1.default.join(root, 'change'); | ||
return path_1.default.join(root, exports.changeFolder); | ||
} | ||
exports.getChangePath = getChangePath; | ||
//# sourceMappingURL=paths.js.map |
@@ -11,3 +11,3 @@ "use strict"; | ||
async function bumpAndPush(bumpInfo, publishBranch, options) { | ||
const { path: cwd, branch, tag, message, gitTimeout } = options; | ||
const { path: cwd, branch, tag, depth, message, gitTimeout } = options; | ||
const { remote, remoteBranch } = workspace_tools_1.parseRemoteBranch(branch); | ||
@@ -21,14 +21,9 @@ let completed = false; | ||
workspace_tools_1.revertLocalChanges(cwd); | ||
const warnPrefix = `[WARN ${tryNumber}/${BUMP_PUSH_RETRIES}]:`; | ||
// pull in latest from origin branch | ||
if (options.fetch !== false) { | ||
console.log('Fetching from remote'); | ||
let fetchResult; | ||
if (options.depth) { | ||
fetchResult = workspace_tools_1.git(['fetch', remote, remoteBranch, `--depth=${options.depth}`], { cwd }); | ||
} | ||
else { | ||
fetchResult = workspace_tools_1.git(['fetch', remote, remoteBranch], { cwd }); | ||
} | ||
const fetchResult = workspace_tools_1.git(['fetch', remote, remoteBranch, ...(depth ? [`--depth=${depth}`] : [])], { cwd }); | ||
if (!fetchResult.success) { | ||
console.warn(`[WARN ${tryNumber}/${BUMP_PUSH_RETRIES}]: fetch from ${branch} has failed!\n${fetchResult.stderr}`); | ||
console.warn(`${warnPrefix} fetch from ${branch} has failed!\n${fetchResult.stderr}`); | ||
continue; | ||
@@ -39,3 +34,3 @@ } | ||
if (!mergeResult.success) { | ||
console.warn(`[WARN ${tryNumber}/${BUMP_PUSH_RETRIES}]: pull from ${branch} has failed!\n${mergeResult.stderr}`); | ||
console.warn(`${warnPrefix} pull from ${branch} has failed!\n${mergeResult.stderr}`); | ||
continue; | ||
@@ -49,3 +44,3 @@ } | ||
if (!mergePublishBranchResult.success) { | ||
console.warn(`[WARN ${tryNumber}/${BUMP_PUSH_RETRIES}]: merging to target has failed!`); | ||
console.warn(`${warnPrefix} merging to target has failed!`); | ||
continue; | ||
@@ -64,4 +59,10 @@ } | ||
if (!pushResult.success) { | ||
console.warn(`[WARN ${tryNumber}/${BUMP_PUSH_RETRIES}]: push to ${branch} has failed!\n${pushResult.stderr}`); | ||
continue; | ||
// If it timed out, the return value contains an "error" object with ETIMEDOUT code | ||
// (it doesn't throw the error) | ||
if (pushResult.error?.code === 'ETIMEDOUT') { | ||
console.warn(`${warnPrefix} push to ${branch} has timed out!`); | ||
} | ||
else { | ||
console.warn(`${warnPrefix} push to ${branch} has failed!\n${pushResult.stderr}`); | ||
} | ||
} | ||
@@ -75,4 +76,4 @@ else { | ||
catch (e) { | ||
console.warn(`[WARN ${tryNumber}/${BUMP_PUSH_RETRIES}]: push to ${branch} has failed!\n${e}`); | ||
continue; | ||
// This is likely not reachable (see comment above), but leaving it just in case for this critical operation | ||
console.warn(`${warnPrefix} push to ${branch} has failed!\n${e}`); | ||
} | ||
@@ -79,0 +80,0 @@ } |
import { PackageInfos } from '../types/PackageInfo'; | ||
export declare const acceptedKeys: string[]; | ||
export declare function performPublishOverrides(packagesToPublish: string[], packageInfos: PackageInfos): void; | ||
//# sourceMappingURL=performPublishOverrides.d.ts.map |
@@ -22,5 +22,16 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.performPublishOverrides = exports.acceptedKeys = void 0; | ||
exports.performPublishOverrides = void 0; | ||
const fs = __importStar(require("fs-extra")); | ||
exports.acceptedKeys = ['types', 'typings', 'main', 'module', 'exports', 'repository', 'bin', 'browser', 'files']; | ||
const acceptedKeys = [ | ||
'types', | ||
'typings', | ||
'main', | ||
'module', | ||
'exports', | ||
'repository', | ||
'bin', | ||
'browser', | ||
'files', | ||
]; | ||
const workspacePrefix = 'workspace:'; | ||
function performPublishOverrides(packagesToPublish, packageInfos) { | ||
@@ -39,6 +50,8 @@ for (const pkgName of packagesToPublish) { | ||
if (packageJson.publishConfig) { | ||
for (const key of exports.acceptedKeys) { | ||
const value = packageJson.publishConfig[key] || packageJson[key]; | ||
packageJson[key] = value; | ||
delete packageJson.publishConfig[key]; | ||
for (const [k, value] of Object.entries(packageJson.publishConfig)) { | ||
const key = k; | ||
if (acceptedKeys.includes(key)) { | ||
packageJson[key] = value; | ||
delete packageJson.publishConfig[key]; | ||
} | ||
} | ||
@@ -54,14 +67,13 @@ } | ||
function performWorkspaceVersionOverrides(packageJson, packageInfos) { | ||
const depTypes = ['dependencies', 'devDependencies', 'peerDependencies']; | ||
depTypes.forEach(depKind => { | ||
const deps = packageJson[depKind]; | ||
if (deps) { | ||
Object.keys(deps).forEach(dep => { | ||
const packageInfo = packageInfos[dep]; | ||
if (packageInfo && deps[dep].startsWith('workspace:')) { | ||
deps[dep] = resolveWorkspaceVersionForPublish(deps[dep], packageInfo.version); | ||
} | ||
}); | ||
const { dependencies, devDependencies, peerDependencies } = packageJson; | ||
for (const deps of [dependencies, devDependencies, peerDependencies]) { | ||
if (!deps) | ||
continue; | ||
for (const [depName, depVersion] of Object.entries(deps)) { | ||
const packageInfo = packageInfos[depName]; | ||
if (packageInfo && depVersion.startsWith(workspacePrefix)) { | ||
deps[depName] = resolveWorkspaceVersionForPublish(depVersion, packageInfo.version); | ||
} | ||
} | ||
}); | ||
} | ||
} | ||
@@ -74,13 +86,11 @@ /** | ||
function resolveWorkspaceVersionForPublish(workspaceDependency, packageInfoVersion) { | ||
const versionStartIndex = 'workspace:'.length; | ||
if (workspaceDependency === 'workspace:*') { | ||
const workspaceVersion = workspaceDependency.substring(workspacePrefix.length); | ||
if (workspaceVersion === '*') { | ||
return packageInfoVersion; | ||
} | ||
else if (new Set(['workspace:~', 'workspace:^']).has(workspaceDependency)) { | ||
return `${workspaceDependency[versionStartIndex]}${packageInfoVersion}`; | ||
if (workspaceVersion === '^' || workspaceVersion === '~') { | ||
return `${workspaceVersion}${packageInfoVersion}`; | ||
} | ||
else { | ||
return workspaceDependency.substring(versionStartIndex); | ||
} | ||
return workspaceVersion; | ||
} | ||
//# sourceMappingURL=performPublishOverrides.js.map |
import { PackageInfos } from '../types/PackageInfo'; | ||
/** | ||
* Topological sort the packages based on its dependency graph. | ||
* Topologically sort the packages based on their dependency graph. | ||
* Dependency comes first before dependent. | ||
@@ -5,0 +5,0 @@ * @param packages Packages to be sorted. |
@@ -9,3 +9,3 @@ "use strict"; | ||
/** | ||
* Topological sort the packages based on its dependency graph. | ||
* Topologically sort the packages based on their dependency graph. | ||
* Dependency comes first before dependent. | ||
@@ -18,20 +18,14 @@ * @param packages Packages to be sorted. | ||
const dependencyGraph = []; | ||
packages.forEach(pkgName => { | ||
let allDeps = []; | ||
['dependencies', 'devDependencies', 'peerDependencies'].forEach(depKind => { | ||
const info = packageInfos[pkgName]; | ||
if (!info) { | ||
throw new Error(`Package info is missing for ${pkgName}.`); | ||
for (const pkgName of packageSet) { | ||
const info = packageInfos[pkgName]; | ||
if (!info) { | ||
throw new Error(`Package info is missing for ${pkgName}.`); | ||
} | ||
const allDeps = new Set([info.dependencies, info.devDependencies, info.peerDependencies] | ||
.flatMap(deps => Object.keys(deps || {})) | ||
.filter(pkg => packageSet.has(pkg))); | ||
if (allDeps.size) { | ||
for (const depPkgName of allDeps) { | ||
dependencyGraph.push([depPkgName, pkgName]); | ||
} | ||
const deps = info[depKind]; | ||
if (deps) { | ||
const depPkgNames = Object.keys(deps); | ||
allDeps = allDeps.concat(depPkgNames); | ||
} | ||
}); | ||
allDeps = [...new Set(allDeps)].filter(pkg => packageSet.has(pkg)); | ||
if (allDeps.length > 0) { | ||
allDeps.forEach(depPkgName => { | ||
dependencyGraph.push([depPkgName, pkgName]); | ||
}); | ||
} | ||
@@ -41,3 +35,3 @@ else { | ||
} | ||
}); | ||
} | ||
try { | ||
@@ -47,3 +41,3 @@ return toposort_1.default(dependencyGraph).filter((pkg) => !!pkg); | ||
catch (err) { | ||
throw new Error(`Failed to do toposort for packages: ${err?.message}`); | ||
throw new Error(`Failed to topologically sort packages: ${err?.message}`); | ||
} | ||
@@ -50,0 +44,0 @@ } |
@@ -10,2 +10,10 @@ import { PackageOptions, BeachballOptions } from './BeachballOptions'; | ||
main?: string; | ||
module?: string; | ||
types?: string; | ||
typings?: string; | ||
exports?: any; | ||
repository?: any; | ||
bin?: any; | ||
browser?: any; | ||
files?: string[]; | ||
dependencies?: PackageDeps; | ||
@@ -16,2 +24,4 @@ devDependencies?: PackageDeps; | ||
beachball?: BeachballOptions; | ||
/** Overrides applied during publishing */ | ||
publishConfig?: Pick<PackageJson, 'types' | 'typings' | 'main' | 'module' | 'exports' | 'repository' | 'bin' | 'browser' | 'files'>; | ||
} | ||
@@ -18,0 +28,0 @@ export interface PackageInfo { |
{ | ||
"name": "beachball", | ||
"version": "2.32.2", | ||
"version": "2.32.3", | ||
"description": "The Sunniest Semantic Version Bumper", | ||
@@ -54,3 +54,3 @@ "repository": { | ||
"uuid": "^9.0.0", | ||
"workspace-tools": "^0.34.0", | ||
"workspace-tools": "^0.34.2", | ||
"yargs-parser": "^21.0.0" | ||
@@ -57,0 +57,0 @@ }, |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
3963
318372
Updatedworkspace-tools@^0.34.2