Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@changesets/assemble-release-plan

Package Overview
Dependencies
Maintainers
1
Versions
42
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@changesets/assemble-release-plan - npm Package Compare versions

Comparing version 0.1.0 to 0.1.2

dist/assemble-release-plan.cjs.d.ts

7

CHANGELOG.md
# @changesets/assemble-release-plan
## 0.1.2
### Patch Changes
- [a15abbf9](https://github.com/changesets/changesets/commit/a15abbf9) - Previous release shipped unbuilt code - fixing that
## 0.1.0
### Minor Changes

@@ -5,0 +12,0 @@

12

dist/assemble-release-plan.cjs.js

@@ -1,7 +0,7 @@

"use strict";
'use strict';
let unregister = require("/Users/bconolly/Development/changesets/node_modules/@preconstruct/hook/dist/hook.cjs.js").___internalHook("/Users/bconolly/Development/changesets");
module.exports = require("/Users/bconolly/Development/changesets/packages/assemble-release-plan/src/index.ts");
unregister();
if (process.env.NODE_ENV === "production") {
module.exports = require("./assemble-release-plan.cjs.prod.js");
} else {
module.exports = require("./assemble-release-plan.cjs.dev.js");
}

@@ -1,15 +0,261 @@

// 👋 hey!!
// you might be reading this and seeing .esm in the filename
// and being confused why there is commonjs below this filename
// DON'T WORRY!
// this is intentional
// it's only commonjs with `preconstruct dev`
// when you run `preconstruct build`, it will be ESM
// why is it commonjs?
// we need to re-export every export from the source file
// but we can't do that with ESM without knowing what the exports are (because default exports aren't included in export/import *)
// and they could change after running `preconstruct dev` so we can't look at the file without forcing people to
// run preconstruct dev again which wouldn't be ideal
// this solution could change but for now, it's working
import _objectSpread from '@babel/runtime/helpers/esm/objectSpread';
import semver from 'semver';
module.exports = require("/Users/bconolly/Development/changesets/packages/assemble-release-plan/src/index.ts")
/*
WARNING:
Important note for understanding how this package works:
We are doing some kind of wacky things with manipulating the objects within the
releases array, despite the fact that this was passed to us as an argument. We are
aware that this is generally bad practice, but have decided to to this here as
we control the entire flow of releases.
We could solve this by inlining this function, or by returning a deep-cloned then
modified array, but we decided both of those are worse than this solution.
*/
function getDependents(releases, workspaces, dependencyGraph) {
let updated = false; // NOTE this is intended to be called recursively
let pkgsToSearch = [...releases];
let pkgJsonsByName = new Map( // TODO this seems an inefficient use of getting the whole workspaces
workspaces.map(({
name,
config
}) => [name, config]));
while (pkgsToSearch.length > 0) {
// nextRelease is our dependency, think of it as "avatar"
const nextRelease = pkgsToSearch.shift();
if (!nextRelease) continue; // pkgDependents will be a list of packages that depend on nextRelease ie. ['avatar-group', 'comment']
const pkgDependents = dependencyGraph.get(nextRelease.name);
if (!pkgDependents) {
throw new Error(`Error in determining dependents - could not find package in repository: ${nextRelease.name}`);
} // For each dependent we are going to see whether it needs to be bumped because it's dependency
// is leaving the version range.
pkgDependents.map(dependent => {
let type;
const dependentPkgJSON = pkgJsonsByName.get(dependent);
if (!dependentPkgJSON) throw new Error("Dependency map is incorrect");
const {
depTypes,
versionRange
} = getDependencyVersionRange(dependentPkgJSON, nextRelease.name); // Firstly we check if it is a peerDependency because if it is, our dependent bump type needs to be major.
if (depTypes.includes("peerDependencies") && nextRelease.type !== "patch" && (!releases.some(dep => dep.name === dependent) || releases.some(dep => dep.name === dependent && dep.type !== "major"))) {
type = "major";
} else {
if ( // TODO validate this - I don't think it's right anymore
!releases.some(dep => dep.name === dependent) && !semver.satisfies(semver.inc(nextRelease.oldVersion, nextRelease.type), versionRange)) {
type = "patch";
}
}
return {
name: dependent,
type,
pkgJSON: dependentPkgJSON
};
}).filter(({
type
}) => type).forEach( // @ts-ignore - I don't know how to make typescript understand that the filter above guarantees this and I got sick of trying
({
name,
type,
pkgJSON
}) => {
// At this point, we know if we are making a change
updated = true;
const existing = releases.find(dep => dep.name === name); // For things that are being given a major bump, we check if we have already
// added them here. If we have, we update the existing item instead of pushing it on to search.
// It is safe to not add it to pkgsToSearch because it should have already been searched at the
// largest possible bump type.
if (existing && type === "major" && existing.type !== "major") {
existing.type = "major";
pkgsToSearch.push(existing);
} else {
let newDependent = {
name,
type,
oldVersion: pkgJSON.version,
changesets: []
};
pkgsToSearch.push(newDependent);
releases.push(newDependent);
}
});
}
return updated;
}
/*
Returns an object in the shape { depTypes: [], versionRange: '' } with a list of different depTypes
matched ('dependencies', 'peerDependencies', etc) and the versionRange itself ('^1.0.0')
*/
function getDependencyVersionRange(dependentPkgJSON, dependencyName) {
const DEPENDENCY_TYPES = ["dependencies", "devDependencies", "peerDependencies", "optionalDependencies"];
const dependencyVersionRange = {
depTypes: [],
versionRange: ""
};
for (const type of DEPENDENCY_TYPES) {
const deps = dependentPkgJSON[type];
if (!deps) continue;
if (deps[dependencyName]) {
dependencyVersionRange.depTypes.push(type); // We'll just override this each time, *hypothetically* it *should* be the same...
dependencyVersionRange.versionRange = deps[dependencyName];
}
}
return dependencyVersionRange;
}
// This function takes in changesets and returns one release per
// package listed in the changesets
function flattenReleases(changesets, workspaces) {
let releases = new Map();
changesets.forEach(changeset => {
changeset.releases.forEach(({
name,
type
}) => {
let release = releases.get(name);
let ws = workspaces.find(ws => ws.name === name);
if (!ws) {
throw new Error(`Could not find package information for ${name}`);
}
let {
config
} = ws;
if (!release) {
release = {
name,
type,
oldVersion: config.version,
changesets: [changeset.id]
};
} else {
// If the type was already major, we never need to update it
if (release.type === "minor" && type === "major") {
release.type = type;
} else if (release.type === "patch" && (type === "major" || type === "minor")) {
release.type = type;
} // Check whether the bumpType will change
// If the bumpType has changed recalc newVersion
// push new changeset to releases
release.changesets.push(changeset.id);
}
releases.set(name, release);
});
});
return [...releases.values()];
}
/*
WARNING:
Important note for understanding how this package works:
We are doing some kind of wacky things with manipulating the objects within the
releases array, despite the fact that this was passed to us as an argument. We are
aware that this is generally bad practice, but have decided to to this here as
we control the entire flow of releases.
We could solve this by inlining this function, or by returning a deep-cloned then
modified array, but we decided both of those are worse than this solution.
*/
function applyLinks(releases, workspaces, linked) {
let updated = false;
if (!linked) return updated; // We do this for each set of linked packages
for (let linkedPackages of linked) {
// First we filter down to all the relevent releases for one set of linked packages
let releasingLinkedPackages = releases.filter(release => linkedPackages.includes(release.name)); // If we proceed any further we do extra work with calculating highestVersion for things that might
// not need one, as they only have workspace based packages
if (releasingLinkedPackages.length < 1) continue;
let highestReleaseType;
let highestVersion;
for (let pkg of releasingLinkedPackages) {
// Note that patch is implictly set here, but never needs to override another value
if (!highestReleaseType) {
highestReleaseType = pkg.type;
} else if (pkg.type === "major") {
highestReleaseType = pkg.type;
} else if (pkg.type === "minor" && highestReleaseType !== "major") {
highestReleaseType = pkg.type;
}
} // Next we determine what the highest version among the linked packages will be
for (let linkedPackage of linkedPackages) {
let workspace = workspaces.find(workspace => workspace.name === linkedPackage);
if (workspace) {
if (highestVersion === undefined || semver.gt(workspace.config.version, highestVersion)) {
highestVersion = workspace.config.version;
}
} else {
console.error(`FATAL ERROR IN CHANGESETS! We were unable to version for linked package: ${linkedPackage} in linkedPackages: ${linkedPackages.toString()}`);
throw new Error(`fatal: could not resolve linked packages`);
}
}
if (!highestVersion || !highestReleaseType) throw new Error(`Large internal changesets error in calculating linked versions. Please contact the maintainers`); // Finally, we update the packages so all of them are on the highest version
for (let linkedPackage of releasingLinkedPackages) {
if (linkedPackage.type !== highestReleaseType) {
updated = true;
linkedPackage.type = highestReleaseType;
}
if (linkedPackage.oldVersion !== highestVersion) {
updated = true;
linkedPackage.oldVersion = highestVersion;
}
}
}
return updated;
}
function assembleReleasePlan(changesets, workspaces, dependentsGraph, config) {
// releases is, at this point a list of all packages we are going to releases,
// flattened down to one release per package, having a reference back to their
// changesets, and with a calculated new versions
let releases = flattenReleases(changesets, workspaces);
let releaseObjectValidated = false;
while (releaseObjectValidated === false) {
// The map passed in to determineDependents will be mutated
let dependentAdded = getDependents(releases, workspaces || [], dependentsGraph); // The map passed in to determineDependents will be mutated
let linksUpdated = applyLinks(releases, workspaces, config.linked);
releaseObjectValidated = !linksUpdated && !dependentAdded;
}
return {
changesets,
releases: releases.map(incompleteRelease => {
return _objectSpread({}, incompleteRelease, {
newVersion: semver.inc(incompleteRelease.oldVersion, incompleteRelease.type)
});
})
};
}
export default assembleReleasePlan;
{
"name": "@changesets/assemble-release-plan",
"version": "0.1.0",
"version": "0.1.2",
"description": "Reads changesets and adds information on dependents that need bumping",

@@ -11,8 +11,8 @@ "main": "dist/assemble-release-plan.cjs.js",

"@babel/runtime": "^7.4.4",
"@changesets/types": "^0.1.0",
"@changesets/types": "^0.1.2",
"semver": "^5.4.1"
},
"devDependencies": {
"@changesets/config": "^0.1.0"
"@changesets/config": "^0.1.2"
}
}
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc