@npmcli/arborist
Advanced tools
Comparing version 0.0.0-pre.3 to 0.0.0-pre.4
@@ -27,2 +27,5 @@ // mixin implementing the buildIdealTree method | ||
const {resolve} = require('path') | ||
const relpath = require('../relpath.js') | ||
const _depsSeen = Symbol('depsSeen') | ||
@@ -43,2 +46,6 @@ const _depsQueue = Symbol('depsQueue') | ||
const _nodeFromSpec = Symbol('nodeFromSpec') | ||
const _fetchManifest = Symbol('fetchManifest') | ||
const _problemEdges = Symbol('problemEdges') | ||
const _manifests = Symbol('manifests') | ||
const _linkFromSpec = Symbol('linkFromSpec') | ||
const _loadPeerSet = Symbol('loadPeerSet') | ||
@@ -52,2 +59,3 @@ // shared symbols so we can hit them with unit tests | ||
const _fixDepFlags = Symbol('fixDepFlags') | ||
const _resolveLinks = Symbol('resolveLinks') | ||
const _rootNodeFromPackage = Symbol('rootNodeFromPackage') | ||
@@ -61,2 +69,3 @@ const _addRm = Symbol('addRm') | ||
const _pruneFailedOptional = Symbol('pruneFailedOptional') | ||
const _linkNodes = Symbol('linkNodes') | ||
@@ -89,2 +98,4 @@ // used by Reify mixin | ||
this[_loadFailures] = new Set() | ||
this[_linkNodes] = new Set() | ||
this[_manifests] = new Map() | ||
} | ||
@@ -266,3 +277,3 @@ | ||
if (!this[_depsQueue].length) | ||
return | ||
return this[_resolveLinks]() | ||
@@ -329,18 +340,2 @@ // sort physically shallower deps up to the front of the queue, | ||
// skip over any bundled deps, they're not our problem. | ||
// Note that this WILL fetch bundled meta-deps which are also dependencies | ||
// but not listed as bundled deps. When reifying, we first unpack any | ||
// nodes that have bundleDependencies, then do a loadActual on them, move | ||
// the nodes into the ideal tree, and then prune. So, fetching those | ||
// possibly-bundled meta-deps at this point doesn't cause any worse | ||
// problems than a few unnecessary packument fetches. | ||
const bd = node.isRoot ? null : node.package.bundleDependencies | ||
const bundled = new Set(bd || []) | ||
const problems = [...node.edgesOut.values()] | ||
.filter(edge => !bundled.has(edge.name) && | ||
!(edge.to && edge.to.inShrinkwrap) && | ||
(!edge.valid || !edge.to || this[_updateNames].includes(edge.name) || | ||
node.isRoot && this[_explicitRequests].has(edge.name))) | ||
return Promise.all( | ||
@@ -353,3 +348,3 @@ // resolve all the edges into nodes using pacote.manifest | ||
// the new and/or updated dependency. | ||
problems.map(edge => this[_nodeFromEdge](edge) | ||
this[_problemEdges](node).map(edge => this[_nodeFromEdge](edge) | ||
.then(dep => ({edge, dep}))) | ||
@@ -363,2 +358,3 @@ ) | ||
.then(placed => { | ||
const promises = [] | ||
for (const set of placed) { | ||
@@ -369,6 +365,15 @@ for (const node of set) { | ||
this[_depsQueue].push(node) | ||
// we're certainly going to need these soon, fetch them asap | ||
// if it fails at this point, though, dont' worry because it | ||
// may well be an optional dep that has gone missing. it'll | ||
// fail later anyway. | ||
promises.push(...this[_problemEdges](node).map(e => | ||
this[_fetchManifest](npa.resolve(e.name, e.spec, node.path)) | ||
.catch(er => null))) | ||
} | ||
} | ||
return this[_buildDepStep]() | ||
return Promise.all(promises) | ||
}) | ||
.then(() => this[_buildDepStep]()) | ||
} | ||
@@ -391,2 +396,31 @@ | ||
[_problemEdges] (node) { | ||
// skip over any bundled deps, they're not our problem. | ||
// Note that this WILL fetch bundled meta-deps which are also dependencies | ||
// but not listed as bundled deps. When reifying, we first unpack any | ||
// nodes that have bundleDependencies, then do a loadActual on them, move | ||
// the nodes into the ideal tree, and then prune. So, fetching those | ||
// possibly-bundled meta-deps at this point doesn't cause any worse | ||
// problems than a few unnecessary packument fetches. | ||
const bd = node.isRoot ? null : node.package.bundleDependencies | ||
const bundled = new Set(bd || []) | ||
return [...node.edgesOut.values()] | ||
.filter(edge => !bundled.has(edge.name) && | ||
!(edge.to && edge.to.inShrinkwrap) && | ||
(!edge.valid || !edge.to || this[_updateNames].includes(edge.name) || | ||
node.isRoot && this[_explicitRequests].has(edge.name))) | ||
} | ||
[_fetchManifest] (spec) { | ||
if (this[_manifests].has(spec.raw)) | ||
return this[_manifests].get(spec.raw) | ||
else { | ||
const options = Object.create(this.options) | ||
const p = pacote.manifest(spec, options) | ||
this[_manifests].set(spec.raw, p) | ||
return p | ||
} | ||
} | ||
[_nodeFromSpec] (name, spec, parent, edge) { | ||
@@ -398,4 +432,4 @@ // pacote will slap integrity on its options, so we have to clone | ||
return spec.type === 'directory' | ||
? Promise.resolve(new Link({ name, parent, realpath: spec.fetchSpec })) | ||
: pacote.manifest(spec, Object.create(this.options)) | ||
? this[_linkFromSpec](name, spec, parent, edge) | ||
: this[_fetchManifest](spec) | ||
.then(pkg => new Node({ name, pkg, parent }), error => { | ||
@@ -416,2 +450,11 @@ error.requiredBy = edge.from.location || '.' | ||
[_linkFromSpec] (name, spec, parent, edge) { | ||
const realpath = spec.fetchSpec | ||
return rpj(realpath + '/package.json').catch(() => ({})).then(pkg => { | ||
const link = new Link({ name, parent, realpath, pkg }) | ||
this[_linkNodes].add(link) | ||
return link | ||
}) | ||
} | ||
// load all peer deps and meta-peer deps into the node's parent | ||
@@ -724,2 +767,47 @@ // At the end of this, the node's peer-type outward edges are all | ||
// go through all the links in the this[_linkNodes] set | ||
// for each one: | ||
// - if outside the root, ignore it, assume it's fine, it's not our problem | ||
// - if a node in the tree already, assign the target to that node. | ||
// - if a path under an existing node, then assign that as the fsParent, | ||
// and add it to the _depsQueue | ||
// | ||
// call buildDepStep if anything was added to the queue, otherwise we're done | ||
[_resolveLinks] () { | ||
for (const link of this[_linkNodes]) { | ||
this[_linkNodes].delete(link) | ||
const realpath = link.realpath | ||
const loc = relpath(this.path, realpath) | ||
if (/^\.\.[\\\/]/.test(loc)) { | ||
// outside the root, somebody else's problem, ignore it | ||
// TODO: deep updates somehow, with a flag | ||
continue | ||
} | ||
const fromInv = this.idealTree.inventory.get(loc) | ||
if (fromInv && fromInv !== link.target) | ||
link.target = fromInv | ||
if (!link.target.parent && !link.target.fsParent) { | ||
// the fsParent MUST be some node in the tree, possibly the root. | ||
// find it by walking up. Note that this is where its deps may | ||
// end up being installed, if possible. | ||
const parts = loc.split('/') | ||
let p = parts.length - 1 | ||
for (let p = parts.length - 1; p > -1; p--) { | ||
const path = parts.slice(0, p).join('/') | ||
const node = !path ? this.idealTree : this.idealTree.inventory.get(path) | ||
if (node) { | ||
link.target.fsParent = node | ||
this[_depsQueue].push(link.target) | ||
p = -1 | ||
} | ||
} | ||
} | ||
} | ||
if (this[_depsQueue].length) | ||
return this[_buildDepStep]() | ||
} | ||
[_fixDepFlags] () { | ||
@@ -726,0 +814,0 @@ const metaFromDisk = this.idealTree.meta.loadedFromDisk |
@@ -27,2 +27,4 @@ // mix-in implementing the loadActual method | ||
const _cache = Symbol('nodeLoadingCache') | ||
const _loadActualVirtually = Symbol('loadActualVirtually') | ||
const _loadActualActually = Symbol('loadActualActually') | ||
@@ -68,8 +70,44 @@ module.exports = cls => class ActualLoader extends cls { | ||
.then(node => { | ||
// XXX only rely on this if the hidden lockfile is the newest thing? | ||
// need some kind of heuristic, like if the package.json or sw have | ||
// been touched more recently, then ignore it? This is a hazard if | ||
// user switches back and forth between Arborist and another way of | ||
// mutating the node_modules folder. | ||
this.actualTree = node | ||
this[_actualTreeLoaded] = new Set() | ||
// did is a set of all realpaths visited on this walk | ||
// important when a link points at a node we end up visiting later. | ||
return this[loadFSTree](node) | ||
return Shrinkwrap.load({ | ||
path: node.realpath, | ||
hiddenLockfile: true, | ||
}).then(meta => { | ||
if (meta.loadedFromDisk) { | ||
node.meta = meta | ||
meta.add(node) | ||
return this[_loadActualVirtually]() | ||
} | ||
return Shrinkwrap.load({ | ||
path: node.realpath, | ||
}).then(meta => { | ||
node.meta = meta | ||
meta.add(node) | ||
return this[_loadActualActually]() | ||
}) | ||
}) | ||
}) | ||
} | ||
[_loadActualVirtually] () { | ||
// have to load on a new Arborist object, so we don't assign | ||
// the virtualTree on this one! Also, the weird reference is because | ||
// we can't easily get a ref to Arborist in this module, without | ||
// creating a circular reference, since this class is a mixin used | ||
// to build up the Arborist class itself. | ||
return new this.constructor({...this.options}).loadVirtual({ | ||
root: this.actualTree, | ||
}) | ||
} | ||
[_loadActualActually] () { | ||
this[_actualTreeLoaded] = new Set() | ||
// did is a set of all realpaths visited on this walk | ||
// important when a link points at a node we end up visiting later. | ||
return this[loadFSTree](this.actualTree) | ||
.then(() => this[findFSParents]()) | ||
@@ -102,33 +140,15 @@ .then(() => calcDepFlags(this.actualTree)) | ||
// if we're at the root level, then we always load a shrinkwrap/lockfile | ||
// top-of-tree nodes also load a lockfile, so we can update it if we | ||
// make changes within their trees. | ||
// beyond the root level, we only load the shrinkwrap, because it can | ||
// affect how we build up the ideal tree. | ||
// basically, if we have a parent, only load shrinkwrap, not pkg-lock | ||
const shrinkwrapOnly = !!parent | ||
const p = Shrinkwrap.load({ | ||
path: real, | ||
shrinkwrapOnly, | ||
}).then(meta => { | ||
// TODO --no-shrinkwrap support | ||
if (path !== this.path && !meta.loadedFromDisk) | ||
meta = null | ||
return rpj(join(real, 'package.json')) | ||
// soldier on if read-package-json raises an error | ||
.then(pkg => [pkg, null], error => [null, error]) | ||
.then(([pkg, error]) => { | ||
return this[path === real ? newNode : newLink]({ | ||
path, | ||
realpath: real, | ||
pkg, | ||
error, | ||
parent, | ||
meta, | ||
root, | ||
hasShrinkwrap: meta && shrinkwrapOnly, | ||
}) | ||
const p = rpj(join(real, 'package.json')) | ||
// soldier on if read-package-json raises an error | ||
.then(pkg => [pkg, null], error => [null, error]) | ||
.then(([pkg, error]) => { | ||
return this[path === real ? newNode : newLink]({ | ||
path, | ||
realpath: real, | ||
pkg, | ||
error, | ||
parent, | ||
root, | ||
}) | ||
}) | ||
}) | ||
@@ -135,0 +155,0 @@ this[_cache].set(path, p) |
@@ -31,2 +31,5 @@ // mixin providing the loadVirtual method | ||
if (options.root && options.root.meta) | ||
return this[loadFromShrinkwrap](options.root.meta, options.root) | ||
return Shrinkwrap.load({ path: this.path }).then(s => { | ||
@@ -53,3 +56,3 @@ if (!s.loadedFromDisk && !options.root) | ||
this[assignParentage](nodes) | ||
return root | ||
return Promise.resolve(root) | ||
} | ||
@@ -103,3 +106,11 @@ | ||
if (parent) { | ||
node[ name === node.name ? 'parent' : 'fsParent' ] = parent | ||
// if the node location doesn't actually start with node_modules, but | ||
// the node name DOES match the folder it's in, like if you have a | ||
// link from `node_modules/app` to `./app`, then split won't contain | ||
// anything, but the name will still match. In that case, it is an | ||
// fsParent, though, not a parent. | ||
const parentType = name === node.name && split.length | ||
? 'parent' | ||
: 'fsParent' | ||
node[ parentType ] = parent | ||
// read inBundle from package because 'package' here is | ||
@@ -106,0 +117,0 @@ // actually a v2 lockfile metadata entry. |
@@ -29,2 +29,3 @@ // mixin implementing the reify method | ||
const relpath = require('../relpath.js') | ||
const Diff = require('../diff.js') | ||
@@ -56,2 +57,3 @@ const retirePath = require('../retire-path.js') | ||
const _extractOrLink = Symbol('extractOrLink') | ||
const _symlink = Symbol('symlink') | ||
const _recheckEngineAndPlatform = Symbol('recheckEngineAndPlatform') | ||
@@ -73,2 +75,3 @@ const _checkEngine = Symbol('checkEngine') | ||
const _saveIdealTree = Symbol.for('saveIdealTree') | ||
const _copyIdealToActual = Symbol('copyIdealToActual') | ||
@@ -106,2 +109,3 @@ // defined by Ideal mixin | ||
.then(() => this[_saveIdealTree](options)) | ||
.then(() => this[_copyIdealToActual]()) | ||
.then(() => { | ||
@@ -163,3 +167,3 @@ this.finishTracker('reify') | ||
[_renamePath] (from, to) { | ||
[_renamePath] (from, to, didMkdirp = false) { | ||
return rename(from, to) | ||
@@ -172,3 +176,7 @@ .catch(er => { | ||
return rimraf(to).then(() => rename(from, to)) | ||
else | ||
else if (er.code === 'ENOENT' && !didMkdirp) { | ||
// often this is a matter of the target dir not existing | ||
return mkdirp(dirname(to)).then(() => | ||
this[_renamePath](from, to, true)) | ||
} else | ||
throw er | ||
@@ -272,3 +280,3 @@ }) | ||
return node.isLink | ||
? rimraf(node.path).then(() => symlink(node.realpath, node.path, 'dir')) | ||
? rimraf(node.path).then(() => this[_symlink](node)) | ||
: pacote.extract(this[_registryResolved](node.resolved), node.path, { | ||
@@ -281,2 +289,9 @@ ...this.options, | ||
[_symlink] (node) { | ||
const dir = dirname(node.path) | ||
const target = node.realpath | ||
const rel = relative(dir, target) | ||
return symlink(rel, node.path, 'dir') | ||
} | ||
[_recheckEngineAndPlatform] (node) { | ||
@@ -296,4 +311,4 @@ // If we're loading from a v1 lockfile, then need to do this again later | ||
return Promise.resolve() | ||
.then(() => this[_checkEngine](node)) | ||
.then(() => this[_checkPlatform](node)) | ||
.then(() => this[_checkEngine](node)) | ||
.then(() => this[_checkPlatform](node)) | ||
} | ||
@@ -652,7 +667,5 @@ | ||
// the ideal tree is actualized now, hooray! | ||
// | ||
// XXX this needs to clone rather than copy, so that we can leave failed | ||
// optional deps in the ideal tree, but remove them from the actual. | ||
// But to do that, we need a way to clone a tree efficiently. | ||
this.actualTree = this.idealTree | ||
// it still contains all the references to optional nodes that were removed | ||
// for install failures. Those still end up in the shrinkwrap, so we | ||
// save it first, then prune out the optional trash, and then return it. | ||
@@ -664,3 +677,3 @@ // support save=false option | ||
if (options.add) { | ||
const pkg = this.actualTree.package | ||
const pkg = this.idealTree.package | ||
// need to save these more nicely, now that we know what they are | ||
@@ -672,3 +685,3 @@ for (const [type, specs] of Object.entries(options.add)) { | ||
for (const [name, spec] of Object.entries(specs)) { | ||
const child = this.actualTree.children.get(name) | ||
const child = this.idealTree.children.get(name) | ||
const resolved = child.resolved | ||
@@ -691,11 +704,11 @@ const req = npa(spec) | ||
// refresh the edges so they have the correct specs | ||
this.actualTree.package = pkg | ||
this.idealTree.package = pkg | ||
} | ||
// XXX preserve indentation maybe? | ||
const pj = resolve(this.actualTree.path, 'package.json') | ||
const pj = resolve(this.idealTree.path, 'package.json') | ||
return Promise.all([ | ||
this.actualTree.meta.save(), | ||
this.idealTree.meta.save(), | ||
writeFile(pj, JSON.stringify({ | ||
...this.actualTree.package, | ||
...this.idealTree.package, | ||
_id: undefined, | ||
@@ -705,2 +718,19 @@ }, null, 2) + '\n'), | ||
} | ||
[_copyIdealToActual] () { | ||
// save the ideal's meta as a hidden lockfile after we actualize it | ||
this.idealTree.meta.filename = | ||
this.path + '/node_modules/.package-lock.json' | ||
this.idealTree.meta.hiddenLockfile = true | ||
this.actualTree = this.idealTree | ||
this.idealTree = null | ||
for (const path of this[_trashList]) { | ||
const loc = relpath(this.path, path) | ||
const node = this.actualTree.inventory.get(loc) | ||
if (node && node.root === this.actualTree) | ||
node.parent = null | ||
} | ||
return this.actualTree.meta.save() | ||
} | ||
} |
@@ -26,6 +26,10 @@ // An edge in the dependency graph | ||
if (typeof spec !== 'string') | ||
throw new TypeError('must provide string spec') | ||
this[_spec] = spec | ||
if (typeof name !== 'string') | ||
throw new TypeError('must provide dependency name') | ||
this[_name] = name | ||
if (!types.has(type)) { | ||
@@ -32,0 +36,0 @@ throw new TypeError( |
@@ -5,2 +5,3 @@ const relpath = require('./relpath.js') | ||
const _target = Symbol('_target') | ||
const {dirname} = require('path') | ||
class Link extends Node { | ||
@@ -64,3 +65,3 @@ constructor (options) { | ||
return this.path && this.realpath | ||
? `file:${relpath(this.path, this.realpath)}` | ||
? `file:${relpath(dirname(this.path), this.realpath)}` | ||
: null | ||
@@ -67,0 +68,0 @@ } |
@@ -656,3 +656,3 @@ // inventory, path, realpath, root, and parent | ||
const root = this.root | ||
this.location = relative(root.path, this.path).replace(/\\/g, '/') | ||
this.location = relpath(root.realpath, this.path) | ||
@@ -671,5 +671,5 @@ root.inventory.add(this) | ||
: !/^file:/.test(resolved) ? resolved | ||
// resolve onto the metadata path, then relpath to there from here | ||
// resolve onto the metadata path, then realpath to there from here | ||
: `file:${relpath(this.path, | ||
resolve(root.meta.path, resolved.substr(5)))}` | ||
resolve(root.realpath, resolved.substr(5)))}` | ||
@@ -676,0 +676,0 @@ // if we have one, only set the other if it matches |
@@ -91,2 +91,3 @@ // a module that manages a shrinkwrap file (npm-shrinkwrap.json or | ||
'hasInstallScript', | ||
'bin', | ||
] | ||
@@ -166,3 +167,8 @@ | ||
constructor (options = {}) { | ||
const { path, indent = 2, shrinkwrapOnly = false } = options | ||
const { | ||
path, | ||
indent = 2, | ||
shrinkwrapOnly = false, | ||
hiddenLockfile = false, | ||
} = options | ||
this[_awaitingUpdate] = new Map() | ||
@@ -176,2 +182,3 @@ this.path = resolve(path || '.') | ||
this.yarnLock = null | ||
this.hiddenLockfile = hiddenLockfile | ||
// only load npm-shrinkwrap.json in dep trees, not package-lock | ||
@@ -197,2 +204,5 @@ this.shrinkwrapOnly = shrinkwrapOnly | ||
maybeReadFile(this.path + '/npm-shrinkwrap.json'), | ||
] : this.hiddenLockfile ? [ | ||
null, | ||
maybeReadFile(this.path + '/node_modules/.package-lock.json'), | ||
] : [ | ||
@@ -212,3 +222,5 @@ maybeReadFile(this.path + '/npm-shrinkwrap.json'), | ||
this.filename = `${this.path}/${ | ||
this.shrinkwrapOnly || sw && !lock ? 'npm-shrinkwrap' : 'package-lock' | ||
this.hiddenLockfile ? 'node_modules/.package-lock' | ||
: this.shrinkwrapOnly || sw && !lock ? 'npm-shrinkwrap' | ||
: 'package-lock' | ||
}.json` | ||
@@ -234,3 +246,3 @@ this.type = basename(this.filename) | ||
packages: lock.packages || {}, | ||
dependencies: lock.dependencies || {}, | ||
...(this.hiddenLockfile ? {} : {dependencies: lock.dependencies || {}}), | ||
} | ||
@@ -514,2 +526,7 @@ this.originalLockfileVersion = lock.lockfileVersion | ||
this.data.packages[loc] = meta | ||
// hidden lockfiles don't include legacy metadata | ||
if (this.hiddenLockfile) | ||
return | ||
const path = loc.split(/(?:^|\/)node_modules\//) | ||
@@ -626,2 +643,7 @@ | ||
} | ||
// hidden lockfiles don't include legacy metadata | ||
if (this.hiddenLockfile) | ||
delete this.data.dependencies | ||
return this.data | ||
@@ -628,0 +650,0 @@ } |
@@ -33,2 +33,3 @@ // parse a yarn lock file | ||
const consistentResolve = require('./consistent-resolve.js') | ||
const {dirname} = require('path') | ||
@@ -172,4 +173,7 @@ const prefix = | ||
if (node.resolved) | ||
this.current.resolved = | ||
consistentResolve(node.resolved, node.path, node.root.path) | ||
this.current.resolved = consistentResolve( | ||
node.resolved, | ||
node.isLink ? dirname(node.path) : node.path, | ||
node.root.path | ||
) | ||
if (node.integrity) | ||
@@ -176,0 +180,0 @@ this.current.integrity = node.integrity |
{ | ||
"name": "@npmcli/arborist", | ||
"version": "0.0.0-pre.3", | ||
"version": "0.0.0-pre.4", | ||
"description": "Manage node_modules trees", | ||
@@ -11,3 +11,3 @@ "dependencies": { | ||
"mkdirp-infer-owner": "^1.0.2", | ||
"npm-install-checks": "github:npm/npm-install-checks#v4", | ||
"npm-install-checks": "^4.0.0", | ||
"npm-package-arg": "^8.0.0", | ||
@@ -20,3 +20,3 @@ "pacote": "^10.3.1", | ||
"readdir-scoped-modules": "^1.1.0", | ||
"semver": "^6.1.2", | ||
"semver": "^7.1.2", | ||
"treeverse": "^1.0.1" | ||
@@ -28,3 +28,3 @@ }, | ||
"require-inject": "^1.4.4", | ||
"tap": "^14.10.5", | ||
"tap": "^14.10.6", | ||
"tcompare": "^3.0.4" | ||
@@ -31,0 +31,0 @@ }, |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
GitHub dependency
Supply chain riskContains a dependency which resolves to a GitHub URL. Dependencies fetched from GitHub specifiers are not immutable can be used to inject untrusted code or reduce the likelihood of a reproducible install.
Found 1 instance in 1 package
4344
1
195640
32
+ Addednpm-install-checks@4.0.0(transitive)
- Removedsemver@6.3.1(transitive)
Updatednpm-install-checks@^4.0.0
Updatedsemver@^7.1.2