watch-dependency-graph
Advanced tools
Comparing version 0.0.2 to 0.1.0
225
index.js
const path = require('path') | ||
const { EventEmitter } = require('events') | ||
const assert = require('assert') | ||
const chokidar = require('chokidar') | ||
@@ -7,3 +8,10 @@ const matched = require('matched') | ||
function walk(children, parentIndex, parentChildren, ids, register, visited = []) { | ||
function walk ( | ||
children, | ||
entryPointer, | ||
parentChildren, | ||
ids, | ||
register, | ||
visited = [] | ||
) { | ||
for (const { id, children: childs } of children) { | ||
@@ -13,10 +21,11 @@ // push to all files | ||
const index = ids.indexOf(id) | ||
const pointer = ids.indexOf(id) | ||
// push to previous parent's children | ||
if (!parentChildren.includes(index)) parentChildren.push(index) | ||
if (!parentChildren.includes(pointer)) parentChildren.push(pointer) | ||
// set module values | ||
if (!register[id]) register[id] = { roots: [], children: [] } // setup | ||
if (!register[id].roots.includes(parentIndex)) register[id].roots.push(parentIndex) // set roots | ||
if (!register[id]) register[id] = { entries: [], children: [] } // setup | ||
if (!register[id].entries.includes(entryPointer)) | ||
register[id].entries.push(entryPointer) // set entries | ||
@@ -26,3 +35,3 @@ // recurse, but only if we haven't walked these children yet | ||
visited.push(id) | ||
walk(childs, parentIndex, register[id].children, ids, register, visited) | ||
walk(childs, entryPointer, register[id].children, ids, register, visited) | ||
} | ||
@@ -32,102 +41,168 @@ } | ||
module.exports = function graph(...inputs) { | ||
const events = new EventEmitter() | ||
function getEntries (globs) { | ||
const files = uniq( | ||
inputs.flat(2).map(matched.sync).flat(2).map(f => require.resolve(path.resolve(process.cwd(), f))) | ||
globs | ||
.flat(2) | ||
.map(matched.sync) | ||
.flat(2) | ||
.map(f => require.resolve(path.resolve(process.cwd(), f))) | ||
) | ||
// required, load this module.children | ||
files.map(require) | ||
files.map(require) // load modules | ||
const ids = [] | ||
const register = {} | ||
const entries = module.children.filter(c => files.includes(c.id)) | ||
return module.children.filter(({ id }) => files.includes(id)) | ||
} | ||
// kick it off | ||
for (const { id, children } of entries) { | ||
ids.push(id) | ||
module.exports = function graph (...globs) { | ||
// once instance | ||
const events = new EventEmitter() | ||
const index = ids.indexOf(id) | ||
// all generated from factory | ||
let ids = [] | ||
let register = {} | ||
let entries = [] | ||
let watcher | ||
register[id] = { | ||
roots: [index], | ||
children: [], | ||
// kick it off | ||
;(function init () { | ||
ids = [] | ||
register = {} | ||
entries = getEntries(globs) | ||
assert(entries.length, 'No entries found') | ||
for (const { id, children } of entries) { | ||
ids.push(id) | ||
const entryPointer = ids.indexOf(id) // get pointer | ||
register[id] = { | ||
entries: [entryPointer], // self-referential | ||
children: [] | ||
} | ||
if (children) | ||
walk(children, entryPointer, register[id].children, ids, register) | ||
} | ||
if (children) walk(children, index, register[id].children, ids, register) | ||
} | ||
watcher = chokidar.watch(ids, { ignoreInitial: true }) | ||
const watcher = chokidar.watch(ids, { ignoreInitial: true }) | ||
watcher.on('all', (e, f) => { | ||
if (e === 'add') { | ||
// shouldn't ever happen | ||
} else if (e === 'unlink') { | ||
const removedModule = entries.find(e => e.id === f) | ||
// an *entry* was renamed or removed | ||
if (removedModule) { | ||
watcher.close() | ||
events.emit('remove', [removedModule.id]) | ||
init() | ||
// const pointer = ids.indexOf(f) | ||
watcher | ||
.on('all', (e, f) => { | ||
const updatedFilepath = require.resolve(f) | ||
const { roots: parentsToUpdate } = register[updatedFilepath] | ||
// // delete from ids and register | ||
// ids.splice(pointer, 1) | ||
// delete register[f] | ||
const prev = require.cache[updatedFilepath] | ||
delete require.cache[updatedFilepath] | ||
require(updatedFilepath) | ||
const next = require.cache[updatedFilepath] | ||
// // remove any references | ||
// for (const filepath of Object.keys(register)) { | ||
// const { entries, children } = register[filepath] | ||
// entries.splice(entries.indexOf(pointer), 1) | ||
// children.splice(children.indexOf(pointer), 1) | ||
// } | ||
} else { | ||
watcher.unwatch(f) | ||
} | ||
} else if (e === 'change') { | ||
const updatedFilepath = require.resolve(f) | ||
const { entries } = register[updatedFilepath] | ||
// diff prev/next | ||
const removedModules = prev.children | ||
.filter(c => !next.children.find(_c => _c.id === c.id)) | ||
const addedModules = next.children | ||
.filter(c => !prev.children.find(_c => _c.id === c.id)) | ||
const prev = require.cache[updatedFilepath] | ||
delete require.cache[updatedFilepath] | ||
require(updatedFilepath) | ||
const next = require.cache[updatedFilepath] | ||
for (const removedModule of removedModules) { | ||
let isModuleStillInUse = false | ||
const removedModuleIndex = ids.indexOf(removedModule.id) | ||
// diff prev/next | ||
const removedModuleIds = prev.children | ||
.filter(c => !next.children.find(_c => _c.id === c.id)) | ||
.map(c => c.id) | ||
for (const filepath of Object.keys(register)) { | ||
if (filepath === updatedFilepath) { | ||
const localChildren = register[filepath].children | ||
const localIndex = localChildren.indexOf(removedModuleIndex) | ||
// add to watch instance | ||
next.children | ||
.filter(c => !prev.children.find(_c => _c.id === c.id)) | ||
.forEach(c => watcher.add(c.id)) | ||
/* | ||
* for any roots of the file that changed, remove them from childen | ||
* of this file | ||
*/ | ||
for (const rootIndex of register[filepath].roots) { | ||
for (const localChildIndex of localChildren) { | ||
const localChildFile = ids[localChildIndex] | ||
const localChildFileRoots = register[localChildFile].roots | ||
localChildFileRoots.splice(localChildFileRoots.indexOf(rootIndex), 1) | ||
for (const removedModuleId of removedModuleIds) { | ||
let isModuleStillInUse = false | ||
const removedModulePointer = ids.indexOf(removedModuleId) | ||
for (const filepath of Object.keys(register)) { | ||
if (filepath === updatedFilepath) { | ||
const localChildren = register[filepath].children | ||
const localPointer = localChildren.indexOf(removedModulePointer) | ||
/* | ||
* for any entries of the file that changed, remove them from childen | ||
* of this file | ||
*/ | ||
for (const entryPointer of register[filepath].entries) { | ||
for (const localChildPointer of localChildren) { | ||
const localChildFile = ids[localChildPointer] | ||
const localChildEntries = register[localChildFile].entries | ||
localChildEntries.splice( | ||
localChildEntries.indexOf(entryPointer), | ||
1 | ||
) | ||
} | ||
} | ||
} | ||
// clean up the children of this file last | ||
register[filepath].children.splice(localIndex, 1) | ||
} else { | ||
// don't accidentally reset back to false on another iteration | ||
if (isModuleStillInUse) continue | ||
// clean up the children of this file last | ||
register[filepath].children.splice(localPointer, 1) | ||
} else { | ||
// don't accidentally reset back to false on another iteration | ||
if (isModuleStillInUse) continue | ||
isModuleStillInUse = register[filepath].children.includes(removedModuleIndex) | ||
isModuleStillInUse = register[filepath].children.includes( | ||
removedModulePointer | ||
) | ||
} | ||
} | ||
if (!isModuleStillInUse) { | ||
ids.splice(removedModulePointer, 1) | ||
delete register[removedModuleId] | ||
watcher.unwatch(removedModuleId) | ||
} | ||
} | ||
if (!isModuleStillInUse) { | ||
ids.splice(removedModuleIndex, 1) | ||
delete register[removedModule.id] | ||
watcher.unwatch(removedModule.id) | ||
for (const entryPointer of entries) { | ||
const parentFile = ids[entryPointer] | ||
walk( | ||
next.children, | ||
entryPointer, | ||
register[parentFile].children, | ||
ids, | ||
register | ||
) | ||
} | ||
} | ||
watcher.add(addedModules.map(a => a.id)) | ||
for (const parentIndex of parentsToUpdate) { | ||
const parentFile = ids[parentIndex] | ||
walk(next.children, parentIndex, register[parentFile].children, ids, register) | ||
events.emit('update', ids[parentIndex]) | ||
events.emit( | ||
'update', | ||
entries.map(p => ids[p]) | ||
) | ||
} | ||
}) | ||
})() | ||
return { | ||
ids, | ||
register, | ||
on(ev, fn) { | ||
get ids () { | ||
return ids | ||
}, | ||
get register () { | ||
return register | ||
}, | ||
on (ev, fn) { | ||
events.on(ev, fn) | ||
return () => events.removeListener(ev, fn) | ||
}, | ||
async close() { | ||
async close () { | ||
return watcher.close() | ||
@@ -134,0 +209,0 @@ } |
{ | ||
"name": "watch-dependency-graph", | ||
"version": "0.0.2", | ||
"version": "0.1.0", | ||
"description": "", | ||
"main": "index.js", | ||
"scripts": { | ||
"test": "node -r esm test", | ||
"test:watch": "nodemon -r esm test -i ./fixtures" | ||
"fixtures": "node fixtures", | ||
"test": "npm run fixtures && node -r esm test", | ||
"test:watch": "npm run fixtures && nodemon -r esm test -i ./fixtures", | ||
"format": "prettier-standard --format" | ||
}, | ||
"husky": { | ||
"hooks": { | ||
"pre-commit": "prettier-standard --format --staged && npm run test" | ||
} | ||
}, | ||
"keywords": [], | ||
@@ -22,4 +29,6 @@ "author": "", | ||
"fs-extra": "^9.0.1", | ||
"nodemon": "^2.0.4" | ||
"husky": "^4.3.0", | ||
"nodemon": "^2.0.4", | ||
"prettier-standard": "^16.4.1" | ||
} | ||
} |
172
test.js
const fs = require('fs-extra') | ||
const path = require('path') | ||
const test = require('baretest')('presta') | ||
const assert = require('assert') | ||
const DELAY = 300 | ||
const { fixtures, fixturesRoot } = require('./fixtures.js') | ||
const wait = t => new Promise(r => setTimeout(r, t)) | ||
const fixturesRoot = path.join(__dirname, 'fixtures') | ||
const fixtures = { | ||
childOfChildren: path.join(fixturesRoot, 'childOfChildren.js'), | ||
commonDep: path.join(fixturesRoot, 'commonDep.js'), | ||
childOfA: path.join(fixturesRoot, 'childOfA.js'), | ||
childOfB: path.join(fixturesRoot, 'childOfB.js'), | ||
A: path.join(fixturesRoot, 'A.js'), | ||
B: path.join(fixturesRoot, 'B.js'), | ||
function subscribe (event, instance) { | ||
return new Promise(r => { | ||
const close = instance.on(event, ids => { | ||
close() | ||
r(ids) | ||
}) | ||
}) | ||
} | ||
test.before(() => { | ||
fs.ensureDirSync(fixturesRoot) | ||
fs.outputFileSync(fixtures.childOfChildren, `module.exports = {}`) | ||
fs.outputFileSync(fixtures.commonDep, `module.exports = {}`) | ||
fs.outputFileSync(fixtures.childOfA, `require('${fixtures.childOfChildren}')`) | ||
fs.outputFileSync(fixtures.childOfB, `require('${fixtures.childOfChildren}')`) | ||
fs.outputFileSync(fixtures.A, `import * as A from '${fixtures.childOfA}';import * as commonDep from '${fixtures.commonDep}'`) // works with imports | ||
fs.outputFileSync(fixtures.B, `require('${fixtures.childOfB}');require('${fixtures.commonDep}')`) | ||
}) | ||
test('update main entries', async () => { | ||
const instance = require('./')(fixtures.A, fixtures.B) | ||
test.after(async () => { | ||
fs.removeSync(fixturesRoot) | ||
}) | ||
const A = subscribe('update', instance) | ||
test('update main entries', async () => { | ||
const updated = [] | ||
const instance = require('./')(['./fixtures/A.js', './fixtures/B.js']) | ||
const close = instance.on('update', mod => updated.push(mod)) | ||
fs.outputFileSync(fixtures.A, fs.readFileSync(fixtures.A)) | ||
fs.outputFileSync(fixtures.A, fs.readFileSync(fixtures.A)) | ||
assert((await A).includes(fixtures.A)) | ||
const B = subscribe('update', instance) | ||
fs.outputFileSync(fixtures.B, fs.readFileSync(fixtures.B)) | ||
await wait(DELAY) | ||
assert((await B).includes(fixtures.B)) | ||
assert(updated.length >= 2) | ||
assert(updated.includes(fixtures.A)) | ||
assert(updated.includes(fixtures.B)) | ||
close() | ||
await instance.close() | ||
@@ -52,9 +37,9 @@ }) | ||
test('update single child', async () => { | ||
const updated = [] | ||
const instance = require('./')(['./fixtures/A.js', './fixtures/B.js']) | ||
const close = instance.on('update', mod => updated.push(mod)) | ||
const instance = require('./')(fixtures.A, fixtures.B) | ||
const subscriber = subscribe('update', instance) | ||
fs.outputFileSync(fixtures.childOfA, fs.readFileSync(fixtures.childOfA)) | ||
await wait(DELAY) | ||
const updated = await subscriber | ||
@@ -64,3 +49,2 @@ assert(updated.length >= 1) | ||
close() | ||
await instance.close() | ||
@@ -70,10 +54,13 @@ }) | ||
test('update common nested child', async () => { | ||
const updated = [] | ||
const instance = require('./')(['./fixtures/A.js', './fixtures/B.js']) | ||
const close = instance.on('update', mod => updated.push(mod)) | ||
const instance = require('./')(fixtures.A, fixtures.B) | ||
fs.outputFileSync(fixtures.childOfChildren, fs.readFileSync(fixtures.childOfChildren)) | ||
const subscriber = subscribe('update', instance) | ||
await wait(DELAY) | ||
fs.outputFileSync( | ||
fixtures.childOfChildren, | ||
fs.readFileSync(fixtures.childOfChildren) | ||
) | ||
const updated = await subscriber | ||
assert(updated.length >= 2) | ||
@@ -83,3 +70,2 @@ assert(updated.includes(fixtures.A)) | ||
close() | ||
await instance.close() | ||
@@ -89,21 +75,22 @@ }) | ||
test('update common nested child after ancestor removal', async () => { | ||
const updated = [] | ||
const instance = require('./')(['./fixtures/A.js', './fixtures/B.js']) | ||
const close = instance.on('update', mod => updated.push(mod)) | ||
const instance = require('./')(fixtures.A, fixtures.B) | ||
const A = subscribe('update', instance) | ||
fs.outputFileSync(fixtures.childOfA, '') // remove child | ||
await wait(DELAY) | ||
const updatedA = await A | ||
assert(updated.pop() === fixtures.A) | ||
assert(updatedA.length === 1) | ||
assert(updatedA[0] === fixtures.A) | ||
await wait(DELAY) | ||
const child = subscribe('update', instance) | ||
fs.outputFileSync(fixtures.childOfChildren, fs.readFileSync(fixtures.childOfChildren)) | ||
fs.outputFileSync( | ||
fixtures.childOfChildren, | ||
fs.readFileSync(fixtures.childOfChildren) | ||
) | ||
await wait(DELAY) | ||
assert((await child).pop() === fixtures.B) | ||
assert(updated.pop() === fixtures.B) | ||
close() | ||
await instance.close() | ||
@@ -113,5 +100,5 @@ }) | ||
test('ensure shared deps are both mapped to entries', async () => { | ||
const { register, close } = require('./')(['./fixtures/A.js', './fixtures/B.js']) | ||
const { register, close } = require('./')(fixtures.A, fixtures.B) | ||
assert(register[fixtures.commonDep].roots.length === 2) | ||
assert(register[fixtures.commonDep].entries.length === 2) | ||
@@ -122,9 +109,12 @@ await close() | ||
test('handles circular deps', async () => { | ||
fs.outputFileSync(fixtures.childOfA, `require('${fixtures.childOfChildren}');require('${fixtures.commonDep}')`) | ||
fs.outputFileSync( | ||
fixtures.childOfA, | ||
`require('${fixtures.childOfChildren}');require('${fixtures.commonDep}')` | ||
) | ||
await wait(DELAY) | ||
await wait(500) | ||
const { register, close } = require('./')(['./fixtures/A.js', './fixtures/B.js']) | ||
const { register, close } = require('./')(fixtures.A, fixtures.B) | ||
assert(register[fixtures.commonDep].roots.length === 2) | ||
assert(register[fixtures.commonDep].entries.length === 2) | ||
@@ -134,6 +124,66 @@ await close() | ||
test('handles case rename as change', async () => { | ||
const instance = require('./')(fixtures.renameableEntry) | ||
const subscriber = subscribe('update', instance) | ||
fs.renameSync( | ||
fixtures.renameable, | ||
fixtures.renameable.replace('renameable', 'Renameable') | ||
) | ||
const ids = await subscriber | ||
assert(ids.includes(fixtures.renameableEntry)) | ||
await instance.close() | ||
}) | ||
test('handles file rename by unwatching', async () => { | ||
const instance = require('./')(fixtures.renameableEntry) | ||
const subscriber = subscribe('update', instance) | ||
const newFile = fixtures.renameable.replace('renameable', 'renameabl') | ||
fs.renameSync(fixtures.renameable, newFile) | ||
fs.outputFileSync(newFile, fs.readFileSync(newFile)) | ||
// bump, otherwise ^ those won't fire | ||
fs.outputFileSync( | ||
fixtures.renameableEntry, | ||
fs.readFileSync(fixtures.renameableEntry) | ||
) | ||
const ids = await subscriber | ||
assert(ids.length === 1) | ||
assert(ids.includes(fixtures.renameableEntry)) | ||
await instance.close() | ||
}) | ||
test('handles entry rename by restarting', async () => { | ||
const instance = require('./')('./fixtures/*.entry.js') | ||
const removed = subscribe('remove', instance) | ||
const newFile = fixtures.renameableEntry.replace( | ||
'renameableEntry', | ||
'renameableEntr' | ||
) | ||
fs.renameSync(fixtures.renameableEntry, newFile) | ||
const removedIds = await removed | ||
assert(removedIds.includes(fixtures.renameableEntry)) | ||
assert(instance.ids.includes(newFile)) | ||
await instance.close() | ||
}) | ||
!(async function () { | ||
console.time('test') | ||
await test.run() | ||
fs.removeSync(fixturesRoot) | ||
console.timeEnd('test') | ||
})() |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
14023
10
390
6
6
1