broccoli-caching-writer
Advanced tools
Comparing version 0.4.2 to 0.5.0
249
index.js
var fs = require('fs'); | ||
var path = require('path'); | ||
var RSVP = require('rsvp'); | ||
var mkdirp = require('mkdirp') | ||
var walkSync = require('walk-sync'); | ||
var rimraf = RSVP.denodeify(require('rimraf')); | ||
var mapSeries = require('promise-map-series'); | ||
var quickTemp = require('quick-temp') | ||
var Writer = require('broccoli-writer'); | ||
var helpers = require('broccoli-kitchen-sink-helpers'); | ||
var symlinkOrCopy = require('symlink-or-copy'); | ||
var generateRandomString = require('./lib/generate-random-string'); | ||
function CachingWriter (inputTrees, options) { | ||
if (!(this instanceof CachingWriter)) return new CachingWriter(inputTrees, options); | ||
var canLink = testCanLink(); | ||
this._inputTreeCacheHash = []; | ||
this._shouldBeIgnoredCache = Object.create(null); | ||
this.destDir = path.resolve(path.join('tmp', 'caching-writer-dest-dir_' + generateRandomString(6) + '.tmp')); | ||
CachingWriter.prototype = Object.create(Writer.prototype); | ||
CachingWriter.prototype.constructor = CachingWriter; | ||
function CachingWriter (inputTree, options) { | ||
if (!(this instanceof CachingWriter)) return new CachingWriter(inputTree, options); | ||
this.inputTree = inputTree; | ||
options = options || {}; | ||
@@ -27,4 +25,38 @@ | ||
} | ||
if (Array.isArray(inputTrees)) { | ||
if (this.enforceSingleInputTree) { | ||
throw new Error('You passed an array of input trees, but only a single tree is allowed.'); | ||
} | ||
this.inputTrees = inputTrees; | ||
} else { | ||
this.inputTrees = [inputTrees]; | ||
} | ||
if (this.filterFromCache === undefined) { | ||
this.filterFromCache = {}; | ||
} | ||
if (this.filterFromCache.include === undefined) { | ||
this.filterFromCache.include = []; | ||
} | ||
if (this.filterFromCache.exclude === undefined) { | ||
this.filterFromCache.exclude = []; | ||
} | ||
if (!Array.isArray(this.filterFromCache.include)) { | ||
throw new Error("Invalid filterFromCache.include option, it must be an array or undefined.") | ||
} | ||
if (!Array.isArray(this.filterFromCache.exclude)) { | ||
throw new Error("Invalid filterFromCache.exclude option, it must be an array or undefined.") | ||
} | ||
}; | ||
CachingWriter.prototype.constructor = CachingWriter; | ||
CachingWriter.prototype.enforceSingleInputTree = false; | ||
CachingWriter.prototype.getCacheDir = function () { | ||
@@ -38,26 +70,39 @@ return quickTemp.makeOrReuse(this, 'tmpCacheDir'); | ||
CachingWriter.prototype.write = function (readTree, destDir) { | ||
CachingWriter.prototype.read = function (readTree) { | ||
var self = this; | ||
return readTree(this.inputTree).then(function (srcDir) { | ||
var inputTreeKeys = keysForTree(srcDir); | ||
var inputTreeHash = helpers.hashStrings(inputTreeKeys); | ||
return mapSeries(this.inputTrees, readTree) | ||
.then(function(inputPaths) { | ||
var inputTreeHashes = []; | ||
var invalidateCache = false; | ||
var keys, dir, updateCacheResult; | ||
return RSVP.resolve() | ||
.then(function() { | ||
var updateCacheResult; | ||
for (var i = 0, l = inputPaths.length; i < l; i++) { | ||
dir = inputPaths[i]; | ||
keys = self.keysForTree(dir); | ||
inputTreeHashes[i] = helpers.hashStrings(keys); | ||
if (inputTreeHash !== self._cacheHash) { | ||
updateCacheResult = self.updateCache(srcDir, self.getCleanCacheDir()); | ||
self._cacheHash = inputTreeHash; | ||
self._cacheTreeKeys = inputTreeKeys; | ||
if (self._inputTreeCacheHash[i] !== inputTreeHashes[i]) { | ||
invalidateCache = true; | ||
} | ||
} | ||
return updateCacheResult; | ||
}) | ||
.finally(function() { | ||
linkFromCache(self.getCacheDir(), destDir); | ||
}); | ||
}); | ||
if (invalidateCache) { | ||
var updateCacheSrcArg = self.enforceSingleInputTree ? inputPaths[0] : inputPaths; | ||
updateCacheResult = self.updateCache(updateCacheSrcArg, self.getCleanCacheDir()); | ||
self._inputTreeCacheHash = inputTreeHashes; | ||
} | ||
return updateCacheResult; | ||
}) | ||
.then(function() { | ||
return rimraf(self.destDir); | ||
}) | ||
.then(function() { | ||
symlinkOrCopy.sync(self.getCacheDir(), self.destDir); | ||
}) | ||
.then(function() { | ||
return self.destDir; | ||
}); | ||
}; | ||
@@ -67,3 +112,5 @@ | ||
quickTemp.remove(this, 'tmpCacheDir'); | ||
Writer.prototype.cleanup.call(this); | ||
// sadly we must use sync removal for now | ||
rimraf.sync(this.destDir); | ||
}; | ||
@@ -75,36 +122,43 @@ | ||
module.exports = CachingWriter; | ||
// Takes in a path and { include, exclude }. Tests the path using regular expressions and | ||
// returns true if the path does not match any exclude patterns AND matches atleast | ||
// one include pattern. | ||
CachingWriter.prototype.shouldBeIgnored = function (fullPath) { | ||
if (this._shouldBeIgnoredCache[fullPath] !== undefined) { | ||
return this._shouldBeIgnoredCache[fullPath]; | ||
} | ||
function linkFromCache(srcDir, destDir) { | ||
var files = walkSync(srcDir); | ||
var length = files.length; | ||
var file; | ||
var excludePatterns = this.filterFromCache.exclude; | ||
var includePatterns = this.filterFromCache.include; | ||
var i = null; | ||
for (var i = 0; i < length; i++) { | ||
file = files[i]; | ||
// Check exclude patterns | ||
for (i = 0; i < excludePatterns.length; i++) { | ||
// An exclude pattern that returns true should be ignored | ||
if (excludePatterns[i].test(fullPath) === true) { | ||
return this._shouldBeIgnoredCache[fullPath] = true; | ||
} | ||
} | ||
var srcFile = path.join(srcDir, file); | ||
var stats = fs.statSync(srcFile); | ||
// Check include patterns | ||
if (includePatterns !== undefined && includePatterns.length > 0) { | ||
for (i = 0; i < includePatterns.length; i++) { | ||
// An include pattern that returns true (and wasn't excluded at all) | ||
// should _not_ be ignored | ||
if (includePatterns[i].test(fullPath) === true) { | ||
return this._shouldBeIgnoredCache[fullPath] = false; | ||
} | ||
} | ||
if (stats.isDirectory()) { continue; } | ||
// If no include patterns were matched, ignore this file. | ||
return this._shouldBeIgnoredCache[fullPath] = true; | ||
} | ||
if (!stats.isFile()) { throw new Error('Can not link non-file.'); } | ||
destFile = path.join(destDir, file); | ||
mkdirp.sync(path.dirname(destFile)); | ||
if (canLink) { | ||
fs.linkSync(srcFile, destFile); | ||
} | ||
else { | ||
fs.writeFileSync(destFile, fs.readFileSync(srcFile)); | ||
} | ||
} | ||
// Otherwise, don't ignore this file | ||
return this._shouldBeIgnoredCache[fullPath] = false; | ||
} | ||
function keysForTree (fullPath, options) { | ||
options = options || {} | ||
var _stack = options._stack; | ||
var _followSymlink = options._followSymlink; | ||
var relativePath = options.relativePath || '.'; | ||
CachingWriter.prototype.keysForTree = function (fullPath, initialRelativePath) { | ||
var relativePath = initialRelativePath || '.' | ||
var stats; | ||
@@ -114,7 +168,3 @@ var statKeys; | ||
try { | ||
if (_followSymlink) { | ||
stats = fs.statSync(fullPath); | ||
} else { | ||
stats = fs.lstatSync(fullPath); | ||
} | ||
stats = fs.statSync(fullPath); | ||
} catch (err) { | ||
@@ -127,3 +177,3 @@ console.warn('Warning: failed to stat ' + fullPath); | ||
if (stats) { | ||
statKeys = ['stats', stats.mode, stats.size]; | ||
statKeys = ['stats', stats.mode]; | ||
} else { | ||
@@ -134,38 +184,26 @@ statKeys = ['stat failed']; | ||
var fileIdentity = stats.dev + '\x00' + stats.ino; | ||
if (_stack != null && _stack.indexOf(fileIdentity) !== -1) { | ||
console.warn('Symlink directory loop detected at ' + fullPath + ' (note: loop detection may have false positives on Windows)'); | ||
} else { | ||
if (_stack != null) _stack = _stack.concat([fileIdentity]); | ||
var entries; | ||
try { | ||
entries = fs.readdirSync(fullPath).sort(); | ||
} catch (err) { | ||
console.warn('Warning: Failed to read directory ' + fullPath); | ||
console.warn(err.stack); | ||
childKeys = ['readdir failed']; | ||
// That's all there is to say about this directory. | ||
} | ||
if (entries != null) { | ||
for (var i = 0; i < entries.length; i++) { | ||
var entries; | ||
try { | ||
entries = fs.readdirSync(fullPath).sort(); | ||
} catch (err) { | ||
console.warn('Warning: Failed to read directory ' + fullPath); | ||
console.warn(err.stack); | ||
childKeys = ['readdir failed']; | ||
// That's all there is to say about this directory. | ||
} | ||
if (entries != null) { | ||
for (var i = 0; i < entries.length; i++) { | ||
var keys = keysForTree(path.join(fullPath, entries[i]), { | ||
_stack: _stack, | ||
relativePath: path.join(relativePath, entries[i]) | ||
}); | ||
childKeys = childKeys.concat(keys); | ||
} | ||
var keys = this.keysForTree( | ||
path.join(fullPath, entries[i]), | ||
path.join(relativePath, entries[i]) | ||
); | ||
childKeys = childKeys.concat(keys); | ||
} | ||
} | ||
} else if (stats && stats.isSymbolicLink()) { | ||
if (_stack == null) { | ||
// From here on in the traversal, we need to guard against symlink | ||
// directory loops. _stack is kept null in the absence of symlinks to we | ||
// don't have to deal with Windows for now, as long as it doesn't use | ||
// symlinks. | ||
_stack = []; | ||
} else if (stats && stats.isFile()) { | ||
if (this.shouldBeIgnored(fullPath)) { | ||
return []; | ||
} | ||
childKeys = keysForTree(fullPath, {_stack: _stack, relativePath: relativePath, _followSymlink: true}); // follow symlink | ||
statKeys.push(stats.mtime.getTime()); | ||
} else if (stats && stats.isFile()) { | ||
statKeys.push(stats.mtime.getTime()); | ||
statKeys.push(stats.mtime.getTime(), stats.size); | ||
} | ||
@@ -179,23 +217,2 @@ | ||
function testCanLink () { | ||
var canLinkSrc = path.join(__dirname, "canLinkSrc.tmp"); | ||
var canLinkDest = path.join(__dirname, "canLinkDest.tmp"); | ||
try { | ||
fs.writeFileSync(canLinkSrc); | ||
} catch (e) { | ||
return false; | ||
} | ||
try { | ||
fs.linkSync(canLinkSrc, canLinkDest); | ||
} catch (e) { | ||
fs.unlinkSync(canLinkSrc); | ||
return false; | ||
} | ||
fs.unlinkSync(canLinkDest); | ||
return true; | ||
} | ||
module.exports = CachingWriter; |
{ | ||
"name": "broccoli-caching-writer", | ||
"version": "0.4.2", | ||
"version": "0.5.0", | ||
"description": "Broccoli plugin that allows simple caching (while still allowing N:N) based on the input tree hash.", | ||
@@ -20,16 +20,14 @@ "main": "index.js", | ||
"dependencies": { | ||
"broccoli-writer": "~0.1.1", | ||
"broccoli-kitchen-sink-helpers": "~0.2.0", | ||
"mkdirp": "~0.4.0", | ||
"quick-temp": "~0.1.2", | ||
"walk-sync": "~0.1.2", | ||
"rsvp": "~3.0.6" | ||
"broccoli-kitchen-sink-helpers": "^0.2.5", | ||
"promise-map-series": "^0.2.0", | ||
"quick-temp": "^0.1.2", | ||
"rimraf": "^2.2.8", | ||
"rsvp": "^3.0.14", | ||
"symlink-or-copy": "^1.0.0" | ||
}, | ||
"devDependencies": { | ||
"mocha": "~1.18.2", | ||
"rimraf": "~2.2.6", | ||
"broccoli": "~0.9.0", | ||
"expect.js": "~0.3.1", | ||
"mkdirp": "~0.4.0" | ||
"broccoli": "^0.13.0", | ||
"expect.js": "^0.3.1" | ||
} | ||
} |
@@ -11,2 +11,49 @@ # Broccoli Caching Writer | ||
## Documentation | ||
### `CachingWriter(inputTrees, options)` | ||
`inputTrees` *{Array of Trees | Single Tree}* | ||
Can either be a single tree, or an array of trees. If an array was specified, an array of source paths will be provided when | ||
calling `updateCache`. | ||
#### Options | ||
`filterFromCache.include` *{Array of RegExps}* | ||
An array of regular expressions that files and directories in the input tree must pass (match at least one pattern) in order to be included in the cache hash for rebuilds. In other words, a whitelist of patterns that identify which files and/or directories can trigger a rebuild. | ||
Default: `[]` | ||
---- | ||
`filterFromCache.exclude` *{Array of RegExps}* | ||
An array of regular expressions that files and directories in the input tree cannot pass in order to be included in the cache hash for rebuilds. In other words, a blacklist of patterns that identify which files and/or directories will never trigger a rebuild. | ||
*Note, in the case when a file or directory matches both an include and exlude pattern, the exclude pattern wins* | ||
Default: `[]` | ||
## Switching from `broccoli-writer` | ||
If your broccoli plugin currently extends `broccoli-writer`, | ||
and you wish to extend `broccoli-caching-writer` instead: | ||
1. Switch the constructor | ||
- Require this module: `var CachingWriter = require('broccoli-caching-writer');` | ||
- Change the prototype to use `CachingWriter`: `MyBroccoliWriter.prototype = Object.create(CachingWriter.prototype);` | ||
- In the constructor, ensure that you are calling `CachingWriter.apply(this, arguments);`. | ||
2. Switch `write` function for an `updateCache` function. | ||
- Switch the function signatures: | ||
- From: `MyBroccoliWriter.prototype.write = function(readTree, destDir) {` | ||
- To: `MyBroccoliWriter.prototype.updateCache = function(srcDir, destDir) {` | ||
- Get rid of `readTree`, as `srcPaths` (array of paths from input trees) is already provided: | ||
- Code that looks like: `return readTree(this.inputTree).then(function (srcPaths) { /* Do the main processing */ });` | ||
- Simply extract the code, `/* Do the main processing */`, and get rid of the function wrapping it. | ||
## ZOMG!!! TESTS?!?!!? | ||
@@ -13,0 +60,0 @@ |
@@ -7,3 +7,2 @@ 'use strict'; | ||
var RSVP = require('rsvp'); | ||
var mkdirp = require('mkdirp'); | ||
var rimraf = require('rimraf'); | ||
@@ -19,12 +18,53 @@ var root = process.cwd(); | ||
var sourcePath = 'tests/fixtures/sample-project'; | ||
var dummyChangedFile = sourcePath + '/dummy-changed-file'; | ||
var secondaryPath = 'tests/fixtures/other-tree'; | ||
var existingJSFile = sourcePath + '/core.js'; | ||
var dummyChangedFile = sourcePath + '/dummy-changed-file.txt'; | ||
var dummyJSChangedFile = sourcePath + '/dummy-changed-file.js'; | ||
afterEach(function() { | ||
if (fs.existsSync(dummyChangedFile)) { | ||
fs.unlinkSync(dummyChangedFile); | ||
} | ||
if (fs.existsSync(dummyJSChangedFile)) { | ||
fs.unlinkSync(dummyJSChangedFile); | ||
} | ||
fs.writeFileSync(existingJSFile, '"YIPPIE"\n'); | ||
if (builder) { | ||
builder.cleanup(); | ||
return builder.cleanup(); | ||
} | ||
}); | ||
if (fs.existsSync(dummyChangedFile)) { | ||
fs.unlinkSync(dummyChangedFile); | ||
function buildInSeries(count) { | ||
var promise = RSVP.resolve(); | ||
for (var i = 0; i < count; i++) { | ||
promise = promise.then(function() { | ||
return builder.build(); | ||
}); | ||
} | ||
return promise; | ||
} | ||
describe('enforceSingleInputTree', function() { | ||
it('defaults `enforceSingleInputTree` to false', function() { | ||
var tree = cachingWriter(sourcePath, { | ||
updateCache: function() { } | ||
}); | ||
expect(tree.enforceSingleInputTree).to.not.be.ok(); | ||
}); | ||
it('throws an error if enforceSingleInputTree is true, and an array is passed', function() { | ||
expect(function() { | ||
var tree = cachingWriter([sourcePath, secondaryPath], { | ||
enforceSingleInputTree: true, | ||
updateCache: function() { } | ||
}); | ||
}).throwException(/You passed an array of input trees, but only a single tree is allowed./); | ||
}); | ||
}); | ||
@@ -47,5 +87,6 @@ | ||
it('is provided a source and destination directory', function(){ | ||
it('calls updateCache with a single path if enforceSingleInputTree is true', function(){ | ||
var updateCacheCalled = false; | ||
var tree = cachingWriter(sourcePath, { | ||
enforceSingleInputTree: true, | ||
updateCache: function(srcDir, destDir) { | ||
@@ -61,2 +102,15 @@ expect(fs.statSync(srcDir).isDirectory()).to.be.ok(); | ||
it('is provided a source and destination directory', function(){ | ||
var updateCacheCalled = false; | ||
var tree = cachingWriter(sourcePath, { | ||
updateCache: function(srcDir, destDir) { | ||
expect(fs.statSync(srcDir[0]).isDirectory()).to.be.ok(); | ||
expect(fs.statSync(destDir).isDirectory()).to.be.ok(); | ||
} | ||
}); | ||
builder = new broccoli.Builder(tree); | ||
return builder.build() | ||
}); | ||
it('only calls updateCache once if input is not changing', function(){ | ||
@@ -71,3 +125,3 @@ var updateCacheCount = 0; | ||
builder = new broccoli.Builder(tree); | ||
return RSVP.all([builder.build(), builder.build(), builder.build()]) | ||
return buildInSeries(3) | ||
.then(function() { | ||
@@ -80,2 +134,34 @@ expect(updateCacheCount).to.eql(1); | ||
var updateCacheCount = 0; | ||
var tree = cachingWriter([sourcePath, secondaryPath], { | ||
updateCache: function() { | ||
updateCacheCount++; | ||
} | ||
}); | ||
builder = new broccoli.Builder(tree); | ||
return builder.build() | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(1); | ||
}) | ||
.then(function() { | ||
fs.writeFileSync(dummyChangedFile, 'bergh'); | ||
return buildInSeries(3); | ||
}) | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(2); | ||
}) | ||
.then(function() { | ||
fs.writeFileSync(secondaryPath + '/foo-baz.js', 'bergh'); | ||
return buildInSeries(3); | ||
}) | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(3); | ||
}); | ||
}); | ||
it('calls updateCache again if existing file is changed', function(){ | ||
var updateCacheCount = 0; | ||
var tree = cachingWriter(sourcePath, { | ||
@@ -94,11 +180,88 @@ updateCache: function() { | ||
.then(function() { | ||
fs.writeFileSync(existingJSFile, '"YIPPIE"\n"KI-YAY"\n'); | ||
return buildInSeries(3); | ||
}) | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(2); | ||
}); | ||
}); | ||
it('does not call updateCache again if input is changed but filtered from cache (via exclude)', function(){ | ||
var updateCacheCount = 0; | ||
var tree = cachingWriter(sourcePath, { | ||
updateCache: function() { | ||
updateCacheCount++; | ||
}, | ||
filterFromCache: { | ||
exclude: [/.*\.txt$/] | ||
} | ||
}); | ||
builder = new broccoli.Builder(tree); | ||
return builder.build() | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(1); | ||
}) | ||
.then(function() { | ||
fs.writeFileSync(dummyChangedFile, 'bergh'); | ||
return RSVP.all([ | ||
builder.build(), | ||
builder.build(), | ||
builder.build() | ||
]) | ||
return buildInSeries(3); | ||
}) | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(1); | ||
}); | ||
}); | ||
it('does not call updateCache again if input is changed but filtered from cache (via include)', function(){ | ||
var updateCacheCount = 0; | ||
var tree = cachingWriter(sourcePath, { | ||
updateCache: function() { | ||
updateCacheCount++; | ||
}, | ||
filterFromCache: { | ||
include: [/.*\.js$/] | ||
} | ||
}); | ||
builder = new broccoli.Builder(tree); | ||
return builder.build() | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(1); | ||
}) | ||
.then(function() { | ||
fs.writeFileSync(dummyChangedFile, 'bergh'); | ||
return buildInSeries(3); | ||
}) | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(1); | ||
}); | ||
}); | ||
it('does call updateCache again if input is changed is included in the cache filter', function(){ | ||
var updateCacheCount = 0; | ||
var tree = cachingWriter(sourcePath, { | ||
updateCache: function() { | ||
updateCacheCount++; | ||
}, | ||
filterFromCache: { | ||
include: [/.*\.js$/] | ||
} | ||
}); | ||
builder = new broccoli.Builder(tree); | ||
return builder.build() | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(1); | ||
}) | ||
.then(function() { | ||
fs.writeFileSync(dummyJSChangedFile, 'bergh'); | ||
return buildInSeries(3); | ||
}) | ||
.finally(function() { | ||
expect(updateCacheCount).to.eql(2); | ||
@@ -110,2 +273,20 @@ }); | ||
describe('updateCache', function() { | ||
it('provides array of paths if array of sourceTrees was provided', function() { | ||
var tree = cachingWriter([sourcePath, secondaryPath], { | ||
updateCache: function(srcDirs, destDir) { | ||
expect(fs.readFileSync(srcDirs[0] + '/core.js', {encoding: 'utf8'})).to.eql('"YIPPIE"\n'); | ||
expect(fs.readFileSync(srcDirs[1] + '/bar.js', {encoding: 'utf8'})).to.eql('"BLAMMO!"\n'); | ||
fs.writeFileSync(destDir + '/something-cool.js', 'zomg blammo', {encoding: 'utf8'}); | ||
} | ||
}); | ||
builder = new broccoli.Builder(tree); | ||
return builder.build().then(function(result) { | ||
var dir = result.directory; | ||
expect(fs.readFileSync(dir + '/something-cool.js', {encoding: 'utf8'})).to.eql('zomg blammo'); | ||
}); | ||
}); | ||
it('can write files to destDir, and they will be in the final output', function(){ | ||
@@ -119,3 +300,4 @@ var tree = cachingWriter(sourcePath, { | ||
builder = new broccoli.Builder(tree); | ||
return builder.build().then(function(dir) { | ||
return builder.build().then(function(result) { | ||
var dir = result.directory; | ||
expect(fs.readFileSync(dir + '/something-cool.js', {encoding: 'utf8'})).to.eql('zomg blammo'); | ||
@@ -147,3 +329,3 @@ }); | ||
builder = new broccoli.Builder(tree); | ||
return builder.build().then(function(dir) { | ||
return builder.build().then(function() { | ||
expect(thenCalled).to.be.ok(); | ||
@@ -153,2 +335,45 @@ }); | ||
}); | ||
describe('shouldBeIgnored', function() { | ||
var tree; | ||
beforeEach(function() { | ||
tree = cachingWriter(sourcePath); | ||
}); | ||
it('returns true if the path is included in an exclude filter', function() { | ||
tree.filterFromCache.exclude = [ /.foo$/, /.bar$/ ]; | ||
expect(tree.shouldBeIgnored('blah/blah/blah.foo')).to.be.ok(); | ||
expect(tree.shouldBeIgnored('blah/blah/blah.bar')).to.be.ok(); | ||
expect(tree.shouldBeIgnored('blah/blah/blah.baz')).to.not.be.ok(); | ||
}); | ||
it('returns false if the path is included in an include filter', function() { | ||
tree.filterFromCache.include = [ /.foo$/, /.bar$/ ]; | ||
expect(tree.shouldBeIgnored('blah/blah/blah.foo')).to.not.be.ok(); | ||
expect(tree.shouldBeIgnored('blah/blah/blah.bar')).to.not.be.ok(); | ||
}); | ||
it('returns true if the path is not included in an include filter', function() { | ||
tree.filterFromCache.include = [ /.foo$/, /.bar$/ ]; | ||
expect(tree.shouldBeIgnored('blah/blah/blah.baz')).to.be.ok(); | ||
}); | ||
it('returns false if no patterns were used', function() { | ||
expect(tree.shouldBeIgnored('blah/blah/blah.baz')).to.not.be.ok(); | ||
}); | ||
it('uses a cache to ensure we do not recalculate the filtering on subsequent attempts', function() { | ||
expect(tree.shouldBeIgnored('blah/blah/blah.baz')).to.not.be.ok(); | ||
// changing the filter mid-run should have no result on | ||
// previously calculated paths | ||
tree.filterFromCache.include = [ /.foo$/, /.bar$/ ]; | ||
expect(tree.shouldBeIgnored('blah/blah/blah.baz')).to.not.be.ok(); | ||
}); | ||
}); | ||
}); |
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
24297
3
14
493
72
1
+ Addedpromise-map-series@^0.2.0
+ Addedrimraf@^2.2.8
+ Addedsymlink-or-copy@^1.0.0
+ Addedpromise-map-series@0.2.3(transitive)
+ Addedrsvp@3.6.2(transitive)
+ Addedsymlink-or-copy@1.3.1(transitive)
- Removedbroccoli-writer@~0.1.1
- Removedmkdirp@~0.4.0
- Removedwalk-sync@~0.1.2
- Removedbroccoli-writer@0.1.1(transitive)
- Removedminimist@0.0.8(transitive)
- Removedmkdirp@0.4.2(transitive)
- Removedrsvp@3.0.21(transitive)
- Removedwalk-sync@0.1.3(transitive)
Updatedquick-temp@^0.1.2
Updatedrsvp@^3.0.14