Socket
Socket
Sign inDemoInstall

broccoli-persistent-filter

Package Overview
Dependencies
60
Maintainers
4
Versions
59
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 2.2.3 to 2.3.0

lib/addPatches.js

4

CHANGELOG.md
# master
# 2.3.0
* add optional enchanced dependency tracking
# 2.2.3

@@ -4,0 +8,0 @@

@@ -0,1 +1,2 @@

// @ts-check
'use strict';

@@ -15,3 +16,4 @@

const Processor = require('./lib/processor');
const defaultProccessor = require('./lib/strategies/default');
const Dependencies = require('./lib/dependencies'); // jshint ignore:line
const addPatches = require('./lib/addPatches');
const hashForDep = require('hash-for-dep');

@@ -83,2 +85,3 @@ const FSTree = require('fs-tree-diff');

/** @type {{debug(...s: any[]): void; info(...s: any[]): void}} */
this._logger = debugGenerator(loggerName);

@@ -92,4 +95,7 @@

/** @type {Processor} */
this.processor = new Processor(options);
this.processor.setStrategy(defaultProccessor);
/** @type {Dependencies | null} */
this.dependencies = null;
this.currentTree = new FSTree();

@@ -111,2 +117,5 @@

// TODO: don't enable this by default. it's just for testing.
/** @type {boolean} */
this.dependencyInvalidation = options && options.dependencyInvalidation || false;
this._canProcessCache = Object.create(null);

@@ -130,7 +139,51 @@ this._destFilePathCache = Object.create(null);

/**
* @param invalidated {Array<string>} The files that have been invalidated.
* @param currentTree {FSTree} the current tree - for entry lookup.
* @param nextTree {FSTree} The next tree - for entry lookup.
*/
function invalidationsAsPatches(invalidated, currentTree, nextTree) {
if (invalidated.length === 0) {
return [];
}
/** @type {Array<FSTree.Operation>} */
let patches = [];
let currentEntries = {};
for (let entry of currentTree.entries) {
currentEntries[entry.relativePath] = entry;
}
let nextEntries = {};
for (let entry of nextTree.entries) {
nextEntries[entry.relativePath] = entry;
}
for (let file of invalidated) {
if (currentEntries[file]) {
patches.push(['change', file, currentEntries[file]]);
} else if (nextEntries[file]) {
patches.push(['create', file, nextEntries[file]]);
}
}
return patches;
}
Filter.prototype.build = function() {
// @ts-ignore
let srcDir = this.inputPaths[0];
// @ts-ignore
let destDir = this.outputPath;
if (this.dependencyInvalidation && !this.dependencies) {
this.dependencies = this.processor.initialDependencies(srcDir);
}
if (this._needsReset) {
this.currentTree = new FSTree();
// @ts-ignore
let instrumentation = heimdall.start('reset');
if (this.dependencies) {
this.dependencies = this.processor.initialDependencies(srcDir);
}
// @ts-ignore
rimraf.sync(this.outputPath);
// @ts-ignore
mkdirp.sync(this.outputPath);

@@ -140,7 +193,4 @@ instrumentation.stop();

let srcDir = this.inputPaths[0];
let destDir = this.outputPath;
let prevTime = process.hrtime();
// @ts-ignore
let instrumentation = heimdall.start('derivePatches', DerivePatchesSchema);

@@ -150,13 +200,21 @@

let entries = walkSync.entries(srcDir);
let nextTree = FSTree.fromEntries(entries);
let walkDuration = timeSince(walkStart);
let nextTree = FSTree.fromEntries(entries);
let currentTree = this.currentTree;
let invalidationsStart = process.hrtime();
let invalidated = this.dependencies && this.dependencies.getInvalidatedFiles() || [];
this._logger.info('found', invalidated.length, 'files invalidated due to dependency changes.');
let invalidationPatches = invalidationsAsPatches(invalidated, this.currentTree, nextTree);
let invalidationsDuration = timeSince(invalidationsStart);
this.currentTree = nextTree;
let patches = this.currentTree.calculatePatch(nextTree);
patches = addPatches(invalidationPatches, patches);
let patches = currentTree.calculatePatch(nextTree);
instrumentation.stats.patches = patches.length;
instrumentation.stats.entries = entries.length;
instrumentation.stats.invalidations = {
dependencies: this.dependencies ? this.dependencies.countUnique() : 0,
count: invalidationPatches.length,
duration: invalidationsDuration
};
instrumentation.stats.walk = {

@@ -167,2 +225,4 @@ entries: entries.length,

this.currentTree = nextTree;
this._logger.info('derivePatches', 'duration:', timeSince(prevTime), JSON.stringify(instrumentation.stats));

@@ -172,2 +232,7 @@

if (this.dependencies && patches.length > 0) {
let files = patches.filter(p => p[0] === 'unlink').map(p => p[1]);
this.dependencies = this.dependencies.copyWithout(files);
}
if (patches.length === 0) {

@@ -183,2 +248,3 @@ // no work, exit early

const pendingWork = [];
// @ts-ignore
return heimdall.node('applyPatches', ApplyPatchesSchema, instrumentation => {

@@ -192,2 +258,3 @@ let prevTime = process.hrtime();

let outputFilePath = outputPath;
let forceInvalidation = invalidated.includes(relativePath);

@@ -210,3 +277,3 @@ this._logger.debug('[operation:%s] %s', operation, relativePath);

instrumentation.change++;
return this._handleFile(relativePath, srcDir, destDir, entry, outputFilePath, true, instrumentation);
return this._handleFile(relativePath, srcDir, destDir, entry, outputFilePath, forceInvalidation, true, instrumentation);
};

@@ -222,3 +289,3 @@ if (this.async) {

instrumentation.create++;
return this._handleFile(relativePath, srcDir, destDir, entry, outputFilePath, false, instrumentation);
return this._handleFile(relativePath, srcDir, destDir, entry, outputFilePath, forceInvalidation, false, instrumentation);
};

@@ -238,2 +305,5 @@ if (this.async) {

this._logger.info('applyPatches', 'duration:', timeSince(prevTime), JSON.stringify(instrumentation));
if (this.dependencies) {
this.processor.sealDependencies(this.dependencies);
}
this._needsReset = false;

@@ -245,3 +315,3 @@ return result;

Filter.prototype._handleFile = function(relativePath, srcDir, destDir, entry, outputPath, isChange, stats) {
Filter.prototype._handleFile = function(relativePath, srcDir, destDir, entry, outputPath, forceInvalidation, isChange, stats) {
stats.handleFile++;

@@ -261,3 +331,3 @@

}
result = this.processAndCacheFile(srcDir, destDir, entry, isChange, stats);
result = this.processAndCacheFile(srcDir, destDir, entry, forceInvalidation, isChange, stats);
} else {

@@ -322,2 +392,3 @@ stats.linked++;

Filter.prototype.isDirectory = function(relativePath, entry) {
// @ts-ignore
if (this.inputPaths === undefined) {

@@ -327,2 +398,3 @@ return false;

// @ts-ignore
let srcDir = this.inputPaths[0];

@@ -357,3 +429,3 @@ let path = srcDir + '/' + relativePath;

Filter.prototype.processAndCacheFile = function(srcDir, destDir, entry, isChange, instrumentation) {
Filter.prototype.processAndCacheFile = function(srcDir, destDir, entry, forceInvalidation, isChange, instrumentation) {
let filter = this;

@@ -364,3 +436,3 @@ let relativePath = entry.relativePath;

then(() => {
return filter.processFile(srcDir, destDir, relativePath, isChange, instrumentation, entry);
return filter.processFile(srcDir, destDir, relativePath, forceInvalidation, isChange, instrumentation, entry);
}).

@@ -385,3 +457,3 @@ then(undefined,

Filter.prototype.processFile = function(srcDir, destDir, relativePath, isChange, instrumentation, entry) {
Filter.prototype.processFile = function(srcDir, destDir, relativePath, forceInvalidation, isChange, instrumentation, entry) {
let filter = this;

@@ -400,3 +472,3 @@ let inputEncoding = this.inputEncoding;

let processStringStart = process.hrtime();
let string = invoke(this.processor, this.processor.processString, [this, contents, relativePath, instrumentation]);
let string = invoke(this.processor, this.processor.processString, [this, contents, relativePath, forceInvalidation, instrumentation]);

@@ -418,3 +490,2 @@ return string.then(outputString => {

if (isSame) {
this._logger.debug('[change:%s] but was the same, skipping', relativePath, isSame);

@@ -444,3 +515,8 @@ return;

Filter.prototype.processString = function(/* contents, relativePath */) {
/**
* @param contents {string}
* @param relativePath {string}
* @returns {string}
*/
Filter.prototype.processString = function(contents, relativePath) { // jshint ignore:line
throw new Error(

@@ -447,0 +523,0 @@ 'When subclassing broccoli-persistent-filter you must implement the ' +

@@ -0,1 +1,2 @@

/// @ts-check
'use strict';

@@ -5,8 +6,19 @@

module.exports = function (input) {
/**
* @param input {Buffer | string}
* @returns {string}
*/
function md5sum(input) {
let hash = crypto.createHash('md5');
/**
* @param buf {Buffer | string}
*/
function update(buf) {
let inputEncoding = typeof buf === 'string' ? 'utf8' : undefined;
hash.update(buf, inputEncoding);
/** @type {'utf8' | undefined} */
if (typeof buf === 'string') {
hash.update(buf, 'utf8');
} else {
hash.update(buf);
}
}

@@ -25,2 +37,4 @@

return hash.digest('hex');
};
}
module.exports = md5sum;

@@ -0,7 +1,10 @@

/// @ts-check
'use strict';
const defaultProcessor = require('./strategies/default');
module.exports = class Processor {
constructor(options) {
options = options || {};
this.processor = {};
this.processor = defaultProcessor;
this.persistent = options.persist;

@@ -15,8 +18,29 @@ }

init(ctx) {
// @ts-ignore
this.processor.init(ctx);
}
processString(ctx, contents, relativePath, instrumentation) {
return this.processor.processString(ctx, contents, relativePath, instrumentation);
processString(ctx, contents, relativePath, forceInvalidation, instrumentation) {
// @ts-ignore
return this.processor.processString(ctx, contents, relativePath, forceInvalidation, instrumentation);
}
/**
* Create the initial dependencies.
* @param srcDir {string}
* @returns {ReturnType<typeof defaultProcessor['initialDependencies']>}
*/
initialDependencies(srcDir) {
return this.processor.initialDependencies(srcDir);
}
/**
* Seals the dependencies and captures the dependency state.
* May cache the dependency information for the next process.
* @param dependencies {Parameters<typeof defaultProcessor['sealDependencies']>[0]} The dependencies to seal.
* @returns {void}
*/
sealDependencies(dependencies) {
this.processor.sealDependencies(dependencies);
}
};

@@ -0,4 +1,6 @@

/// @ts-check
'use strict';
const Promise = require('rsvp').Promise;
const Dependencies = require('../dependencies');

@@ -8,2 +10,8 @@ module.exports = {

/**
* @param ctx {{processString(contents: string, relativePath: string); postProcess(v: any, relativePath: string)}}
* @param contents {string}
* @param relativePath {string}
* @returns {Promise<string>};
*/
processString(ctx, contents, relativePath) {

@@ -34,3 +42,21 @@ let string = new Promise(resolve => {

});
},
/**
* By default initial dependencies are empty.
* @returns {Dependencies}
*/
initialDependencies(srcDir) {
// Dependencies start out empty and sealed as if they came from
// the previous build iteration.
return (new Dependencies(srcDir)).seal().captureDependencyState();
},
/**
* Seals the dependencies and captures the dependency state.
* @param dependencies {Dependencies} The dependencies to seal.
*/
sealDependencies(dependencies) {
dependencies.seal().captureDependencyState();
}
};

@@ -0,5 +1,9 @@

/// @ts-check
'use strict';
const AsyncDiskCache = require('async-disk-cache');
const SyncDiskCache = require('sync-disk-cache');
const Promise = require('rsvp').Promise;
const Dependencies = require('../dependencies');
const rimraf = require('rimraf').sync;

@@ -20,4 +24,11 @@ module.exports = {

this._syncCache = new SyncDiskCache(ctx.constructor._persistentCacheKey, {
location: process.env['BROCCOLI_PERSISTENT_FILTER_CACHE_ROOT']
});
if (process.env['CLEAR_BROCCOLI_PERSISTENT_FILTER_CACHE'] === 'true') {
this._cache.clear();
// this._cache.clear is async and can result in race conditions here.
// TODO: update async-disk-cache to have a synchronous `clearSync` method.
rimraf(this._cache.root);
rimraf(this._syncCache.root);
}

@@ -30,3 +41,3 @@ },

processString(ctx, contents, relativePath, instrumentation) {
processString(ctx, contents, relativePath, forceInvalidation, instrumentation) {
let key = ctx.cacheKeyProcessString(contents, relativePath);

@@ -37,3 +48,3 @@ let cache = this._cache;

return cache.get(key).then(entry => {
if (entry.isCached) {
if (entry.isCached && !forceInvalidation) {
instrumentation.persistentCacheHit++;

@@ -74,3 +85,33 @@

});
},
/**
* By default initial dependencies are empty.
* @returns {Dependencies}
*/
initialDependencies(srcDir) {
let result = this._syncCache.get('__dependencies__');
let dependencies;
if (result.isCached) {
/** @type {ReturnType<Dependencies['serialize']>} */
let data = JSON.parse(result.value);
dependencies = Dependencies.deserialize(data, srcDir);
} else {
// Dependencies start out empty; they are sealed as if they came from
// the previous build iteration.
dependencies = new Dependencies(srcDir);
dependencies.seal().captureDependencyState();
}
return dependencies;
},
/**
* Seals the dependencies and captures the dependency state.
* @param dependencies {Dependencies} The dependencies to seal.
*/
sealDependencies(dependencies) {
dependencies.seal().captureDependencyState();
let data = dependencies.serialize();
this._syncCache.set('__dependencies__', JSON.stringify(data));
}
};

3

package.json
{
"name": "broccoli-persistent-filter",
"version": "2.2.3",
"version": "2.3.0",
"description": "broccoli filter but with a persistent cache",

@@ -47,2 +47,3 @@ "author": "Stefan Penner <stefan.penner@gmail.com>",

"symlink-or-copy": "^1.0.1",
"sync-disk-cache": "^1.3.3",
"walk-sync": "^1.0.0"

@@ -49,0 +50,0 @@ },

@@ -71,19 +71,26 @@ # broccoli-persistent-filter

* `annotation`: Same as
[broccoli-plugin](https://github.com/broccolijs/broccoli-plugin#new-plugininputnodes-options);
see there.
* `async`: Whether the `create` and `change` file operations are allowed to
complete asynchronously (true|false, default: false)
* `concurrency`: Used with `async: true`. The number of operations that can be
run concurrently. This overrides the value set with `JOBS=n` environment
variable. (default: the number of detected CPU cores - 1, with a min of 1)
* `dependencyInvalidation`: Defaults to false. Setting this option to `true` will
allow the plugin to track other files as dependencies that affect the output
for that file. See *Dependency Invalidation* below for more information.
* `extensions`: An array of file extensions to process, e.g. `['md', 'markdown']`.
* `targetExtension`: The file extension of the corresponding output files, e.g.
`'html'`.
* `inputEncoding`: The character encoding used for reading input files to be
processed (default: `'utf8'`). For binary files, pass `null` to receive a
`Buffer` object in `processString`.
* `name`: Same as
[broccoli-plugin](https://github.com/broccolijs/broccoli-plugin#new-plugininputnodes-options);
see there.
* `outputEncoding`: The character encoding used for writing output files after
processing (default: `'utf8'`). For binary files, pass `null` and return a
`Buffer` object from `processString`.
* `async`: Whether the `create` and `change` file operations are allowed to
complete asynchronously (true|false, default: false)
* `name`, `annotation`: Same as
[broccoli-plugin](https://github.com/broccolijs/broccoli-plugin#new-plugininputnodes-options);
see there.
* `concurrency`: Used with `async: true`. The number of operations that can be
run concurrently. This overrides the value set with `JOBS=n` environment variable.
(default: the number of detected CPU cores - 1, with a min of 1)
* `persist`: Defaults to `false`. When `true`, causes the plugin to cache the results of processing a file to disk so that it can be re-used during the next build. See *Persistent Cache* below for more information.
* `targetExtension`: The file extension of the corresponding output files, e.g.
`'html'`.

@@ -152,6 +159,12 @@ All options except `name` and `annotation` can also be set on the prototype

The second key, represents the contents of the file. Typically the base-class's functionality
is sufficient, as it merely generates a checksum of the file contents. If for some reason this
is not sufficient, it can be re-configured via subclassing.
The second key, represents the contents of the file. Typically the
base-class's functionality is sufficient, as it merely generates a checksum
of the file contents. If for some reason this is not sufficient (e.g. if the
file name should be considered), it can be re-configured via sub-classing.
Note that this method is not useful for general purpose cache invalidation
since it's only used to restore the cache across processes and doesn't apply
for rebuilds. See the `dependencyInvalidation` option above to invalidate
files that have dependencies that affect the output.
```js

@@ -189,2 +202,33 @@ Subclass.prototype.cacheKeyProcessString = function(string, relativePath) {

## Dependency Invalidation
When the output of `processString()` can depend on files other than the
primary input file, the broccoli plugin should use the
`dependencyInvalidation` option and these related APIs to cause the output
cache to become automatically invalidated should those other input files
change.
Plugins that enable the `dependencyInvalidation` option will have an instance
property `dependencies` that can be used to register dependencies for a file.
During either `processString` or `postProcess`, the plugin should call
`this.dependencies.setDependencies(relativeFile, arrayOfDeps)` to establish
which files this file depends on.
Dependency invalidation works during rebuilds as well as when restoring results
from the persistent cache.
When tracking dependencies, `setDependencies()` should always be called when
processing a file that could have dependencies. If a file has no
dependencies, pass an empty array. Failure to do this can result in stale
dependency information about the file.
The dependencies passed to `setDependencies()` can be absolute paths or
relative. If relative, the path will be assumed relative to the file being
processed. The dependencies can be within the broccoli tree or outside it
(note: adding dependencies outside the tree does not cause those files to be
watched). Files inside the broccoli tree are tracked for changes using a
checksum because files in broccoli trees do not have stable timestamps. Files
outside the tree are tracked using modification time.
## FAQ

@@ -191,0 +235,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc