| var TimeQueue = require('..') | ||
| , assert = require('assert') | ||
| ; | ||
| describe('Create a queue and add to it', function() { | ||
| var n = 0; | ||
| var q = new TimeQueue(function worker(callback) { | ||
| n++; | ||
| process.nextTick(callback); | ||
| }, { | ||
| concurrency: 3, | ||
| maxQueued: 2 | ||
| }); | ||
| it('Ignores tasks pushed after it is full', function(done) { | ||
| for (var i = 0; i < 10; i++) { | ||
| q.push(); | ||
| } | ||
| q.on('drain', function() { | ||
| assert.equal(n, 5); | ||
| done(); | ||
| }); | ||
| }); | ||
| }); |
+1
-1
| language: node_js | ||
| node_js: | ||
| - 0.6 | ||
| - 0.8 | ||
| - 0.10 |
+22
-17
@@ -8,7 +8,7 @@ var EventEmitter = require('events').EventEmitter | ||
| * @constructor | ||
| * @extends (EventEmitter) | ||
| * @param (Function(..., Function(!Error, ...)) worker | ||
| * @param (Object) options | ||
| * @param (Object.number) options.concurrency | ||
| * @param (Object.number) options.time | ||
| * @extends {EventEmitter} | ||
| * @param {Function(..., Function(!Error, ...)} worker | ||
| * @param {Object} options | ||
| * @param {Number} concurrency | ||
| * @param {Number} time | ||
| */ | ||
@@ -22,2 +22,3 @@ var TimeQueue = module.exports = function TimeQueue(worker, options) { | ||
| this.every = options.every || 0; | ||
| this.maxQueued = options.maxQueued || Infinity; | ||
| this.timeout = options.timeout || 0; | ||
@@ -59,6 +60,10 @@ this._queue = []; | ||
| * | ||
| * @param (Object) args... | ||
| * @param (Function(!Error, ...)) callback | ||
| * @param {Object} args... | ||
| * @param {Function(!Error, ...)} callback | ||
| */ | ||
| TimeQueue.prototype.push = function() { | ||
| if (this.maxQueued === this.queued) { | ||
| return; | ||
| } | ||
| if (this.intransit < this.concurrency) { | ||
@@ -81,3 +86,3 @@ this.intransit++; | ||
| * | ||
| * @param (Object) arguments | ||
| * @param {Object} arguments | ||
| */ | ||
@@ -108,3 +113,3 @@ TimeQueue.prototype._process = function(args) { | ||
| // if `timeout` option is set, set a timeout to check the task doesn't lag | ||
| // If `timeout` option is set, set a timeout to check the task doesn't lag. | ||
| var taskTimedOut = false; | ||
@@ -123,3 +128,3 @@ var callbackCalled = false; | ||
| // add missing arguments | ||
| // Add missing arguments. | ||
| while (args.length < this.worker.length - 1) { | ||
@@ -130,3 +135,3 @@ args.push(undefined); | ||
| function taskCallback(err) { | ||
| // if this task has timed out, and the callback is called again | ||
| // If this task has timed out, and the callback is called again | ||
| // from the worker, ignore it. | ||
@@ -139,3 +144,3 @@ if (!taskTimedOut) { | ||
| // check that this callback is only called once | ||
| // Check that this callback is only called once. | ||
| if (callbackCalled && !taskTimedOut) { | ||
@@ -150,8 +155,8 @@ throw Error('Callback from worker should only be called once'); | ||
| if (typeof callback === 'function') { | ||
| // if a callback was given with the task, | ||
| // call it when the task is finished | ||
| // If a callback was given with the task, | ||
| // call it when the task is finished. | ||
| callback.apply(null, arguments); | ||
| } else if (err) { | ||
| // otherwise emit an `error` event if there was an error with the task | ||
| // Otherwise emit an `error` event if there was an error with the task. | ||
| self.emit('error', err); | ||
@@ -167,7 +172,7 @@ } | ||
| // add custom callback to args | ||
| // Add custom callback to args. | ||
| var args2 = args.slice(); | ||
| args2.push(taskCallback); | ||
| // call the worker | ||
| // Call the worker. | ||
| this.worker.apply(null, args2); | ||
@@ -174,0 +179,0 @@ }; |
+1
-1
@@ -5,3 +5,3 @@ { | ||
| "keywords": ["queue", "flow", "time"], | ||
| "version": "0.2.1", | ||
| "version": "0.2.2", | ||
| "repository": { | ||
@@ -8,0 +8,0 @@ "type": "git", |
+12
-6
@@ -35,10 +35,16 @@ # timequeue.js [](http://travis-ci.org/fent/timequeue.js) | ||
| { | ||
| // how many tasks to execute concurrently. | ||
| // Maximum tasks to execute concurrently. | ||
| concurrency: 1 | ||
| // maximum amount of tasks to execute per a given time limit in milliseconds. | ||
| // if number of tasks are finished faster than the limit, they will be queued. | ||
| // How much time in milliseconds to allow no more than | ||
| // the max number of concurrent tasks to run. | ||
| // If the max amount of concurrent tasks are finished faster than the limit, | ||
| // they will be queued. | ||
| , every: 0 | ||
| // if set, will emit an `error` event if a tasks takes too much time. | ||
| // Maximum number of tasks to keep in the queue. | ||
| // While full, pushed tasks will be ignored. | ||
| , maxQueued: Infinity | ||
| // If set, will emit an `error` event if a tasks takes too much time. | ||
| // if callback was given to that task, | ||
@@ -50,3 +56,3 @@ // it will be called with the error instead. | ||
| `worker`, `concurrency`, `time`, and `timeout` properties can later be edited on the queue instance. | ||
| All of these options can later be edited on the queue instance. | ||
@@ -88,3 +94,3 @@ ### TimeQueue#active | ||
| Queue is empty, some tasks might still be running. | ||
| Queue is empty, with tasks still running. | ||
@@ -91,0 +97,0 @@ ### Event: 'drain' |
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 1 instance in 1 package
Uses eval
Supply chain riskPackage uses dynamic code execution (e.g., eval()), which is a dangerous practice. This can prevent the code from running in certain environments and increases the risk that the code may contain exploits or malicious behavior.
Found 1 instance in 1 package
16126
4.54%10
11.11%409
6.79%113
5.61%4
33.33%