batch-cluster
Advanced tools
Comparing version 5.6.8 to 5.7.0
@@ -20,2 +20,8 @@ # Changelog | ||
## v5.7.0 | ||
- 🐞 Fixed issue where `onStartError` and `onTaskError` didn't get emitted. | ||
- 📦 Updated deps, rebuilt docs. | ||
- 🐞 Deflaked CI tests with longer timeouts and less aggressive `shutdown()` | ||
## v5.6.8 | ||
@@ -22,0 +28,0 @@ |
@@ -6,14 +6,4 @@ export declare function delay(millis: number, unref?: boolean): Promise<void>; | ||
*/ | ||
export declare function until(f: () => boolean | Promise<boolean>, timeoutMs: number): Promise<boolean>; | ||
export declare function until(f: (count: number) => boolean | Promise<boolean>, timeoutMs: number, delayMs?: number): Promise<boolean>; | ||
/** | ||
* Return a function that will, at most, run the given function once at a time. | ||
* Calls that occur during prior calls will no-op. | ||
*/ | ||
export declare function atMostOne<T>(f: () => Promise<T>): () => Promise<T | undefined>; | ||
/** | ||
* Return a function that will only invoke the given thunk after all prior given | ||
* promises have resolved or rejected. | ||
*/ | ||
export declare function serial<T>(): (f: () => Promise<T>) => Promise<T>; | ||
/** | ||
* Return a thunk that will call the underlying thunk at most every `minDelayMs` | ||
@@ -20,0 +10,0 @@ * milliseconds. The thunk will accept a boolean, that, when set, will force the |
@@ -40,4 +40,2 @@ "use strict"; | ||
var timers_1 = require("timers"); | ||
var Array_1 = require("./Array"); | ||
var Deferred_1 = require("./Deferred"); | ||
var Object_1 = require("./Object"); | ||
@@ -57,5 +55,6 @@ function delay(millis, unref) { | ||
*/ | ||
function until(f, timeoutMs) { | ||
function until(f, timeoutMs, delayMs) { | ||
if (delayMs === void 0) { delayMs = 50; } | ||
return __awaiter(this, void 0, void 0, function () { | ||
var timeoutAt; | ||
var timeoutAt, count; | ||
return __generator(this, function (_a) { | ||
@@ -65,10 +64,13 @@ switch (_a.label) { | ||
timeoutAt = Date.now() + timeoutMs; | ||
count = 0; | ||
_a.label = 1; | ||
case 1: | ||
if (!(Date.now() < timeoutAt)) return [3 /*break*/, 6]; | ||
return [4 /*yield*/, f()]; | ||
return [4 /*yield*/, f(count)]; | ||
case 2: | ||
if (!_a.sent()) return [3 /*break*/, 3]; | ||
return [2 /*return*/, true]; | ||
case 3: return [4 /*yield*/, delay(50)]; | ||
case 3: | ||
count++; | ||
return [4 /*yield*/, delay(delayMs)]; | ||
case 4: | ||
@@ -85,48 +87,2 @@ _a.sent(); | ||
/** | ||
* Return a function that will, at most, run the given function once at a time. | ||
* Calls that occur during prior calls will no-op. | ||
*/ | ||
function atMostOne(f) { | ||
var _this = this; | ||
var running = false; | ||
return function () { return __awaiter(_this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
if (running) | ||
return [2 /*return*/]; | ||
running = true; | ||
_a.label = 1; | ||
case 1: | ||
_a.trys.push([1, , 3, 4]); | ||
return [4 /*yield*/, f()]; | ||
case 2: return [2 /*return*/, _a.sent()]; | ||
case 3: | ||
running = false; | ||
return [7 /*endfinally*/]; | ||
case 4: return [2 /*return*/]; | ||
} | ||
}); | ||
}); }; | ||
} | ||
exports.atMostOne = atMostOne; | ||
/** | ||
* Return a function that will only invoke the given thunk after all prior given | ||
* promises have resolved or rejected. | ||
*/ | ||
function serial() { | ||
var priors = []; | ||
return function (f) { | ||
Array_1.filterInPlace(priors, function (ea) { return ea.pending; }); | ||
var d = new Deferred_1.Deferred(); | ||
// tslint:disable-next-line: no-floating-promises | ||
Promise.all(priors.map(function (ea) { return ea.promise; })) | ||
.then(function () { return f(); }) | ||
.then(function (ea) { return d.resolve(ea); }); | ||
priors.push(d); | ||
return d.promise; | ||
}; | ||
} | ||
exports.serial = serial; | ||
/** | ||
* Return a thunk that will call the underlying thunk at most every `minDelayMs` | ||
@@ -133,0 +89,0 @@ * milliseconds. The thunk will accept a boolean, that, when set, will force the |
@@ -6,2 +6,3 @@ /// <reference types="node" /> | ||
import { BatchProcessOptions } from "./BatchProcessOptions"; | ||
import { Deferred } from "./Deferred"; | ||
import { Task } from "./Task"; | ||
@@ -37,2 +38,3 @@ export { BatchClusterOptions } from "./BatchClusterOptions"; | ||
export declare class BatchCluster extends BatchClusterEmitter { | ||
private readonly m; | ||
private readonly _tasksPerProc; | ||
@@ -48,3 +50,3 @@ readonly options: AllOpts; | ||
private _spawnedProcs; | ||
private endprocs?; | ||
private endPromise?; | ||
private _internalErrorCount; | ||
@@ -60,3 +62,3 @@ constructor(opts: Partial<BatchClusterOptions> & BatchProcessOptions & ChildProcessFactory); | ||
*/ | ||
end(gracefully?: boolean): Promise<void>; | ||
end(gracefully?: boolean): Deferred<any>; | ||
/** | ||
@@ -63,0 +65,0 @@ * Submits `task` for processing by a `BatchProcess` instance |
@@ -69,8 +69,9 @@ "use strict"; | ||
var Array_1 = require("./Array"); | ||
var Async_1 = require("./Async"); | ||
var BatchClusterEmitter_1 = require("./BatchClusterEmitter"); | ||
var BatchClusterOptions_1 = require("./BatchClusterOptions"); | ||
var BatchProcess_1 = require("./BatchProcess"); | ||
var Deferred_1 = require("./Deferred"); | ||
var Logger_1 = require("./Logger"); | ||
var Mean_1 = require("./Mean"); | ||
var Mutex_1 = require("./Mutex"); | ||
var Object_1 = require("./Object"); | ||
@@ -81,4 +82,4 @@ var Pids_1 = require("./Pids"); | ||
exports.BatchClusterOptions = BatchClusterOptions_2.BatchClusterOptions; | ||
var Deferred_1 = require("./Deferred"); | ||
exports.Deferred = Deferred_1.Deferred; | ||
var Deferred_2 = require("./Deferred"); | ||
exports.Deferred = Deferred_2.Deferred; | ||
__export(require("./Logger")); | ||
@@ -107,2 +108,3 @@ var Parser_1 = require("./Parser"); | ||
var _this = _super.call(this) || this; | ||
_this.m = new Mutex_1.Mutex(); | ||
_this._tasksPerProc = new Mean_1.Mean(); | ||
@@ -118,41 +120,50 @@ _this._procs = []; | ||
_this.exitListener = function () { return _this.end(false); }; | ||
_this.maybeLaunchNewChild = Async_1.atMostOne(function () { return __awaiter(_this, void 0, void 0, function () { | ||
var child_1, proc_1, err_1; | ||
var _this = this; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
// Minimize start time system load. Only launch one new proc at a time | ||
if (this.ended || | ||
this.tasks.length === 0 || | ||
this._procs.length >= this.options.maxProcs || | ||
this._lastSpawnedProcTime > | ||
Date.now() - this.options.minDelayBetweenSpawnMillis) { | ||
_this.maybeLaunchNewChild = function () { | ||
return _this.m.runIfIdle(function () { return __awaiter(_this, void 0, void 0, function () { | ||
var child_1, proc_1, err_1; | ||
var _this = this; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
// Minimize start time system load. Only launch one new proc at a time | ||
if (this.ended || | ||
this.tasks.length === 0 || | ||
this._procs.length >= this.options.maxProcs || | ||
this._lastSpawnedProcTime > | ||
Date.now() - this.options.minDelayBetweenSpawnMillis) { | ||
return [2 /*return*/]; | ||
} | ||
_a.label = 1; | ||
case 1: | ||
_a.trys.push([1, 3, , 4]); | ||
this._lastSpawnedProcTime = Date.now(); | ||
return [4 /*yield*/, this.options.processFactory()]; | ||
case 2: | ||
child_1 = _a.sent(); | ||
proc_1 = new BatchProcess_1.BatchProcess(child_1, this.options, this.observer); | ||
if (this.ended) { | ||
// This should only happen in tests. | ||
// tslint:disable-next-line: no-floating-promises | ||
proc_1.end(false, "ended"); | ||
} | ||
else { | ||
this._procs.push(proc_1); | ||
this.emitter.emit("childStart", child_1); | ||
// tslint:disable-next-line: no-floating-promises | ||
proc_1.exitedPromise.then(function () { | ||
_this._tasksPerProc.push(proc_1.taskCount); | ||
_this.emitter.emit("childExit", child_1); | ||
}); | ||
} | ||
this._spawnedProcs++; | ||
return [2 /*return*/, proc_1]; | ||
case 3: | ||
err_1 = _a.sent(); | ||
this.emitter.emit("startError", err_1); | ||
return [2 /*return*/]; | ||
} | ||
_a.label = 1; | ||
case 1: | ||
_a.trys.push([1, 3, , 4]); | ||
this._lastSpawnedProcTime = Date.now(); | ||
return [4 /*yield*/, this.options.processFactory()]; | ||
case 2: | ||
child_1 = _a.sent(); | ||
proc_1 = new BatchProcess_1.BatchProcess(child_1, this.options, this.observer); | ||
this._procs.push(proc_1); | ||
this.emitter.emit("childStart", child_1); | ||
// tslint:disable-next-line: no-floating-promises | ||
proc_1.exitedPromise.then(function () { | ||
_this._tasksPerProc.push(proc_1.taskCount); | ||
_this.emitter.emit("childExit", child_1); | ||
}); | ||
this._spawnedProcs++; | ||
return [2 /*return*/, proc_1]; | ||
case 3: | ||
err_1 = _a.sent(); | ||
this.emitter.emit("startError", err_1); | ||
return [2 /*return*/]; | ||
case 4: return [2 /*return*/]; | ||
} | ||
}); | ||
}); }); | ||
case 4: return [2 /*return*/]; | ||
} | ||
}); | ||
}); }); | ||
}; | ||
_this.options = Object.freeze(BatchClusterOptions_1.verifyOptions(opts)); | ||
@@ -178,3 +189,3 @@ if (_this.options.onIdleIntervalMillis > 0) { | ||
get: function () { | ||
return this.endprocs != null; | ||
return this.endPromise != null; | ||
}, | ||
@@ -193,3 +204,3 @@ enumerable: true, | ||
if (gracefully === void 0) { gracefully = true; } | ||
if (this.endprocs == null) { | ||
if (this.endPromise == null) { | ||
this.emitter.emit("beforeEnd"); | ||
@@ -200,10 +211,10 @@ Object_1.map(this.onIdleInterval, timers_1.clearInterval); | ||
_p.removeListener("exit", this.exitListener); | ||
this.endprocs = Promise.all(this._procs.map(function (p) { | ||
this.endPromise = new Deferred_1.Deferred().observe(Promise.all(this._procs.map(function (p) { | ||
return p | ||
.end(gracefully, "BatchCluster.end()") | ||
.catch(function (err) { return _this.emitter.emit("endError", err); }); | ||
})).then(function () { return _this.emitter.emit("end"); }); | ||
})).then(function () { return _this.emitter.emit("end"); })); | ||
this._procs.length = 0; | ||
} | ||
return this.endprocs; | ||
return this.endPromise; | ||
}; | ||
@@ -223,3 +234,5 @@ /** | ||
this.tasks.push(task); | ||
setTimeout(function () { return _this.onIdle(); }, 1); | ||
setImmediate(function () { return _this.onIdle(); }); | ||
// tslint:disable-next-line: no-floating-promises | ||
task.promise.then(function () { return _this.onIdle(); }).catch(function () { }); | ||
return task.promise; | ||
@@ -352,11 +365,18 @@ }; | ||
}; | ||
// NOT ASYNC: updates internal state. | ||
BatchCluster.prototype.onIdle = function () { | ||
if (this.ended) | ||
return; | ||
this.vacuumProcs(); | ||
while (this.execNextTask()) { } | ||
// tslint:disable-next-line: no-floating-promises | ||
this.maybeLaunchNewChild(); | ||
return; | ||
var _this = this; | ||
return this.m.runIfIdle(function () { return __awaiter(_this, void 0, void 0, function () { | ||
var _this = this; | ||
return __generator(this, function (_a) { | ||
this.vacuumProcs(); | ||
while (this.execNextTask()) { } | ||
// setImmediate because we currently have the mutex | ||
if (this.tasks.length > 0) | ||
setImmediate(function () { return _this.maybeLaunchNewChild(); }); | ||
return [2 /*return*/]; | ||
}); | ||
}); }); | ||
}; | ||
// NOT ASYNC: updates internal state. | ||
BatchCluster.prototype.vacuumProcs = function () { | ||
@@ -381,4 +401,5 @@ var _this = this; | ||
}; | ||
// NOT ASYNC: updates internal state. | ||
BatchCluster.prototype.execNextTask = function () { | ||
if (this.ended) | ||
if (this.tasks.length === 0 || this.ended) | ||
return false; | ||
@@ -389,4 +410,6 @@ var readyProc = Array_1.rrFindResult(this._procs, this._lastUsedProcsIdx + 1, function (ea) { return ea.ready; }); | ||
var task = this.tasks.shift(); | ||
if (task == null) | ||
if (task == null) { | ||
this.onInternalError(new Error("unexpected null task")); | ||
return false; | ||
} | ||
this._lastUsedProcsIdx = readyProc.index; | ||
@@ -393,0 +416,0 @@ var submitted = readyProc.result.execTask(task); |
@@ -87,7 +87,2 @@ "use strict"; | ||
this.startupTask = new Task_1.Task(opts.versionCommand, Parser_1.SimpleParser); | ||
this.startupTask.promise | ||
// Prevent unhandled startup task rejections from killing node: | ||
.catch(function (err) { | ||
BatchCluster_1.logger().warn(_this.name + " startup task was rejected: " + err); | ||
}); | ||
if (!this.execTask(this.startupTask)) { | ||
@@ -228,9 +223,9 @@ // This could also be considered a "start error", but if it's just an | ||
task.promise | ||
.catch(function () { }) | ||
.catch(function (err) { | ||
return _this.taskCount === 1 | ||
? _this.observer.onStartError(err) | ||
: _this.observer.onTaskError(err, task); | ||
}) | ||
.then(function () { | ||
if (_this.currentTask === task) { | ||
// logger().debug( | ||
// this.name + ".task resolved, but currentTask wasn't cleared", | ||
// { task } | ||
// ) | ||
_this.clearCurrentTask(); | ||
@@ -260,3 +255,2 @@ } | ||
if (gracefully === void 0) { gracefully = true; } | ||
// logger().debug(this.name + ".end()", { gracefully, source }) | ||
if (this._endPromise == null) { | ||
@@ -272,37 +266,44 @@ this._endPromise = this._end(gracefully, source); | ||
return __awaiter(this, void 0, void 0, function () { | ||
var lastTask, err_1, err, cmd, _a, _b; | ||
var lastTask, _a, msg, msg, cmd, _b, _c; | ||
var _this = this; | ||
return __generator(this, function (_c) { | ||
switch (_c.label) { | ||
return __generator(this, function (_d) { | ||
switch (_d.label) { | ||
case 0: | ||
lastTask = this.currentTask; | ||
if (!(lastTask != null)) return [3 /*break*/, 6]; | ||
this.clearCurrentTask(); | ||
if (!(lastTask != null)) return [3 /*break*/, 7]; | ||
if (!gracefully) return [3 /*break*/, 5]; | ||
_c.label = 1; | ||
_d.label = 1; | ||
case 1: | ||
_c.trys.push([1, 3, , 4]); | ||
_d.trys.push([1, 3, , 4]); | ||
return [4 /*yield*/, lastTask.promise]; | ||
case 2: | ||
_c.sent(); | ||
_d.sent(); | ||
return [3 /*break*/, 4]; | ||
case 3: | ||
err_1 = _c.sent(); | ||
this.observer.onTaskError(err_1, lastTask); | ||
_a = _d.sent(); | ||
return [3 /*break*/, 4]; | ||
case 4: return [3 /*break*/, 6]; | ||
case 5: | ||
// NOTE: not graceful, so clearing the current task is fine. | ||
this.clearCurrentTask(); | ||
// This isn't an internal error, as this state would be expected if | ||
// the user calls .end(false) when there are pending tasks. | ||
BatchCluster_1.logger().warn(this.name + ".end(): called while not idle", { | ||
source: source, | ||
gracefully: gracefully, | ||
cmd: lastTask.command | ||
}); | ||
err = new Error("end() called when not idle"); | ||
this.observer.onTaskError(err, lastTask); | ||
lastTask.reject(err); | ||
_c.label = 6; | ||
msg = String_1.blank(lastTask.stderr) | ||
? source + " called end()" | ||
: lastTask.stderr; | ||
lastTask.reject(new Error(msg)); | ||
_d.label = 6; | ||
case 6: | ||
if (this.taskCount > 1) { | ||
msg = this.name + ".end(): called while not idle"; | ||
this.observer.onInternalError(new Error(msg)); | ||
// NOTE: not graceful, so clearing the current task is fine. | ||
// This isn't an internal error, as this state would be expected if | ||
// the user calls .end(false) when there are pending tasks. | ||
BatchCluster_1.logger().warn(msg, { | ||
source: source, | ||
gracefully: gracefully, | ||
cmd: lastTask.command | ||
}); | ||
// We're ending, so there's no point in asking if this error is fatal. It is. | ||
} | ||
_d.label = 7; | ||
case 7: | ||
cmd = Object_1.map(this.opts.exitCommand, function (ea) { return String_1.ensureSuffix(ea, "\n"); }); | ||
@@ -316,12 +317,12 @@ // proc cleanup: | ||
]); | ||
_a = this.opts.cleanupChildProcs && | ||
_b = this.opts.cleanupChildProcs && | ||
gracefully && | ||
this.opts.endGracefulWaitTimeMillis > 0; | ||
if (!_a) return [3 /*break*/, 8]; | ||
if (!_b) return [3 /*break*/, 9]; | ||
return [4 /*yield*/, this.running()]; | ||
case 7: | ||
_a = (_c.sent()); | ||
_c.label = 8; | ||
case 8: | ||
if (!_a) return [3 /*break*/, 14]; | ||
_b = (_d.sent()); | ||
_d.label = 9; | ||
case 9: | ||
if (!_b) return [3 /*break*/, 15]; | ||
// Wait for the end command to take effect: | ||
@@ -331,36 +332,36 @@ return [4 /*yield*/, this.awaitNotRunning(this.opts.endGracefulWaitTimeMillis / 2) | ||
]; | ||
case 9: | ||
case 10: | ||
// Wait for the end command to take effect: | ||
_c.sent(); | ||
_d.sent(); | ||
return [4 /*yield*/, this.running()]; | ||
case 10: | ||
if (!_c.sent()) return [3 /*break*/, 12]; | ||
case 11: | ||
if (!_d.sent()) return [3 /*break*/, 13]; | ||
return [4 /*yield*/, Pids_1.kill(this.proc.pid) | ||
// Wait for the signal handler to work: | ||
]; | ||
case 11: | ||
_c.sent(); | ||
_c.label = 12; | ||
case 12: | ||
case 12: | ||
_d.sent(); | ||
_d.label = 13; | ||
case 13: | ||
// Wait for the signal handler to work: | ||
return [4 /*yield*/, this.awaitNotRunning(this.opts.endGracefulWaitTimeMillis / 2)]; | ||
case 13: | ||
case 14: | ||
// Wait for the signal handler to work: | ||
_c.sent(); | ||
_c.label = 14; | ||
case 14: | ||
_b = this.opts.cleanupChildProcs; | ||
if (!_b) return [3 /*break*/, 16]; | ||
_d.sent(); | ||
_d.label = 15; | ||
case 15: | ||
_c = this.opts.cleanupChildProcs; | ||
if (!_c) return [3 /*break*/, 17]; | ||
return [4 /*yield*/, this.running()]; | ||
case 15: | ||
_b = (_c.sent()); | ||
_c.label = 16; | ||
case 16: | ||
if (!_b) return [3 /*break*/, 18]; | ||
_c = (_d.sent()); | ||
_d.label = 17; | ||
case 17: | ||
if (!_c) return [3 /*break*/, 19]; | ||
BatchCluster_1.logger().warn(this.name + ".end(): force-killing still-running child."); | ||
return [4 /*yield*/, Pids_1.kill(this.proc.pid, true)]; | ||
case 17: | ||
_c.sent(); | ||
_c.label = 18; | ||
case 18: return [2 /*return*/, this.exitedPromise]; | ||
case 18: | ||
_d.sent(); | ||
_d.label = 19; | ||
case 19: return [2 /*return*/, this.exitedPromise]; | ||
} | ||
@@ -401,3 +402,3 @@ }); | ||
this.end(false, "onError(" + source + ")"); | ||
if (task === this.startupTask) { | ||
if (task != null && this.taskCount === 1) { | ||
BatchCluster_1.logger().warn(this.name + ".onError(): startup task failed: " + error); | ||
@@ -407,3 +408,2 @@ this.observer.onStartError(error); | ||
if (task != null) { | ||
this.observer.onTaskError(error, task); | ||
if (task.pending) { | ||
@@ -423,5 +423,5 @@ task.reject(error); | ||
BatchProcess.prototype.onStderr = function (data) { | ||
// logger().debug("onStderr(" + this.pid + "):" + data) | ||
if (data == null) | ||
if (String_1.blank(data)) | ||
return; | ||
BatchCluster_1.logger().info("onStderr(" + this.pid + "):" + data); | ||
var task = this.currentTask; | ||
@@ -462,3 +462,2 @@ if (task != null && task.pending) { | ||
if (pass != null) { | ||
// logger().debug("onData(" + this.pid + ") PASS", { pass, ...ctx }) | ||
return this.resolveCurrentTask(task, String_1.toS(pass[1]).trim(), task.stderr, true); | ||
@@ -468,11 +467,10 @@ } | ||
if (failout != null) { | ||
// logger().debug("onData(" + this.pid + ") FAILOUT", { failout, ...ctx }) | ||
return this.resolveCurrentTask(task, String_1.toS(failout[1]).trim(), task.stderr, false); | ||
var msg = String_1.toS(failout[1]).trim(); | ||
return this.resolveCurrentTask(task, msg, task.stderr, false); | ||
} | ||
var failerr = this.opts.failRE.exec(task.stderr); | ||
if (failerr != null) { | ||
// logger().debug("onData(" + this.pid + ") FAILERR", { failerr, ...ctx }) | ||
return this.resolveCurrentTask(task, task.stdout, failerr[1], false); | ||
var msg = String_1.toS(failerr[1]).trim(); | ||
return this.resolveCurrentTask(task, task.stdout, msg, false); | ||
} | ||
// logger().debug("onData(" + this.pid + ") not finished", ctx) | ||
return; | ||
@@ -491,3 +489,2 @@ }; | ||
case 0: | ||
// logger().debug("resolveCurrentTask()", { task: String(task), stdout, stderr, passed }) | ||
this.clearCurrentTask(); | ||
@@ -494,0 +491,0 @@ if (!task.pending) return [3 /*break*/, 2]; |
@@ -26,2 +26,4 @@ /** | ||
reject(reason?: any): boolean; | ||
observe(p: Promise<T>): this; | ||
observeQuietly(p: Promise<T>): Deferred<T | undefined>; | ||
} |
@@ -76,2 +76,16 @@ "use strict"; | ||
}; | ||
Deferred.prototype.observe = function (p) { | ||
var _this = this; | ||
p.then(function (resolution) { | ||
_this.resolve(resolution); | ||
}).catch(function (err) { | ||
_this.reject(err); | ||
}); | ||
return this; | ||
}; | ||
Deferred.prototype.observeQuietly = function (p) { | ||
var _this = this; | ||
p.then(function (ea) { return _this.resolve(ea); }).catch(function () { return _this.resolve(undefined); }); | ||
return this; | ||
}; | ||
return Deferred; | ||
@@ -78,0 +92,0 @@ }()); |
@@ -72,3 +72,5 @@ "use strict"; | ||
return new Promise(function (resolve, reject) { | ||
_cp.execFile(isWin ? "tasklist" : "ps", isWin ? ["/NH", "/FO", "CSV"] : ["-e"], function (error, stdout, stderr) { | ||
_cp.execFile(isWin ? "tasklist" : "ps", | ||
// NoHeader, FOrmat CSV | ||
isWin ? ["/NH", "/FO", "CSV"] : ["-e"], function (error, stdout, stderr) { | ||
if (error != null) { | ||
@@ -75,0 +77,0 @@ reject(error); |
@@ -1,4 +0,5 @@ | ||
export declare function blank(s: string | undefined): boolean; | ||
/// <reference types="node" /> | ||
export declare function blank(s: string | Buffer | undefined): boolean; | ||
export declare function notBlank(s: string | undefined): s is string; | ||
export declare function ensureSuffix(s: string, suffix: string): string; | ||
export declare function toS(s: any): string; |
@@ -122,14 +122,5 @@ "use strict"; | ||
_a.trys.push([0, 2, , 3]); | ||
return [4 /*yield*/, this.parser(stdout, stderr, passed) | ||
// logger().trace("Task.onData(): resolved", { | ||
// command: this.command, | ||
// parseResult | ||
// }) | ||
]; | ||
return [4 /*yield*/, this.parser(stdout, stderr, passed)]; | ||
case 1: | ||
parseResult = _a.sent(); | ||
// logger().trace("Task.onData(): resolved", { | ||
// command: this.command, | ||
// parseResult | ||
// }) | ||
this.d.resolve(parseResult); | ||
@@ -136,0 +127,0 @@ return [3 /*break*/, 3]; |
{ | ||
"name": "batch-cluster", | ||
"version": "5.6.8", | ||
"version": "5.7.0", | ||
"description": "Manage a cluster of child processes", | ||
@@ -36,7 +36,7 @@ "main": "dist/BatchCluster.js", | ||
"devDependencies": { | ||
"@types/chai": "^4.2.0", | ||
"@types/chai": "^4.2.4", | ||
"@types/chai-as-promised": "^7.1.2", | ||
"@types/chai-string": "^1.4.2", | ||
"@types/mocha": "^5.2.7", | ||
"@types/node": "^12.7.3", | ||
"@types/node": "^12.11.6", | ||
"chai": "^4.2.0", | ||
@@ -46,15 +46,15 @@ "chai-as-promised": "^7.1.1", | ||
"chai-withintoleranceof": "^1.0.1", | ||
"mocha": "^6.2.0", | ||
"mocha": "^6.2.2", | ||
"prettier": "^1.18.2", | ||
"rimraf": "^3.0.0", | ||
"seedrandom": "^3.0.3", | ||
"serve": "^11.1.0", | ||
"seedrandom": "^3.0.5", | ||
"serve": "^11.2.0", | ||
"source-map-support": "^0.5.13", | ||
"split2": "^3.1.1", | ||
"timekeeper": "^2.2.0", | ||
"tslint": "^5.19.0", | ||
"tslint": "^5.20.0", | ||
"typedoc": "^0.15.0", | ||
"typescript": "^3.6.2", | ||
"typescript": "^3.6.4", | ||
"wtfnode": "^0.8.0" | ||
} | ||
} |
@@ -5,2 +5,3 @@ # batch-cluster | ||
[](https://github.com/users/mceachen/sponsorship) | ||
[](https://www.npmjs.com/package/batch-cluster) | ||
@@ -7,0 +8,0 @@ [](https://travis-ci.org/photostructure/batch-cluster.js) |
Sorry, the diff of this file is not supported yet
50
2719
83
131585