@ayonli/jsext
Advanced tools
Comparing version 0.4.5 to 0.4.6
@@ -10,4 +10,22 @@ import "./string/augment"; | ||
import "./error/augment"; | ||
import { AsyncFunction, AsyncGeneratorFunction } from "."; | ||
declare global { | ||
const AsyncFunction: AsyncFunctionConstructor; | ||
const AsyncGeneratorFunction: AsyncGeneratorFunctionConstructor; | ||
export interface AsyncFunction { | ||
(...args: any[]): Promise<unknown>; | ||
readonly length: number; | ||
readonly name: string; | ||
} | ||
export interface AsyncFunctionConstructor { | ||
new(...args: any[]): AsyncFunction; | ||
(...args: any[]): AsyncFunction; | ||
readonly length: number; | ||
readonly name: string; | ||
readonly prototype: AsyncFunction; | ||
} | ||
interface Constructor<T> extends Function { | ||
@@ -27,1 +45,6 @@ new(...args: any[]): T; | ||
} | ||
// @ts-ignore | ||
globalThis["AsyncFunction"] = AsyncFunction; | ||
// @ts-ignore | ||
globalThis["AsyncGeneratorFunction"] = AsyncGeneratorFunction; |
@@ -12,2 +12,7 @@ "use strict"; | ||
require("./error/augment"); | ||
const _1 = require("."); | ||
// @ts-ignore | ||
globalThis["AsyncFunction"] = _1.AsyncFunction; | ||
// @ts-ignore | ||
globalThis["AsyncGeneratorFunction"] = _1.AsyncGeneratorFunction; | ||
//# sourceMappingURL=augment.js.map |
1534
cjs/index.js
"use strict"; | ||
var _a; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.run = exports.read = exports.isSubclassOf = exports.mixins = exports.throttle = exports.wrap = exports.func = exports._try = exports.AsyncGeneratorFunction = exports.AsyncFunction = void 0; | ||
const check_iterable_1 = require("check-iterable"); | ||
const number_1 = require("./number"); | ||
const isNode = typeof process === "object" && !!((_a = process.versions) === null || _a === void 0 ? void 0 : _a.node); | ||
const throttleCaches = new Map(); | ||
exports.AsyncFunction = (async function () { }).constructor; | ||
exports.AsyncGeneratorFunction = (async function* () { }).constructor; | ||
function _try(fn, ...args) { | ||
if (typeof fn === "function") { | ||
try { | ||
return _try(fn.apply(void 0, args)); | ||
} | ||
catch (err) { | ||
return [err, undefined]; | ||
} | ||
} | ||
let returns = fn; | ||
// Implementation details should be ordered from complex to simple. | ||
if ((0, check_iterable_1.isAsyncGenerator)(returns)) { | ||
return (async function* () { | ||
let input; | ||
let result; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} | ||
else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve([null, value]); | ||
} | ||
} | ||
catch (err) { | ||
// If any error occurs, yield that error as resolved | ||
// and break the loop immediately, indicating the | ||
// process is forced broken. | ||
yield Promise.resolve([err, undefined]); | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})(); | ||
} | ||
else if ((0, check_iterable_1.isGenerator)(returns)) { | ||
return (function* () { | ||
let input; | ||
let result; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} | ||
else { | ||
input = yield [null, value]; | ||
} | ||
} | ||
catch (err) { | ||
yield [err, undefined]; | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})(); | ||
} | ||
else if (typeof (returns === null || returns === void 0 ? void 0 : returns.then) === "function") { | ||
returns = returns.then((value) => [null, value]); | ||
return Promise.resolve(returns).catch((err) => [err, undefined]); | ||
} | ||
else { | ||
return [null, returns]; | ||
} | ||
} | ||
exports._try = _try; | ||
/** | ||
* The maximum number of workers allowed to exist at the same time. | ||
* Inspired by Golang, creates a function that receives a `defer` function which can be used | ||
* to carry deferred jobs that will be run after the main function is complete. | ||
* | ||
* The primary purpose of the workers is not mean to run tasks in parallel, but run them in separate | ||
* from the main thread, so that aborting tasks can be achieved by terminating the worker thread and | ||
* it will not affect the main thread. | ||
* Multiple calls of the `defer` function is supported, and the callbacks are called in the | ||
* LIFO order. Callbacks can be async functions if the main function is an async function or | ||
* an async generator function, and all the running procedures will be awaited. | ||
* | ||
* That said, the worker thread can still be used to achieve parallelism, but it should be noticed | ||
* that only the numbers of tasks that equals to the CPU core numbers will be run at the same time. | ||
* @example | ||
* const getVersion = await jsext.func(async (defer) => { | ||
* const file = await fs.open("./package.json", "r"); | ||
* defer(() => file.close()); | ||
* | ||
* const content = await file.readFile("utf8"); | ||
* const pkg = JSON.parse(content); | ||
* | ||
* return pkg.version as string; | ||
* }); | ||
*/ | ||
const maxWorkerNum = 16; | ||
const workerIdCounter = (0, number_1.sequence)(1, Number.MAX_SAFE_INTEGER, 1, true); | ||
let workerPool = []; | ||
// The worker consumer queue is nothing but a callback list, once a worker is available, the runner | ||
// pop a consumer and run the callback, which will retry gaining the worker and retry the task. | ||
const workerConsumerQueue = []; | ||
function func(fn) { | ||
return function (...args) { | ||
var _a; | ||
const callbacks = []; | ||
const defer = (cb) => void callbacks.push(cb); | ||
let result; | ||
try { | ||
const returns = fn.call(this, defer, ...args); | ||
if ((0, check_iterable_1.isAsyncGenerator)(returns)) { | ||
const gen = (async function* () { | ||
var _a; | ||
let input; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} | ||
else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve(value); | ||
} | ||
} | ||
catch (error) { | ||
// If any error occurs, capture that error and break | ||
// the loop immediately, indicating the process is | ||
// forced broken. | ||
result = { value: void 0, error }; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await ((_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks)); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
})(); | ||
return gen; | ||
} | ||
else if ((0, check_iterable_1.isGenerator)(returns)) { | ||
const gen = (function* () { | ||
var _a; | ||
let input; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} | ||
else { | ||
input = yield value; | ||
} | ||
} | ||
catch (error) { | ||
result = { value: void 0, error }; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
(_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
})(); | ||
return gen; | ||
} | ||
else if (typeof (returns === null || returns === void 0 ? void 0 : returns.then) === "function") { | ||
return Promise.resolve(returns).then(value => ({ | ||
value, | ||
error: null, | ||
})).catch((error) => ({ | ||
value: void 0, | ||
error, | ||
})).then(async (result) => { | ||
var _a; | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await ((_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks)); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
}); | ||
} | ||
else { | ||
result = { value: returns, error: null }; | ||
} | ||
} | ||
catch (error) { | ||
result = { value: void 0, error }; | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
(_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
}; | ||
} | ||
exports.func = func; | ||
/** | ||
* Wraps a function inside another function and returns a new function | ||
* that copies the original function's name and properties. | ||
*/ | ||
function wrap(fn, wrapper) { | ||
const wrapped = function (...args) { | ||
return wrapper.call(this, fn, ...args); | ||
}; | ||
Object.defineProperty(wrapped, "name", Object.getOwnPropertyDescriptor(fn, "name")); | ||
Object.defineProperty(wrapped, "length", Object.getOwnPropertyDescriptor(fn, "length")); | ||
Object.defineProperty(wrapped, "toString", { | ||
configurable: true, | ||
enumerable: false, | ||
writable: true, | ||
value: fn.toString.bind(fn), | ||
}); | ||
return wrapped; | ||
} | ||
exports.wrap = wrap; | ||
const throttleCaches = new Map(); | ||
function throttle(handler, options) { | ||
const key = typeof options === "number" ? null : options.for; | ||
const duration = typeof options === "number" ? options : options.duration; | ||
const handleCall = function (cache, ...args) { | ||
var _a; | ||
if (cache.result && Date.now() < ((_a = cache.expires) !== null && _a !== void 0 ? _a : 0)) { | ||
if (cache.result.error) { | ||
throw cache.result.error; | ||
} | ||
else { | ||
return cache.result.value; | ||
} | ||
} | ||
try { | ||
const returns = handler.call(this, ...args); | ||
cache.result = { value: returns }; | ||
cache.expires = Date.now() + duration; | ||
return returns; | ||
} | ||
catch (error) { | ||
cache.result = { error }; | ||
cache.expires = Date.now() + duration; | ||
throw error; | ||
} | ||
}; | ||
if (!key) { | ||
const cache = { for: null }; | ||
return function (...args) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
} | ||
else { | ||
let cache = throttleCaches.get(key); | ||
if (!cache) { | ||
cache = { for: key }; | ||
throttleCaches.set(key, cache); | ||
} | ||
return function (...args) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
} | ||
} | ||
exports.throttle = throttle; | ||
/** | ||
* Merges properties and methods only if they're missing in the class. | ||
@@ -71,701 +331,430 @@ */ | ||
} | ||
const jsext = { | ||
try(fn, ...args) { | ||
if (typeof fn === "function") { | ||
try { | ||
return jsext.try(fn.apply(void 0, args)); | ||
} | ||
catch (err) { | ||
return [err, undefined]; | ||
} | ||
function mixins(base, ...mixins) { | ||
const obj = { ctor: null }; | ||
obj.ctor = class extends base { | ||
}; // make sure this class has no name | ||
for (const mixin of mixins) { | ||
if (typeof mixin == "function") { | ||
mergeHierarchy(obj.ctor, mixin); | ||
} | ||
let returns = fn; | ||
// Implementation details should be ordered from complex to simple. | ||
if ((0, check_iterable_1.isAsyncGenerator)(returns)) { | ||
return (async function* () { | ||
let input; | ||
let result; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} | ||
else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve([null, value]); | ||
} | ||
} | ||
catch (err) { | ||
// If any error occurs, yield that error as resolved | ||
// and break the loop immediately, indicating the | ||
// process is forced broken. | ||
yield Promise.resolve([err, undefined]); | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})(); | ||
else if (mixin && typeof mixin == "object") { | ||
mergeIfNotExists(obj.ctor.prototype, mixin); | ||
} | ||
else if ((0, check_iterable_1.isGenerator)(returns)) { | ||
return (function* () { | ||
let input; | ||
let result; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} | ||
else { | ||
input = yield [null, value]; | ||
} | ||
} | ||
catch (err) { | ||
yield [err, undefined]; | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})(); | ||
} | ||
else if (typeof (returns === null || returns === void 0 ? void 0 : returns.then) === "function") { | ||
returns = returns.then((value) => [null, value]); | ||
return Promise.resolve(returns).catch((err) => [err, undefined]); | ||
} | ||
else { | ||
return [null, returns]; | ||
throw new TypeError("mixin must be a constructor or an object"); | ||
} | ||
}, | ||
func(fn) { | ||
return function (...args) { | ||
var _a; | ||
const callbacks = []; | ||
const defer = (cb) => void callbacks.push(cb); | ||
let result; | ||
try { | ||
const returns = fn.call(this, defer, ...args); | ||
if ((0, check_iterable_1.isAsyncGenerator)(returns)) { | ||
const gen = (async function* () { | ||
var _a; | ||
let input; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} | ||
else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve(value); | ||
} | ||
} | ||
catch (error) { | ||
// If any error occurs, capture that error and break | ||
// the loop immediately, indicating the process is | ||
// forced broken. | ||
result = { value: void 0, error }; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await ((_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks)); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
})(); | ||
return gen; | ||
} | ||
return obj.ctor; | ||
} | ||
exports.mixins = mixins; | ||
/** Checks if a class is a subclass of another class. */ | ||
function isSubclassOf(ctor1, ctor2) { | ||
return typeof ctor1 === "function" | ||
&& typeof ctor2 === "function" | ||
&& ctor1.prototype instanceof ctor2; | ||
} | ||
exports.isSubclassOf = isSubclassOf; | ||
function read(source, eventMap = undefined) { | ||
var _a; | ||
if (typeof source[Symbol.asyncIterator] === "function") { | ||
return source; | ||
} | ||
const iterable = { | ||
ended: false, | ||
error: null, | ||
queue: [], | ||
consumers: [], | ||
next() { | ||
return new Promise((resolve, reject) => { | ||
if (this.error && !this.ended) { | ||
// If there is error occurred during the last transmission and the iterator | ||
// hasn't been closed, reject that error and stop the iterator immediately. | ||
reject(this.error); | ||
this.ended = true; | ||
} | ||
else if ((0, check_iterable_1.isGenerator)(returns)) { | ||
const gen = (function* () { | ||
var _a; | ||
let input; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} | ||
else { | ||
input = yield value; | ||
} | ||
} | ||
catch (error) { | ||
result = { value: void 0, error }; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
(_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
})(); | ||
return gen; | ||
else if (this.ended && !this.queue.length) { | ||
// If the iterator has is closed, resolve the pending consumer with void | ||
// value. | ||
resolve({ value: void 0, done: true }); | ||
} | ||
else if (typeof (returns === null || returns === void 0 ? void 0 : returns.then) === "function") { | ||
return Promise.resolve(returns).then(value => ({ | ||
value, | ||
error: null, | ||
})).catch((error) => ({ | ||
value: void 0, | ||
error, | ||
})).then(async (result) => { | ||
var _a; | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await ((_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks)); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
}); | ||
else if (this.queue.length > 0) { | ||
// If there are data in the queue, resolve the the first piece immediately. | ||
resolve({ value: this.queue.shift(), done: false }); | ||
} | ||
else { | ||
result = { value: returns, error: null }; | ||
// If there are no queued data, push the consumer to a waiting queue. | ||
this.consumers.push({ resolve, reject }); | ||
} | ||
} | ||
catch (error) { | ||
result = { value: void 0, error }; | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
(_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
}; | ||
}, | ||
wrap(fn, wrapper) { | ||
const wrapped = function (...args) { | ||
return wrapper.call(this, fn, ...args); | ||
}; | ||
Object.defineProperty(wrapped, "name", Object.getOwnPropertyDescriptor(fn, "name")); | ||
Object.defineProperty(wrapped, "length", Object.getOwnPropertyDescriptor(fn, "length")); | ||
Object.defineProperty(wrapped, "toString", { | ||
configurable: true, | ||
enumerable: false, | ||
writable: true, | ||
value: fn.toString.bind(fn), | ||
}); | ||
return wrapped; | ||
}, | ||
throttle(handler, options) { | ||
const key = typeof options === "number" ? null : options.for; | ||
const duration = typeof options === "number" ? options : options.duration; | ||
const handleCall = function (cache, ...args) { | ||
var _a; | ||
if (cache.result && Date.now() < ((_a = cache.expires) !== null && _a !== void 0 ? _a : 0)) { | ||
if (cache.result.error) { | ||
throw cache.result.error; | ||
} | ||
else { | ||
return cache.result.value; | ||
} | ||
} | ||
try { | ||
const returns = handler.call(this, ...args); | ||
cache.result = { value: returns }; | ||
cache.expires = Date.now() + duration; | ||
return returns; | ||
} | ||
catch (error) { | ||
cache.result = { error }; | ||
cache.expires = Date.now() + duration; | ||
throw error; | ||
} | ||
}; | ||
if (!key) { | ||
const cache = { for: null }; | ||
return function (...args) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
}); | ||
} | ||
}; | ||
const handleMessage = (data) => { | ||
var _a; | ||
if (iterable.consumers.length > 0) { | ||
(_a = iterable.consumers.shift()) === null || _a === void 0 ? void 0 : _a.resolve({ value: data, done: false }); | ||
} | ||
else { | ||
let cache = throttleCaches.get(key); | ||
if (!cache) { | ||
cache = { for: key }; | ||
throttleCaches.set(key, cache); | ||
} | ||
return function (...args) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
iterable.queue.push(data); | ||
} | ||
}, | ||
mixins(base, ...mixins) { | ||
const obj = { ctor: null }; | ||
obj.ctor = class extends base { | ||
}; // make sure this class has no name | ||
for (const mixin of mixins) { | ||
if (typeof mixin == "function") { | ||
mergeHierarchy(obj.ctor, mixin); | ||
} | ||
else if (mixin && typeof mixin == "object") { | ||
mergeIfNotExists(obj.ctor.prototype, mixin); | ||
} | ||
else { | ||
throw new TypeError("mixin must be a constructor or an object"); | ||
} | ||
}; | ||
const handleClose = () => { | ||
iterable.ended = true; | ||
let consumer; | ||
while (consumer = iterable.consumers.shift()) { | ||
consumer.resolve({ value: undefined, done: true }); | ||
} | ||
return obj.ctor; | ||
}, | ||
isSubclassOf(ctor1, ctor2) { | ||
return typeof ctor1 === "function" | ||
&& typeof ctor2 === "function" | ||
&& ctor1.prototype instanceof ctor2; | ||
}, | ||
read(source, eventMap = undefined) { | ||
var _a; | ||
if (typeof source[Symbol.asyncIterator] === "function") { | ||
return source; | ||
}; | ||
const handleError = (err) => { | ||
iterable.error = err; | ||
if (iterable.consumers.length > 0) { | ||
iterable.consumers.forEach(item => { | ||
item.reject(err); | ||
}); | ||
iterable.consumers = []; | ||
} | ||
const iterable = { | ||
ended: false, | ||
error: null, | ||
queue: [], | ||
consumers: [], | ||
next() { | ||
return new Promise((resolve, reject) => { | ||
if (this.error && !this.ended) { | ||
// If there is error occurred during the last transmission and the iterator | ||
// hasn't been closed, reject that error and stop the iterator immediately. | ||
reject(this.error); | ||
this.ended = true; | ||
} | ||
else if (this.ended && !this.queue.length) { | ||
// If the iterator has is closed, resolve the pending consumer with void | ||
// value. | ||
resolve({ value: void 0, done: true }); | ||
} | ||
else if (this.queue.length > 0) { | ||
// If there are data in the queue, resolve the the first piece immediately. | ||
resolve({ value: this.queue.shift(), done: false }); | ||
} | ||
else { | ||
// If there are no queued data, push the consumer to a waiting queue. | ||
this.consumers.push({ resolve, reject }); | ||
} | ||
}); | ||
} | ||
}; | ||
const handleMessage = (data) => { | ||
var _a; | ||
if (iterable.consumers.length > 0) { | ||
(_a = iterable.consumers.shift()) === null || _a === void 0 ? void 0 : _a.resolve({ value: data, done: false }); | ||
} | ||
else { | ||
iterable.queue.push(data); | ||
} | ||
}; | ||
const handleClose = () => { | ||
iterable.ended = true; | ||
let consumer; | ||
while (consumer = iterable.consumers.shift()) { | ||
consumer.resolve({ value: undefined, done: true }); | ||
} | ||
}; | ||
const handleError = (err) => { | ||
iterable.error = err; | ||
if (iterable.consumers.length > 0) { | ||
iterable.consumers.forEach(item => { | ||
item.reject(err); | ||
}); | ||
iterable.consumers = []; | ||
} | ||
}; | ||
const handleBrowserErrorEvent = (ev) => { | ||
let err; | ||
if (ev instanceof ErrorEvent) { | ||
err = ev.error || new Error(ev.message); | ||
} | ||
else { | ||
// @ts-ignore | ||
err = new Error("something went wrong", { cause: ev }); | ||
} | ||
handleError(err); | ||
}; | ||
const proto = Object.getPrototypeOf(source); | ||
const msgDesc = Object.getOwnPropertyDescriptor(proto, "onmessage"); | ||
if ((msgDesc === null || msgDesc === void 0 ? void 0 : msgDesc.set) && typeof source.close === "function") { // WebSocket or EventSource | ||
const errDesc = Object.getOwnPropertyDescriptor(proto, "onerror"); | ||
const closeDesc = Object.getOwnPropertyDescriptor(proto, "onclose"); | ||
let cleanup; | ||
if ((eventMap === null || eventMap === void 0 ? void 0 : eventMap.event) && | ||
(eventMap === null || eventMap === void 0 ? void 0 : eventMap.event) !== "message" && | ||
typeof source["addEventListener"] === "function") { // for EventSource listening on custom events | ||
const es = source; | ||
const eventName = eventMap.event; | ||
const msgListener = (ev) => { | ||
handleMessage(ev.data); | ||
}; | ||
es.addEventListener(eventName, msgListener); | ||
cleanup = () => { | ||
es.removeEventListener(eventName, msgListener); | ||
}; | ||
} | ||
else { | ||
msgDesc.set.call(source, (ev) => { | ||
handleMessage(ev.data); | ||
}); | ||
cleanup = () => { | ||
var _a; | ||
(_a = msgDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
}; | ||
} | ||
(_a = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, handleBrowserErrorEvent); | ||
if (closeDesc === null || closeDesc === void 0 ? void 0 : closeDesc.set) { // WebSocket | ||
closeDesc.set.call(source, () => { | ||
var _a, _b; | ||
handleClose(); | ||
(_a = closeDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
(_b = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _b === void 0 ? void 0 : _b.call(source, null); | ||
cleanup === null || cleanup === void 0 ? void 0 : cleanup(); | ||
}); | ||
} | ||
else if (!(closeDesc === null || closeDesc === void 0 ? void 0 : closeDesc.set) && typeof source.close === "function") { // EventSource | ||
// EventSource by default does not trigger close event, we need to make sure when | ||
// it calls the close() function, the iterator is automatically closed. | ||
const es = source; | ||
const _close = es.close; | ||
es.close = function close() { | ||
var _a; | ||
_close.call(es); | ||
handleClose(); | ||
es.close = _close; | ||
(_a = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
cleanup === null || cleanup === void 0 ? void 0 : cleanup(); | ||
}; | ||
} | ||
}; | ||
const handleBrowserErrorEvent = (ev) => { | ||
let err; | ||
if (ev instanceof ErrorEvent) { | ||
err = ev.error || new Error(ev.message); | ||
} | ||
else if (typeof source.send === "function" && typeof source.close === "function") { | ||
// non-standard WebSocket implementation | ||
const ws = source; | ||
ws.onmessage = (ev) => { | ||
else { | ||
// @ts-ignore | ||
err = new Error("something went wrong", { cause: ev }); | ||
} | ||
handleError(err); | ||
}; | ||
const proto = Object.getPrototypeOf(source); | ||
const msgDesc = Object.getOwnPropertyDescriptor(proto, "onmessage"); | ||
if ((msgDesc === null || msgDesc === void 0 ? void 0 : msgDesc.set) && typeof source.close === "function") { // WebSocket or EventSource | ||
const errDesc = Object.getOwnPropertyDescriptor(proto, "onerror"); | ||
const closeDesc = Object.getOwnPropertyDescriptor(proto, "onclose"); | ||
let cleanup; | ||
if ((eventMap === null || eventMap === void 0 ? void 0 : eventMap.event) && | ||
(eventMap === null || eventMap === void 0 ? void 0 : eventMap.event) !== "message" && | ||
typeof source["addEventListener"] === "function") { // for EventSource listening on custom events | ||
const es = source; | ||
const eventName = eventMap.event; | ||
const msgListener = (ev) => { | ||
handleMessage(ev.data); | ||
}; | ||
ws.onerror = handleBrowserErrorEvent; | ||
ws.onclose = () => { | ||
handleClose(); | ||
ws.onclose = null; | ||
ws.onerror = null; | ||
ws.onmessage = null; | ||
es.addEventListener(eventName, msgListener); | ||
cleanup = () => { | ||
es.removeEventListener(eventName, msgListener); | ||
}; | ||
} | ||
else if (typeof source["addEventListener"] === "function") { // EventTarget | ||
const target = source; | ||
const msgEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.message) || "message"; | ||
const errEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.error) || "error"; | ||
const closeEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.close) || "close"; | ||
const msgListener = (ev) => { | ||
if (ev instanceof MessageEvent) { | ||
handleMessage(ev.data); | ||
} | ||
else { | ||
msgDesc.set.call(source, (ev) => { | ||
handleMessage(ev.data); | ||
}); | ||
cleanup = () => { | ||
var _a; | ||
(_a = msgDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
}; | ||
target.addEventListener(msgEvent, msgListener); | ||
target.addEventListener(errEvent, handleBrowserErrorEvent); | ||
target.addEventListener(closeEvent, function closeListener() { | ||
} | ||
(_a = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, handleBrowserErrorEvent); | ||
if (closeDesc === null || closeDesc === void 0 ? void 0 : closeDesc.set) { // WebSocket | ||
closeDesc.set.call(source, () => { | ||
var _a, _b; | ||
handleClose(); | ||
target.removeEventListener(closeEvent, closeListener); | ||
target.removeEventListener(msgEvent, msgListener); | ||
target.removeEventListener(errEvent, handleBrowserErrorEvent); | ||
(_a = closeDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
(_b = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _b === void 0 ? void 0 : _b.call(source, null); | ||
cleanup === null || cleanup === void 0 ? void 0 : cleanup(); | ||
}); | ||
} | ||
else if (typeof source["on"] === "function") { // EventEmitter | ||
const target = source; | ||
const dataEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.data) || "data"; | ||
const errEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.error) || "error"; | ||
const endEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.close) || "close"; | ||
target.on(dataEvent, handleMessage); | ||
target.once(errEvent, handleError); | ||
target.once(endEvent, () => { | ||
else if (!(closeDesc === null || closeDesc === void 0 ? void 0 : closeDesc.set) && typeof source.close === "function") { // EventSource | ||
// EventSource by default does not trigger close event, we need to make sure when | ||
// it calls the close() function, the iterator is automatically closed. | ||
const es = source; | ||
const _close = es.close; | ||
es.close = function close() { | ||
var _a; | ||
_close.call(es); | ||
handleClose(); | ||
target.off(dataEvent, handleMessage); | ||
target.off(dataEvent, handleError); | ||
}); | ||
es.close = _close; | ||
(_a = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
cleanup === null || cleanup === void 0 ? void 0 : cleanup(); | ||
}; | ||
} | ||
else { | ||
throw new TypeError("the input source cannot be read as an AsyncIterable object"); | ||
} | ||
return { | ||
[Symbol.asyncIterator]() { | ||
return iterable; | ||
} | ||
else if (typeof source.send === "function" && typeof source.close === "function") { | ||
// non-standard WebSocket implementation | ||
const ws = source; | ||
ws.onmessage = (ev) => { | ||
handleMessage(ev.data); | ||
}; | ||
ws.onerror = handleBrowserErrorEvent; | ||
ws.onclose = () => { | ||
handleClose(); | ||
ws.onclose = null; | ||
ws.onerror = null; | ||
ws.onmessage = null; | ||
}; | ||
} | ||
else if (typeof source["addEventListener"] === "function") { // EventTarget | ||
const target = source; | ||
const msgEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.message) || "message"; | ||
const errEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.error) || "error"; | ||
const closeEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.close) || "close"; | ||
const msgListener = (ev) => { | ||
if (ev instanceof MessageEvent) { | ||
handleMessage(ev.data); | ||
} | ||
}; | ||
}, | ||
async run(script, args = undefined, options = undefined) { | ||
var _a, _b; | ||
const msg = { | ||
type: "ffi", | ||
script, | ||
baseUrl: "", | ||
fn: (options === null || options === void 0 ? void 0 : options.fn) || "default", | ||
args: args !== null && args !== void 0 ? args : [], | ||
}; | ||
if (typeof Deno === "object") { | ||
msg.baseUrl = "file://" + Deno.cwd() + "/"; | ||
target.addEventListener(msgEvent, msgListener); | ||
target.addEventListener(errEvent, handleBrowserErrorEvent); | ||
target.addEventListener(closeEvent, function closeListener() { | ||
handleClose(); | ||
target.removeEventListener(closeEvent, closeListener); | ||
target.removeEventListener(msgEvent, msgListener); | ||
target.removeEventListener(errEvent, handleBrowserErrorEvent); | ||
}); | ||
} | ||
else if (typeof source["on"] === "function") { // EventEmitter | ||
const target = source; | ||
const dataEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.data) || "data"; | ||
const errEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.error) || "error"; | ||
const endEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.close) || "close"; | ||
target.on(dataEvent, handleMessage); | ||
target.once(errEvent, handleError); | ||
target.once(endEvent, () => { | ||
handleClose(); | ||
target.off(dataEvent, handleMessage); | ||
target.off(dataEvent, handleError); | ||
}); | ||
} | ||
else { | ||
throw new TypeError("the input source cannot be read as an AsyncIterable object"); | ||
} | ||
return { | ||
[Symbol.asyncIterator]() { | ||
return iterable; | ||
} | ||
else if (isNode) { | ||
msg.baseUrl = "file://" + process.cwd() + "/"; | ||
}; | ||
} | ||
exports.read = read; | ||
const isNode = typeof process === "object" && !!((_a = process.versions) === null || _a === void 0 ? void 0 : _a.node); | ||
/** | ||
* The maximum number of workers allowed to exist at the same time. | ||
* | ||
* The primary purpose of the workers is not mean to run tasks in parallel, but run them in separate | ||
* from the main thread, so that aborting tasks can be achieved by terminating the worker thread and | ||
* it will not affect the main thread. | ||
* | ||
* That said, the worker thread can still be used to achieve parallelism, but it should be noticed | ||
* that only the numbers of tasks that equals to the CPU core numbers will be run at the same time. | ||
*/ | ||
const maxWorkerNum = 16; | ||
const workerIdCounter = (0, number_1.sequence)(1, Number.MAX_SAFE_INTEGER, 1, true); | ||
let workerPool = []; | ||
// The worker consumer queue is nothing but a callback list, once a worker is available, the runner | ||
// pop a consumer and run the callback, which will retry gaining the worker and retry the task. | ||
const workerConsumerQueue = []; | ||
/** | ||
* Runs a task in the `script` in a worker thread that can be aborted during runtime. | ||
* | ||
* In Node.js, the `script` can be either a CommonJS module or an ES module, and is relative to | ||
* the current working directory if not absolute. | ||
* | ||
* In browser or Deno, the `script` can only be an ES module, and is relative to the current URL | ||
* (or working directory for Deno) if not absolute. | ||
*/ | ||
async function run(script, args = undefined, options = undefined) { | ||
var _a, _b; | ||
const msg = { | ||
type: "ffi", | ||
script, | ||
baseUrl: "", | ||
fn: (options === null || options === void 0 ? void 0 : options.fn) || "default", | ||
args: args !== null && args !== void 0 ? args : [], | ||
}; | ||
if (typeof Deno === "object") { | ||
msg.baseUrl = "file://" + Deno.cwd() + "/"; | ||
} | ||
else if (isNode) { | ||
msg.baseUrl = "file://" + process.cwd() + "/"; | ||
} | ||
else if (typeof location === "object") { | ||
msg.baseUrl = location.href; | ||
} | ||
// `buffer` is used to store data pieces yielded by generator functions before they are | ||
// consumed. `error` and `result` serves similar purposes for function results. | ||
const buffer = []; | ||
let error = null; | ||
let result; | ||
let resolver; | ||
let iterator; | ||
let workerId; | ||
let poolRecord; | ||
let release; | ||
let terminate = () => Promise.resolve(void 0); | ||
const timeout = (options === null || options === void 0 ? void 0 : options.timeout) ? setTimeout(() => { | ||
const err = new Error(`operation timeout after ${options.timeout}ms`); | ||
if (resolver) { | ||
resolver.reject(err); | ||
} | ||
else if (typeof location === "object") { | ||
msg.baseUrl = location.href; | ||
else { | ||
error = err; | ||
} | ||
// `buffer` is used to store data pieces yielded by generator functions before they are | ||
// consumed. `error` and `result` serves similar purposes for function results. | ||
const buffer = []; | ||
let error = null; | ||
let result; | ||
let resolver; | ||
let iterator; | ||
let workerId; | ||
let poolRecord; | ||
let release; | ||
let terminate = () => Promise.resolve(void 0); | ||
const timeout = (options === null || options === void 0 ? void 0 : options.timeout) ? setTimeout(() => { | ||
const err = new Error(`operation timeout after ${options.timeout}ms`); | ||
if (resolver) { | ||
resolver.reject(err); | ||
terminate(); | ||
}, options.timeout) : null; | ||
const handleMessage = (msg) => { | ||
var _a; | ||
if (msg && typeof msg === "object" && typeof msg.type === "string") { | ||
if (msg.type === "error") { | ||
return handleError(msg.error); | ||
} | ||
else { | ||
error = err; | ||
else if (msg.type === "return") { | ||
if (options === null || options === void 0 ? void 0 : options.keepAlive) { | ||
// Release before resolve. | ||
release === null || release === void 0 ? void 0 : release(); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to gain the worker. | ||
(_a = workerConsumerQueue.shift()) === null || _a === void 0 ? void 0 : _a(); | ||
} | ||
} | ||
else { | ||
terminate(); | ||
} | ||
if (resolver) { | ||
resolver.resolve(msg.value); | ||
} | ||
else { | ||
result = { value: msg.value }; | ||
} | ||
} | ||
terminate(); | ||
}, options.timeout) : null; | ||
const handleMessage = (msg) => { | ||
var _a; | ||
if (msg && typeof msg === "object" && typeof msg.type === "string") { | ||
if (msg.type === "error") { | ||
return handleError(msg.error); | ||
else if (msg.type === "yield") { | ||
if (msg.done) { | ||
// The final message of yield event is the return value. | ||
handleMessage({ type: "return", value: msg.value }); | ||
} | ||
else if (msg.type === "return") { | ||
if (options === null || options === void 0 ? void 0 : options.keepAlive) { | ||
// Release before resolve. | ||
release === null || release === void 0 ? void 0 : release(); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to gain the worker. | ||
(_a = workerConsumerQueue.shift()) === null || _a === void 0 ? void 0 : _a(); | ||
} | ||
else { | ||
if (iterator) { | ||
iterator.emit("data", msg.value); | ||
} | ||
else { | ||
terminate(); | ||
buffer.push(msg.value); | ||
} | ||
if (resolver) { | ||
resolver.resolve(msg.value); | ||
} | ||
else { | ||
result = { value: msg.value }; | ||
} | ||
} | ||
else if (msg.type === "yield") { | ||
if (msg.done) { | ||
// The final message of yield event is the return value. | ||
handleMessage({ type: "return", value: msg.value }); | ||
} | ||
else { | ||
if (iterator) { | ||
iterator.emit("data", msg.value); | ||
} | ||
else { | ||
buffer.push(msg.value); | ||
} | ||
} | ||
} | ||
} | ||
}; | ||
const handleError = (err) => { | ||
if (resolver) { | ||
resolver.reject(err); | ||
} | ||
}; | ||
const handleError = (err) => { | ||
if (resolver) { | ||
resolver.reject(err); | ||
} | ||
else if (iterator) { | ||
iterator.emit("error", err); | ||
} | ||
else { | ||
error = err; | ||
} | ||
}; | ||
const handleExit = () => { | ||
var _a; | ||
if (poolRecord) { | ||
// Clean the pool before resolve. | ||
workerPool = workerPool.filter(record => record !== poolRecord); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to create new worker. | ||
(_a = workerConsumerQueue.shift()) === null || _a === void 0 ? void 0 : _a(); | ||
} | ||
else if (iterator) { | ||
iterator.emit("error", err); | ||
} | ||
else { | ||
error = err; | ||
} | ||
}; | ||
const handleExit = () => { | ||
var _a; | ||
} | ||
if (resolver) { | ||
resolver.resolve(void 0); | ||
} | ||
else if (iterator) { | ||
iterator.emit("close"); | ||
} | ||
else if (!error && !result) { | ||
result = { value: void 0 }; | ||
} | ||
}; | ||
if (isNode) { | ||
const path = await Promise.resolve().then(() => require("path")); | ||
const { fileURLToPath } = await Promise.resolve().then(() => require("url")); | ||
let _filename; | ||
let _dirname; | ||
let entry; | ||
if (typeof __filename === "string") { | ||
_filename = __filename; | ||
_dirname = __dirname; | ||
} | ||
else { | ||
// This file URL will be replace with `import.meta.url` by Rollup plugin. | ||
_filename = fileURLToPath("file://{__filename}"); | ||
_dirname = path.dirname(_filename); | ||
} | ||
if (["cjs", "esm"].includes(path.basename(_dirname))) { // compiled | ||
entry = path.join(path.dirname(_dirname), "worker.mjs"); | ||
} | ||
else { | ||
entry = path.join(_dirname, "worker.mjs"); | ||
} | ||
if ((options === null || options === void 0 ? void 0 : options.adapter) === "child_process") { | ||
let worker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "child_process" && !item.busy; | ||
}); | ||
if (poolRecord) { | ||
// Clean the pool before resolve. | ||
workerPool = workerPool.filter(record => record !== poolRecord); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to create new worker. | ||
(_a = workerConsumerQueue.shift()) === null || _a === void 0 ? void 0 : _a(); | ||
} | ||
worker = poolRecord.worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} | ||
if (resolver) { | ||
resolver.resolve(void 0); | ||
} | ||
else if (iterator) { | ||
iterator.emit("close"); | ||
} | ||
else if (!error && !result) { | ||
result = { value: void 0 }; | ||
} | ||
}; | ||
if (isNode) { | ||
const path = await Promise.resolve().then(() => require("path")); | ||
const { fileURLToPath } = await Promise.resolve().then(() => require("url")); | ||
let _filename; | ||
let _dirname; | ||
let entry; | ||
if (typeof __filename === "string") { | ||
_filename = __filename; | ||
_dirname = __dirname; | ||
} | ||
else { | ||
// This file URL will be replace with `import.meta.url` by Rollup plugin. | ||
_filename = fileURLToPath("file://{__filename}"); | ||
_dirname = path.dirname(_filename); | ||
} | ||
if (["cjs", "esm"].includes(path.basename(_dirname))) { // compiled | ||
entry = path.join(path.dirname(_dirname), "worker.mjs"); | ||
} | ||
else { | ||
entry = path.join(_dirname, "worker.mjs"); | ||
} | ||
if ((options === null || options === void 0 ? void 0 : options.adapter) === "child_process") { | ||
let worker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "child_process" && !item.busy; | ||
else if (workerPool.length < maxWorkerNum) { | ||
const { fork } = await Promise.resolve().then(() => require("child_process")); | ||
const isPrior14 = parseInt(process.version.slice(1)) < 14; | ||
worker = fork(entry, { | ||
stdio: "inherit", | ||
serialization: isPrior14 ? "advanced" : "json", | ||
}); | ||
if (poolRecord) { | ||
worker = poolRecord.worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} | ||
else if (workerPool.length < maxWorkerNum) { | ||
const { fork } = await Promise.resolve().then(() => require("child_process")); | ||
const isPrior14 = parseInt(process.version.slice(1)) < 14; | ||
worker = fork(entry, { | ||
stdio: "inherit", | ||
serialization: isPrior14 ? "advanced" : "json", | ||
workerId = worker.pid; | ||
ok = await new Promise((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
workerId = worker.pid; | ||
ok = await new Promise((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("message", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
worker.once("message", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
// Fill the worker pool regardless the current call should keep-alive or not, | ||
// this will make sure that the total number of workers will not exceed the | ||
// maxWorkerNum. If the the call doesn't keep-alive the worker, it will be | ||
// cleaned after the call. | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "child_process", | ||
busy: true, | ||
}); | ||
} | ||
else { | ||
// Put the current call in the consumer queue if there are no workers available, | ||
// once an existing call finishes, the queue will pop the its head consumer and | ||
// retry. | ||
return new Promise((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
} | ||
release = () => { | ||
// Remove the event listener so that later calls will not mess up. | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = () => Promise.resolve(void worker.kill(1)); | ||
if (ok) { | ||
worker.send(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
}); | ||
// Fill the worker pool regardless the current call should keep-alive or not, | ||
// this will make sure that the total number of workers will not exceed the | ||
// maxWorkerNum. If the the call doesn't keep-alive the worker, it will be | ||
// cleaned after the call. | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "child_process", | ||
busy: true, | ||
}); | ||
} | ||
else { | ||
let worker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "worker_threads" && !item.busy; | ||
}); | ||
if (poolRecord) { | ||
worker = poolRecord.worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} | ||
else if (workerPool.length < maxWorkerNum) { | ||
const { Worker } = await Promise.resolve().then(() => require("worker_threads")); | ||
worker = new Worker(entry); | ||
// `threadId` may not exist in Bun. | ||
workerId = (_a = worker.threadId) !== null && _a !== void 0 ? _a : workerIdCounter.next().value; | ||
ok = await new Promise((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("online", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
}); | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "worker_threads", | ||
busy: true, | ||
}); | ||
} | ||
else { | ||
return new Promise((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
} | ||
release = () => { | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => void (await worker.terminate()); | ||
if (ok) { | ||
worker.postMessage(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("messageerror", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
// Put the current call in the consumer queue if there are no workers available, | ||
// once an existing call finishes, the queue will pop the its head consumer and | ||
// retry. | ||
return new Promise((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => run(script, args, options)); | ||
} | ||
release = () => { | ||
// Remove the event listener so that later calls will not mess up. | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = () => Promise.resolve(void worker.kill(1)); | ||
if (ok) { | ||
worker.send(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
} | ||
else { | ||
let worker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
@@ -780,29 +769,19 @@ return item.adapter === "worker_threads" && !item.busy; | ||
else if (workerPool.length < maxWorkerNum) { | ||
let url; | ||
if (typeof Deno === "object") { | ||
// Deno can load the module regardless of MINE type. | ||
url = [ | ||
...("file://{__filename}".split("/").slice(0, -1)), | ||
"worker-web.mjs" | ||
].join("/"); | ||
} | ||
else { | ||
const _url = (options === null || options === void 0 ? void 0 : options.webWorkerEntry) | ||
|| "https://raw.githubusercontent.com/ayonli/jsext/main/esm/worker-web.mjs"; | ||
const res = await fetch(_url); | ||
let blob; | ||
if ((_b = res.headers.get("content-type")) === null || _b === void 0 ? void 0 : _b.startsWith("application/javascript")) { | ||
blob = await res.blob(); | ||
} | ||
else { | ||
const buf = await res.arrayBuffer(); | ||
blob = new Blob([new Uint8Array(buf)], { | ||
type: "application/javascript", | ||
}); | ||
} | ||
url = URL.createObjectURL(blob); | ||
} | ||
worker = new Worker(url, { type: "module" }); | ||
workerId = workerIdCounter.next().value; | ||
workerPool.push(poolRecord = { | ||
const { Worker } = await Promise.resolve().then(() => require("worker_threads")); | ||
worker = new Worker(entry); | ||
// `threadId` may not exist in Bun. | ||
workerId = (_a = worker.threadId) !== null && _a !== void 0 ? _a : workerIdCounter.next().value; | ||
ok = await new Promise((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("online", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
}); | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
@@ -817,64 +796,137 @@ worker, | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
}).then(() => run(script, args, options)); | ||
} | ||
release = () => { | ||
worker.onmessage = null; | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => { | ||
await Promise.resolve(worker.terminate()); | ||
handleExit(); | ||
}; | ||
worker.postMessage(msg); | ||
worker.onmessage = (ev) => handleMessage(ev.data); | ||
worker.onerror = (ev) => handleMessage(ev.error || new Error(ev.message)); | ||
worker.onmessageerror = () => { | ||
handleError(new Error("unable to deserialize the message")); | ||
}; | ||
terminate = async () => void (await worker.terminate()); | ||
if (ok) { | ||
worker.postMessage(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("messageerror", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
} | ||
return { | ||
workerId, | ||
async abort() { | ||
timeout && clearTimeout(timeout); | ||
await terminate(); | ||
}, | ||
async result() { | ||
return await new Promise((resolve, reject) => { | ||
if (error) { | ||
reject(error); | ||
} | ||
else if (result) { | ||
resolve(result.value); | ||
} | ||
else { | ||
resolver = { resolve, reject }; | ||
} | ||
}); | ||
}, | ||
async *iterate() { | ||
if (resolver) { | ||
throw new Error("result() has been called"); | ||
} | ||
else { | ||
let worker; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "worker_threads" && !item.busy; | ||
}); | ||
if (poolRecord) { | ||
worker = poolRecord.worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} | ||
else if (workerPool.length < maxWorkerNum) { | ||
let url; | ||
if (typeof Deno === "object") { | ||
// Deno can load the module regardless of MINE type. | ||
url = [ | ||
...("file://{__filename}".split("/").slice(0, -1)), | ||
"worker-web.mjs" | ||
].join("/"); | ||
} | ||
else { | ||
const _url = (options === null || options === void 0 ? void 0 : options.webWorkerEntry) | ||
|| "https://raw.githubusercontent.com/ayonli/jsext/main/esm/worker-web.mjs"; | ||
const res = await fetch(_url); | ||
let blob; | ||
if ((_b = res.headers.get("content-type")) === null || _b === void 0 ? void 0 : _b.startsWith("application/javascript")) { | ||
blob = await res.blob(); | ||
} | ||
else { | ||
const buf = await res.arrayBuffer(); | ||
blob = new Blob([new Uint8Array(buf)], { | ||
type: "application/javascript", | ||
}); | ||
} | ||
url = URL.createObjectURL(blob); | ||
} | ||
worker = new Worker(url, { type: "module" }); | ||
workerId = workerIdCounter.next().value; | ||
workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "worker_threads", | ||
busy: true, | ||
}); | ||
} | ||
else { | ||
return new Promise((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => run(script, args, options)); | ||
} | ||
release = () => { | ||
worker.onmessage = null; | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => { | ||
await Promise.resolve(worker.terminate()); | ||
handleExit(); | ||
}; | ||
worker.postMessage(msg); | ||
worker.onmessage = (ev) => handleMessage(ev.data); | ||
worker.onerror = (ev) => handleMessage(ev.error || new Error(ev.message)); | ||
worker.onmessageerror = () => { | ||
handleError(new Error("unable to deserialize the message")); | ||
}; | ||
} | ||
return { | ||
workerId, | ||
async abort() { | ||
timeout && clearTimeout(timeout); | ||
await terminate(); | ||
}, | ||
async result() { | ||
return await new Promise((resolve, reject) => { | ||
if (error) { | ||
reject(error); | ||
} | ||
else if (result) { | ||
throw new TypeError("the response is not iterable"); | ||
resolve(result.value); | ||
} | ||
const { EventEmitter } = await Promise.resolve().then(() => require("events")); | ||
iterator = new EventEmitter(); | ||
if (buffer.length) { | ||
(async () => { | ||
await Promise.resolve(null); | ||
let msg; | ||
while (msg = buffer.shift()) { | ||
iterator.emit("data", msg); | ||
} | ||
})().catch(console.error); | ||
else { | ||
resolver = { resolve, reject }; | ||
} | ||
for await (const msg of jsext.read(iterator)) { | ||
yield msg; | ||
} | ||
}, | ||
}; | ||
} | ||
}); | ||
}, | ||
async *iterate() { | ||
if (resolver) { | ||
throw new Error("result() has been called"); | ||
} | ||
else if (result) { | ||
throw new TypeError("the response is not iterable"); | ||
} | ||
const { EventEmitter } = await Promise.resolve().then(() => require("events")); | ||
iterator = new EventEmitter(); | ||
if (buffer.length) { | ||
(async () => { | ||
await Promise.resolve(null); | ||
let msg; | ||
while (msg = buffer.shift()) { | ||
iterator.emit("data", msg); | ||
} | ||
})().catch(console.error); | ||
} | ||
for await (const msg of read(iterator)) { | ||
yield msg; | ||
} | ||
}, | ||
}; | ||
} | ||
exports.run = run; | ||
const jsext = { | ||
try: _try, | ||
func, | ||
wrap, | ||
throttle, | ||
mixins, | ||
isSubclassOf, | ||
read, | ||
run, | ||
}; | ||
exports.default = jsext; | ||
//# sourceMappingURL=index.js.map |
@@ -14,2 +14,3 @@ "use strict"; | ||
Object.as = _1.as; | ||
Object.isValid = _1.isValid; | ||
//# sourceMappingURL=augment.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.as = exports.omit = exports.pick = exports.patch = exports.hasOwnMethod = exports.hasOwn = void 0; | ||
exports.isValid = exports.as = exports.omit = exports.pick = exports.patch = exports.hasOwnMethod = exports.hasOwn = void 0; | ||
function hasOwn(obj, key) { | ||
@@ -61,3 +61,3 @@ return Object.prototype.hasOwnProperty.call(obj, key); | ||
exports.omit = omit; | ||
function as(obj, type) { | ||
function as(value, type) { | ||
if (typeof type !== "function") { | ||
@@ -74,12 +74,12 @@ throw new TypeError("type must be a valid constructor"); | ||
}; | ||
if (obj instanceof type) { | ||
if (value instanceof type) { | ||
if ([String, Number, Boolean].includes(type)) { | ||
return obj.valueOf(); // make sure the primitives are returned. | ||
return value.valueOf(); // make sure the primitives are returned. | ||
} | ||
else { | ||
return obj; | ||
return value; | ||
} | ||
} | ||
else if ((_type = typeof obj) && primitiveMap[_type] === type) { | ||
return obj; | ||
else if ((_type = typeof value) && primitiveMap[_type] === type) { | ||
return value; | ||
} | ||
@@ -89,2 +89,17 @@ return null; | ||
exports.as = as; | ||
/** | ||
* Returns `true` if the given value is valid. Thee following values are considered invalid: | ||
* | ||
* - `undefined` | ||
* - `null` | ||
* - `NaN` | ||
* - `Invalid Date` | ||
*/ | ||
function isValid(value) { | ||
return value !== undefined | ||
&& value !== null | ||
&& !Object.is(value, NaN) | ||
&& !(value instanceof Date && value.toString() === "Invalid Date"); | ||
} | ||
exports.isValid = isValid; | ||
//# sourceMappingURL=index.js.map |
@@ -10,2 +10,8 @@ import './string/augment.js'; | ||
import './error/augment.js'; | ||
import { AsyncFunction, AsyncGeneratorFunction } from './index.js'; | ||
// @ts-ignore | ||
globalThis["AsyncFunction"] = AsyncFunction; | ||
// @ts-ignore | ||
globalThis["AsyncGeneratorFunction"] = AsyncGeneratorFunction; | ||
//# sourceMappingURL=augment.js.map |
1527
esm/index.js
@@ -5,21 +5,276 @@ import { isAsyncGenerator as isAsyncGenerator_1, isGenerator as isGenerator_1 } from './_external/check-iterable/index.js'; | ||
var _a; | ||
const isNode = typeof process === "object" && !!((_a = process.versions) === null || _a === void 0 ? void 0 : _a.node); | ||
const throttleCaches = new Map(); | ||
const AsyncFunction = (async function () { }).constructor; | ||
const AsyncGeneratorFunction = (async function* () { }).constructor; | ||
function _try(fn, ...args) { | ||
if (typeof fn === "function") { | ||
try { | ||
return _try(fn.apply(void 0, args)); | ||
} | ||
catch (err) { | ||
return [err, undefined]; | ||
} | ||
} | ||
let returns = fn; | ||
// Implementation details should be ordered from complex to simple. | ||
if (isAsyncGenerator_1(returns)) { | ||
return (async function* () { | ||
let input; | ||
let result; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} | ||
else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve([null, value]); | ||
} | ||
} | ||
catch (err) { | ||
// If any error occurs, yield that error as resolved | ||
// and break the loop immediately, indicating the | ||
// process is forced broken. | ||
yield Promise.resolve([err, undefined]); | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})(); | ||
} | ||
else if (isGenerator_1(returns)) { | ||
return (function* () { | ||
let input; | ||
let result; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} | ||
else { | ||
input = yield [null, value]; | ||
} | ||
} | ||
catch (err) { | ||
yield [err, undefined]; | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})(); | ||
} | ||
else if (typeof (returns === null || returns === void 0 ? void 0 : returns.then) === "function") { | ||
returns = returns.then((value) => [null, value]); | ||
return Promise.resolve(returns).catch((err) => [err, undefined]); | ||
} | ||
else { | ||
return [null, returns]; | ||
} | ||
} | ||
/** | ||
* The maximum number of workers allowed to exist at the same time. | ||
* Inspired by Golang, creates a function that receives a `defer` function which can be used | ||
* to carry deferred jobs that will be run after the main function is complete. | ||
* | ||
* The primary purpose of the workers is not mean to run tasks in parallel, but run them in separate | ||
* from the main thread, so that aborting tasks can be achieved by terminating the worker thread and | ||
* it will not affect the main thread. | ||
* Multiple calls of the `defer` function is supported, and the callbacks are called in the | ||
* LIFO order. Callbacks can be async functions if the main function is an async function or | ||
* an async generator function, and all the running procedures will be awaited. | ||
* | ||
* That said, the worker thread can still be used to achieve parallelism, but it should be noticed | ||
* that only the numbers of tasks that equals to the CPU core numbers will be run at the same time. | ||
* @example | ||
* const getVersion = await jsext.func(async (defer) => { | ||
* const file = await fs.open("./package.json", "r"); | ||
* defer(() => file.close()); | ||
* | ||
* const content = await file.readFile("utf8"); | ||
* const pkg = JSON.parse(content); | ||
* | ||
* return pkg.version as string; | ||
* }); | ||
*/ | ||
const maxWorkerNum = 16; | ||
const workerIdCounter = sequence(1, Number.MAX_SAFE_INTEGER, 1, true); | ||
let workerPool = []; | ||
// The worker consumer queue is nothing but a callback list, once a worker is available, the runner | ||
// pop a consumer and run the callback, which will retry gaining the worker and retry the task. | ||
const workerConsumerQueue = []; | ||
function func(fn) { | ||
return function (...args) { | ||
var _a; | ||
const callbacks = []; | ||
const defer = (cb) => void callbacks.push(cb); | ||
let result; | ||
try { | ||
const returns = fn.call(this, defer, ...args); | ||
if (isAsyncGenerator_1(returns)) { | ||
const gen = (async function* () { | ||
var _a; | ||
let input; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} | ||
else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve(value); | ||
} | ||
} | ||
catch (error) { | ||
// If any error occurs, capture that error and break | ||
// the loop immediately, indicating the process is | ||
// forced broken. | ||
result = { value: void 0, error }; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await ((_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks)); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
})(); | ||
return gen; | ||
} | ||
else if (isGenerator_1(returns)) { | ||
const gen = (function* () { | ||
var _a; | ||
let input; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} | ||
else { | ||
input = yield value; | ||
} | ||
} | ||
catch (error) { | ||
result = { value: void 0, error }; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
(_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
})(); | ||
return gen; | ||
} | ||
else if (typeof (returns === null || returns === void 0 ? void 0 : returns.then) === "function") { | ||
return Promise.resolve(returns).then(value => ({ | ||
value, | ||
error: null, | ||
})).catch((error) => ({ | ||
value: void 0, | ||
error, | ||
})).then(async (result) => { | ||
var _a; | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await ((_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks)); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
}); | ||
} | ||
else { | ||
result = { value: returns, error: null }; | ||
} | ||
} | ||
catch (error) { | ||
result = { value: void 0, error }; | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
(_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
}; | ||
} | ||
/** | ||
* Wraps a function inside another function and returns a new function | ||
* that copies the original function's name and properties. | ||
*/ | ||
function wrap(fn, wrapper) { | ||
const wrapped = function (...args) { | ||
return wrapper.call(this, fn, ...args); | ||
}; | ||
Object.defineProperty(wrapped, "name", Object.getOwnPropertyDescriptor(fn, "name")); | ||
Object.defineProperty(wrapped, "length", Object.getOwnPropertyDescriptor(fn, "length")); | ||
Object.defineProperty(wrapped, "toString", { | ||
configurable: true, | ||
enumerable: false, | ||
writable: true, | ||
value: fn.toString.bind(fn), | ||
}); | ||
return wrapped; | ||
} | ||
const throttleCaches = new Map(); | ||
function throttle(handler, options) { | ||
const key = typeof options === "number" ? null : options.for; | ||
const duration = typeof options === "number" ? options : options.duration; | ||
const handleCall = function (cache, ...args) { | ||
var _a; | ||
if (cache.result && Date.now() < ((_a = cache.expires) !== null && _a !== void 0 ? _a : 0)) { | ||
if (cache.result.error) { | ||
throw cache.result.error; | ||
} | ||
else { | ||
return cache.result.value; | ||
} | ||
} | ||
try { | ||
const returns = handler.call(this, ...args); | ||
cache.result = { value: returns }; | ||
cache.expires = Date.now() + duration; | ||
return returns; | ||
} | ||
catch (error) { | ||
cache.result = { error }; | ||
cache.expires = Date.now() + duration; | ||
throw error; | ||
} | ||
}; | ||
if (!key) { | ||
const cache = { for: null }; | ||
return function (...args) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
} | ||
else { | ||
let cache = throttleCaches.get(key); | ||
if (!cache) { | ||
cache = { for: key }; | ||
throttleCaches.set(key, cache); | ||
} | ||
return function (...args) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
} | ||
} | ||
/** | ||
* Merges properties and methods only if they're missing in the class. | ||
@@ -71,701 +326,427 @@ */ | ||
} | ||
const jsext = { | ||
try(fn, ...args) { | ||
if (typeof fn === "function") { | ||
try { | ||
return jsext.try(fn.apply(void 0, args)); | ||
} | ||
catch (err) { | ||
return [err, undefined]; | ||
} | ||
function mixins(base, ...mixins) { | ||
const obj = { ctor: null }; | ||
obj.ctor = class extends base { | ||
}; // make sure this class has no name | ||
for (const mixin of mixins) { | ||
if (typeof mixin == "function") { | ||
mergeHierarchy(obj.ctor, mixin); | ||
} | ||
let returns = fn; | ||
// Implementation details should be ordered from complex to simple. | ||
if (isAsyncGenerator_1(returns)) { | ||
return (async function* () { | ||
let input; | ||
let result; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} | ||
else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve([null, value]); | ||
} | ||
} | ||
catch (err) { | ||
// If any error occurs, yield that error as resolved | ||
// and break the loop immediately, indicating the | ||
// process is forced broken. | ||
yield Promise.resolve([err, undefined]); | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})(); | ||
else if (mixin && typeof mixin == "object") { | ||
mergeIfNotExists(obj.ctor.prototype, mixin); | ||
} | ||
else if (isGenerator_1(returns)) { | ||
return (function* () { | ||
let input; | ||
let result; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} | ||
else { | ||
input = yield [null, value]; | ||
} | ||
} | ||
catch (err) { | ||
yield [err, undefined]; | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})(); | ||
} | ||
else if (typeof (returns === null || returns === void 0 ? void 0 : returns.then) === "function") { | ||
returns = returns.then((value) => [null, value]); | ||
return Promise.resolve(returns).catch((err) => [err, undefined]); | ||
} | ||
else { | ||
return [null, returns]; | ||
throw new TypeError("mixin must be a constructor or an object"); | ||
} | ||
}, | ||
func(fn) { | ||
return function (...args) { | ||
var _a; | ||
const callbacks = []; | ||
const defer = (cb) => void callbacks.push(cb); | ||
let result; | ||
try { | ||
const returns = fn.call(this, defer, ...args); | ||
if (isAsyncGenerator_1(returns)) { | ||
const gen = (async function* () { | ||
var _a; | ||
let input; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} | ||
else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve(value); | ||
} | ||
} | ||
catch (error) { | ||
// If any error occurs, capture that error and break | ||
// the loop immediately, indicating the process is | ||
// forced broken. | ||
result = { value: void 0, error }; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await ((_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks)); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
})(); | ||
return gen; | ||
} | ||
return obj.ctor; | ||
} | ||
/** Checks if a class is a subclass of another class. */ | ||
function isSubclassOf(ctor1, ctor2) { | ||
return typeof ctor1 === "function" | ||
&& typeof ctor2 === "function" | ||
&& ctor1.prototype instanceof ctor2; | ||
} | ||
function read(source, eventMap = undefined) { | ||
var _a; | ||
if (typeof source[Symbol.asyncIterator] === "function") { | ||
return source; | ||
} | ||
const iterable = { | ||
ended: false, | ||
error: null, | ||
queue: [], | ||
consumers: [], | ||
next() { | ||
return new Promise((resolve, reject) => { | ||
if (this.error && !this.ended) { | ||
// If there is error occurred during the last transmission and the iterator | ||
// hasn't been closed, reject that error and stop the iterator immediately. | ||
reject(this.error); | ||
this.ended = true; | ||
} | ||
else if (isGenerator_1(returns)) { | ||
const gen = (function* () { | ||
var _a; | ||
let input; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} | ||
else { | ||
input = yield value; | ||
} | ||
} | ||
catch (error) { | ||
result = { value: void 0, error }; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
(_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
})(); | ||
return gen; | ||
else if (this.ended && !this.queue.length) { | ||
// If the iterator has is closed, resolve the pending consumer with void | ||
// value. | ||
resolve({ value: void 0, done: true }); | ||
} | ||
else if (typeof (returns === null || returns === void 0 ? void 0 : returns.then) === "function") { | ||
return Promise.resolve(returns).then(value => ({ | ||
value, | ||
error: null, | ||
})).catch((error) => ({ | ||
value: void 0, | ||
error, | ||
})).then(async (result) => { | ||
var _a; | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await ((_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks)); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
}); | ||
else if (this.queue.length > 0) { | ||
// If there are data in the queue, resolve the the first piece immediately. | ||
resolve({ value: this.queue.shift(), done: false }); | ||
} | ||
else { | ||
result = { value: returns, error: null }; | ||
// If there are no queued data, push the consumer to a waiting queue. | ||
this.consumers.push({ resolve, reject }); | ||
} | ||
} | ||
catch (error) { | ||
result = { value: void 0, error }; | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
(_a = callbacks[i]) === null || _a === void 0 ? void 0 : _a.call(callbacks); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} | ||
else { | ||
return result.value; | ||
} | ||
}; | ||
}, | ||
wrap(fn, wrapper) { | ||
const wrapped = function (...args) { | ||
return wrapper.call(this, fn, ...args); | ||
}; | ||
Object.defineProperty(wrapped, "name", Object.getOwnPropertyDescriptor(fn, "name")); | ||
Object.defineProperty(wrapped, "length", Object.getOwnPropertyDescriptor(fn, "length")); | ||
Object.defineProperty(wrapped, "toString", { | ||
configurable: true, | ||
enumerable: false, | ||
writable: true, | ||
value: fn.toString.bind(fn), | ||
}); | ||
return wrapped; | ||
}, | ||
throttle(handler, options) { | ||
const key = typeof options === "number" ? null : options.for; | ||
const duration = typeof options === "number" ? options : options.duration; | ||
const handleCall = function (cache, ...args) { | ||
var _a; | ||
if (cache.result && Date.now() < ((_a = cache.expires) !== null && _a !== void 0 ? _a : 0)) { | ||
if (cache.result.error) { | ||
throw cache.result.error; | ||
} | ||
else { | ||
return cache.result.value; | ||
} | ||
} | ||
try { | ||
const returns = handler.call(this, ...args); | ||
cache.result = { value: returns }; | ||
cache.expires = Date.now() + duration; | ||
return returns; | ||
} | ||
catch (error) { | ||
cache.result = { error }; | ||
cache.expires = Date.now() + duration; | ||
throw error; | ||
} | ||
}; | ||
if (!key) { | ||
const cache = { for: null }; | ||
return function (...args) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
}); | ||
} | ||
}; | ||
const handleMessage = (data) => { | ||
var _a; | ||
if (iterable.consumers.length > 0) { | ||
(_a = iterable.consumers.shift()) === null || _a === void 0 ? void 0 : _a.resolve({ value: data, done: false }); | ||
} | ||
else { | ||
let cache = throttleCaches.get(key); | ||
if (!cache) { | ||
cache = { for: key }; | ||
throttleCaches.set(key, cache); | ||
} | ||
return function (...args) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
iterable.queue.push(data); | ||
} | ||
}, | ||
mixins(base, ...mixins) { | ||
const obj = { ctor: null }; | ||
obj.ctor = class extends base { | ||
}; // make sure this class has no name | ||
for (const mixin of mixins) { | ||
if (typeof mixin == "function") { | ||
mergeHierarchy(obj.ctor, mixin); | ||
} | ||
else if (mixin && typeof mixin == "object") { | ||
mergeIfNotExists(obj.ctor.prototype, mixin); | ||
} | ||
else { | ||
throw new TypeError("mixin must be a constructor or an object"); | ||
} | ||
}; | ||
const handleClose = () => { | ||
iterable.ended = true; | ||
let consumer; | ||
while (consumer = iterable.consumers.shift()) { | ||
consumer.resolve({ value: undefined, done: true }); | ||
} | ||
return obj.ctor; | ||
}, | ||
isSubclassOf(ctor1, ctor2) { | ||
return typeof ctor1 === "function" | ||
&& typeof ctor2 === "function" | ||
&& ctor1.prototype instanceof ctor2; | ||
}, | ||
read(source, eventMap = undefined) { | ||
var _a; | ||
if (typeof source[Symbol.asyncIterator] === "function") { | ||
return source; | ||
}; | ||
const handleError = (err) => { | ||
iterable.error = err; | ||
if (iterable.consumers.length > 0) { | ||
iterable.consumers.forEach(item => { | ||
item.reject(err); | ||
}); | ||
iterable.consumers = []; | ||
} | ||
const iterable = { | ||
ended: false, | ||
error: null, | ||
queue: [], | ||
consumers: [], | ||
next() { | ||
return new Promise((resolve, reject) => { | ||
if (this.error && !this.ended) { | ||
// If there is error occurred during the last transmission and the iterator | ||
// hasn't been closed, reject that error and stop the iterator immediately. | ||
reject(this.error); | ||
this.ended = true; | ||
} | ||
else if (this.ended && !this.queue.length) { | ||
// If the iterator has is closed, resolve the pending consumer with void | ||
// value. | ||
resolve({ value: void 0, done: true }); | ||
} | ||
else if (this.queue.length > 0) { | ||
// If there are data in the queue, resolve the the first piece immediately. | ||
resolve({ value: this.queue.shift(), done: false }); | ||
} | ||
else { | ||
// If there are no queued data, push the consumer to a waiting queue. | ||
this.consumers.push({ resolve, reject }); | ||
} | ||
}); | ||
} | ||
}; | ||
const handleMessage = (data) => { | ||
var _a; | ||
if (iterable.consumers.length > 0) { | ||
(_a = iterable.consumers.shift()) === null || _a === void 0 ? void 0 : _a.resolve({ value: data, done: false }); | ||
} | ||
else { | ||
iterable.queue.push(data); | ||
} | ||
}; | ||
const handleClose = () => { | ||
iterable.ended = true; | ||
let consumer; | ||
while (consumer = iterable.consumers.shift()) { | ||
consumer.resolve({ value: undefined, done: true }); | ||
} | ||
}; | ||
const handleError = (err) => { | ||
iterable.error = err; | ||
if (iterable.consumers.length > 0) { | ||
iterable.consumers.forEach(item => { | ||
item.reject(err); | ||
}); | ||
iterable.consumers = []; | ||
} | ||
}; | ||
const handleBrowserErrorEvent = (ev) => { | ||
let err; | ||
if (ev instanceof ErrorEvent) { | ||
err = ev.error || new Error(ev.message); | ||
} | ||
else { | ||
// @ts-ignore | ||
err = new Error("something went wrong", { cause: ev }); | ||
} | ||
handleError(err); | ||
}; | ||
const proto = Object.getPrototypeOf(source); | ||
const msgDesc = Object.getOwnPropertyDescriptor(proto, "onmessage"); | ||
if ((msgDesc === null || msgDesc === void 0 ? void 0 : msgDesc.set) && typeof source.close === "function") { // WebSocket or EventSource | ||
const errDesc = Object.getOwnPropertyDescriptor(proto, "onerror"); | ||
const closeDesc = Object.getOwnPropertyDescriptor(proto, "onclose"); | ||
let cleanup; | ||
if ((eventMap === null || eventMap === void 0 ? void 0 : eventMap.event) && | ||
(eventMap === null || eventMap === void 0 ? void 0 : eventMap.event) !== "message" && | ||
typeof source["addEventListener"] === "function") { // for EventSource listening on custom events | ||
const es = source; | ||
const eventName = eventMap.event; | ||
const msgListener = (ev) => { | ||
handleMessage(ev.data); | ||
}; | ||
es.addEventListener(eventName, msgListener); | ||
cleanup = () => { | ||
es.removeEventListener(eventName, msgListener); | ||
}; | ||
} | ||
else { | ||
msgDesc.set.call(source, (ev) => { | ||
handleMessage(ev.data); | ||
}); | ||
cleanup = () => { | ||
var _a; | ||
(_a = msgDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
}; | ||
} | ||
(_a = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, handleBrowserErrorEvent); | ||
if (closeDesc === null || closeDesc === void 0 ? void 0 : closeDesc.set) { // WebSocket | ||
closeDesc.set.call(source, () => { | ||
var _a, _b; | ||
handleClose(); | ||
(_a = closeDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
(_b = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _b === void 0 ? void 0 : _b.call(source, null); | ||
cleanup === null || cleanup === void 0 ? void 0 : cleanup(); | ||
}); | ||
} | ||
else if (!(closeDesc === null || closeDesc === void 0 ? void 0 : closeDesc.set) && typeof source.close === "function") { // EventSource | ||
// EventSource by default does not trigger close event, we need to make sure when | ||
// it calls the close() function, the iterator is automatically closed. | ||
const es = source; | ||
const _close = es.close; | ||
es.close = function close() { | ||
var _a; | ||
_close.call(es); | ||
handleClose(); | ||
es.close = _close; | ||
(_a = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
cleanup === null || cleanup === void 0 ? void 0 : cleanup(); | ||
}; | ||
} | ||
}; | ||
const handleBrowserErrorEvent = (ev) => { | ||
let err; | ||
if (ev instanceof ErrorEvent) { | ||
err = ev.error || new Error(ev.message); | ||
} | ||
else if (typeof source.send === "function" && typeof source.close === "function") { | ||
// non-standard WebSocket implementation | ||
const ws = source; | ||
ws.onmessage = (ev) => { | ||
else { | ||
// @ts-ignore | ||
err = new Error("something went wrong", { cause: ev }); | ||
} | ||
handleError(err); | ||
}; | ||
const proto = Object.getPrototypeOf(source); | ||
const msgDesc = Object.getOwnPropertyDescriptor(proto, "onmessage"); | ||
if ((msgDesc === null || msgDesc === void 0 ? void 0 : msgDesc.set) && typeof source.close === "function") { // WebSocket or EventSource | ||
const errDesc = Object.getOwnPropertyDescriptor(proto, "onerror"); | ||
const closeDesc = Object.getOwnPropertyDescriptor(proto, "onclose"); | ||
let cleanup; | ||
if ((eventMap === null || eventMap === void 0 ? void 0 : eventMap.event) && | ||
(eventMap === null || eventMap === void 0 ? void 0 : eventMap.event) !== "message" && | ||
typeof source["addEventListener"] === "function") { // for EventSource listening on custom events | ||
const es = source; | ||
const eventName = eventMap.event; | ||
const msgListener = (ev) => { | ||
handleMessage(ev.data); | ||
}; | ||
ws.onerror = handleBrowserErrorEvent; | ||
ws.onclose = () => { | ||
handleClose(); | ||
ws.onclose = null; | ||
ws.onerror = null; | ||
ws.onmessage = null; | ||
es.addEventListener(eventName, msgListener); | ||
cleanup = () => { | ||
es.removeEventListener(eventName, msgListener); | ||
}; | ||
} | ||
else if (typeof source["addEventListener"] === "function") { // EventTarget | ||
const target = source; | ||
const msgEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.message) || "message"; | ||
const errEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.error) || "error"; | ||
const closeEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.close) || "close"; | ||
const msgListener = (ev) => { | ||
if (ev instanceof MessageEvent) { | ||
handleMessage(ev.data); | ||
} | ||
else { | ||
msgDesc.set.call(source, (ev) => { | ||
handleMessage(ev.data); | ||
}); | ||
cleanup = () => { | ||
var _a; | ||
(_a = msgDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
}; | ||
target.addEventListener(msgEvent, msgListener); | ||
target.addEventListener(errEvent, handleBrowserErrorEvent); | ||
target.addEventListener(closeEvent, function closeListener() { | ||
} | ||
(_a = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, handleBrowserErrorEvent); | ||
if (closeDesc === null || closeDesc === void 0 ? void 0 : closeDesc.set) { // WebSocket | ||
closeDesc.set.call(source, () => { | ||
var _a, _b; | ||
handleClose(); | ||
target.removeEventListener(closeEvent, closeListener); | ||
target.removeEventListener(msgEvent, msgListener); | ||
target.removeEventListener(errEvent, handleBrowserErrorEvent); | ||
(_a = closeDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
(_b = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _b === void 0 ? void 0 : _b.call(source, null); | ||
cleanup === null || cleanup === void 0 ? void 0 : cleanup(); | ||
}); | ||
} | ||
else if (typeof source["on"] === "function") { // EventEmitter | ||
const target = source; | ||
const dataEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.data) || "data"; | ||
const errEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.error) || "error"; | ||
const endEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.close) || "close"; | ||
target.on(dataEvent, handleMessage); | ||
target.once(errEvent, handleError); | ||
target.once(endEvent, () => { | ||
else if (!(closeDesc === null || closeDesc === void 0 ? void 0 : closeDesc.set) && typeof source.close === "function") { // EventSource | ||
// EventSource by default does not trigger close event, we need to make sure when | ||
// it calls the close() function, the iterator is automatically closed. | ||
const es = source; | ||
const _close = es.close; | ||
es.close = function close() { | ||
var _a; | ||
_close.call(es); | ||
handleClose(); | ||
target.off(dataEvent, handleMessage); | ||
target.off(dataEvent, handleError); | ||
}); | ||
es.close = _close; | ||
(_a = errDesc === null || errDesc === void 0 ? void 0 : errDesc.set) === null || _a === void 0 ? void 0 : _a.call(source, null); | ||
cleanup === null || cleanup === void 0 ? void 0 : cleanup(); | ||
}; | ||
} | ||
else { | ||
throw new TypeError("the input source cannot be read as an AsyncIterable object"); | ||
} | ||
return { | ||
[Symbol.asyncIterator]() { | ||
return iterable; | ||
} | ||
else if (typeof source.send === "function" && typeof source.close === "function") { | ||
// non-standard WebSocket implementation | ||
const ws = source; | ||
ws.onmessage = (ev) => { | ||
handleMessage(ev.data); | ||
}; | ||
ws.onerror = handleBrowserErrorEvent; | ||
ws.onclose = () => { | ||
handleClose(); | ||
ws.onclose = null; | ||
ws.onerror = null; | ||
ws.onmessage = null; | ||
}; | ||
} | ||
else if (typeof source["addEventListener"] === "function") { // EventTarget | ||
const target = source; | ||
const msgEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.message) || "message"; | ||
const errEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.error) || "error"; | ||
const closeEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.close) || "close"; | ||
const msgListener = (ev) => { | ||
if (ev instanceof MessageEvent) { | ||
handleMessage(ev.data); | ||
} | ||
}; | ||
}, | ||
async run(script, args = undefined, options = undefined) { | ||
var _a, _b; | ||
const msg = { | ||
type: "ffi", | ||
script, | ||
baseUrl: "", | ||
fn: (options === null || options === void 0 ? void 0 : options.fn) || "default", | ||
args: args !== null && args !== void 0 ? args : [], | ||
}; | ||
if (typeof Deno === "object") { | ||
msg.baseUrl = "file://" + Deno.cwd() + "/"; | ||
target.addEventListener(msgEvent, msgListener); | ||
target.addEventListener(errEvent, handleBrowserErrorEvent); | ||
target.addEventListener(closeEvent, function closeListener() { | ||
handleClose(); | ||
target.removeEventListener(closeEvent, closeListener); | ||
target.removeEventListener(msgEvent, msgListener); | ||
target.removeEventListener(errEvent, handleBrowserErrorEvent); | ||
}); | ||
} | ||
else if (typeof source["on"] === "function") { // EventEmitter | ||
const target = source; | ||
const dataEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.data) || "data"; | ||
const errEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.error) || "error"; | ||
const endEvent = (eventMap === null || eventMap === void 0 ? void 0 : eventMap.close) || "close"; | ||
target.on(dataEvent, handleMessage); | ||
target.once(errEvent, handleError); | ||
target.once(endEvent, () => { | ||
handleClose(); | ||
target.off(dataEvent, handleMessage); | ||
target.off(dataEvent, handleError); | ||
}); | ||
} | ||
else { | ||
throw new TypeError("the input source cannot be read as an AsyncIterable object"); | ||
} | ||
return { | ||
[Symbol.asyncIterator]() { | ||
return iterable; | ||
} | ||
else if (isNode) { | ||
msg.baseUrl = "file://" + process.cwd() + "/"; | ||
}; | ||
} | ||
const isNode = typeof process === "object" && !!((_a = process.versions) === null || _a === void 0 ? void 0 : _a.node); | ||
/** | ||
* The maximum number of workers allowed to exist at the same time. | ||
* | ||
* The primary purpose of the workers is not mean to run tasks in parallel, but run them in separate | ||
* from the main thread, so that aborting tasks can be achieved by terminating the worker thread and | ||
* it will not affect the main thread. | ||
* | ||
* That said, the worker thread can still be used to achieve parallelism, but it should be noticed | ||
* that only the numbers of tasks that equals to the CPU core numbers will be run at the same time. | ||
*/ | ||
const maxWorkerNum = 16; | ||
const workerIdCounter = sequence(1, Number.MAX_SAFE_INTEGER, 1, true); | ||
let workerPool = []; | ||
// The worker consumer queue is nothing but a callback list, once a worker is available, the runner | ||
// pop a consumer and run the callback, which will retry gaining the worker and retry the task. | ||
const workerConsumerQueue = []; | ||
/** | ||
* Runs a task in the `script` in a worker thread that can be aborted during runtime. | ||
* | ||
* In Node.js, the `script` can be either a CommonJS module or an ES module, and is relative to | ||
* the current working directory if not absolute. | ||
* | ||
* In browser or Deno, the `script` can only be an ES module, and is relative to the current URL | ||
* (or working directory for Deno) if not absolute. | ||
*/ | ||
async function run(script, args = undefined, options = undefined) { | ||
var _a, _b; | ||
const msg = { | ||
type: "ffi", | ||
script, | ||
baseUrl: "", | ||
fn: (options === null || options === void 0 ? void 0 : options.fn) || "default", | ||
args: args !== null && args !== void 0 ? args : [], | ||
}; | ||
if (typeof Deno === "object") { | ||
msg.baseUrl = "file://" + Deno.cwd() + "/"; | ||
} | ||
else if (isNode) { | ||
msg.baseUrl = "file://" + process.cwd() + "/"; | ||
} | ||
else if (typeof location === "object") { | ||
msg.baseUrl = location.href; | ||
} | ||
// `buffer` is used to store data pieces yielded by generator functions before they are | ||
// consumed. `error` and `result` serves similar purposes for function results. | ||
const buffer = []; | ||
let error = null; | ||
let result; | ||
let resolver; | ||
let iterator; | ||
let workerId; | ||
let poolRecord; | ||
let release; | ||
let terminate = () => Promise.resolve(void 0); | ||
const timeout = (options === null || options === void 0 ? void 0 : options.timeout) ? setTimeout(() => { | ||
const err = new Error(`operation timeout after ${options.timeout}ms`); | ||
if (resolver) { | ||
resolver.reject(err); | ||
} | ||
else if (typeof location === "object") { | ||
msg.baseUrl = location.href; | ||
else { | ||
error = err; | ||
} | ||
// `buffer` is used to store data pieces yielded by generator functions before they are | ||
// consumed. `error` and `result` serves similar purposes for function results. | ||
const buffer = []; | ||
let error = null; | ||
let result; | ||
let resolver; | ||
let iterator; | ||
let workerId; | ||
let poolRecord; | ||
let release; | ||
let terminate = () => Promise.resolve(void 0); | ||
const timeout = (options === null || options === void 0 ? void 0 : options.timeout) ? setTimeout(() => { | ||
const err = new Error(`operation timeout after ${options.timeout}ms`); | ||
if (resolver) { | ||
resolver.reject(err); | ||
terminate(); | ||
}, options.timeout) : null; | ||
const handleMessage = (msg) => { | ||
var _a; | ||
if (msg && typeof msg === "object" && typeof msg.type === "string") { | ||
if (msg.type === "error") { | ||
return handleError(msg.error); | ||
} | ||
else { | ||
error = err; | ||
else if (msg.type === "return") { | ||
if (options === null || options === void 0 ? void 0 : options.keepAlive) { | ||
// Release before resolve. | ||
release === null || release === void 0 ? void 0 : release(); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to gain the worker. | ||
(_a = workerConsumerQueue.shift()) === null || _a === void 0 ? void 0 : _a(); | ||
} | ||
} | ||
else { | ||
terminate(); | ||
} | ||
if (resolver) { | ||
resolver.resolve(msg.value); | ||
} | ||
else { | ||
result = { value: msg.value }; | ||
} | ||
} | ||
terminate(); | ||
}, options.timeout) : null; | ||
const handleMessage = (msg) => { | ||
var _a; | ||
if (msg && typeof msg === "object" && typeof msg.type === "string") { | ||
if (msg.type === "error") { | ||
return handleError(msg.error); | ||
else if (msg.type === "yield") { | ||
if (msg.done) { | ||
// The final message of yield event is the return value. | ||
handleMessage({ type: "return", value: msg.value }); | ||
} | ||
else if (msg.type === "return") { | ||
if (options === null || options === void 0 ? void 0 : options.keepAlive) { | ||
// Release before resolve. | ||
release === null || release === void 0 ? void 0 : release(); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to gain the worker. | ||
(_a = workerConsumerQueue.shift()) === null || _a === void 0 ? void 0 : _a(); | ||
} | ||
else { | ||
if (iterator) { | ||
iterator.emit("data", msg.value); | ||
} | ||
else { | ||
terminate(); | ||
buffer.push(msg.value); | ||
} | ||
if (resolver) { | ||
resolver.resolve(msg.value); | ||
} | ||
else { | ||
result = { value: msg.value }; | ||
} | ||
} | ||
else if (msg.type === "yield") { | ||
if (msg.done) { | ||
// The final message of yield event is the return value. | ||
handleMessage({ type: "return", value: msg.value }); | ||
} | ||
else { | ||
if (iterator) { | ||
iterator.emit("data", msg.value); | ||
} | ||
else { | ||
buffer.push(msg.value); | ||
} | ||
} | ||
} | ||
} | ||
}; | ||
const handleError = (err) => { | ||
if (resolver) { | ||
resolver.reject(err); | ||
} | ||
}; | ||
const handleError = (err) => { | ||
if (resolver) { | ||
resolver.reject(err); | ||
} | ||
else if (iterator) { | ||
iterator.emit("error", err); | ||
} | ||
else { | ||
error = err; | ||
} | ||
}; | ||
const handleExit = () => { | ||
var _a; | ||
if (poolRecord) { | ||
// Clean the pool before resolve. | ||
workerPool = workerPool.filter(record => record !== poolRecord); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to create new worker. | ||
(_a = workerConsumerQueue.shift()) === null || _a === void 0 ? void 0 : _a(); | ||
} | ||
else if (iterator) { | ||
iterator.emit("error", err); | ||
} | ||
else { | ||
error = err; | ||
} | ||
}; | ||
const handleExit = () => { | ||
var _a; | ||
} | ||
if (resolver) { | ||
resolver.resolve(void 0); | ||
} | ||
else if (iterator) { | ||
iterator.emit("close"); | ||
} | ||
else if (!error && !result) { | ||
result = { value: void 0 }; | ||
} | ||
}; | ||
if (isNode) { | ||
const path = await import('path'); | ||
const { fileURLToPath } = await import('url'); | ||
let _filename; | ||
let _dirname; | ||
let entry; | ||
if (typeof __filename === "string") { | ||
_filename = __filename; | ||
_dirname = __dirname; | ||
} | ||
else { | ||
// This file URL will be replace with `import.meta.url` by Rollup plugin. | ||
_filename = fileURLToPath(import.meta.url); | ||
_dirname = path.dirname(_filename); | ||
} | ||
if (["cjs", "esm"].includes(path.basename(_dirname))) { // compiled | ||
entry = path.join(path.dirname(_dirname), "worker.mjs"); | ||
} | ||
else { | ||
entry = path.join(_dirname, "worker.mjs"); | ||
} | ||
if ((options === null || options === void 0 ? void 0 : options.adapter) === "child_process") { | ||
let worker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "child_process" && !item.busy; | ||
}); | ||
if (poolRecord) { | ||
// Clean the pool before resolve. | ||
workerPool = workerPool.filter(record => record !== poolRecord); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to create new worker. | ||
(_a = workerConsumerQueue.shift()) === null || _a === void 0 ? void 0 : _a(); | ||
} | ||
worker = poolRecord.worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} | ||
if (resolver) { | ||
resolver.resolve(void 0); | ||
} | ||
else if (iterator) { | ||
iterator.emit("close"); | ||
} | ||
else if (!error && !result) { | ||
result = { value: void 0 }; | ||
} | ||
}; | ||
if (isNode) { | ||
const path = await import('path'); | ||
const { fileURLToPath } = await import('url'); | ||
let _filename; | ||
let _dirname; | ||
let entry; | ||
if (typeof __filename === "string") { | ||
_filename = __filename; | ||
_dirname = __dirname; | ||
} | ||
else { | ||
// This file URL will be replace with `import.meta.url` by Rollup plugin. | ||
_filename = fileURLToPath(import.meta.url); | ||
_dirname = path.dirname(_filename); | ||
} | ||
if (["cjs", "esm"].includes(path.basename(_dirname))) { // compiled | ||
entry = path.join(path.dirname(_dirname), "worker.mjs"); | ||
} | ||
else { | ||
entry = path.join(_dirname, "worker.mjs"); | ||
} | ||
if ((options === null || options === void 0 ? void 0 : options.adapter) === "child_process") { | ||
let worker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "child_process" && !item.busy; | ||
else if (workerPool.length < maxWorkerNum) { | ||
const { fork } = await import('child_process'); | ||
const isPrior14 = parseInt(process.version.slice(1)) < 14; | ||
worker = fork(entry, { | ||
stdio: "inherit", | ||
serialization: isPrior14 ? "advanced" : "json", | ||
}); | ||
if (poolRecord) { | ||
worker = poolRecord.worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} | ||
else if (workerPool.length < maxWorkerNum) { | ||
const { fork } = await import('child_process'); | ||
const isPrior14 = parseInt(process.version.slice(1)) < 14; | ||
worker = fork(entry, { | ||
stdio: "inherit", | ||
serialization: isPrior14 ? "advanced" : "json", | ||
workerId = worker.pid; | ||
ok = await new Promise((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
workerId = worker.pid; | ||
ok = await new Promise((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("message", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
worker.once("message", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
// Fill the worker pool regardless the current call should keep-alive or not, | ||
// this will make sure that the total number of workers will not exceed the | ||
// maxWorkerNum. If the the call doesn't keep-alive the worker, it will be | ||
// cleaned after the call. | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "child_process", | ||
busy: true, | ||
}); | ||
} | ||
else { | ||
// Put the current call in the consumer queue if there are no workers available, | ||
// once an existing call finishes, the queue will pop the its head consumer and | ||
// retry. | ||
return new Promise((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
} | ||
release = () => { | ||
// Remove the event listener so that later calls will not mess up. | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = () => Promise.resolve(void worker.kill(1)); | ||
if (ok) { | ||
worker.send(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
}); | ||
// Fill the worker pool regardless the current call should keep-alive or not, | ||
// this will make sure that the total number of workers will not exceed the | ||
// maxWorkerNum. If the the call doesn't keep-alive the worker, it will be | ||
// cleaned after the call. | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "child_process", | ||
busy: true, | ||
}); | ||
} | ||
else { | ||
let worker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "worker_threads" && !item.busy; | ||
}); | ||
if (poolRecord) { | ||
worker = poolRecord.worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} | ||
else if (workerPool.length < maxWorkerNum) { | ||
const { Worker } = await import('worker_threads'); | ||
worker = new Worker(entry); | ||
// `threadId` may not exist in Bun. | ||
workerId = (_a = worker.threadId) !== null && _a !== void 0 ? _a : workerIdCounter.next().value; | ||
ok = await new Promise((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("online", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
}); | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "worker_threads", | ||
busy: true, | ||
}); | ||
} | ||
else { | ||
return new Promise((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
} | ||
release = () => { | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => void (await worker.terminate()); | ||
if (ok) { | ||
worker.postMessage(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("messageerror", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
// Put the current call in the consumer queue if there are no workers available, | ||
// once an existing call finishes, the queue will pop the its head consumer and | ||
// retry. | ||
return new Promise((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => run(script, args, options)); | ||
} | ||
release = () => { | ||
// Remove the event listener so that later calls will not mess up. | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = () => Promise.resolve(void worker.kill(1)); | ||
if (ok) { | ||
worker.send(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
} | ||
else { | ||
let worker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
@@ -780,29 +761,19 @@ return item.adapter === "worker_threads" && !item.busy; | ||
else if (workerPool.length < maxWorkerNum) { | ||
let url; | ||
if (typeof Deno === "object") { | ||
// Deno can load the module regardless of MINE type. | ||
url = [ | ||
...(import.meta.url.split("/").slice(0, -1)), | ||
"worker-web.mjs" | ||
].join("/"); | ||
} | ||
else { | ||
const _url = (options === null || options === void 0 ? void 0 : options.webWorkerEntry) | ||
|| "https://raw.githubusercontent.com/ayonli/jsext/main/esm/worker-web.mjs"; | ||
const res = await fetch(_url); | ||
let blob; | ||
if ((_b = res.headers.get("content-type")) === null || _b === void 0 ? void 0 : _b.startsWith("application/javascript")) { | ||
blob = await res.blob(); | ||
} | ||
else { | ||
const buf = await res.arrayBuffer(); | ||
blob = new Blob([new Uint8Array(buf)], { | ||
type: "application/javascript", | ||
}); | ||
} | ||
url = URL.createObjectURL(blob); | ||
} | ||
worker = new Worker(url, { type: "module" }); | ||
workerId = workerIdCounter.next().value; | ||
workerPool.push(poolRecord = { | ||
const { Worker } = await import('worker_threads'); | ||
worker = new Worker(entry); | ||
// `threadId` may not exist in Bun. | ||
workerId = (_a = worker.threadId) !== null && _a !== void 0 ? _a : workerIdCounter.next().value; | ||
ok = await new Promise((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("online", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
}); | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
@@ -817,65 +788,137 @@ worker, | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
}).then(() => run(script, args, options)); | ||
} | ||
release = () => { | ||
worker.onmessage = null; | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => { | ||
await Promise.resolve(worker.terminate()); | ||
handleExit(); | ||
}; | ||
worker.postMessage(msg); | ||
worker.onmessage = (ev) => handleMessage(ev.data); | ||
worker.onerror = (ev) => handleMessage(ev.error || new Error(ev.message)); | ||
worker.onmessageerror = () => { | ||
handleError(new Error("unable to deserialize the message")); | ||
}; | ||
terminate = async () => void (await worker.terminate()); | ||
if (ok) { | ||
worker.postMessage(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("messageerror", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
} | ||
return { | ||
workerId, | ||
async abort() { | ||
timeout && clearTimeout(timeout); | ||
await terminate(); | ||
}, | ||
async result() { | ||
return await new Promise((resolve, reject) => { | ||
if (error) { | ||
reject(error); | ||
} | ||
else if (result) { | ||
resolve(result.value); | ||
} | ||
else { | ||
resolver = { resolve, reject }; | ||
} | ||
}); | ||
}, | ||
async *iterate() { | ||
if (resolver) { | ||
throw new Error("result() has been called"); | ||
} | ||
else { | ||
let worker; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "worker_threads" && !item.busy; | ||
}); | ||
if (poolRecord) { | ||
worker = poolRecord.worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} | ||
else if (workerPool.length < maxWorkerNum) { | ||
let url; | ||
if (typeof Deno === "object") { | ||
// Deno can load the module regardless of MINE type. | ||
url = [ | ||
...(import.meta.url.split("/").slice(0, -1)), | ||
"worker-web.mjs" | ||
].join("/"); | ||
} | ||
else { | ||
const _url = (options === null || options === void 0 ? void 0 : options.webWorkerEntry) | ||
|| "https://raw.githubusercontent.com/ayonli/jsext/main/esm/worker-web.mjs"; | ||
const res = await fetch(_url); | ||
let blob; | ||
if ((_b = res.headers.get("content-type")) === null || _b === void 0 ? void 0 : _b.startsWith("application/javascript")) { | ||
blob = await res.blob(); | ||
} | ||
else { | ||
const buf = await res.arrayBuffer(); | ||
blob = new Blob([new Uint8Array(buf)], { | ||
type: "application/javascript", | ||
}); | ||
} | ||
url = URL.createObjectURL(blob); | ||
} | ||
worker = new Worker(url, { type: "module" }); | ||
workerId = workerIdCounter.next().value; | ||
workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "worker_threads", | ||
busy: true, | ||
}); | ||
} | ||
else { | ||
return new Promise((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => run(script, args, options)); | ||
} | ||
release = () => { | ||
worker.onmessage = null; | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => { | ||
await Promise.resolve(worker.terminate()); | ||
handleExit(); | ||
}; | ||
worker.postMessage(msg); | ||
worker.onmessage = (ev) => handleMessage(ev.data); | ||
worker.onerror = (ev) => handleMessage(ev.error || new Error(ev.message)); | ||
worker.onmessageerror = () => { | ||
handleError(new Error("unable to deserialize the message")); | ||
}; | ||
} | ||
return { | ||
workerId, | ||
async abort() { | ||
timeout && clearTimeout(timeout); | ||
await terminate(); | ||
}, | ||
async result() { | ||
return await new Promise((resolve, reject) => { | ||
if (error) { | ||
reject(error); | ||
} | ||
else if (result) { | ||
throw new TypeError("the response is not iterable"); | ||
resolve(result.value); | ||
} | ||
const { EventEmitter } = await import('events'); | ||
iterator = new EventEmitter(); | ||
if (buffer.length) { | ||
(async () => { | ||
await Promise.resolve(null); | ||
let msg; | ||
while (msg = buffer.shift()) { | ||
iterator.emit("data", msg); | ||
} | ||
})().catch(console.error); | ||
else { | ||
resolver = { resolve, reject }; | ||
} | ||
for await (const msg of jsext.read(iterator)) { | ||
yield msg; | ||
} | ||
}, | ||
}; | ||
} | ||
}); | ||
}, | ||
async *iterate() { | ||
if (resolver) { | ||
throw new Error("result() has been called"); | ||
} | ||
else if (result) { | ||
throw new TypeError("the response is not iterable"); | ||
} | ||
const { EventEmitter } = await import('events'); | ||
iterator = new EventEmitter(); | ||
if (buffer.length) { | ||
(async () => { | ||
await Promise.resolve(null); | ||
let msg; | ||
while (msg = buffer.shift()) { | ||
iterator.emit("data", msg); | ||
} | ||
})().catch(console.error); | ||
} | ||
for await (const msg of read(iterator)) { | ||
yield msg; | ||
} | ||
}, | ||
}; | ||
} | ||
const jsext = { | ||
try: _try, | ||
func, | ||
wrap, | ||
throttle, | ||
mixins, | ||
isSubclassOf, | ||
read, | ||
run, | ||
}; | ||
export { jsext as default }; | ||
export { AsyncFunction, AsyncGeneratorFunction, _try, jsext as default, func, isSubclassOf, mixins, read, run, throttle, wrap }; | ||
//# sourceMappingURL=index.js.map |
@@ -1,2 +0,2 @@ | ||
import { hasOwn, hasOwnMethod, patch, pick, omit, as } from './index.js'; | ||
import { hasOwn, hasOwnMethod, patch, pick, omit, as, isValid } from './index.js'; | ||
@@ -13,2 +13,3 @@ if (!Object.hasOwn) { | ||
Object.as = as; | ||
Object.isValid = isValid; | ||
//# sourceMappingURL=augment.js.map |
@@ -51,3 +51,3 @@ function hasOwn(obj, key) { | ||
} | ||
function as(obj, type) { | ||
function as(value, type) { | ||
if (typeof type !== "function") { | ||
@@ -64,17 +64,31 @@ throw new TypeError("type must be a valid constructor"); | ||
}; | ||
if (obj instanceof type) { | ||
if (value instanceof type) { | ||
if ([String, Number, Boolean].includes(type)) { | ||
return obj.valueOf(); // make sure the primitives are returned. | ||
return value.valueOf(); // make sure the primitives are returned. | ||
} | ||
else { | ||
return obj; | ||
return value; | ||
} | ||
} | ||
else if ((_type = typeof obj) && primitiveMap[_type] === type) { | ||
return obj; | ||
else if ((_type = typeof value) && primitiveMap[_type] === type) { | ||
return value; | ||
} | ||
return null; | ||
} | ||
/** | ||
* Returns `true` if the given value is valid. Thee following values are considered invalid: | ||
* | ||
* - `undefined` | ||
* - `null` | ||
* - `NaN` | ||
* - `Invalid Date` | ||
*/ | ||
function isValid(value) { | ||
return value !== undefined | ||
&& value !== null | ||
&& !Object.is(value, NaN) | ||
&& !(value instanceof Date && value.toString() === "Invalid Date"); | ||
} | ||
export { as, hasOwn, hasOwnMethod, omit, patch, pick }; | ||
export { as, hasOwn, hasOwnMethod, isValid, omit, patch, pick }; | ||
//# sourceMappingURL=index.js.map |
1871
index.ts
@@ -6,2 +6,19 @@ import { isAsyncGenerator, isGenerator } from "check-iterable"; | ||
export const AsyncFunction = (async function () { }).constructor as AsyncFunctionConstructor; | ||
export const AsyncGeneratorFunction = (async function* () { }).constructor as AsyncGeneratorFunctionConstructor; | ||
export interface AsyncFunction { | ||
(...args: any[]): Promise<unknown>; | ||
readonly length: number; | ||
readonly name: string; | ||
} | ||
export interface AsyncFunctionConstructor { | ||
new(...args: any[]): AsyncFunction; | ||
(...args: any[]): AsyncFunction; | ||
readonly length: number; | ||
readonly name: string; | ||
readonly prototype: AsyncFunction; | ||
} | ||
export interface Constructor<T> extends Function { | ||
@@ -21,7 +38,288 @@ new(...args: any[]): T; | ||
export type UnionToIntersection<U> = (U extends any ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never; | ||
/** | ||
* Invokes an async generator function and renders its yield value and result in a `[err, val]` tuple. | ||
*/ | ||
export function _try<E = Error, T = any, A extends any[] = any[], TReturn = any, TNext = unknown>( | ||
fn: (...args: A) => AsyncGenerator<T, TReturn, TNext>, | ||
...args: A | ||
): AsyncGenerator<[E | null, T], [E | null, TReturn], TNext>; | ||
/** | ||
* Invokes a generator function and renders its yield value and result in a `[err, val]` tuple. | ||
*/ | ||
export function _try<E = Error, T = any, A extends any[] = any[], TReturn = any, TNext = unknown>( | ||
fn: (...args: A) => Generator<T, TReturn, TNext>, | ||
...args: A | ||
): Generator<[E | null, T], [E | null, TReturn], TNext>; | ||
/** | ||
* Invokes an async function and renders its result in a `[err, val]` tuple. | ||
*/ | ||
export function _try<E = Error, R = any, A extends any[] = any[]>( | ||
fn: (...args: A) => Promise<R>, | ||
...args: A | ||
): Promise<[E | null, R]>; | ||
/** | ||
* Invokes a function and renders its result in a `[err, val]` tuple. | ||
*/ | ||
export function _try<E = Error, R = any, A extends any[] = any[]>( | ||
fn: (...args: A) => R, | ||
...args: A | ||
): [E | null, R]; | ||
/** | ||
* Resolves an async generator and renders its yield value and result in a `[err, val]` tuple. | ||
*/ | ||
export function _try<E = Error, T = any, TReturn = any, TNext = unknown>( | ||
gen: AsyncGenerator<T, TReturn, TNext> | ||
): AsyncGenerator<[E | null, T], [E | null, TReturn], TNext>; | ||
/** | ||
* Resolves a generator and renders its yield value and result in a `[err, val]` tuple. | ||
*/ | ||
export function _try<E = Error, T = any, TReturn = any, TNext = unknown>( | ||
gen: Generator<T, TReturn, TNext> | ||
): Generator<[E | null, T], [E | null, TReturn], TNext>; | ||
/** | ||
* Resolves a promise and renders its result in a `[err, res]` tuple. | ||
*/ | ||
export function _try<E = Error, R = any>(job: Promise<R>): Promise<[E | null, R]>; | ||
export function _try(fn: any, ...args: any[]) { | ||
if (typeof fn === "function") { | ||
try { | ||
return _try(fn.apply(void 0, args)); | ||
} catch (err) { | ||
return [err, undefined]; | ||
} | ||
} | ||
const isNode = typeof process === "object" && !!process.versions?.node; | ||
declare var Deno: any; | ||
let returns = fn; | ||
// Implementation details should be ordered from complex to simple. | ||
if (isAsyncGenerator(returns)) { | ||
return (async function* () { | ||
let input: unknown; | ||
let result: any; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve([null, value]); | ||
} | ||
} catch (err) { | ||
// If any error occurs, yield that error as resolved | ||
// and break the loop immediately, indicating the | ||
// process is forced broken. | ||
yield Promise.resolve([err, undefined]); | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})() as AsyncGenerator<unknown, any, unknown>; | ||
} else if (isGenerator(returns)) { | ||
return (function* () { | ||
let input: unknown; | ||
let result: any; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} else { | ||
input = yield [null, value]; | ||
} | ||
} catch (err) { | ||
yield [err, undefined]; | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})() as Generator<unknown, any, unknown>; | ||
} else if (typeof returns?.then === "function") { | ||
returns = (returns as PromiseLike<any>).then((value: any) => [null, value]); | ||
return Promise.resolve(returns).catch((err: unknown) => [err, undefined]) as any; | ||
} else { | ||
return [null, returns]; | ||
} | ||
} | ||
/** | ||
* Inspired by Golang, creates a function that receives a `defer` function which can be used | ||
* to carry deferred jobs that will be run after the main function is complete. | ||
* | ||
* Multiple calls of the `defer` function is supported, and the callbacks are called in the | ||
* LIFO order. Callbacks can be async functions if the main function is an async function or | ||
* an async generator function, and all the running procedures will be awaited. | ||
* | ||
* @example | ||
* const getVersion = await jsext.func(async (defer) => { | ||
* const file = await fs.open("./package.json", "r"); | ||
* defer(() => file.close()); | ||
* | ||
* const content = await file.readFile("utf8"); | ||
* const pkg = JSON.parse(content); | ||
* | ||
* return pkg.version as string; | ||
* }); | ||
*/ | ||
export function func<T, R = any, A extends any[] = any[]>( | ||
fn: (this: T, defer: (cb: () => void) => void, ...args: A) => R | ||
): (this: T, ...args: A) => R { | ||
return function (this: T, ...args: A) { | ||
const callbacks: (() => void)[] = []; | ||
const defer = (cb: () => void) => void callbacks.push(cb); | ||
type Result = { value?: Awaited<R>; error: unknown; }; | ||
let result: Result | undefined; | ||
try { | ||
const returns = fn.call(this, defer, ...args) as any; | ||
if (isAsyncGenerator(returns)) { | ||
const gen = (async function* () { | ||
let input: unknown; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve(value); | ||
} | ||
} catch (error) { | ||
// If any error occurs, capture that error and break | ||
// the loop immediately, indicating the process is | ||
// forced broken. | ||
result = { value: void 0, error } as Result; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await (callbacks[i] as () => void | Promise<void>)?.(); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} else { | ||
return result.value; | ||
} | ||
})() as AsyncGenerator<unknown, any, unknown>; | ||
return gen as R; | ||
} else if (isGenerator(returns)) { | ||
const gen = (function* () { | ||
let input: unknown; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} else { | ||
input = yield value; | ||
} | ||
} catch (error) { | ||
result = { value: void 0, error } as Result; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
callbacks[i]?.(); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} else { | ||
return result.value; | ||
} | ||
})() as Generator<unknown, R, unknown>; | ||
return gen as R; | ||
} else if (typeof returns?.then === "function") { | ||
return Promise.resolve(returns as PromiseLike<R>).then(value => ({ | ||
value, | ||
error: null, | ||
} as Result)).catch((error: unknown) => ({ | ||
value: void 0, | ||
error, | ||
} as Result)).then(async result => { | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await (callbacks[i] as () => void | Promise<void>)?.(); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} else { | ||
return result.value; | ||
} | ||
}) as R; | ||
} else { | ||
result = { value: returns, error: null } as Result; | ||
} | ||
} catch (error) { | ||
result = { value: void 0, error } as Result; | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
callbacks[i]?.(); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} else { | ||
return result.value as R; | ||
} | ||
}; | ||
} | ||
/** | ||
* Wraps a function inside another function and returns a new function | ||
* that copies the original function's name and properties. | ||
*/ | ||
export function wrap<T, Fn extends (this: T, ...args: any[]) => any>( | ||
fn: Fn, | ||
wrapper: (this: T, fn: Fn, ...args: Parameters<Fn>) => ReturnType<Fn> | ||
): Fn { | ||
const wrapped = function (this: any, ...args: Parameters<Fn>): ReturnType<Fn> { | ||
return wrapper.call(this, fn, ...args); | ||
}; | ||
Object.defineProperty(wrapped, | ||
"name", | ||
Object.getOwnPropertyDescriptor(fn, "name") as PropertyDescriptor); | ||
Object.defineProperty(wrapped, | ||
"length", | ||
Object.getOwnPropertyDescriptor(fn, "length") as PropertyDescriptor); | ||
Object.defineProperty(wrapped, "toString", { | ||
configurable: true, | ||
enumerable: false, | ||
writable: true, | ||
value: fn.toString.bind(fn), | ||
}); | ||
return wrapped as Fn; | ||
} | ||
type ThrottleCache = { | ||
@@ -35,25 +333,76 @@ for: any; | ||
/** | ||
* The maximum number of workers allowed to exist at the same time. | ||
* Creates a throttled function that will only be run once in a certain amount of time. | ||
* | ||
* The primary purpose of the workers is not mean to run tasks in parallel, but run them in separate | ||
* from the main thread, so that aborting tasks can be achieved by terminating the worker thread and | ||
* it will not affect the main thread. | ||
* | ||
* That said, the worker thread can still be used to achieve parallelism, but it should be noticed | ||
* that only the numbers of tasks that equals to the CPU core numbers will be run at the same time. | ||
* If a subsequent call happens within the `duration`, the previous result will be returned and | ||
* the `handler` function will not be invoked. | ||
*/ | ||
const maxWorkerNum = 16; | ||
export function throttle<T, Fn extends (this: T, ...args: any[]) => any>( | ||
handler: Fn, | ||
duration: number | ||
): Fn; | ||
export function throttle<T, Fn extends (this: T, ...args: any[]) => any>(handler: Fn, options: { | ||
duration: number; | ||
/** | ||
* Use the throttle strategy `for` the given key, this will keep the result in a global | ||
* cache, binding new `handler` function for the same key will result in the same result | ||
* as the previous, unless the duration has passed. This mechanism guarantees that both | ||
* creating the throttled function in function scopes and overwriting the handler are | ||
* possible. | ||
*/ | ||
for?: any; | ||
}): Fn; | ||
export function throttle(handler: (this: any, ...args: any[]) => any, options: number | { | ||
duration: number; | ||
for?: any; | ||
}) { | ||
const key = typeof options === "number" ? null : options.for; | ||
const duration = typeof options === "number" ? options : options.duration; | ||
const workerIdCounter = sequence(1, Number.MAX_SAFE_INTEGER, 1, true); | ||
let workerPool: { | ||
workerId: number; | ||
worker: Worker | NodeWorker | ChildProcess; | ||
adapter: "worker_threads" | "child_process"; | ||
busy: boolean; | ||
}[] = []; | ||
const handleCall = function ( | ||
this: any, | ||
cache: ThrottleCache, | ||
...args: any[] | ||
) { | ||
if (cache.result && Date.now() < (cache.expires ?? 0)) { | ||
if (cache.result.error) { | ||
throw cache.result.error; | ||
} else { | ||
return cache.result.value; | ||
} | ||
} | ||
// The worker consumer queue is nothing but a callback list, once a worker is available, the runner | ||
// pop a consumer and run the callback, which will retry gaining the worker and retry the task. | ||
const workerConsumerQueue: (() => void)[] = []; | ||
try { | ||
const returns = handler.call(this, ...args); | ||
cache.result = { value: returns }; | ||
cache.expires = Date.now() + duration; | ||
return returns; | ||
} catch (error) { | ||
cache.result = { error }; | ||
cache.expires = Date.now() + duration; | ||
throw error; | ||
} | ||
}; | ||
if (!key) { | ||
const cache: ThrottleCache = { for: null }; | ||
return function (this: any, ...args: any[]) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
} else { | ||
let cache = throttleCaches.get(key); | ||
if (!cache) { | ||
cache = { for: key }; | ||
throttleCaches.set(key, cache); | ||
} | ||
return function (this: any, ...args: any[]) { | ||
return handleCall.call(this, cache as ThrottleCache, ...args); | ||
}; | ||
} | ||
} | ||
export type UnionToIntersection<U> = ( | ||
U extends any ? (k: U) => void : never) extends ((k: infer I) => void) ? I : never; | ||
/** | ||
@@ -111,130 +460,285 @@ * Merges properties and methods only if they're missing in the class. | ||
export interface JsExt { | ||
/** | ||
* Runs a function and catches any error happens inside it, returns the error and result | ||
* in a `[err, res]` tuple. | ||
*/ | ||
try<E = Error, T = any, A extends any[] = any[], TReturn = any, TNext = unknown>( | ||
fn: (...args: A) => AsyncGenerator<T, TReturn, TNext>, | ||
...args: A | ||
): AsyncGenerator<[E | null, T], [E | null, TReturn], TNext>; | ||
try<E = Error, T = any, A extends any[] = any[], TReturn = any, TNext = unknown>( | ||
fn: (...args: A) => Generator<T, TReturn, TNext>, | ||
...args: A | ||
): Generator<[E | null, T], [E | null, TReturn], TNext>; | ||
try<E = Error, R = any, A extends any[] = any[]>( | ||
fn: (...args: A) => Promise<R>, | ||
...args: A | ||
): Promise<[E | null, R]>; | ||
try<E = Error, R = any, A extends any[] = any[]>( | ||
fn: (...args: A) => R, | ||
...args: A | ||
): [E | null, R]; | ||
/** Resolves a generator and renders its yield value in a `[err, val]` tuple. */ | ||
try<E = Error, T = any, TReturn = any, TNext = unknown>( | ||
gen: AsyncGenerator<T, TReturn, TNext> | ||
): AsyncGenerator<[E | null, T], [E | null, TReturn], TNext>; | ||
try<E = Error, T = any, TReturn = any, TNext = unknown>( | ||
gen: Generator<T, TReturn, TNext> | ||
): Generator<[E | null, T], [E | null, TReturn], TNext>; | ||
/** Resolves a promise and returns the error and result in a `[err, res]` tuple. */ | ||
try<E = Error, R = any>(job: Promise<R>): Promise<[E | null, R]>; | ||
/** | ||
* Returns an extended class that combines all mixin methods. | ||
* | ||
* This function does not mutates the base class but create a pivot class | ||
* instead. | ||
*/ | ||
export function mixins<T extends Constructor<any>, M extends any[]>( | ||
base: T, | ||
...mixins: { [X in keyof M]: Constructor<M[X]> } | ||
): T & Constructor<UnionToIntersection<FlatArray<M, 1>>>; | ||
export function mixins<T extends Constructor<any>, M extends any[]>( | ||
base: T, | ||
...mixins: M | ||
): T & Constructor<UnionToIntersection<FlatArray<M, 1>>>; | ||
export function mixins(base: Constructor<any>, ...mixins: any[]) { | ||
const obj = { ctor: null as any as Constructor<any> }; | ||
obj.ctor = class extends (<any>base) { }; // make sure this class has no name | ||
/** | ||
* Inspired by Golang, creates a function that receives a `defer` function which can be used | ||
* to carry deferred jobs that will be run after the main function is complete. | ||
* | ||
* Multiple calls of the `defer` function is supported, and the callbacks are called in the | ||
* LIFO order. Callbacks can be async functions if the main function is an async function or | ||
* an async generator function, and all the running procedures will be awaited. | ||
* | ||
* @example | ||
* const getVersion = await jsext.func(async (defer) => { | ||
* const file = await fs.open("./package.json", "r"); | ||
* defer(() => file.close()); | ||
* | ||
* const content = await file.readFile("utf8"); | ||
* const pkg = JSON.parse(content); | ||
* | ||
* return pkg.version as string; | ||
* }); | ||
*/ | ||
func<T, R = any, A extends any[] = any[]>( | ||
fn: (this: T, defer: (cb: () => void) => void, ...args: A) => R | ||
): (this: T, ...args: A) => R; | ||
for (const mixin of mixins) { | ||
if (typeof mixin == "function") { | ||
mergeHierarchy(obj.ctor, mixin); | ||
} else if (mixin && typeof mixin == "object") { | ||
mergeIfNotExists(obj.ctor.prototype, mixin); | ||
} else { | ||
throw new TypeError("mixin must be a constructor or an object"); | ||
} | ||
} | ||
/** | ||
* Wraps a function inside another function and returns a new function | ||
* that copies the original function's name and properties. | ||
*/ | ||
wrap<T, Fn extends (this: T, ...args: any[]) => any>( | ||
fn: Fn, | ||
wrapper: (this: T, fn: Fn, ...args: Parameters<Fn>) => ReturnType<Fn> | ||
): Fn; | ||
return obj.ctor as Constructor<any>; | ||
} | ||
/** | ||
* Creates a throttled function that will only be run once in a certain amount of time. | ||
* | ||
* If a subsequent call happens within the `duration`, the previous result will be returned and | ||
* the `handler` function will not be invoked. | ||
*/ | ||
throttle<T, Fn extends (this: T, ...args: any[]) => any>(handler: Fn, duration: number): Fn; | ||
throttle<T, Fn extends (this: T, ...args: any[]) => any>(handler: Fn, options: { | ||
duration: number; | ||
/** | ||
* Use the throttle strategy `for` the given key, this will keep the result in a global | ||
* cache, binding new `handler` function for the same key will result in the same result | ||
* as the previous, unless the duration has passed. This mechanism guarantees that both | ||
* creating the throttled function in function scopes and overwriting the handler are | ||
* possible. | ||
*/ | ||
for?: any; | ||
}): Fn; | ||
/** Checks if a class is a subclass of another class. */ | ||
export function isSubclassOf<T, B>(ctor1: Constructor<T>, ctor2: Constructor<B>): boolean { | ||
return typeof ctor1 === "function" | ||
&& typeof ctor2 === "function" | ||
&& ctor1.prototype instanceof ctor2; | ||
} | ||
/** | ||
* Returns an extended class that combines all mixin methods. | ||
* | ||
* This function does not mutates the base class but create a pivot class | ||
* instead. | ||
*/ | ||
mixins<T extends Constructor<any>, M extends any[]>( | ||
base: T, | ||
...mixins: { [X in keyof M]: Constructor<M[X]> } | ||
): T & Constructor<UnionToIntersection<FlatArray<M, 1>>>; | ||
mixins<T extends Constructor<any>, M extends any[]>( | ||
base: T, | ||
...mixins: M | ||
): T & Constructor<UnionToIntersection<FlatArray<M, 1>>>; | ||
/** | ||
* Wraps a source as an AsyncIterable object that can be used in the `for...await...` loop | ||
* for reading streaming data. | ||
*/ | ||
export function read<I extends AsyncIterable<any>>(iterable: I): I; | ||
export function read(es: EventSource, options?: { event?: string; }): AsyncIterable<string>; | ||
export function read<T extends Uint8Array | string>(ws: WebSocket): AsyncIterable<T>; | ||
export function read<T>(target: EventTarget, eventMap?: { | ||
message?: string; | ||
error?: string; | ||
close?: string; | ||
}): AsyncIterable<T>; | ||
export function read<T>(target: NodeJS.EventEmitter, eventMap?: { | ||
data?: string; | ||
error?: string; | ||
close?: string; | ||
}): AsyncIterable<T>; | ||
export function read<T>(source: any, eventMap: { | ||
event?: string; // for EventSource custom event | ||
message?: string; | ||
data?: string; | ||
error?: string; | ||
close?: string; | ||
} | undefined = undefined): AsyncIterable<T> { | ||
if (typeof source[Symbol.asyncIterator] === "function") { | ||
return source; | ||
} | ||
/** Checks if a class is a subclass of another class. */ | ||
isSubclassOf<T, B>(ctor1: Constructor<T>, ctor2: Constructor<B>): boolean; | ||
const iterable = { | ||
ended: false, | ||
error: null as Error | null, | ||
queue: [] as T[], | ||
consumers: [] as { | ||
resolve: (data: IteratorResult<T>) => void; | ||
reject: (err: any) => void; | ||
}[], | ||
next() { | ||
return new Promise<IteratorResult<T>>((resolve, reject) => { | ||
if (this.error && !this.ended) { | ||
// If there is error occurred during the last transmission and the iterator | ||
// hasn't been closed, reject that error and stop the iterator immediately. | ||
reject(this.error); | ||
this.ended = true; | ||
} else if (this.ended && !this.queue.length) { | ||
// If the iterator has is closed, resolve the pending consumer with void | ||
// value. | ||
resolve({ value: void 0 as T, done: true }); | ||
} else if (this.queue.length > 0) { | ||
// If there are data in the queue, resolve the the first piece immediately. | ||
resolve({ value: this.queue.shift() as T, done: false }); | ||
} else { | ||
// If there are no queued data, push the consumer to a waiting queue. | ||
this.consumers.push({ resolve, reject }); | ||
} | ||
}); | ||
} | ||
}; | ||
/** | ||
* Wraps a source as an AsyncIterable object that can be used in the `for...await...` loop | ||
* for reading streaming data. | ||
*/ | ||
read<I extends AsyncIterable<any>>(iterable: I): I; | ||
read(es: EventSource, options?: { event?: string; }): AsyncIterable<string>; | ||
read<T extends Uint8Array | string>(ws: WebSocket): AsyncIterable<T>; | ||
read<T>(target: EventTarget, eventMap?: { | ||
message?: string; | ||
error?: string; | ||
close?: string; | ||
}): AsyncIterable<T>; | ||
read<T>(target: NodeJS.EventEmitter, eventMap?: { | ||
data?: string; | ||
error?: string; | ||
close?: string; | ||
}): AsyncIterable<T>; | ||
const handleMessage = (data: T) => { | ||
if (iterable.consumers.length > 0) { | ||
iterable.consumers.shift()?.resolve({ value: data, done: false }); | ||
} else { | ||
iterable.queue.push(data); | ||
} | ||
}; | ||
const handleClose = () => { | ||
iterable.ended = true; | ||
let consumer: typeof iterable["consumers"][0] | undefined; | ||
/** | ||
* Runs a task in the `script` in a worker thread that can be aborted during runtime. | ||
* | ||
* In Node.js, the `script` can be either a CommonJS module or an ES module, and is relative to | ||
* the current working directory if not absolute. | ||
* | ||
* In browser or Deno, the `script` can only be an ES module, and is relative to the current URL | ||
* (or working directory for Deno) if not absolute. | ||
*/ | ||
run<T, A extends any[] = any[]>(script: string, args?: A, options?: { | ||
while (consumer = iterable.consumers.shift()) { | ||
consumer.resolve({ value: undefined, done: true }); | ||
} | ||
}; | ||
const handleError = (err: Error) => { | ||
iterable.error = err; | ||
if (iterable.consumers.length > 0) { | ||
iterable.consumers.forEach(item => { | ||
item.reject(err); | ||
}); | ||
iterable.consumers = []; | ||
} | ||
}; | ||
const handleBrowserErrorEvent = (ev: Event) => { | ||
let err: Error; | ||
if (ev instanceof ErrorEvent) { | ||
err = ev.error || new Error(ev.message); | ||
} else { | ||
// @ts-ignore | ||
err = new Error("something went wrong", { cause: ev }); | ||
} | ||
handleError(err); | ||
}; | ||
const proto = Object.getPrototypeOf(source); | ||
const msgDesc = Object.getOwnPropertyDescriptor(proto, "onmessage"); | ||
if (msgDesc?.set && typeof source.close === "function") { // WebSocket or EventSource | ||
const errDesc = Object.getOwnPropertyDescriptor(proto, "onerror"); | ||
const closeDesc = Object.getOwnPropertyDescriptor(proto, "onclose"); | ||
let cleanup: () => void; | ||
if (eventMap?.event && | ||
eventMap?.event !== "message" && | ||
typeof source["addEventListener"] === "function" | ||
) { // for EventSource listening on custom events | ||
const es = source as EventSource; | ||
const eventName = eventMap.event; | ||
const msgListener = (ev: MessageEvent<T>) => { | ||
handleMessage(ev.data); | ||
}; | ||
es.addEventListener(eventName, msgListener); | ||
cleanup = () => { | ||
es.removeEventListener(eventName, msgListener); | ||
}; | ||
} else { | ||
msgDesc.set.call(source, (ev: MessageEvent<T>) => { | ||
handleMessage(ev.data); | ||
}); | ||
cleanup = () => { | ||
msgDesc.set?.call(source, null); | ||
}; | ||
} | ||
errDesc?.set?.call(source, handleBrowserErrorEvent); | ||
if (closeDesc?.set) { // WebSocket | ||
closeDesc.set.call(source, () => { | ||
handleClose(); | ||
closeDesc.set?.call(source, null); | ||
errDesc?.set?.call(source, null); | ||
cleanup?.(); | ||
}); | ||
} else if (!closeDesc?.set && typeof source.close === "function") { // EventSource | ||
// EventSource by default does not trigger close event, we need to make sure when | ||
// it calls the close() function, the iterator is automatically closed. | ||
const es = source as EventSource; | ||
const _close = es.close; | ||
es.close = function close() { | ||
_close.call(es); | ||
handleClose(); | ||
es.close = _close; | ||
errDesc?.set?.call(source, null); | ||
cleanup?.(); | ||
}; | ||
} | ||
} else if (typeof source.send === "function" && typeof source.close === "function") { | ||
// non-standard WebSocket implementation | ||
const ws = source as WebSocket; | ||
ws.onmessage = (ev: MessageEvent<T>) => { | ||
handleMessage(ev.data); | ||
}; | ||
ws.onerror = handleBrowserErrorEvent; | ||
ws.onclose = () => { | ||
handleClose(); | ||
ws.onclose = null; | ||
ws.onerror = null; | ||
ws.onmessage = null; | ||
}; | ||
} else if (typeof source["addEventListener"] === "function") { // EventTarget | ||
const target = source as EventTarget; | ||
const msgEvent = eventMap?.message || "message"; | ||
const errEvent = eventMap?.error || "error"; | ||
const closeEvent = eventMap?.close || "close"; | ||
const msgListener = (ev: Event) => { | ||
if (ev instanceof MessageEvent) { | ||
handleMessage(ev.data); | ||
} | ||
}; | ||
target.addEventListener(msgEvent, msgListener); | ||
target.addEventListener(errEvent, handleBrowserErrorEvent); | ||
target.addEventListener(closeEvent, function closeListener() { | ||
handleClose(); | ||
target.removeEventListener(closeEvent, closeListener); | ||
target.removeEventListener(msgEvent, msgListener); | ||
target.removeEventListener(errEvent, handleBrowserErrorEvent); | ||
}); | ||
} else if (typeof source["on"] === "function") { // EventEmitter | ||
const target = source as NodeJS.EventEmitter; | ||
const dataEvent = eventMap?.data || "data"; | ||
const errEvent = eventMap?.error || "error"; | ||
const endEvent = eventMap?.close || "close"; | ||
target.on(dataEvent, handleMessage); | ||
target.once(errEvent, handleError); | ||
target.once(endEvent, () => { | ||
handleClose(); | ||
target.off(dataEvent, handleMessage); | ||
target.off(dataEvent, handleError); | ||
}); | ||
} else { | ||
throw new TypeError("the input source cannot be read as an AsyncIterable object"); | ||
} | ||
return { | ||
[Symbol.asyncIterator]() { | ||
return iterable; | ||
} | ||
}; | ||
} | ||
const isNode = typeof process === "object" && !!process.versions?.node; | ||
declare var Deno: any; | ||
/** | ||
* The maximum number of workers allowed to exist at the same time. | ||
* | ||
* The primary purpose of the workers is not mean to run tasks in parallel, but run them in separate | ||
* from the main thread, so that aborting tasks can be achieved by terminating the worker thread and | ||
* it will not affect the main thread. | ||
* | ||
* That said, the worker thread can still be used to achieve parallelism, but it should be noticed | ||
* that only the numbers of tasks that equals to the CPU core numbers will be run at the same time. | ||
*/ | ||
const maxWorkerNum = 16; | ||
const workerIdCounter = sequence(1, Number.MAX_SAFE_INTEGER, 1, true); | ||
let workerPool: { | ||
workerId: number; | ||
worker: Worker | NodeWorker | ChildProcess; | ||
adapter: "worker_threads" | "child_process"; | ||
busy: boolean; | ||
}[] = []; | ||
// The worker consumer queue is nothing but a callback list, once a worker is available, the runner | ||
// pop a consumer and run the callback, which will retry gaining the worker and retry the task. | ||
const workerConsumerQueue: (() => void)[] = []; | ||
/** | ||
* Runs a task in the `script` in a worker thread that can be aborted during runtime. | ||
* | ||
* In Node.js, the `script` can be either a CommonJS module or an ES module, and is relative to | ||
* the current working directory if not absolute. | ||
* | ||
* In browser or Deno, the `script` can only be an ES module, and is relative to the current URL | ||
* (or working directory for Deno) if not absolute. | ||
*/ | ||
export async function run<T, A extends any[] = any[]>( | ||
script: string, | ||
args: A | undefined = undefined, | ||
options: { | ||
/** If not set, runs the default function, otherwise runs the specific function. */ | ||
@@ -263,736 +767,209 @@ fn?: string; | ||
webWorkerEntry?: string; | ||
}): Promise<{ | ||
workerId: number; | ||
/** Terminates the worker and abort the task. */ | ||
abort(): Promise<void>; | ||
/** Retrieves the return value of the function. */ | ||
result(): Promise<T>; | ||
/** Iterates the yield value if the function returns a generator. */ | ||
iterate(): AsyncIterable<T>; | ||
}>; | ||
} | ||
} | undefined = undefined | ||
): Promise<{ | ||
workerId: number; | ||
/** Terminates the worker and abort the task. */ | ||
abort(): Promise<void>; | ||
/** Retrieves the return value of the function. */ | ||
result(): Promise<T>; | ||
/** Iterates the yield value if the function returns a generator. */ | ||
iterate(): AsyncIterable<T>; | ||
}> { | ||
const msg = { | ||
type: "ffi", | ||
script, | ||
baseUrl: "", | ||
fn: options?.fn || "default", | ||
args: args ?? [], | ||
}; | ||
const jsext: JsExt = { | ||
try(fn: any, ...args: any[]) { | ||
if (typeof fn === "function") { | ||
try { | ||
return jsext.try(fn.apply(void 0, args)); | ||
} catch (err) { | ||
return [err, undefined]; | ||
} | ||
} | ||
if (typeof Deno === "object") { | ||
msg.baseUrl = "file://" + Deno.cwd() + "/"; | ||
} else if (isNode) { | ||
msg.baseUrl = "file://" + process.cwd() + "/"; | ||
} else if (typeof location === "object") { | ||
msg.baseUrl = location.href; | ||
} | ||
let returns = fn; | ||
// Implementation details should be ordered from complex to simple. | ||
// `buffer` is used to store data pieces yielded by generator functions before they are | ||
// consumed. `error` and `result` serves similar purposes for function results. | ||
const buffer: any[] = []; | ||
let error: Error | null = null; | ||
let result: { value: any; } | undefined; | ||
let resolver: { | ||
resolve: (data: any) => void; | ||
reject: (err: unknown) => void; | ||
} | undefined; | ||
let iterator: NodeJS.EventEmitter | undefined; | ||
let workerId: number | undefined; | ||
let poolRecord: typeof workerPool[0] | undefined; | ||
let release: () => void; | ||
let terminate = () => Promise.resolve<void>(void 0); | ||
const timeout = options?.timeout ? setTimeout(() => { | ||
const err = new Error(`operation timeout after ${options.timeout}ms`); | ||
if (isAsyncGenerator(returns)) { | ||
return (async function* () { | ||
let input: unknown; | ||
let result: any; | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve([null, value]); | ||
} | ||
} catch (err) { | ||
// If any error occurs, yield that error as resolved | ||
// and break the loop immediately, indicating the | ||
// process is forced broken. | ||
yield Promise.resolve([err, undefined]); | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})() as AsyncGenerator<unknown, any, unknown>; | ||
} else if (isGenerator(returns)) { | ||
return (function* () { | ||
let input: unknown; | ||
let result: any; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = value; | ||
break; | ||
} else { | ||
input = yield [null, value]; | ||
} | ||
} catch (err) { | ||
yield [err, undefined]; | ||
break; | ||
} | ||
} | ||
return [null, result]; | ||
})() as Generator<unknown, any, unknown>; | ||
} else if (typeof returns?.then === "function") { | ||
returns = (returns as PromiseLike<any>).then((value: any) => [null, value]); | ||
return Promise.resolve(returns).catch((err: unknown) => [err, undefined]) as any; | ||
if (resolver) { | ||
resolver.reject(err); | ||
} else { | ||
return [null, returns]; | ||
error = err; | ||
} | ||
}, | ||
func<T, R = any, A extends any[] = any[]>( | ||
fn: (this: T, defer: (cb: () => void) => void, ...args: A) => R | ||
): (this: T, ...args: A) => R { | ||
return function (this: any, ...args: A) { | ||
const callbacks: (() => void)[] = []; | ||
const defer = (cb: () => void) => void callbacks.push(cb); | ||
type Result = { value?: Awaited<R>; error: unknown; }; | ||
let result: Result | undefined; | ||
try { | ||
const returns = fn.call(this, defer, ...args) as any; | ||
terminate(); | ||
}, options.timeout) : null; | ||
if (isAsyncGenerator(returns)) { | ||
const gen = (async function* () { | ||
let input: unknown; | ||
const handleMessage = (msg: any) => { | ||
if (msg && typeof msg === "object" && typeof msg.type === "string") { | ||
if (msg.type === "error") { | ||
return handleError(msg.error); | ||
} else if (msg.type === "return") { | ||
if (options?.keepAlive) { | ||
// Release before resolve. | ||
release?.(); | ||
// Use `while` loop instead of `for...of...` in order to | ||
// retrieve the return value of a generator function. | ||
while (true) { | ||
try { | ||
const { done, value } = await returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} else { | ||
// Receive any potential input value that passed | ||
// to the outer `next()` call, and pass them to | ||
// `res.next()` in the next call. | ||
input = yield Promise.resolve(value); | ||
} | ||
} catch (error) { | ||
// If any error occurs, capture that error and break | ||
// the loop immediately, indicating the process is | ||
// forced broken. | ||
result = { value: void 0, error } as Result; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await (callbacks[i] as () => void | Promise<void>)?.(); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} else { | ||
return result.value; | ||
} | ||
})() as AsyncGenerator<unknown, any, unknown>; | ||
return gen as R; | ||
} else if (isGenerator(returns)) { | ||
const gen = (function* () { | ||
let input: unknown; | ||
while (true) { | ||
try { | ||
const { done, value } = returns.next(input); | ||
if (done) { | ||
result = { value, error: null }; | ||
break; | ||
} else { | ||
input = yield value; | ||
} | ||
} catch (error) { | ||
result = { value: void 0, error } as Result; | ||
break; | ||
} | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
callbacks[i]?.(); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} else { | ||
return result.value; | ||
} | ||
})() as Generator<unknown, R, unknown>; | ||
return gen as R; | ||
} else if (typeof returns?.then === "function") { | ||
return Promise.resolve(returns as PromiseLike<R>).then(value => ({ | ||
value, | ||
error: null, | ||
} as Result)).catch((error: unknown) => ({ | ||
value: void 0, | ||
error, | ||
} as Result)).then(async result => { | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
await (callbacks[i] as () => void | Promise<void>)?.(); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} else { | ||
return result.value; | ||
} | ||
}) as R; | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to gain the worker. | ||
workerConsumerQueue.shift()?.(); | ||
} | ||
} else { | ||
result = { value: returns, error: null } as Result; | ||
terminate(); | ||
} | ||
} catch (error) { | ||
result = { value: void 0, error } as Result; | ||
} | ||
for (let i = callbacks.length - 1; i >= 0; i--) { | ||
callbacks[i]?.(); | ||
} | ||
if (result.error) { | ||
throw result.error; | ||
} else { | ||
return result.value as R; | ||
} | ||
}; | ||
}, | ||
wrap<T, Fn extends (this: T, ...args: any[]) => any>( | ||
fn: Fn, | ||
wrapper: (this: T, fn: Fn, ...args: Parameters<Fn>) => ReturnType<Fn> | ||
): Fn { | ||
const wrapped = function (this: any, ...args: Parameters<Fn>): ReturnType<Fn> { | ||
return wrapper.call(this, fn, ...args); | ||
}; | ||
Object.defineProperty(wrapped, | ||
"name", | ||
Object.getOwnPropertyDescriptor(fn, "name") as PropertyDescriptor); | ||
Object.defineProperty(wrapped, | ||
"length", | ||
Object.getOwnPropertyDescriptor(fn, "length") as PropertyDescriptor); | ||
Object.defineProperty(wrapped, "toString", { | ||
configurable: true, | ||
enumerable: false, | ||
writable: true, | ||
value: fn.toString.bind(fn), | ||
}); | ||
return wrapped as Fn; | ||
}, | ||
throttle(handler, options) { | ||
const key = typeof options === "number" ? null : options.for; | ||
const duration = typeof options === "number" ? options : options.duration; | ||
const handleCall = function ( | ||
this: any, | ||
cache: ThrottleCache, | ||
...args: any[] | ||
) { | ||
if (cache.result && Date.now() < (cache.expires ?? 0)) { | ||
if (cache.result.error) { | ||
throw cache.result.error; | ||
if (resolver) { | ||
resolver.resolve(msg.value); | ||
} else { | ||
return cache.result.value; | ||
result = { value: msg.value }; | ||
} | ||
} else if (msg.type === "yield") { | ||
if (msg.done) { | ||
// The final message of yield event is the return value. | ||
handleMessage({ type: "return", value: msg.value }); | ||
} else { | ||
if (iterator) { | ||
iterator.emit("data", msg.value); | ||
} else { | ||
buffer.push(msg.value); | ||
} | ||
} | ||
} | ||
} | ||
}; | ||
try { | ||
const returns = handler.call(this, ...args); | ||
cache.result = { value: returns }; | ||
cache.expires = Date.now() + duration; | ||
return returns; | ||
} catch (error) { | ||
cache.result = { error }; | ||
cache.expires = Date.now() + duration; | ||
throw error; | ||
} | ||
}; | ||
if (!key) { | ||
const cache: ThrottleCache = { for: null }; | ||
return function (this: any, ...args: any[]) { | ||
return handleCall.call(this, cache, ...args); | ||
}; | ||
const handleError = (err: Error | null) => { | ||
if (resolver) { | ||
resolver.reject(err); | ||
} else if (iterator) { | ||
iterator.emit("error", err); | ||
} else { | ||
let cache = throttleCaches.get(key); | ||
if (!cache) { | ||
cache = { for: key }; | ||
throttleCaches.set(key, cache); | ||
} | ||
return function (this: any, ...args: any[]) { | ||
return handleCall.call(this, cache as ThrottleCache, ...args); | ||
}; | ||
error = err; | ||
} | ||
}, | ||
mixins(base, ...mixins) { | ||
const obj = { ctor: null as any as Constructor<any> }; | ||
obj.ctor = class extends (<any>base) { }; // make sure this class has no name | ||
}; | ||
const handleExit = () => { | ||
if (poolRecord) { | ||
// Clean the pool before resolve. | ||
workerPool = workerPool.filter(record => record !== poolRecord); | ||
for (const mixin of mixins) { | ||
if (typeof mixin == "function") { | ||
mergeHierarchy(obj.ctor, mixin); | ||
} else if (mixin && typeof mixin == "object") { | ||
mergeIfNotExists(obj.ctor.prototype, mixin); | ||
} else { | ||
throw new TypeError("mixin must be a constructor or an object"); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to create new worker. | ||
workerConsumerQueue.shift()?.(); | ||
} | ||
} | ||
return obj.ctor as Constructor<any>; | ||
}, | ||
isSubclassOf(ctor1, ctor2) { | ||
return typeof ctor1 === "function" | ||
&& typeof ctor2 === "function" | ||
&& ctor1.prototype instanceof ctor2; | ||
}, | ||
read<T>(source: any, eventMap: { | ||
event?: string; // for EventSource custom event | ||
message?: string; | ||
data?: string; | ||
error?: string; | ||
close?: string; | ||
} | undefined = undefined): AsyncIterable<T> { | ||
if (typeof source[Symbol.asyncIterator] === "function") { | ||
return source; | ||
if (resolver) { | ||
resolver.resolve(void 0); | ||
} else if (iterator) { | ||
iterator.emit("close"); | ||
} else if (!error && !result) { | ||
result = { value: void 0 }; | ||
} | ||
}; | ||
const iterable = { | ||
ended: false, | ||
error: null as Error | null, | ||
queue: [] as T[], | ||
consumers: [] as { | ||
resolve: (data: IteratorResult<T>) => void; | ||
reject: (err: any) => void; | ||
}[], | ||
next() { | ||
return new Promise<IteratorResult<T>>((resolve, reject) => { | ||
if (this.error && !this.ended) { | ||
// If there is error occurred during the last transmission and the iterator | ||
// hasn't been closed, reject that error and stop the iterator immediately. | ||
reject(this.error); | ||
this.ended = true; | ||
} else if (this.ended && !this.queue.length) { | ||
// If the iterator has is closed, resolve the pending consumer with void | ||
// value. | ||
resolve({ value: void 0 as T, done: true }); | ||
} else if (this.queue.length > 0) { | ||
// If there are data in the queue, resolve the the first piece immediately. | ||
resolve({ value: this.queue.shift() as T, done: false }); | ||
} else { | ||
// If there are no queued data, push the consumer to a waiting queue. | ||
this.consumers.push({ resolve, reject }); | ||
} | ||
}); | ||
} | ||
}; | ||
if (isNode) { | ||
const path = await import("path"); | ||
const { fileURLToPath } = await import("url"); | ||
let _filename: string; | ||
let _dirname: string; | ||
let entry: string; | ||
const handleMessage = (data: T) => { | ||
if (iterable.consumers.length > 0) { | ||
iterable.consumers.shift()?.resolve({ value: data, done: false }); | ||
} else { | ||
iterable.queue.push(data); | ||
} | ||
}; | ||
const handleClose = () => { | ||
iterable.ended = true; | ||
let consumer: typeof iterable["consumers"][0] | undefined; | ||
while (consumer = iterable.consumers.shift()) { | ||
consumer.resolve({ value: undefined, done: true }); | ||
} | ||
}; | ||
const handleError = (err: Error) => { | ||
iterable.error = err; | ||
if (iterable.consumers.length > 0) { | ||
iterable.consumers.forEach(item => { | ||
item.reject(err); | ||
}); | ||
iterable.consumers = []; | ||
} | ||
}; | ||
const handleBrowserErrorEvent = (ev: Event) => { | ||
let err: Error; | ||
if (ev instanceof ErrorEvent) { | ||
err = ev.error || new Error(ev.message); | ||
} else { | ||
// @ts-ignore | ||
err = new Error("something went wrong", { cause: ev }); | ||
} | ||
handleError(err); | ||
}; | ||
const proto = Object.getPrototypeOf(source); | ||
const msgDesc = Object.getOwnPropertyDescriptor(proto, "onmessage"); | ||
if (msgDesc?.set && typeof source.close === "function") { // WebSocket or EventSource | ||
const errDesc = Object.getOwnPropertyDescriptor(proto, "onerror"); | ||
const closeDesc = Object.getOwnPropertyDescriptor(proto, "onclose"); | ||
let cleanup: () => void; | ||
if (eventMap?.event && | ||
eventMap?.event !== "message" && | ||
typeof source["addEventListener"] === "function" | ||
) { // for EventSource listening on custom events | ||
const es = source as EventSource; | ||
const eventName = eventMap.event; | ||
const msgListener = (ev: MessageEvent<T>) => { | ||
handleMessage(ev.data); | ||
}; | ||
es.addEventListener(eventName, msgListener); | ||
cleanup = () => { | ||
es.removeEventListener(eventName, msgListener); | ||
}; | ||
} else { | ||
msgDesc.set.call(source, (ev: MessageEvent<T>) => { | ||
handleMessage(ev.data); | ||
}); | ||
cleanup = () => { | ||
msgDesc.set?.call(source, null); | ||
}; | ||
} | ||
errDesc?.set?.call(source, handleBrowserErrorEvent); | ||
if (closeDesc?.set) { // WebSocket | ||
closeDesc.set.call(source, () => { | ||
handleClose(); | ||
closeDesc.set?.call(source, null); | ||
errDesc?.set?.call(source, null); | ||
cleanup?.(); | ||
}); | ||
} else if (!closeDesc?.set && typeof source.close === "function") { // EventSource | ||
// EventSource by default does not trigger close event, we need to make sure when | ||
// it calls the close() function, the iterator is automatically closed. | ||
const es = source as EventSource; | ||
const _close = es.close; | ||
es.close = function close() { | ||
_close.call(es); | ||
handleClose(); | ||
es.close = _close; | ||
errDesc?.set?.call(source, null); | ||
cleanup?.(); | ||
}; | ||
} | ||
} else if (typeof source.send === "function" && typeof source.close === "function") { | ||
// non-standard WebSocket implementation | ||
const ws = source as WebSocket; | ||
ws.onmessage = (ev: MessageEvent<T>) => { | ||
handleMessage(ev.data); | ||
}; | ||
ws.onerror = handleBrowserErrorEvent; | ||
ws.onclose = () => { | ||
handleClose(); | ||
ws.onclose = null; | ||
ws.onerror = null; | ||
ws.onmessage = null; | ||
}; | ||
} else if (typeof source["addEventListener"] === "function") { // EventTarget | ||
const target = source as EventTarget; | ||
const msgEvent = eventMap?.message || "message"; | ||
const errEvent = eventMap?.error || "error"; | ||
const closeEvent = eventMap?.close || "close"; | ||
const msgListener = (ev: Event) => { | ||
if (ev instanceof MessageEvent) { | ||
handleMessage(ev.data); | ||
} | ||
}; | ||
target.addEventListener(msgEvent, msgListener); | ||
target.addEventListener(errEvent, handleBrowserErrorEvent); | ||
target.addEventListener(closeEvent, function closeListener() { | ||
handleClose(); | ||
target.removeEventListener(closeEvent, closeListener); | ||
target.removeEventListener(msgEvent, msgListener); | ||
target.removeEventListener(errEvent, handleBrowserErrorEvent); | ||
}); | ||
} else if (typeof source["on"] === "function") { // EventEmitter | ||
const target = source as NodeJS.EventEmitter; | ||
const dataEvent = eventMap?.data || "data"; | ||
const errEvent = eventMap?.error || "error"; | ||
const endEvent = eventMap?.close || "close"; | ||
target.on(dataEvent, handleMessage); | ||
target.once(errEvent, handleError); | ||
target.once(endEvent, () => { | ||
handleClose(); | ||
target.off(dataEvent, handleMessage); | ||
target.off(dataEvent, handleError); | ||
}); | ||
if (typeof __filename === "string") { | ||
_filename = __filename; | ||
_dirname = __dirname; | ||
} else { | ||
throw new TypeError("the input source cannot be read as an AsyncIterable object"); | ||
// This file URL will be replace with `import.meta.url` by Rollup plugin. | ||
_filename = fileURLToPath("file://{__filename}"); | ||
_dirname = path.dirname(_filename); | ||
} | ||
return { | ||
[Symbol.asyncIterator]() { | ||
return iterable; | ||
} | ||
}; | ||
}, | ||
async run(script, args = undefined, options = undefined) { | ||
const msg = { | ||
type: "ffi", | ||
script, | ||
baseUrl: "", | ||
fn: options?.fn || "default", | ||
args: args ?? [], | ||
}; | ||
if (typeof Deno === "object") { | ||
msg.baseUrl = "file://" + Deno.cwd() + "/"; | ||
} else if (isNode) { | ||
msg.baseUrl = "file://" + process.cwd() + "/"; | ||
} else if (typeof location === "object") { | ||
msg.baseUrl = location.href; | ||
if (["cjs", "esm"].includes(path.basename(_dirname))) { // compiled | ||
entry = path.join(path.dirname(_dirname), "worker.mjs"); | ||
} else { | ||
entry = path.join(_dirname, "worker.mjs"); | ||
} | ||
// `buffer` is used to store data pieces yielded by generator functions before they are | ||
// consumed. `error` and `result` serves similar purposes for function results. | ||
const buffer: any[] = []; | ||
let error: Error | null = null; | ||
let result: { value: any; } | undefined; | ||
let resolver: { | ||
resolve: (data: any) => void; | ||
reject: (err: unknown) => void; | ||
} | undefined; | ||
let iterator: NodeJS.EventEmitter | undefined; | ||
let workerId: number | undefined; | ||
let poolRecord: typeof workerPool[0] | undefined; | ||
let release: () => void; | ||
let terminate = () => Promise.resolve<void>(void 0); | ||
const timeout = options?.timeout ? setTimeout(() => { | ||
const err = new Error(`operation timeout after ${options.timeout}ms`); | ||
if (options?.adapter === "child_process") { | ||
let worker: ChildProcess; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "child_process" && !item.busy; | ||
}); | ||
if (resolver) { | ||
resolver.reject(err); | ||
} else { | ||
error = err; | ||
} | ||
terminate(); | ||
}, options.timeout) : null; | ||
const handleMessage = (msg: any) => { | ||
if (msg && typeof msg === "object" && typeof msg.type === "string") { | ||
if (msg.type === "error") { | ||
return handleError(msg.error); | ||
} else if (msg.type === "return") { | ||
if (options?.keepAlive) { | ||
// Release before resolve. | ||
release?.(); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to gain the worker. | ||
workerConsumerQueue.shift()?.(); | ||
} | ||
} else { | ||
terminate(); | ||
} | ||
if (resolver) { | ||
resolver.resolve(msg.value); | ||
} else { | ||
result = { value: msg.value }; | ||
} | ||
} else if (msg.type === "yield") { | ||
if (msg.done) { | ||
// The final message of yield event is the return value. | ||
handleMessage({ type: "return", value: msg.value }); | ||
} else { | ||
if (iterator) { | ||
iterator.emit("data", msg.value); | ||
} else { | ||
buffer.push(msg.value); | ||
} | ||
} | ||
} | ||
} | ||
}; | ||
const handleError = (err: Error | null) => { | ||
if (resolver) { | ||
resolver.reject(err); | ||
} else if (iterator) { | ||
iterator.emit("error", err); | ||
} else { | ||
error = err; | ||
} | ||
}; | ||
const handleExit = () => { | ||
if (poolRecord) { | ||
// Clean the pool before resolve. | ||
workerPool = workerPool.filter(record => record !== poolRecord); | ||
if (workerConsumerQueue.length) { | ||
// Queued consumer now has chance to create new worker. | ||
workerConsumerQueue.shift()?.(); | ||
} | ||
} | ||
if (resolver) { | ||
resolver.resolve(void 0); | ||
} else if (iterator) { | ||
iterator.emit("close"); | ||
} else if (!error && !result) { | ||
result = { value: void 0 }; | ||
} | ||
}; | ||
if (isNode) { | ||
const path = await import("path"); | ||
const { fileURLToPath } = await import("url"); | ||
let _filename: string; | ||
let _dirname: string; | ||
let entry: string; | ||
if (typeof __filename === "string") { | ||
_filename = __filename; | ||
_dirname = __dirname; | ||
} else { | ||
// This file URL will be replace with `import.meta.url` by Rollup plugin. | ||
_filename = fileURLToPath("file://{__filename}"); | ||
_dirname = path.dirname(_filename); | ||
} | ||
if (["cjs", "esm"].includes(path.basename(_dirname))) { // compiled | ||
entry = path.join(path.dirname(_dirname), "worker.mjs"); | ||
} else { | ||
entry = path.join(_dirname, "worker.mjs"); | ||
} | ||
if (options?.adapter === "child_process") { | ||
let worker: ChildProcess; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "child_process" && !item.busy; | ||
worker = poolRecord.worker as ChildProcess; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} else if (workerPool.length < maxWorkerNum) { | ||
const { fork } = await import("child_process"); | ||
const isPrior14 = parseInt(process.version.slice(1)) < 14; | ||
worker = fork(entry, { | ||
stdio: "inherit", | ||
serialization: isPrior14 ? "advanced" : "json", | ||
}); | ||
if (poolRecord) { | ||
worker = poolRecord.worker as ChildProcess; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} else if (workerPool.length < maxWorkerNum) { | ||
const { fork } = await import("child_process"); | ||
const isPrior14 = parseInt(process.version.slice(1)) < 14; | ||
worker = fork(entry, { | ||
stdio: "inherit", | ||
serialization: isPrior14 ? "advanced" : "json", | ||
workerId = worker.pid as number; | ||
ok = await new Promise<boolean>((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
workerId = worker.pid as number; | ||
ok = await new Promise<boolean>((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("message", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
worker.once("message", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
}); | ||
// Fill the worker pool regardless the current call should keep-alive or not, | ||
// this will make sure that the total number of workers will not exceed the | ||
// maxWorkerNum. If the the call doesn't keep-alive the worker, it will be | ||
// cleaned after the call. | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "child_process", | ||
busy: true, | ||
}); | ||
} else { | ||
// Put the current call in the consumer queue if there are no workers available, | ||
// once an existing call finishes, the queue will pop the its head consumer and | ||
// retry. | ||
return new Promise<void>((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
} | ||
release = () => { | ||
// Remove the event listener so that later calls will not mess up. | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = () => Promise.resolve(void worker.kill(1)); | ||
if (ok) { | ||
worker.send(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
// Fill the worker pool regardless the current call should keep-alive or not, | ||
// this will make sure that the total number of workers will not exceed the | ||
// maxWorkerNum. If the the call doesn't keep-alive the worker, it will be | ||
// cleaned after the call. | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "child_process", | ||
busy: true, | ||
}); | ||
} else { | ||
let worker: NodeWorker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "worker_threads" && !item.busy; | ||
}); | ||
// Put the current call in the consumer queue if there are no workers available, | ||
// once an existing call finishes, the queue will pop the its head consumer and | ||
// retry. | ||
return new Promise<void>((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => run(script, args, options)); | ||
} | ||
if (poolRecord) { | ||
worker = poolRecord.worker as NodeWorker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} else if (workerPool.length < maxWorkerNum) { | ||
const { Worker } = await import("worker_threads"); | ||
worker = new Worker(entry); | ||
// `threadId` may not exist in Bun. | ||
workerId = worker.threadId ?? workerIdCounter.next().value as number; | ||
ok = await new Promise<boolean>((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("online", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
}); | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "worker_threads", | ||
busy: true, | ||
}); | ||
} else { | ||
return new Promise<void>((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
} | ||
release = () => { | ||
// Remove the event listener so that later calls will not mess up. | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = () => Promise.resolve(void worker.kill(1)); | ||
release = () => { | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => void (await worker.terminate()); | ||
if (ok) { | ||
worker.postMessage(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("messageerror", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
if (ok) { | ||
worker.send(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
} else { | ||
let worker: Worker; | ||
let worker: NodeWorker; | ||
let ok = true; | ||
poolRecord = workerPool.find(item => { | ||
@@ -1003,35 +980,23 @@ return item.adapter === "worker_threads" && !item.busy; | ||
if (poolRecord) { | ||
worker = poolRecord.worker as Worker; | ||
worker = poolRecord.worker as NodeWorker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} else if (workerPool.length < maxWorkerNum) { | ||
let url: string; | ||
if (typeof Deno === "object") { | ||
// Deno can load the module regardless of MINE type. | ||
url = [ | ||
...("file://{__filename}".split("/").slice(0, -1)), | ||
"worker-web.mjs" | ||
].join("/"); | ||
} else { | ||
const _url = options?.webWorkerEntry | ||
|| "https://raw.githubusercontent.com/ayonli/jsext/main/esm/worker-web.mjs"; | ||
const res = await fetch(_url); | ||
let blob: Blob; | ||
if (res.headers.get("content-type")?.startsWith("application/javascript")) { | ||
blob = await res.blob(); | ||
} else { | ||
const buf = await res.arrayBuffer(); | ||
blob = new Blob([new Uint8Array(buf)], { | ||
type: "application/javascript", | ||
}); | ||
} | ||
url = URL.createObjectURL(blob); | ||
} | ||
worker = new Worker(url, { type: "module" }); | ||
workerId = workerIdCounter.next().value as number; | ||
workerPool.push(poolRecord = { | ||
const { Worker } = await import("worker_threads"); | ||
worker = new Worker(entry); | ||
// `threadId` may not exist in Bun. | ||
workerId = worker.threadId ?? workerIdCounter.next().value as number; | ||
ok = await new Promise<boolean>((resolve) => { | ||
worker.once("exit", () => { | ||
if (error) { | ||
// The child process took too long to start and cause timeout error. | ||
resolve(false); | ||
} | ||
}); | ||
worker.once("online", () => { | ||
worker.removeAllListeners("exit"); | ||
resolve(true); | ||
}); | ||
}); | ||
ok && workerPool.push(poolRecord = { | ||
workerId, | ||
@@ -1045,68 +1010,142 @@ worker, | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => jsext.run(script, args, options)); | ||
}).then(() => run(script, args, options)); | ||
} | ||
release = () => { | ||
worker.onmessage = null; | ||
worker.off("message", handleMessage); | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => { | ||
await Promise.resolve(worker.terminate()); | ||
handleExit(); | ||
}; | ||
terminate = async () => void (await worker.terminate()); | ||
worker.postMessage(msg); | ||
worker.onmessage = (ev) => handleMessage(ev.data); | ||
worker.onerror = (ev) => handleMessage(ev.error || new Error(ev.message)); | ||
worker.onmessageerror = () => { | ||
handleError(new Error("unable to deserialize the message")); | ||
}; | ||
if (ok) { | ||
worker.postMessage(msg); | ||
worker.on("message", handleMessage); | ||
worker.once("error", handleError); | ||
worker.once("messageerror", handleError); | ||
worker.once("exit", handleExit); | ||
} | ||
} | ||
} else { | ||
let worker: Worker; | ||
poolRecord = workerPool.find(item => { | ||
return item.adapter === "worker_threads" && !item.busy; | ||
}); | ||
return { | ||
workerId, | ||
async abort() { | ||
timeout && clearTimeout(timeout); | ||
await terminate(); | ||
}, | ||
async result() { | ||
return await new Promise<any>((resolve, reject) => { | ||
if (error) { | ||
reject(error); | ||
} else if (result) { | ||
resolve(result.value); | ||
} else { | ||
resolver = { resolve, reject }; | ||
} | ||
}); | ||
}, | ||
async *iterate() { | ||
if (resolver) { | ||
throw new Error("result() has been called"); | ||
} else if (result) { | ||
throw new TypeError("the response is not iterable"); | ||
if (poolRecord) { | ||
worker = poolRecord.worker as Worker; | ||
workerId = poolRecord.workerId; | ||
poolRecord.busy = true; | ||
} else if (workerPool.length < maxWorkerNum) { | ||
let url: string; | ||
if (typeof Deno === "object") { | ||
// Deno can load the module regardless of MINE type. | ||
url = [ | ||
...("file://{__filename}".split("/").slice(0, -1)), | ||
"worker-web.mjs" | ||
].join("/"); | ||
} else { | ||
const _url = options?.webWorkerEntry | ||
|| "https://raw.githubusercontent.com/ayonli/jsext/main/esm/worker-web.mjs"; | ||
const res = await fetch(_url); | ||
let blob: Blob; | ||
if (res.headers.get("content-type")?.startsWith("application/javascript")) { | ||
blob = await res.blob(); | ||
} else { | ||
const buf = await res.arrayBuffer(); | ||
blob = new Blob([new Uint8Array(buf)], { | ||
type: "application/javascript", | ||
}); | ||
} | ||
const { EventEmitter } = await import("events"); | ||
iterator = new EventEmitter(); | ||
url = URL.createObjectURL(blob); | ||
} | ||
if (buffer.length) { | ||
(async () => { | ||
await Promise.resolve(null); | ||
let msg: any; | ||
worker = new Worker(url, { type: "module" }); | ||
workerId = workerIdCounter.next().value as number; | ||
workerPool.push(poolRecord = { | ||
workerId, | ||
worker, | ||
adapter: "worker_threads", | ||
busy: true, | ||
}); | ||
} else { | ||
return new Promise<void>((resolve) => { | ||
workerConsumerQueue.push(resolve); | ||
}).then(() => run(script, args, options)); | ||
} | ||
while (msg = buffer.shift()) { | ||
iterator.emit("data", msg); | ||
} | ||
})().catch(console.error); | ||
} | ||
release = () => { | ||
worker.onmessage = null; | ||
poolRecord && (poolRecord.busy = false); | ||
}; | ||
terminate = async () => { | ||
await Promise.resolve(worker.terminate()); | ||
handleExit(); | ||
}; | ||
for await (const msg of jsext.read<any>(iterator)) { | ||
yield msg; | ||
} | ||
}, | ||
worker.postMessage(msg); | ||
worker.onmessage = (ev) => handleMessage(ev.data); | ||
worker.onerror = (ev) => handleMessage(ev.error || new Error(ev.message)); | ||
worker.onmessageerror = () => { | ||
handleError(new Error("unable to deserialize the message")); | ||
}; | ||
} | ||
return { | ||
workerId, | ||
async abort() { | ||
timeout && clearTimeout(timeout); | ||
await terminate(); | ||
}, | ||
async result() { | ||
return await new Promise<any>((resolve, reject) => { | ||
if (error) { | ||
reject(error); | ||
} else if (result) { | ||
resolve(result.value); | ||
} else { | ||
resolver = { resolve, reject }; | ||
} | ||
}); | ||
}, | ||
async *iterate() { | ||
if (resolver) { | ||
throw new Error("result() has been called"); | ||
} else if (result) { | ||
throw new TypeError("the response is not iterable"); | ||
} | ||
const { EventEmitter } = await import("events"); | ||
iterator = new EventEmitter(); | ||
if (buffer.length) { | ||
(async () => { | ||
await Promise.resolve(null); | ||
let msg: any; | ||
while (msg = buffer.shift()) { | ||
iterator.emit("data", msg); | ||
} | ||
})().catch(console.error); | ||
} | ||
for await (const msg of read<any>(iterator)) { | ||
yield msg; | ||
} | ||
}, | ||
}; | ||
} | ||
const jsext = { | ||
try: _try, | ||
func, | ||
wrap, | ||
throttle, | ||
mixins, | ||
isSubclassOf, | ||
read, | ||
run, | ||
}; | ||
export default jsext; |
@@ -1,2 +0,2 @@ | ||
import { hasOwn, hasOwnMethod, omit, patch, pick, as } from "."; | ||
import { hasOwn, hasOwnMethod, omit, patch, pick, as, isValid } from "."; | ||
@@ -34,13 +34,22 @@ declare global { | ||
/** | ||
* Returns the object if it's an instance of the given type, otherwise returns `null`. | ||
* This function is mainly used for the optional chaining syntax. | ||
* Checks if the value is an instance of the given type, returns the value itself if passed, | ||
* otherwise returns `null`. This function is mainly used for the optional chaining syntax. | ||
* @example | ||
* Object.as(bar, SomeType)?.doSomething(); | ||
*/ | ||
as(obj: any, type: StringConstructor): string | null; | ||
as(obj: any, type: NumberConstructor): number | null; | ||
as(obj: any, type: BigIntConstructor): bigint | null; | ||
as(obj: any, type: BooleanConstructor): boolean | null; | ||
as(obj: any, type: SymbolConstructor): symbol | null; | ||
as<T>(obj: any, type: Constructor<T>): T | null; | ||
as(value: unknown, type: StringConstructor): string | null; | ||
as(value: unknown, type: NumberConstructor): number | null; | ||
as(value: unknown, type: BigIntConstructor): bigint | null; | ||
as(value: unknown, type: BooleanConstructor): boolean | null; | ||
as(value: unknown, type: SymbolConstructor): symbol | null; | ||
as<T>(value: unknown, type: Constructor<T>): T | null; | ||
/** | ||
* Returns `true` if the given value is valid. Thee following values are considered invalid: | ||
* | ||
* - `undefined` | ||
* - `null` | ||
* - `NaN` | ||
* - `Invalid Date` | ||
*/ | ||
isValid(value: unknown): boolean; | ||
} | ||
@@ -61,1 +70,2 @@ } | ||
Object.as = as; | ||
Object.isValid = isValid; |
@@ -86,14 +86,14 @@ import type { Constructor } from "../index"; | ||
/** | ||
* Returns the object if it's an instance of the given type, otherwise returns `null`. | ||
* This function is mainly used for the optional chaining syntax. | ||
* Checks if the value is an instance of the given type, returns the value itself if passed, | ||
* otherwise returns `null`. This function is mainly used for the optional chaining syntax. | ||
* @example | ||
* as(bar, SomeType)?.doSomething(); | ||
* Object.as(bar, SomeType)?.doSomething(); | ||
*/ | ||
export function as(obj: any, type: StringConstructor): string | null; | ||
export function as(obj: any, type: NumberConstructor): number | null; | ||
export function as(obj: any, type: BigIntConstructor): bigint | null; | ||
export function as(obj: any, type: BooleanConstructor): boolean | null; | ||
export function as(obj: any, type: SymbolConstructor): symbol | null; | ||
export function as<T>(obj: any, type: Constructor<T>): T | null; | ||
export function as(obj: any, type: any): any { | ||
export function as(value: unknown, type: StringConstructor): string | null; | ||
export function as(value: unknown, type: NumberConstructor): number | null; | ||
export function as(value: unknown, type: BigIntConstructor): bigint | null; | ||
export function as(value: unknown, type: BooleanConstructor): boolean | null; | ||
export function as(value: unknown, type: SymbolConstructor): symbol | null; | ||
export function as<T>(value: unknown, type: Constructor<T>): T | null; | ||
export function as(value: any, type: any): any { | ||
if (typeof type !== "function") { | ||
@@ -112,10 +112,10 @@ throw new TypeError("type must be a valid constructor"); | ||
if (obj instanceof type) { | ||
if (value instanceof type) { | ||
if ([String, Number, Boolean].includes(type)) { | ||
return obj.valueOf(); // make sure the primitives are returned. | ||
return value.valueOf(); // make sure the primitives are returned. | ||
} else { | ||
return obj; | ||
return value; | ||
} | ||
} else if ((_type = typeof obj) && primitiveMap[_type] === type) { | ||
return obj; | ||
} else if ((_type = typeof value) && primitiveMap[_type] === type) { | ||
return value; | ||
} | ||
@@ -125,1 +125,16 @@ | ||
} | ||
/** | ||
* Returns `true` if the given value is valid. Thee following values are considered invalid: | ||
* | ||
* - `undefined` | ||
* - `null` | ||
* - `NaN` | ||
* - `Invalid Date` | ||
*/ | ||
export function isValid(value: unknown): boolean { | ||
return value !== undefined | ||
&& value !== null | ||
&& !Object.is(value, NaN) | ||
&& !(value instanceof Date && value.toString() === "Invalid Date"); | ||
} |
{ | ||
"name": "@ayonli/jsext", | ||
"version": "0.4.5", | ||
"version": "0.4.6", | ||
"description": "Additional functions for JavaScript programming in practice.", | ||
@@ -5,0 +5,0 @@ "exports": { |
@@ -51,2 +51,14 @@ # JsExt | ||
## Types | ||
- `AsyncFunction` | ||
- `AsyncGeneratorFunction` | ||
- `AsyncFunctionConstructor` | ||
- `Constructor<T>` | ||
- `TypedArray` | ||
- `Optional<T, K extends keyof T>` | ||
- `Ensured<T, K extends keyof T>` | ||
When [augment](./augment.ts)ing, these types will ba attached to the global namespace. | ||
## Sub-packages | ||
@@ -189,8 +201,9 @@ | ||
- `omit<T>(obj: T, keys: (string | symbol)[]): Partial<T>` | ||
- `as(obj: any, type: StringConstructor): string | null` | ||
- `as(obj: any, type: NumberConstructor): number | null` | ||
- `as(obj: any, type: BigIntConstructor): bigint | null` | ||
- `as(obj: any, type: BooleanConstructor): boolean | null` | ||
- `as(obj: any, type: SymbolConstructor): symbol | null` | ||
- `as<T>(obj: any, type: Constructor<T>): T | null` | ||
- `as(value: unknown, type: StringConstructor): string | null` | ||
- `as(value: unknown, type: NumberConstructor): number | null` | ||
- `as(value: unknown, type: BigIntConstructor): bigint | null` | ||
- `as(value: unknown, type: BooleanConstructor): boolean | null` | ||
- `as(value: unknown, type: SymbolConstructor): symbol | null` | ||
- `as<T>(value: unknown, type: Constructor<T>): T | null` | ||
- `isValid(value: unknown): boolean` | ||
@@ -197,0 +210,0 @@ *When [augment](./object/augment.ts)ing, these functions will be attached to the `Object` constructor.* |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
5984
343
383751