Socket
Socket
Sign inDemoInstall

http-range-fetcher

Package Overview
Dependencies
12
Maintainers
1
Versions
14
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 1.2.5 to 1.3.0

dist/abortcontroller-ponyfill.d.ts

4

CHANGELOG.md

@@ -0,1 +1,5 @@

# [1.3.0](https://github.com/rbuels/http-range-fetcher/compare/v1.2.5...v1.3.0) (2022-02-15)
<a name="1.2.5"></a>

@@ -2,0 +6,0 @@ ## [1.2.5](https://github.com/rbuels/http-range-fetcher/compare/v1.2.4...v1.2.5) (2021-06-03)

46

dist/abortcontroller-ponyfill.js
"use strict";
//@ts-nocheck
/* eslint-disable */
if (typeof AbortController === 'undefined') {
var _require = require('abortcontroller-polyfill/dist/cjs-ponyfill'),
_AbortController = _require.AbortController,
_AbortSignal = _require.AbortSignal;
module.exports = {
AbortController: _AbortController,
AbortSignal: _AbortSignal
};
} else {
module.exports = {
AbortController: AbortController,
AbortSignal: AbortSignal
};
}
Object.defineProperty(exports, "__esModule", { value: true });
exports.AbortSignal = exports.AbortController = void 0;
var cjs_ponyfill_1 = require("abortcontroller-polyfill/dist/cjs-ponyfill");
var getGlobal = function () {
// the only reliable means to get the global object is
// `Function('return this')()`
// However, this causes CSP violations in Chrome apps.
if (typeof self !== 'undefined') {
return self;
}
if (typeof window !== 'undefined') {
return window;
}
if (typeof global !== 'undefined') {
return global;
}
throw new Error('unable to locate global object');
};
//@ts-ignore
var AbortController = typeof getGlobal().AbortController === 'undefined'
? cjs_ponyfill_1.AbortController
: getGlobal().AbortController;
exports.AbortController = AbortController;
//@ts-ignore
var AbortSignal = typeof getGlobal().AbortController === 'undefined'
? cjs_ponyfill_1.AbortSignal
: getGlobal().AbortSignal;
exports.AbortSignal = AbortSignal;
//# sourceMappingURL=abortcontroller-ponyfill.js.map
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime/helpers/slicedToArray"));
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
var _object = _interopRequireDefault(require("object.entries-ponyfill"));
var _abortcontrollerPonyfill = require("./abortcontroller-ponyfill");
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
//@ts-nocheck
var object_entries_ponyfill_1 = __importDefault(require("object.entries-ponyfill"));
var abortcontroller_ponyfill_1 = require("./abortcontroller-ponyfill");
/**
* takes fetch requests and aggregates them at a certain time frequency
*/
var AggregatingFetcher =
/*#__PURE__*/
function () {
/**
*
* @param {object} params
* @param {number} [params.frequency] number of milliseconds to wait for requests to aggregate
*/
function AggregatingFetcher(_ref) {
var _ref$frequency = _ref.frequency,
frequency = _ref$frequency === void 0 ? 100 : _ref$frequency,
fetch = _ref.fetch,
_ref$maxExtraSize = _ref.maxExtraSize,
maxExtraSize = _ref$maxExtraSize === void 0 ? 32000 : _ref$maxExtraSize,
_ref$maxFetchSize = _ref.maxFetchSize,
maxFetchSize = _ref$maxFetchSize === void 0 ? 1000000 : _ref$maxFetchSize;
(0, _classCallCheck2.default)(this, AggregatingFetcher);
this.requestQueues = {}; // url => array of requests
this.fetchCallback = fetch;
this.frequency = frequency;
this.maxExtraSize = maxExtraSize;
this.maxFetchSize = maxFetchSize;
}
(0, _createClass2.default)(AggregatingFetcher, [{
key: "_canAggregate",
value: function _canAggregate(requestGroup, request) {
return (// the fetches overlap, or come close
request.start <= requestGroup.end + this.maxExtraSize && // aggregating would not result in a fetch that is too big
request.end - request.start + requestGroup.end - requestGroup.start < this.maxFetchSize
);
} // returns a promise that only resolves
var AggregatingFetcher = /** @class */ (function () {
/**
*
* @param {object} params
* @param {number} [params.frequency] number of milliseconds to wait for requests to aggregate
*/
function AggregatingFetcher(_a) {
var _b = _a.frequency, frequency = _b === void 0 ? 100 : _b, fetch = _a.fetch, _c = _a.maxExtraSize, maxExtraSize = _c === void 0 ? 32000 : _c, _d = _a.maxFetchSize, maxFetchSize = _d === void 0 ? 1000000 : _d;
this.requestQueues = {}; // url => array of requests
this.fetchCallback = fetch;
this.frequency = frequency;
this.maxExtraSize = maxExtraSize;
this.maxFetchSize = maxFetchSize;
}
AggregatingFetcher.prototype._canAggregate = function (requestGroup, request) {
return (
// the fetches overlap, or come close
request.start <= requestGroup.end + this.maxExtraSize &&
// aggregating would not result in a fetch that is too big
request.end - request.start + requestGroup.end - requestGroup.start <
this.maxFetchSize);
};
// returns a promise that only resolves
// when all of the signals in the given array
// have fired their abort signal
}, {
key: "_allSignalsFired",
value: function _allSignalsFired(signals) {
return new Promise(function (resolve) {
var signalsLeft = signals.filter(function (s) {
return !s.aborted;
}).length;
signals.forEach(function (signal) {
signal.addEventListener('abort', function () {
signalsLeft -= 1; // console.log('aggregatingfetcher received an abort')
if (!signalsLeft) {
// console.log('aggregatingfetcher aborting aggegated request')
resolve();
}
});
AggregatingFetcher.prototype._allSignalsFired = function (signals) {
return new Promise(function (resolve) {
var signalsLeft = signals.filter(function (s) { return !s.aborted; }).length;
signals.forEach(function (signal) {
signal.addEventListener('abort', function () {
signalsLeft -= 1;
// console.log('aggregatingfetcher received an abort')
if (!signalsLeft) {
// console.log('aggregatingfetcher aborting aggegated request')
resolve();
}
});
});
}).catch(function (e) {
// eslint-disable-next-line no-console
console.error(e);
});
}).catch(function (e) {
// eslint-disable-next-line no-console
console.error(e);
});
} // dispatch a request group as a single request
};
// dispatch a request group as a single request
// and then slice the result back up to satisfy
// the individual requests
}, {
key: "_dispatch",
value: function _dispatch(_ref2) {
var url = _ref2.url,
start = _ref2.start,
end = _ref2.end,
requests = _ref2.requests;
// if any of the requests have an AbortSignal `signal` in their requestOptions,
// make our aggregating abortcontroller track it, aborting the request if
// all of the abort signals that are aggregated here have fired
var abortWholeRequest = new _abortcontrollerPonyfill.AbortController();
var signals = [];
requests.forEach(function (_ref3) {
var requestOptions = _ref3.requestOptions;
if (requestOptions && requestOptions.signal) signals.push(requestOptions.signal);
});
if (signals.length === requests.length) {
this._allSignalsFired(signals).then(function () {
return abortWholeRequest.abort();
AggregatingFetcher.prototype._dispatch = function (_a) {
// if any of the requests have an AbortSignal `signal` in their requestOptions,
// make our aggregating abortcontroller track it, aborting the request if
// all of the abort signals that are aggregated here have fired
var url = _a.url, start = _a.start, end = _a.end, requests = _a.requests;
var abortWholeRequest = new abortcontroller_ponyfill_1.AbortController();
var signals = [];
requests.forEach(function (_a) {
var requestOptions = _a.requestOptions;
if (requestOptions && requestOptions.signal) {
signals.push(requestOptions.signal);
}
});
}
this.fetchCallback(url, start, end - 1, {
signal: abortWholeRequest.signal
}).then(function (response) {
var data = response.buffer;
requests.forEach(function (_ref4) {
var reqStart = _ref4.start,
reqEnd = _ref4.end,
resolve = _ref4.resolve;
// remember Buffer.slice does not copy, it creates
// an offset child buffer pointing to the same data
resolve({
headers: response.headers,
buffer: data.slice(reqStart - start, reqEnd - start)
});
if (signals.length === requests.length) {
this._allSignalsFired(signals).then(function () { return abortWholeRequest.abort(); });
}
this.fetchCallback(url, start, end - 1, {
signal: abortWholeRequest.signal,
}).then(function (response) {
var data = response.buffer;
requests.forEach(function (_a) {
var reqStart = _a.start, reqEnd = _a.end, resolve = _a.resolve;
// remember Buffer.slice does not copy, it creates
// an offset child buffer pointing to the same data
resolve({
headers: response.headers,
buffer: data.slice(reqStart - start, reqEnd - start),
});
});
}, function (err) {
requests.forEach(function (_a) {
var reject = _a.reject;
return reject(err);
});
});
}, function (err) {
requests.forEach(function (_ref5) {
var reject = _ref5.reject;
return reject(err);
};
AggregatingFetcher.prototype._aggregateAndDispatch = function () {
var _this = this;
(0, object_entries_ponyfill_1.default)(this.requestQueues).forEach(function (_a) {
var url = _a[0], requests = _a[1];
if (!requests || !requests.length) {
return;
}
// console.log(url, requests)
// we are now going to aggregate the requests in this url's queue
// into groups of requests that can be dispatched as one
var requestsToDispatch = [];
// look to see if any of the requests are aborted, and if they are, just
// reject them now and forget about them
requests.forEach(function (request) {
var requestOptions = request.requestOptions, reject = request.reject;
if (requestOptions &&
requestOptions.signal &&
requestOptions.signal.aborted) {
reject(Object.assign(new Error('aborted'), { code: 'ERR_ABORTED' }));
}
else {
requestsToDispatch.push(request);
}
});
requestsToDispatch.sort(function (a, b) { return a.start - b.start; });
// eslint-disable-next-line no-param-reassign
requests.length = 0;
if (!requestsToDispatch.length) {
return;
}
var currentRequestGroup;
for (var i = 0; i < requestsToDispatch.length; i += 1) {
var next = requestsToDispatch[i];
if (currentRequestGroup &&
_this._canAggregate(currentRequestGroup, next)) {
// aggregate it into the current group
currentRequestGroup.requests.push(next);
currentRequestGroup.end = next.end;
}
else {
// out of range, dispatch the current request group
if (currentRequestGroup) {
_this._dispatch(currentRequestGroup);
}
// and start on a new one
currentRequestGroup = {
requests: [next],
url: url,
start: next.start,
end: next.end,
};
}
}
if (currentRequestGroup) {
_this._dispatch(currentRequestGroup);
}
});
});
}
}, {
key: "_aggregateAndDispatch",
value: function _aggregateAndDispatch() {
var _this = this;
(0, _object.default)(this.requestQueues).forEach(function (_ref6) {
var _ref7 = (0, _slicedToArray2.default)(_ref6, 2),
url = _ref7[0],
requests = _ref7[1];
if (!requests || !requests.length) return; // console.log(url, requests)
// we are now going to aggregate the requests in this url's queue
// into groups of requests that can be dispatched as one
var requestsToDispatch = []; // look to see if any of the requests are aborted, and if they are, just
// reject them now and forget about them
requests.forEach(function (request) {
var requestOptions = request.requestOptions,
reject = request.reject;
if (requestOptions && requestOptions.signal && requestOptions.signal.aborted) {
reject(Object.assign(new Error('aborted'), {
code: 'ERR_ABORTED'
}));
} else {
requestsToDispatch.push(request);
}
});
requestsToDispatch.sort(function (a, b) {
return a.start - b.start;
}); // eslint-disable-next-line no-param-reassign
requests.length = 0;
if (!requestsToDispatch.length) return;
var currentRequestGroup;
for (var i = 0; i < requestsToDispatch.length; i += 1) {
var next = requestsToDispatch[i];
if (currentRequestGroup && _this._canAggregate(currentRequestGroup, next)) {
// aggregate it into the current group
currentRequestGroup.requests.push(next);
currentRequestGroup.end = next.end;
} else {
// out of range, dispatch the current request group
if (currentRequestGroup) _this._dispatch(currentRequestGroup); // and start on a new one
currentRequestGroup = {
requests: [next],
url: url,
start: next.start,
end: next.end
};
}
};
AggregatingFetcher.prototype._enQueue = function (url, request) {
if (!this.requestQueues[url]) {
this.requestQueues[url] = [];
}
if (currentRequestGroup) _this._dispatch(currentRequestGroup);
});
}
}, {
key: "_enQueue",
value: function _enQueue(url, request) {
if (!this.requestQueues[url]) this.requestQueues[url] = [];
this.requestQueues[url].push(request);
}
this.requestQueues[url].push(request);
};
/**

@@ -197,31 +165,18 @@ *

*/
}, {
key: "fetch",
value: function fetch(url, start, end) {
var _this2 = this;
var requestOptions = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
return new Promise(function (resolve, reject) {
_this2._enQueue(url, {
start: start,
end: end,
resolve: resolve,
reject: reject,
requestOptions: requestOptions
AggregatingFetcher.prototype.fetch = function (url, start, end, requestOptions) {
var _this = this;
if (requestOptions === void 0) { requestOptions = {}; }
return new Promise(function (resolve, reject) {
_this._enQueue(url, { start: start, end: end, resolve: resolve, reject: reject, requestOptions: requestOptions });
if (!_this.timeout) {
_this.timeout = setTimeout(function () {
_this.timeout = undefined;
_this._aggregateAndDispatch();
}, _this.frequency || 1);
}
});
if (!_this2.timeout) {
_this2.timeout = setTimeout(function () {
_this2.timeout = undefined;
_this2._aggregateAndDispatch();
}, _this2.frequency || 1);
}
});
}
}]);
return AggregatingFetcher;
}();
module.exports = AggregatingFetcher;
};
return AggregatingFetcher;
}());
exports.default = AggregatingFetcher;
//# sourceMappingURL=aggregatingFetcher.js.map
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheSemantics = exports.parseCacheControl = void 0;
//@ts-nocheck
function parseCacheControl(field) {
if (typeof field !== 'string') {
return {};
}
var parsed = {};
var invalid = field.toLowerCase().replace( // eslint-disable-next-line no-control-regex,no-useless-escape
/(?:^|(?:\s*,\s*))([^\x00-\x20\(\)<>@\,;\:\\"\/\[\]\?\=\{\}\x7F]+)(?:\=(?:([^\x00-\x20\(\)<>@\,;\:\\"\/\[\]\?\=\{\}\x7F]+)|(?:\"((?:[^"\\]|\\.)*)\")))?/g, function (match, fieldName, three, four) {
var value = three || four;
parsed[fieldName] = value ? value.toLowerCase() : true;
return '';
});
if (invalid) return {}; // parse any things that seem to be numbers
Object.keys(parsed).forEach(function (key) {
if (/^[\d]+$/.test(parsed[key])) {
try {
var num = parseInt(parsed[key], 10);
if (!Number.isNaN(num)) {
parsed[key] = num;
if (typeof field !== 'string') {
return {};
}
var parsed = {};
var invalid = field.toLowerCase().replace(
// eslint-disable-next-line no-control-regex,no-useless-escape
/(?:^|(?:\s*,\s*))([^\x00-\x20\(\)<>@\,;\:\\"\/\[\]\?\=\{\}\x7F]+)(?:\=(?:([^\x00-\x20\(\)<>@\,;\:\\"\/\[\]\?\=\{\}\x7F]+)|(?:\"((?:[^"\\]|\\.)*)\")))?/g, function (match, fieldName, three, four) {
var value = three || four;
parsed[fieldName] = value ? value.toLowerCase() : true;
return '';
});
if (invalid) {
return {};
}
// parse any things that seem to be numbers
Object.keys(parsed).forEach(function (key) {
if (/^[\d]+$/.test(parsed[key])) {
try {
var num = parseInt(parsed[key], 10);
if (!Number.isNaN(num)) {
parsed[key] = num;
}
}
catch (e) {
/* ignore */
}
}
} catch (e) {
/* ignore */
}
}
});
return parsed;
});
return parsed;
}
var CacheSemantics =
/*#__PURE__*/
function () {
function CacheSemantics(_ref) {
var minimumTTL = _ref.minimumTTL;
(0, _classCallCheck2.default)(this, CacheSemantics);
this.minimumTTL = minimumTTL;
}
(0, _createClass2.default)(CacheSemantics, [{
key: "calculateChunkExpirationDate",
value: function calculateChunkExpirationDate(chunkResponse) {
var _chunkResponse$header = chunkResponse.headers,
headers = _chunkResponse$header === void 0 ? {} : _chunkResponse$header,
requestDate = chunkResponse.requestDate,
responseDate = chunkResponse.responseDate;
var baselineDate = responseDate || requestDate;
if (!baselineDate) {
if (!headers.date) return undefined;
baselineDate = new Date(headers.date);
}
var basePlus = function basePlus(ttl) {
return new Date(baselineDate.getTime() + ttl);
}; // results that are not really cacheable expire after the minimum time to live
if (/\bno-cache\b/.test(headers.pragma)) return basePlus(this.minimumTTL);
var cacheControl = parseCacheControl(headers['cache-control']);
if (cacheControl['no-cache'] || cacheControl['no-store'] || cacheControl['must-revalidate']) return basePlus(this.minimumTTL);
if (cacheControl['max-age'] !== undefined) {
var ttl = cacheControl['max-age'] * 1000; // max-age is in seconds
return basePlus(Math.max(ttl, this.minimumTTL));
} else if (this._coerceToDate(headers.expires)) {
return this._coerceToDate(headers.expires);
} else if (this._coerceToDate(headers['last-modified'])) {
var lastModified = this._coerceToDate(headers['last-modified']);
var _ttl = (baselineDate.getTime() - lastModified.getTime()) / 10;
return basePlus(_ttl);
} // otherwise, we just cache forever
return undefined;
exports.parseCacheControl = parseCacheControl;
var CacheSemantics = /** @class */ (function () {
function CacheSemantics(_a) {
var minimumTTL = _a.minimumTTL;
this.minimumTTL = minimumTTL;
}
}, {
key: "_coerceToDate",
value: function _coerceToDate(thing) {
if (thing) {
if (thing instanceof Date) return thing;
if (typeof thing === 'string' || typeof thing === 'number') return new Date(thing);
}
return undefined;
}
CacheSemantics.prototype.calculateChunkExpirationDate = function (chunkResponse) {
var _a = chunkResponse.headers, headers = _a === void 0 ? {} : _a, requestDate = chunkResponse.requestDate, responseDate = chunkResponse.responseDate;
var baselineDate = responseDate || requestDate;
if (!baselineDate) {
if (!headers.date) {
return undefined;
}
baselineDate = new Date(headers.date);
}
var basePlus = function (ttl) { return new Date(baselineDate.getTime() + ttl); };
// results that are not really cacheable expire after the minimum time to live
if (/\bno-cache\b/.test(headers.pragma)) {
return basePlus(this.minimumTTL);
}
var cacheControl = parseCacheControl(headers['cache-control']);
if (cacheControl['no-cache'] ||
cacheControl['no-store'] ||
cacheControl['must-revalidate']) {
return basePlus(this.minimumTTL);
}
if (cacheControl['max-age'] !== undefined) {
var ttl = cacheControl['max-age'] * 1000; // max-age is in seconds
return basePlus(Math.max(ttl, this.minimumTTL));
}
else if (this._coerceToDate(headers.expires)) {
return this._coerceToDate(headers.expires);
}
else if (this._coerceToDate(headers['last-modified'])) {
var lastModified = this._coerceToDate(headers['last-modified']);
var ttl = (baselineDate.getTime() - lastModified.getTime()) / 10;
return basePlus(ttl);
}
// otherwise, we just cache forever
return undefined;
};
CacheSemantics.prototype._coerceToDate = function (thing) {
if (thing) {
if (thing instanceof Date) {
return thing;
}
if (typeof thing === 'string' || typeof thing === 'number') {
return new Date(thing);
}
}
return undefined;
};
/**

@@ -103,9 +93,6 @@ * check whether a cached chunk response is still valid and can be used

*/
}, {
key: "cachedChunkIsValid",
value: function cachedChunkIsValid(chunkResponse) {
var expiration = this.calculateChunkExpirationDate(chunkResponse);
return !expiration || new Date() <= expiration;
}
CacheSemantics.prototype.cachedChunkIsValid = function (chunkResponse) {
var expiration = this.calculateChunkExpirationDate(chunkResponse);
return !expiration || new Date() <= expiration;
};
/**

@@ -116,17 +103,10 @@ * check whether the response for this chunk fetch can be cached

*/
}, {
key: "chunkIsCacheable",
value: function chunkIsCacheable() {
// right now, we are caching everything, we just give it a very short
// time to live if it's not supposed to be cached
return true;
}
}]);
return CacheSemantics;
}();
module.exports = {
CacheSemantics: CacheSemantics,
parseCacheControl: parseCacheControl
};
CacheSemantics.prototype.chunkIsCacheable = function () {
// right now, we are caching everything, we just give it a very short
// time to live if it's not supposed to be cached
return true;
};
return CacheSemantics;
}());
exports.CacheSemantics = CacheSemantics;
//# sourceMappingURL=cacheSemantics.js.map
"use strict";
var crossFetch = require('cross-fetch');
function crossFetchBinaryRange(url, start, end) {
var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
var requestDate = new Date();
var fetchOptions = Object.assign({
method: 'GET',
headers: {
range: "bytes=".concat(start, "-").concat(end)
}
}, options);
return crossFetch(url, fetchOptions).then(function (res) {
var responseDate = new Date();
if (res.status !== 206 && res.status !== 200) throw new Error("HTTP ".concat(res.status, " when fetching ").concat(url, " bytes ").concat(start, "-").concat(end));
if (res.status === 200) {
// TODO: check that the response satisfies the byte range,
// and is not too big (check maximum size),
// because we actually ended up getting served the whole file
throw new Error("HTTP ".concat(res.status, " when fetching ").concat(url, " bytes ").concat(start, "-").concat(end));
}
var bufPromise = res.buffer ? res.buffer() : res.arrayBuffer().then(function (arrayBuffer) {
return Buffer.from(arrayBuffer);
}); // return the response headers, and the data buffer
return bufPromise.then(function (buffer) {
return {
headers: res.headers.map,
requestDate: requestDate,
responseDate: responseDate,
buffer: buffer
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
//@ts-nocheck
var cross_fetch_1 = __importDefault(require("cross-fetch"));
function crossFetchBinaryRange(url, start, end, options) {
if (options === void 0) { options = {}; }
var requestDate = new Date();
var fetchOptions = Object.assign({
method: 'GET',
headers: { range: "bytes=".concat(start, "-").concat(end) },
}, options);
return (0, cross_fetch_1.default)(url, fetchOptions).then(function (res) {
var responseDate = new Date();
if (res.status !== 206 && res.status !== 200) {
throw new Error("HTTP ".concat(res.status, " when fetching ").concat(url, " bytes ").concat(start, "-").concat(end));
}
if (res.status === 200) {
// TODO: check that the response satisfies the byte range,
// and is not too big (check maximum size),
// because we actually ended up getting served the whole file
throw new Error("HTTP ".concat(res.status, " when fetching ").concat(url, " bytes ").concat(start, "-").concat(end));
}
var bufPromise = res.buffer
? res.buffer()
: res.arrayBuffer().then(function (arrayBuffer) { return Buffer.from(arrayBuffer); });
// return the response headers, and the data buffer
return bufPromise.then(function (buffer) { return ({
headers: res.headers.map,
requestDate: requestDate,
responseDate: responseDate,
buffer: buffer,
}); });
});
});
}
module.exports = crossFetchBinaryRange;
exports.default = crossFetchBinaryRange;
//# sourceMappingURL=crossFetchBinaryRange.js.map
"use strict";
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
var _toConsumableArray2 = _interopRequireDefault(require("@babel/runtime/helpers/toConsumableArray"));
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
var LRU = require('quick-lru');
var _require = require('./cacheSemantics'),
CacheSemantics = _require.CacheSemantics;
var AggregatingFetcher = require('./aggregatingFetcher');
var crossFetchBinaryRange = require('./crossFetchBinaryRange');
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
//@ts-nocheck
var quick_lru_1 = __importDefault(require("quick-lru"));
var cacheSemantics_1 = require("./cacheSemantics");
var aggregatingFetcher_1 = __importDefault(require("./aggregatingFetcher"));
var crossFetchBinaryRange_1 = __importDefault(require("./crossFetchBinaryRange"));
/**

@@ -28,17 +61,17 @@ * check if the given exception was caused by an operation being intentionally aborted

*/
function isAbortException(exception) {
return (// DOMException
exception.name === 'AbortError' || // standard-ish non-DOM abort exception
// @ts-ignore
exception.code === 'ERR_ABORTED' || // message contains aborted for bubbling through RPC
// things we have seen that we want to catch here
// Error: aborted
// AbortError: aborted
// AbortError: The user aborted a request.
!!exception.message.match(/\b(aborted|AbortError)\b/i)
);
} // TODO: fire events when a remote file is detected as having been changed
return (
// DOMException
exception.name === 'AbortError' ||
// standard-ish non-DOM abort exception
// @ts-ignore
exception.code === 'ERR_ABORTED' ||
// message contains aborted for bubbling through RPC
// things we have seen that we want to catch here
// Error: aborted
// AbortError: aborted
// AbortError: The user aborted a request.
!!exception.message.match(/\b(aborted|AbortError)\b/i));
}
// TODO: fire events when a remote file is detected as having been changed
/**

@@ -48,203 +81,109 @@ * smart cache that fetches chunks of remote files.

*/
var HttpRangeFetcher =
/*#__PURE__*/
function () {
/**
* @param {object} args the arguments object
* @param {number} [args.fetch] callback with signature `(key, start, end) => Promise({ headers, buffer })`
* @param {number} [args.size] size in bytes of cache to keep
* @param {number} [args.chunkSize] size in bytes of cached chunks
* @param {number} [args.aggregationTime] time in ms over which to pool requests before dispatching them
* @param {number} [args.minimumTTL] time in ms a non-cacheable response will be cached
* @param {number} [args.maxFetchSize] maximum size of an aggregated request
* @param {number} [args.maxExtraFetch] max number of additional bytes to fetch when aggregating requests
* that don't actually overlap
*/
function HttpRangeFetcher(_ref) {
var _ref$fetch = _ref.fetch,
fetch = _ref$fetch === void 0 ? crossFetchBinaryRange : _ref$fetch,
_ref$size = _ref.size,
size = _ref$size === void 0 ? 10000000 : _ref$size,
_ref$chunkSize = _ref.chunkSize,
chunkSize = _ref$chunkSize === void 0 ? 32768 : _ref$chunkSize,
_ref$aggregationTime = _ref.aggregationTime,
aggregationTime = _ref$aggregationTime === void 0 ? 100 : _ref$aggregationTime,
_ref$minimumTTL = _ref.minimumTTL,
minimumTTL = _ref$minimumTTL === void 0 ? 1000 : _ref$minimumTTL,
_ref$maxFetchSize = _ref.maxFetchSize,
maxFetchSize = _ref$maxFetchSize === void 0 ? chunkSize * 4 : _ref$maxFetchSize,
_ref$maxExtraFetch = _ref.maxExtraFetch,
maxExtraFetch = _ref$maxExtraFetch === void 0 ? chunkSize : _ref$maxExtraFetch;
(0, _classCallCheck2.default)(this, HttpRangeFetcher);
this.aggregator = new AggregatingFetcher({
fetch: fetch,
frequency: aggregationTime,
maxFetchSize: maxFetchSize,
maxExtraSize: maxExtraFetch
});
this.chunkSize = chunkSize;
this.chunkCache = new LRU({
maxSize: Math.floor(size / chunkSize) || 1
});
this.cacheSemantics = new CacheSemantics({
minimumTTL: minimumTTL
});
this.stats = new LRU({
maxSize: 20
});
}
/**
* Fetch a range of a remote resource.
* @param {string} key the resource's unique identifier, this would usually be a URL.
* This is passed along to the fetch callback.
* @param {number} [position] offset in the file at which to start fetching
* @param {number} [length] number of bytes to fetch, defaults to the remainder of the file
* @param {object} [options] request options
* @param {AbortSignal} [options.signal] AbortSignal object that can be used to abort the fetch
*/
(0, _createClass2.default)(HttpRangeFetcher, [{
key: "getRange",
value: function () {
var _getRange = (0, _asyncToGenerator2.default)(
/*#__PURE__*/
_regenerator.default.mark(function _callee(key) {
var _this = this;
var position,
requestedLength,
options,
length,
stat,
firstChunk,
lastChunk,
fetches,
_loop,
chunk,
chunkResponses,
chunksOffset,
_args = arguments;
return _regenerator.default.wrap(function _callee$(_context) {
while (1) {
switch (_context.prev = _context.next) {
case 0:
position = _args.length > 1 && _args[1] !== undefined ? _args[1] : 0;
requestedLength = _args.length > 2 ? _args[2] : undefined;
options = _args.length > 3 && _args[3] !== undefined ? _args[3] : {};
length = requestedLength;
if (!(length === undefined)) {
_context.next = 11;
break;
}
_context.next = 7;
return this.stat(key);
case 7:
stat = _context.sent;
if (!(stat.size === undefined)) {
_context.next = 10;
break;
}
throw new Error("length not specified, and could not determine size of the remote file");
case 10:
length = stat.size - position;
case 11:
// calculate the list of chunks involved in this fetch
firstChunk = Math.floor(position / this.chunkSize);
lastChunk = Math.floor((position + length - 1) / this.chunkSize); // fetch them all as necessary
fetches = new Array(lastChunk - firstChunk + 1);
_loop = function _loop(chunk) {
fetches[chunk - firstChunk] = _this._getChunk(key, chunk, options).then(function (response) {
return response && {
headers: response.headers,
buffer: response.buffer,
chunkNumber: chunk
};
});
};
for (chunk = firstChunk; chunk <= lastChunk; chunk += 1) {
_loop(chunk);
} // return a "composite buffer" that lets the array of chunks be accessed like a flat buffer
_context.next = 18;
return Promise.all(fetches);
case 18:
chunkResponses = _context.sent;
chunkResponses = chunkResponses.filter(function (r) {
return !!r;
}); // filter out any undefined (out of range) responses
if (chunkResponses.length) {
_context.next = 22;
break;
}
return _context.abrupt("return", {
headers: {},
buffer: Buffer.allocUnsafe(0)
});
case 22:
chunksOffset = position - chunkResponses[0].chunkNumber * this.chunkSize;
return _context.abrupt("return", {
headers: this._makeHeaders(chunkResponses[0].headers, position, position + length - 1),
buffer: this._makeBuffer(chunkResponses, chunksOffset, length)
});
case 24:
case "end":
return _context.stop();
}
}
}, _callee, this);
}));
function getRange(_x) {
return _getRange.apply(this, arguments);
}
return getRange;
}()
}, {
key: "_makeBuffer",
value: function _makeBuffer(chunkResponses, chunksOffset, length) {
if (chunkResponses.length === 1) {
return chunkResponses[0].buffer.slice(chunksOffset, chunksOffset + length);
} else if (chunkResponses.length === 0) {
return Buffer.allocUnsafe(0);
} // 2 or more buffers
var buffers = chunkResponses.map(function (r) {
return r.buffer;
});
var first = buffers.shift().slice(chunksOffset);
var last = buffers.pop();
var trimEnd = first.length + buffers.reduce(function (sum, buf) {
return sum + buf.length;
}, 0) + last.length - length;
if (trimEnd < 0) {
trimEnd = 0;
}
last = last.slice(0, last.length - trimEnd);
return Buffer.concat([first].concat((0, _toConsumableArray2.default)(buffers), [last]));
var HttpRangeFetcher = /** @class */ (function () {
/**
* @param {object} args the arguments object
* @param {number} [args.fetch] callback with signature `(key, start, end) => Promise({ headers, buffer })`
* @param {number} [args.size] size in bytes of cache to keep
* @param {number} [args.chunkSize] size in bytes of cached chunks
* @param {number} [args.aggregationTime] time in ms over which to pool requests before dispatching them
* @param {number} [args.minimumTTL] time in ms a non-cacheable response will be cached
* @param {number} [args.maxFetchSize] maximum size of an aggregated request
* @param {number} [args.maxExtraFetch] max number of additional bytes to fetch when aggregating requests
* that don't actually overlap
*/
function HttpRangeFetcher(_a) {
var _b = _a.fetch, fetch = _b === void 0 ? crossFetchBinaryRange_1.default : _b, _c = _a.size, size = _c === void 0 ? 10000000 : _c, _d = _a.chunkSize, chunkSize = _d === void 0 ? 32768 : _d, _e = _a.aggregationTime, aggregationTime = _e === void 0 ? 100 : _e, _f = _a.minimumTTL, minimumTTL = _f === void 0 ? 1000 : _f, _g = _a.maxFetchSize, maxFetchSize = _g === void 0 ? chunkSize * 4 : _g, _h = _a.maxExtraFetch, maxExtraFetch = _h === void 0 ? chunkSize : _h;
this.aggregator = new aggregatingFetcher_1.default({
fetch: fetch,
frequency: aggregationTime,
maxFetchSize: maxFetchSize,
maxExtraSize: maxExtraFetch,
});
this.chunkSize = chunkSize;
this.chunkCache = new quick_lru_1.default({ maxSize: Math.floor(size / chunkSize) || 1 });
this.cacheSemantics = new cacheSemantics_1.CacheSemantics({ minimumTTL: minimumTTL });
this.stats = new quick_lru_1.default({ maxSize: 20 });
}
/**
* Fetch a range of a remote resource.
* @param {string} key the resource's unique identifier, this would usually be a URL.
* This is passed along to the fetch callback.
* @param {number} [position] offset in the file at which to start fetching
* @param {number} [length] number of bytes to fetch, defaults to the remainder of the file
* @param {object} [options] request options
* @param {AbortSignal} [options.signal] AbortSignal object that can be used to abort the fetch
*/
HttpRangeFetcher.prototype.getRange = function (key, position, requestedLength, options) {
if (position === void 0) { position = 0; }
if (options === void 0) { options = {}; }
return __awaiter(this, void 0, void 0, function () {
var length, stat, firstChunk, lastChunk, fetches, _loop_1, this_1, chunk, chunkResponses, chunksOffset;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
length = requestedLength;
if (!(length === undefined)) return [3 /*break*/, 2];
return [4 /*yield*/, this.stat(key)];
case 1:
stat = _a.sent();
if (stat.size === undefined) {
throw new Error("length not specified, and could not determine size of the remote file");
}
length = stat.size - position;
_a.label = 2;
case 2:
firstChunk = Math.floor(position / this.chunkSize);
lastChunk = Math.floor((position + length - 1) / this.chunkSize);
fetches = new Array(lastChunk - firstChunk + 1);
_loop_1 = function (chunk) {
fetches[chunk - firstChunk] = this_1._getChunk(key, chunk, options).then(function (response) {
return response && {
headers: response.headers,
buffer: response.buffer,
chunkNumber: chunk,
};
});
};
this_1 = this;
for (chunk = firstChunk; chunk <= lastChunk; chunk += 1) {
_loop_1(chunk);
}
return [4 /*yield*/, Promise.all(fetches)];
case 3:
chunkResponses = _a.sent();
chunkResponses = chunkResponses.filter(function (r) { return !!r; }); // filter out any undefined (out of range) responses
if (!chunkResponses.length) {
return [2 /*return*/, { headers: {}, buffer: Buffer.allocUnsafe(0) }];
}
chunksOffset = position - chunkResponses[0].chunkNumber * this.chunkSize;
return [2 /*return*/, {
headers: this._makeHeaders(chunkResponses[0].headers, position, position + length - 1),
buffer: this._makeBuffer(chunkResponses, chunksOffset, length),
}];
}
});
});
};
HttpRangeFetcher.prototype._makeBuffer = function (chunkResponses, chunksOffset, length) {
if (chunkResponses.length === 1) {
return chunkResponses[0].buffer.slice(chunksOffset, chunksOffset + length);
}
else if (chunkResponses.length === 0) {
return Buffer.allocUnsafe(0);
}
// 2 or more buffers
var buffers = chunkResponses.map(function (r) { return r.buffer; });
var first = buffers.shift().slice(chunksOffset);
var last = buffers.pop();
var trimEnd = first.length +
buffers.reduce(function (sum, buf) { return sum + buf.length; }, 0) +
last.length -
length;
if (trimEnd < 0) {
trimEnd = 0;
}
last = last.slice(0, last.length - trimEnd);
return Buffer.concat(__spreadArray(__spreadArray([first], buffers, true), [last], false));
};
/**
* Fetches the first few bytes of the remote file (if necessary) and uses

@@ -259,256 +198,166 @@ * the returned headers to populate a `fs`-like stat object.

*/
}, {
key: "stat",
value: function () {
var _stat = (0, _asyncToGenerator2.default)(
/*#__PURE__*/
_regenerator.default.mark(function _callee2(key) {
var stat, chunk;
return _regenerator.default.wrap(function _callee2$(_context2) {
while (1) {
switch (_context2.prev = _context2.next) {
case 0:
stat = this.stats.get(key);
if (stat) {
_context2.next = 9;
break;
HttpRangeFetcher.prototype.stat = function (key) {
return __awaiter(this, void 0, void 0, function () {
var stat, chunk;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
stat = this.stats.get(key);
if (!!stat) return [3 /*break*/, 2];
return [4 /*yield*/, this._getChunk(key, 0)];
case 1:
chunk = _a.sent();
this._recordStatsIfNecessary(key, chunk);
stat = this.stats.get(key);
if (!stat) {
throw new Error("failed to retrieve file size for ".concat(key));
}
_a.label = 2;
case 2: return [2 /*return*/, stat];
}
_context2.next = 4;
return this._getChunk(key, 0);
case 4:
chunk = _context2.sent;
this._recordStatsIfNecessary(key, chunk);
stat = this.stats.get(key);
if (stat) {
_context2.next = 9;
break;
});
});
};
HttpRangeFetcher.prototype._headersToStats = function (chunkResponse) {
var headers = chunkResponse.headers;
var stat = {};
if (headers['content-range']) {
var match = headers['content-range'].match(/\d+-\d+\/(\d+)/);
if (match) {
stat.size = parseInt(match[1], 10);
if (Number.isNaN(stat.size)) {
delete stat.size;
}
throw new Error("failed to retrieve file size for ".concat(key));
case 9:
return _context2.abrupt("return", stat);
case 10:
case "end":
return _context2.stop();
}
}
}, _callee2, this);
}));
function stat(_x2) {
return _stat.apply(this, arguments);
}
return stat;
}()
}, {
key: "_headersToStats",
value: function _headersToStats(chunkResponse) {
var headers = chunkResponse.headers;
var stat = {};
if (headers['content-range']) {
var match = headers['content-range'].match(/\d+-\d+\/(\d+)/);
}
if (headers['last-modified']) {
stat.mtime = new Date(headers['last-modified']);
if (stat.mtime.toString() === 'Invalid Date') {
delete stat.mtime;
}
if (stat.mtime) {
stat.mtimeMs = stat.mtime.getTime();
}
}
return stat;
};
HttpRangeFetcher.prototype._makeHeaders = function (originalHeaders, newStart, newEnd) {
var newHeaders = Object.assign({}, originalHeaders || {});
newHeaders['content-length'] = newEnd - newStart;
var oldContentRange = newHeaders['content-range'] || '';
var match = oldContentRange.match(/\d+-\d+\/(\d+)/);
if (match) {
stat.size = parseInt(match[1], 10);
if (Number.isNaN(stat.size)) delete stat.size;
newHeaders['content-range'] = "".concat(newStart, "-").concat(newEnd - 1, "/").concat(match[1]);
// eslint-disable-next-line prefer-destructuring
newHeaders['x-resource-length'] = match[1];
}
}
if (headers['last-modified']) {
stat.mtime = new Date(headers['last-modified']);
if (stat.mtime.toString() === 'Invalid Date') delete stat.mtime;
if (stat.mtime) {
stat.mtimeMs = stat.mtime.getTime();
return newHeaders;
};
HttpRangeFetcher.prototype._getChunk = function (key, chunkNumber, requestOptions) {
return __awaiter(this, void 0, void 0, function () {
var chunkKey, cachedPromise, chunk, chunkAborted, err_1, fetchStart, fetchEnd, stat, alreadyRejected, freshPromise, freshChunk;
var _this = this;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
chunkKey = "".concat(key, "/").concat(chunkNumber);
cachedPromise = this.chunkCache.get(chunkKey);
if (!cachedPromise) return [3 /*break*/, 5];
chunk = void 0;
chunkAborted = void 0;
_a.label = 1;
case 1:
_a.trys.push([1, 3, , 4]);
return [4 /*yield*/, cachedPromise];
case 2:
chunk = _a.sent();
return [3 /*break*/, 4];
case 3:
err_1 = _a.sent();
if (isAbortException(err_1)) {
// fetch was aborted
chunkAborted = true;
}
else {
throw err_1;
}
return [3 /*break*/, 4];
case 4:
// when the cached chunk is resolved, validate it before returning it.
// if invalid or aborted, delete it from the cache and redispatch the request
if (chunkAborted || !this.cacheSemantics.cachedChunkIsValid(chunk)) {
this._uncacheIfSame(chunkKey, cachedPromise);
return [2 /*return*/, this._getChunk(key, chunkNumber, requestOptions)];
}
// gather the stats for the file from the headers
this._recordStatsIfNecessary(key, chunk);
return [2 /*return*/, chunk];
case 5:
fetchStart = chunkNumber * this.chunkSize;
fetchEnd = fetchStart + this.chunkSize;
stat = this.stats.get(key);
if (stat && stat.size) {
if (fetchStart >= stat.size) {
return [2 /*return*/, undefined];
}
if (fetchEnd >= stat.size) {
fetchEnd = stat.size;
}
}
alreadyRejected = false;
freshPromise = this.aggregator
.fetch(key, fetchStart, fetchEnd, requestOptions)
.catch(function (err) {
// if the request fails, remove its promise
// from the cache and keep the error
alreadyRejected = true;
_this._uncacheIfSame(chunkKey, freshPromise);
throw err;
});
if (!alreadyRejected) {
this.chunkCache.set(chunkKey, freshPromise);
}
return [4 /*yield*/, freshPromise
// gather the stats for the file from the headers
];
case 6:
freshChunk = _a.sent();
// gather the stats for the file from the headers
this._recordStatsIfNecessary(key, freshChunk);
// remove the promise from the cache
// if it turns out not to be cacheable. this is
// done after the fact because we want multiple requests
// for the same chunk to reuse the same cached promise
if (!this.cacheSemantics.chunkIsCacheable(freshChunk)) {
this._uncacheIfSame(chunkKey, freshPromise);
}
return [2 /*return*/, freshChunk];
}
});
});
};
// if the stats for a resource haven't been recorded yet, record them
HttpRangeFetcher.prototype._recordStatsIfNecessary = function (key, chunk) {
if (!this.stats.has(key)) {
this.stats.set(key, this._headersToStats(chunk));
}
}
return stat;
}
}, {
key: "_makeHeaders",
value: function _makeHeaders(originalHeaders, newStart, newEnd) {
var newHeaders = Object.assign({}, originalHeaders || {});
newHeaders['content-length'] = newEnd - newStart;
var oldContentRange = newHeaders['content-range'] || '';
var match = oldContentRange.match(/\d+-\d+\/(\d+)/);
if (match) {
newHeaders['content-range'] = "".concat(newStart, "-").concat(newEnd - 1, "/").concat(match[1]); // eslint-disable-next-line prefer-destructuring
newHeaders['x-resource-length'] = match[1];
}
return newHeaders;
}
}, {
key: "_getChunk",
value: function () {
var _getChunk2 = (0, _asyncToGenerator2.default)(
/*#__PURE__*/
_regenerator.default.mark(function _callee3(key, chunkNumber, requestOptions) {
var _this2 = this;
var chunkKey, cachedPromise, chunk, chunkAborted, fetchStart, fetchEnd, stat, alreadyRejected, freshPromise, freshChunk;
return _regenerator.default.wrap(function _callee3$(_context3) {
while (1) {
switch (_context3.prev = _context3.next) {
case 0:
chunkKey = "".concat(key, "/").concat(chunkNumber);
cachedPromise = this.chunkCache.get(chunkKey);
if (!cachedPromise) {
_context3.next = 21;
break;
}
_context3.prev = 3;
_context3.next = 6;
return cachedPromise;
case 6:
chunk = _context3.sent;
_context3.next = 16;
break;
case 9:
_context3.prev = 9;
_context3.t0 = _context3["catch"](3);
if (!isAbortException(_context3.t0)) {
_context3.next = 15;
break;
}
// fetch was aborted
chunkAborted = true;
_context3.next = 16;
break;
case 15:
throw _context3.t0;
case 16:
if (!(chunkAborted || !this.cacheSemantics.cachedChunkIsValid(chunk))) {
_context3.next = 19;
break;
}
this._uncacheIfSame(chunkKey, cachedPromise);
return _context3.abrupt("return", this._getChunk(key, chunkNumber, requestOptions));
case 19:
// gather the stats for the file from the headers
this._recordStatsIfNecessary(key, chunk);
return _context3.abrupt("return", chunk);
case 21:
fetchStart = chunkNumber * this.chunkSize;
fetchEnd = fetchStart + this.chunkSize; // clamp the end of the fetch to the size if we have a cached size for the file
stat = this.stats.get(key);
if (!(stat && stat.size)) {
_context3.next = 28;
break;
}
if (!(fetchStart >= stat.size)) {
_context3.next = 27;
break;
}
return _context3.abrupt("return", undefined);
case 27:
if (fetchEnd >= stat.size) fetchEnd = stat.size;
case 28:
alreadyRejected = false;
freshPromise = this.aggregator.fetch(key, fetchStart, fetchEnd, requestOptions).catch(function (err) {
// if the request fails, remove its promise
// from the cache and keep the error
alreadyRejected = true;
_this2._uncacheIfSame(chunkKey, freshPromise);
throw err;
});
if (!alreadyRejected) this.chunkCache.set(chunkKey, freshPromise);
_context3.next = 33;
return freshPromise;
case 33:
freshChunk = _context3.sent;
// gather the stats for the file from the headers
this._recordStatsIfNecessary(key, freshChunk); // remove the promise from the cache
// if it turns out not to be cacheable. this is
// done after the fact because we want multiple requests
// for the same chunk to reuse the same cached promise
if (!this.cacheSemantics.chunkIsCacheable(freshChunk)) {
this._uncacheIfSame(chunkKey, freshPromise);
}
return _context3.abrupt("return", freshChunk);
case 37:
case "end":
return _context3.stop();
}
}
}, _callee3, this, [[3, 9]]);
}));
function _getChunk(_x3, _x4, _x5) {
return _getChunk2.apply(this, arguments);
}
return _getChunk;
}() // if the stats for a resource haven't been recorded yet, record them
}, {
key: "_recordStatsIfNecessary",
value: function _recordStatsIfNecessary(key, chunk) {
if (!this.stats.has(key)) this.stats.set(key, this._headersToStats(chunk));
} // delete a promise from the cache if it is still in there.
};
// delete a promise from the cache if it is still in there.
// need to check if it is still the same because it might
// have been overwritten sometime while the promise was in flight
}, {
key: "_uncacheIfSame",
value: function _uncacheIfSame(key, cachedPromise) {
if (this.chunkCache.get(key) === cachedPromise) {
this.chunkCache.delete(key);
}
}
HttpRangeFetcher.prototype._uncacheIfSame = function (key, cachedPromise) {
if (this.chunkCache.get(key) === cachedPromise) {
this.chunkCache.delete(key);
}
};
/**
* Throw away all cached data, resetting the cache.
*/
}, {
key: "reset",
value: function reset() {
this.stats.clear();
this.chunkCache.clear();
}
}]);
return HttpRangeFetcher;
}();
module.exports = HttpRangeFetcher;
HttpRangeFetcher.prototype.reset = function () {
this.stats.clear();
this.chunkCache.clear();
};
return HttpRangeFetcher;
}());
exports.default = HttpRangeFetcher;
//# sourceMappingURL=httpRangeFetcher.js.map
"use strict";
var HttpRangeFetcher = require('./httpRangeFetcher');
module.exports = {
HttpRangeFetcher: HttpRangeFetcher
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.HttpRangeFetcher = void 0;
//@ts-nocheck
var httpRangeFetcher_1 = __importDefault(require("./httpRangeFetcher"));
exports.HttpRangeFetcher = httpRangeFetcher_1.default;
//# sourceMappingURL=index.js.map
{
"name": "http-range-fetcher",
"version": "1.2.5",
"version": "1.3.0",
"description": "caching, aggregating fetch manager for doing lots of HTTP range requests",

@@ -8,2 +8,3 @@ "license": "MIT",

"main": "dist/index.js",
"module": "esm/index.js",
"author": {

@@ -18,3 +19,4 @@ "name": "Robert Buels",

"files": [
"dist"
"dist",
"esm"
],

@@ -24,9 +26,9 @@ "scripts": {

"coverage": "npm test -- --coverage",
"postcoverage": "opn coverage/lcov-report/index.html",
"lint": "eslint src test",
"docs": "documentation readme --shallow src/HttpRangeFetcher.js --section=API",
"clean": "rimraf dist",
"clean": "rimraf dist esm",
"build:esm": "tsc --target es2018 --outDir esm",
"build:es5": "tsc --target es5 --outDir dist",
"build": "npm run build:esm && npm run build:es5",
"prebuild": "npm run clean",
"build": "babel src -d dist",
"watch": "npm-watch",
"prepublishOnly": "npm run lint && npm test && npm run build",

@@ -36,13 +38,4 @@ "postpublish": "git push origin master --follow-tags",

},
"watch": {
"test": "{src,test}/*.js",
"lint": "{src,test}/*.js",
"build": "src"
},
"jest": {
"testEnvironment": "node"
},
"keywords": [],
"dependencies": {
"@babel/runtime": "^7.4.4",
"abortcontroller-polyfill": "^1.2.9",

@@ -54,21 +47,16 @@ "cross-fetch": "^2.2.2",

"devDependencies": {
"@babel/cli": "^7.4.4",
"@babel/core": "^7.4.4",
"@babel/plugin-transform-runtime": "^7.4.4",
"@babel/preset-env": "^7.4.4",
"babel-eslint": "^8.2.6",
"babel-jest": "^24.8.0",
"documentation": "^6.1.0",
"eslint": "^4.19.1",
"eslint-config-airbnb-base": "^12.1.0",
"eslint-config-prettier": "^2.9.0",
"eslint-plugin-import": "^2.10.0",
"eslint-plugin-prettier": "^2.6.0",
"jest": "^24.8.0",
"lodash": "^4.17.10",
"npm-watch": "^0.5.0",
"opn-cli": "^3.1.0",
"prettier": "^1.11.1",
"rimraf": "^2.6.2",
"standard-changelog": "^1.0.0"
"@types/jest": "^27.4.0",
"@typescript-eslint/eslint-plugin": "^5.10.2",
"@typescript-eslint/parser": "^5.10.2",
"documentation": "^13.2.5",
"eslint": "^8.8.0",
"eslint-config-prettier": "^8.3.0",
"eslint-plugin-import": "^2.25.4",
"eslint-plugin-prettier": "^4.0.0",
"jest": "^27.4.7",
"prettier": "^2.5.1",
"rimraf": "^3.0.2",
"standard-changelog": "^2.0.27",
"ts-jest": "^27.1.3",
"typescript": "^4.5.5"
},

@@ -75,0 +63,0 @@ "publishConfig": {

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc