Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

@gmod/bbi

Package Overview
Dependencies
Maintainers
6
Versions
55
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@gmod/bbi - npm Package Compare versions

Comparing version 2.0.4 to 2.0.5

6

CHANGELOG.md

@@ -0,1 +1,7 @@

## [2.0.5](https://github.com/GMOD/bbi-js/compare/v2.0.4...v2.0.5) (2022-12-17)
- Cleanup package.json and README
## [2.0.4](https://github.com/GMOD/bbi-js/compare/v2.0.3...v2.0.4) (2022-10-15)

@@ -2,0 +8,0 @@

423

dist/bbi.js
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {

@@ -22,38 +11,11 @@ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }

};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.BBI = void 0;
var binary_parser_1 = require("binary-parser");
var generic_filehandle_1 = require("generic-filehandle");
var rxjs_1 = require("rxjs");
var operators_1 = require("rxjs/operators");
var blockView_1 = require("./blockView");
var BIG_WIG_MAGIC = -2003829722;
var BIG_BED_MAGIC = -2021002517;
const binary_parser_1 = require("binary-parser");
const generic_filehandle_1 = require("generic-filehandle");
const rxjs_1 = require("rxjs");
const operators_1 = require("rxjs/operators");
const blockView_1 = require("./blockView");
const BIG_WIG_MAGIC = -2003829722;
const BIG_BED_MAGIC = -2021002517;
function toString(arr) {

@@ -68,4 +30,4 @@ return new TextDecoder().decode(arr);

function getParsers(isBE) {
var le = isBE ? 'big' : 'little';
var headerParser = new binary_parser_1.Parser()
const le = isBE ? 'big' : 'little';
const headerParser = new binary_parser_1.Parser()
.endianess(le)

@@ -93,3 +55,3 @@ .int32('magic')

});
var totalSummaryParser = new binary_parser_1.Parser()
const totalSummaryParser = new binary_parser_1.Parser()
.endianess(le)

@@ -101,3 +63,3 @@ .uint64('basesCovered')

.doublele('scoreSumSquares');
var chromTreeParser = new binary_parser_1.Parser()
const chromTreeParser = new binary_parser_1.Parser()
.endianess(le)

@@ -109,3 +71,3 @@ .uint32('magic')

.uint64('itemCount');
var isLeafNode = new binary_parser_1.Parser()
const isLeafNode = new binary_parser_1.Parser()
.endianess(le)

@@ -117,9 +79,9 @@ .uint8('isLeafNode')

return {
chromTreeParser: chromTreeParser,
totalSummaryParser: totalSummaryParser,
headerParser: headerParser,
isLeafNode: isLeafNode,
chromTreeParser,
totalSummaryParser,
headerParser,
isLeafNode,
};
}
var BBI = /** @class */ (function () {
class BBI {
/*

@@ -131,5 +93,4 @@ * @param filehandle - a filehandle from generic-filehandle or implementing something similar to the node10 fs.promises API

*/
function BBI(options) {
if (options === void 0) { options = {}; }
var filehandle = options.filehandle, _a = options.renameRefSeqs, renameRefSeqs = _a === void 0 ? function (s) { return s; } : _a, path = options.path, url = options.url;
constructor(options = {}) {
const { filehandle, renameRefSeqs = s => s, path, url } = options;
this.renameRefSeqs = renameRefSeqs;

@@ -153,9 +114,7 @@ if (filehandle) {

*/
BBI.prototype.getHeader = function (opts) {
var _this = this;
if (opts === void 0) { opts = {}; }
var options = 'aborted' in opts ? { signal: opts } : opts;
getHeader(opts = {}) {
const options = 'aborted' in opts ? { signal: opts } : opts;
if (!this.headerP) {
this.headerP = this._getHeader(options).catch(function (e) {
_this.headerP = undefined;
this.headerP = this._getHeader(options).catch(e => {
this.headerP = undefined;
throw e;

@@ -165,55 +124,38 @@ });

return this.headerP;
};
BBI.prototype._getHeader = function (opts) {
return __awaiter(this, void 0, void 0, function () {
var header, chroms;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, this._getMainHeader(opts)];
case 1:
header = _a.sent();
return [4 /*yield*/, this._readChromTree(header, opts)];
case 2:
chroms = _a.sent();
return [2 /*return*/, __assign(__assign({}, header), chroms)];
}
});
}
_getHeader(opts) {
return __awaiter(this, void 0, void 0, function* () {
const header = yield this._getMainHeader(opts);
const chroms = yield this._readChromTree(header, opts);
return Object.assign(Object.assign({}, header), chroms);
});
};
BBI.prototype._getMainHeader = function (opts, requestSize) {
if (requestSize === void 0) { requestSize = 2000; }
return __awaiter(this, void 0, void 0, function () {
var buffer, isBigEndian, ret, header, magic, asOffset, totalSummaryOffset, off, tail, sum;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, this.bbi.read(Buffer.alloc(requestSize), 0, requestSize, 0, opts)];
case 1:
buffer = (_a.sent()).buffer;
isBigEndian = this._isBigEndian(buffer);
ret = getParsers(isBigEndian);
header = ret.headerParser.parse(buffer);
magic = header.magic, asOffset = header.asOffset, totalSummaryOffset = header.totalSummaryOffset;
header.fileType = magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig';
if (asOffset > requestSize || totalSummaryOffset > requestSize) {
return [2 /*return*/, this._getMainHeader(opts, requestSize * 2)];
}
if (asOffset) {
off = Number(header.asOffset);
header.autoSql = toString(buffer.subarray(off, buffer.indexOf(0, off)));
}
if (header.totalSummaryOffset > requestSize) {
return [2 /*return*/, this._getMainHeader(opts, requestSize * 2)];
}
if (header.totalSummaryOffset) {
tail = buffer.subarray(Number(header.totalSummaryOffset));
sum = ret.totalSummaryParser.parse(tail);
header.totalSummary = __assign(__assign({}, sum), { basesCovered: Number(sum.basesCovered) });
}
return [2 /*return*/, __assign(__assign({}, header), { isBigEndian: isBigEndian })];
}
});
}
_getMainHeader(opts, requestSize = 2000) {
return __awaiter(this, void 0, void 0, function* () {
const { buffer } = yield this.bbi.read(Buffer.alloc(requestSize), 0, requestSize, 0, opts);
const isBigEndian = this._isBigEndian(buffer);
const ret = getParsers(isBigEndian);
const header = ret.headerParser.parse(buffer);
const { magic, asOffset, totalSummaryOffset } = header;
header.fileType = magic === BIG_BED_MAGIC ? 'bigbed' : 'bigwig';
if (asOffset > requestSize || totalSummaryOffset > requestSize) {
return this._getMainHeader(opts, requestSize * 2);
}
if (asOffset) {
const off = Number(header.asOffset);
header.autoSql = toString(buffer.subarray(off, buffer.indexOf(0, off)));
}
if (header.totalSummaryOffset > requestSize) {
return this._getMainHeader(opts, requestSize * 2);
}
if (header.totalSummaryOffset) {
const tail = buffer.subarray(Number(header.totalSummaryOffset));
const sum = ret.totalSummaryParser.parse(tail);
header.totalSummary = Object.assign(Object.assign({}, sum), { basesCovered: Number(sum.basesCovered) });
}
return Object.assign(Object.assign({}, header), { isBigEndian });
});
};
BBI.prototype._isBigEndian = function (buffer) {
var ret = buffer.readInt32LE(0);
}
_isBigEndian(buffer) {
let ret = buffer.readInt32LE(0);
if (ret === BIG_WIG_MAGIC || ret === BIG_BED_MAGIC) {

@@ -227,87 +169,68 @@ return false;

throw new Error('not a BigWig/BigBed file');
};
}
// todo: add progress if long running
BBI.prototype._readChromTree = function (header, opts) {
return __awaiter(this, void 0, void 0, function () {
var isBE, le, refsByNumber, refsByName, unzoomedDataOffset, chromTreeOffset, off, buffer, p, keySize, leafNodeParser, nonleafNodeParser, rootNodeOffset, bptReadNode;
var _this = this;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
isBE = header.isBigEndian;
le = isBE ? 'big' : 'little';
refsByNumber = [];
refsByName = {};
unzoomedDataOffset = Number(header.unzoomedDataOffset);
chromTreeOffset = Number(header.chromTreeOffset);
while (unzoomedDataOffset % 4 !== 0) {
unzoomedDataOffset += 1;
}
off = unzoomedDataOffset - chromTreeOffset;
return [4 /*yield*/, this.bbi.read(Buffer.alloc(off), 0, off, Number(chromTreeOffset), opts)];
case 1:
buffer = (_a.sent()).buffer;
p = getParsers(isBE);
keySize = p.chromTreeParser.parse(buffer).keySize;
leafNodeParser = new binary_parser_1.Parser()
.endianess(le)
.string('key', { stripNull: true, length: keySize })
.uint32('refId')
.uint32('refSize')
.saveOffset('offset');
nonleafNodeParser = new binary_parser_1.Parser()
.endianess(le)
.skip(keySize)
.uint64('childOffset')
.saveOffset('offset');
rootNodeOffset = 32;
bptReadNode = function (currentOffset) { return __awaiter(_this, void 0, void 0, function () {
var offset, ret, isLeafNode, cnt, n, leafRet, key, refId, refSize, refRec, nextNodes, n, nonleafRet, childOffset;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
offset = currentOffset;
if (offset >= buffer.length) {
throw new Error('reading beyond end of buffer');
}
ret = p.isLeafNode.parse(buffer.subarray(offset));
isLeafNode = ret.isLeafNode, cnt = ret.cnt;
offset += ret.offset;
if (!isLeafNode) return [3 /*break*/, 1];
for (n = 0; n < cnt; n += 1) {
leafRet = leafNodeParser.parse(buffer.subarray(offset));
offset += leafRet.offset;
key = leafRet.key, refId = leafRet.refId, refSize = leafRet.refSize;
refRec = { name: key, id: refId, length: refSize };
refsByName[this.renameRefSeqs(key)] = refId;
refsByNumber[refId] = refRec;
}
return [3 /*break*/, 3];
case 1:
nextNodes = [];
for (n = 0; n < cnt; n += 1) {
nonleafRet = nonleafNodeParser.parse(buffer.subarray(offset));
childOffset = nonleafRet.childOffset;
offset += nonleafRet.offset;
nextNodes.push(bptReadNode(Number(childOffset) - Number(chromTreeOffset)));
}
return [4 /*yield*/, Promise.all(nextNodes)];
case 2:
_a.sent();
_a.label = 3;
case 3: return [2 /*return*/];
}
});
}); };
return [4 /*yield*/, bptReadNode(rootNodeOffset)];
case 2:
_a.sent();
return [2 /*return*/, {
refsByName: refsByName,
refsByNumber: refsByNumber,
}];
_readChromTree(header, opts) {
return __awaiter(this, void 0, void 0, function* () {
const isBE = header.isBigEndian;
const le = isBE ? 'big' : 'little';
const refsByNumber = [];
const refsByName = {};
let unzoomedDataOffset = Number(header.unzoomedDataOffset);
const chromTreeOffset = Number(header.chromTreeOffset);
while (unzoomedDataOffset % 4 !== 0) {
unzoomedDataOffset += 1;
}
const off = unzoomedDataOffset - chromTreeOffset;
const { buffer } = yield this.bbi.read(Buffer.alloc(off), 0, off, Number(chromTreeOffset), opts);
const p = getParsers(isBE);
const { keySize } = p.chromTreeParser.parse(buffer);
const leafNodeParser = new binary_parser_1.Parser()
.endianess(le)
.string('key', { stripNull: true, length: keySize })
.uint32('refId')
.uint32('refSize')
.saveOffset('offset');
const nonleafNodeParser = new binary_parser_1.Parser()
.endianess(le)
.skip(keySize)
.uint64('childOffset')
.saveOffset('offset');
const rootNodeOffset = 32;
const bptReadNode = (currentOffset) => __awaiter(this, void 0, void 0, function* () {
let offset = currentOffset;
if (offset >= buffer.length) {
throw new Error('reading beyond end of buffer');
}
const ret = p.isLeafNode.parse(buffer.subarray(offset));
const { isLeafNode, cnt } = ret;
offset += ret.offset;
if (isLeafNode) {
for (let n = 0; n < cnt; n += 1) {
const leafRet = leafNodeParser.parse(buffer.subarray(offset));
offset += leafRet.offset;
const { key, refId, refSize } = leafRet;
const refRec = { name: key, id: refId, length: refSize };
refsByName[this.renameRefSeqs(key)] = refId;
refsByNumber[refId] = refRec;
}
}
else {
// parse index node
const nextNodes = [];
for (let n = 0; n < cnt; n += 1) {
const nonleafRet = nonleafNodeParser.parse(buffer.subarray(offset));
const { childOffset } = nonleafRet;
offset += nonleafRet.offset;
nextNodes.push(bptReadNode(Number(childOffset) - Number(chromTreeOffset)));
}
yield Promise.all(nextNodes);
}
});
yield bptReadNode(rootNodeOffset);
return {
refsByName,
refsByNumber,
};
});
};
}
/*

@@ -317,15 +240,8 @@ * fetches the "unzoomed" view of the bigwig data. this is the default for bigbed

*/
BBI.prototype.getUnzoomedView = function (opts) {
return __awaiter(this, void 0, void 0, function () {
var _a, unzoomedIndexOffset, refsByName, uncompressBufSize, isBigEndian, fileType;
return __generator(this, function (_b) {
switch (_b.label) {
case 0: return [4 /*yield*/, this.getHeader(opts)];
case 1:
_a = _b.sent(), unzoomedIndexOffset = _a.unzoomedIndexOffset, refsByName = _a.refsByName, uncompressBufSize = _a.uncompressBufSize, isBigEndian = _a.isBigEndian, fileType = _a.fileType;
return [2 /*return*/, new blockView_1.BlockView(this.bbi, refsByName, unzoomedIndexOffset, isBigEndian, uncompressBufSize > 0, fileType)];
}
});
getUnzoomedView(opts) {
return __awaiter(this, void 0, void 0, function* () {
const { unzoomedIndexOffset, refsByName, uncompressBufSize, isBigEndian, fileType, } = yield this.getHeader(opts);
return new blockView_1.BlockView(this.bbi, refsByName, unzoomedIndexOffset, isBigEndian, uncompressBufSize > 0, fileType);
});
};
}
/**

@@ -339,64 +255,39 @@ * Gets features from a BigWig file

*/
BBI.prototype.getFeatureStream = function (refName, start, end, opts) {
if (opts === void 0) { opts = {
scale: 1,
}; }
return __awaiter(this, void 0, void 0, function () {
var chrName, view;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, this.getHeader(opts)];
case 1:
_a.sent();
chrName = this.renameRefSeqs(refName);
if (!opts.basesPerSpan) return [3 /*break*/, 3];
return [4 /*yield*/, this.getView(1 / opts.basesPerSpan, opts)];
case 2:
view = _a.sent();
return [3 /*break*/, 7];
case 3:
if (!opts.scale) return [3 /*break*/, 5];
return [4 /*yield*/, this.getView(opts.scale, opts)];
case 4:
view = _a.sent();
return [3 /*break*/, 7];
case 5: return [4 /*yield*/, this.getView(1, opts)];
case 6:
view = _a.sent();
_a.label = 7;
case 7:
if (!view) {
throw new Error('unable to get block view for data');
}
return [2 /*return*/, new rxjs_1.Observable(function (observer) {
view.readWigData(chrName, start, end, observer, opts);
})];
}
getFeatureStream(refName, start, end, opts = {
scale: 1,
}) {
return __awaiter(this, void 0, void 0, function* () {
yield this.getHeader(opts);
const chrName = this.renameRefSeqs(refName);
let view;
if (opts.basesPerSpan) {
view = yield this.getView(1 / opts.basesPerSpan, opts);
}
else if (opts.scale) {
view = yield this.getView(opts.scale, opts);
}
else {
view = yield this.getView(1, opts);
}
if (!view) {
throw new Error('unable to get block view for data');
}
return new rxjs_1.Observable((observer) => {
view.readWigData(chrName, start, end, observer, opts);
});
});
};
BBI.prototype.getFeatures = function (refName, start, end, opts) {
if (opts === void 0) { opts = {
scale: 1,
}; }
return __awaiter(this, void 0, void 0, function () {
var ob, ret;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, this.getFeatureStream(refName, start, end, opts)];
case 1:
ob = _a.sent();
return [4 /*yield*/, ob
.pipe((0, operators_1.reduce)(function (acc, curr) { return acc.concat(curr); }))
.toPromise()];
case 2:
ret = _a.sent();
return [2 /*return*/, ret || []];
}
});
}
getFeatures(refName, start, end, opts = {
scale: 1,
}) {
return __awaiter(this, void 0, void 0, function* () {
const ob = yield this.getFeatureStream(refName, start, end, opts);
const ret = yield ob
.pipe((0, operators_1.reduce)((acc, curr) => acc.concat(curr)))
.toPromise();
return ret || [];
});
};
return BBI;
}());
}
}
exports.BBI = BBI;
//# sourceMappingURL=bbi.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {

@@ -37,29 +11,2 @@ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }

};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -70,31 +17,26 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.BigBed = exports.filterUndef = void 0;
var binary_parser_1 = require("binary-parser");
var rxjs_1 = require("rxjs");
var operators_1 = require("rxjs/operators");
var abortable_promise_cache_1 = __importDefault(require("abortable-promise-cache"));
var quick_lru_1 = __importDefault(require("quick-lru"));
var bbi_1 = require("./bbi");
const binary_parser_1 = require("binary-parser");
const rxjs_1 = require("rxjs");
const operators_1 = require("rxjs/operators");
const abortable_promise_cache_1 = __importDefault(require("abortable-promise-cache"));
const quick_lru_1 = __importDefault(require("quick-lru"));
const bbi_1 = require("./bbi");
function filterUndef(ts) {
return ts.filter(function (t) { return !!t; });
return ts.filter((t) => !!t);
}
exports.filterUndef = filterUndef;
var BigBed = /** @class */ (function (_super) {
__extends(BigBed, _super);
function BigBed() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.readIndicesCache = new abortable_promise_cache_1.default({
class BigBed extends bbi_1.BBI {
constructor() {
super(...arguments);
this.readIndicesCache = new abortable_promise_cache_1.default({
cache: new quick_lru_1.default({ maxSize: 1 }),
fill: function (args, signal) { return __awaiter(_this, void 0, void 0, function () {
return __generator(this, function (_a) {
return [2 /*return*/, this._readIndices(__assign(__assign({}, args), { signal: signal }))];
});
}); },
fill: (args, signal) => __awaiter(this, void 0, void 0, function* () {
return this._readIndices(Object.assign(Object.assign({}, args), { signal }));
}),
});
return _this;
}
BigBed.prototype.readIndices = function (opts) {
if (opts === void 0) { opts = {}; }
var options = 'aborted' in opts ? { signal: opts } : opts;
readIndices(opts = {}) {
const options = 'aborted' in opts ? { signal: opts } : opts;
return this.readIndicesCache.get(JSON.stringify(options), options, options.signal);
};
}
/*

@@ -106,9 +48,7 @@ * retrieve unzoomed view for any scale

*/
BigBed.prototype.getView = function (_scale, opts) {
return __awaiter(this, void 0, void 0, function () {
return __generator(this, function (_a) {
return [2 /*return*/, this.getUnzoomedView(opts)];
});
getView(_scale, opts) {
return __awaiter(this, void 0, void 0, function* () {
return this.getUnzoomedView(opts);
});
};
}
/*

@@ -119,46 +59,35 @@ * parse the bigbed extraIndex fields

*/
BigBed.prototype._readIndices = function (opts) {
return __awaiter(this, void 0, void 0, function () {
var _a, extHeaderOffset, isBigEndian, data, le, ret, count, offset, blocklen, len, buffer, extParser, indices, i;
return __generator(this, function (_b) {
switch (_b.label) {
case 0: return [4 /*yield*/, this.getHeader(opts)];
case 1:
_a = _b.sent(), extHeaderOffset = _a.extHeaderOffset, isBigEndian = _a.isBigEndian;
return [4 /*yield*/, this.bbi.read(Buffer.alloc(64), 0, 64, Number(extHeaderOffset))];
case 2:
data = (_b.sent()).buffer;
le = isBigEndian ? 'big' : 'little';
ret = new binary_parser_1.Parser()
.endianess(le)
.uint16('size')
.uint16('count')
.uint64('offset')
.parse(data);
count = ret.count, offset = ret.offset;
// no extra index is defined if count==0
if (count === 0) {
return [2 /*return*/, []];
}
blocklen = 20;
len = blocklen * count;
return [4 /*yield*/, this.bbi.read(Buffer.alloc(len), 0, len, Number(offset))];
case 3:
buffer = (_b.sent()).buffer;
extParser = new binary_parser_1.Parser()
.endianess(le)
.int16('type')
.int16('fieldcount')
.uint64('offset')
.skip(4)
.int16('field');
indices = [];
for (i = 0; i < count; i += 1) {
indices.push(extParser.parse(buffer.subarray(i * blocklen)));
}
return [2 /*return*/, indices];
}
});
_readIndices(opts) {
return __awaiter(this, void 0, void 0, function* () {
const { extHeaderOffset, isBigEndian } = yield this.getHeader(opts);
const { buffer: data } = yield this.bbi.read(Buffer.alloc(64), 0, 64, Number(extHeaderOffset));
const le = isBigEndian ? 'big' : 'little';
const ret = new binary_parser_1.Parser()
.endianess(le)
.uint16('size')
.uint16('count')
.uint64('offset')
.parse(data);
const { count, offset } = ret;
// no extra index is defined if count==0
if (count === 0) {
return [];
}
const blocklen = 20;
const len = blocklen * count;
const { buffer } = yield this.bbi.read(Buffer.alloc(len), 0, len, Number(offset));
const extParser = new binary_parser_1.Parser()
.endianess(le)
.int16('type')
.int16('fieldcount')
.uint64('offset')
.skip(4)
.int16('field');
const indices = [];
for (let i = 0; i < count; i += 1) {
indices.push(extParser.parse(buffer.subarray(i * blocklen)));
}
return indices;
});
};
}
/*

@@ -172,106 +101,77 @@ * perform a search in the bigbed extraIndex to find which blocks in the bigbed data to look for the

*/
BigBed.prototype.searchExtraIndexBlocks = function (name, opts) {
if (opts === void 0) { opts = {}; }
return __awaiter(this, void 0, void 0, function () {
var isBigEndian, indices, locs, _a;
var _this = this;
return __generator(this, function (_b) {
switch (_b.label) {
case 0: return [4 /*yield*/, this.getHeader(opts)];
case 1:
isBigEndian = (_b.sent()).isBigEndian;
return [4 /*yield*/, this.readIndices(opts)];
case 2:
indices = _b.sent();
if (!indices.length) {
return [2 /*return*/, []];
searchExtraIndexBlocks(name, opts = {}) {
return __awaiter(this, void 0, void 0, function* () {
const { isBigEndian } = yield this.getHeader(opts);
const indices = yield this.readIndices(opts);
if (!indices.length) {
return [];
}
const locs = indices.map((index) => __awaiter(this, void 0, void 0, function* () {
const { offset, field } = index;
const { buffer: data } = yield this.bbi.read(Buffer.alloc(32), 0, 32, Number(offset), opts);
const le = isBigEndian ? 'big' : 'little';
const p = new binary_parser_1.Parser()
.endianess(le)
.int32('magic')
.int32('blockSize')
.int32('keySize')
.int32('valSize')
.uint64('itemCount');
const { blockSize, keySize, valSize } = p.parse(data);
// console.log({blockSize,keySize,valSize})
const bpt = new binary_parser_1.Parser()
.endianess(le)
.int8('nodeType')
.skip(1)
.int16('cnt')
.choice({
tag: 'nodeType',
choices: {
0: new binary_parser_1.Parser().array('leafkeys', {
length: 'cnt',
type: new binary_parser_1.Parser()
.endianess(le)
.string('key', { length: keySize, stripNull: true })
.uint64('offset'),
}),
1: new binary_parser_1.Parser().array('keys', {
length: 'cnt',
type: new binary_parser_1.Parser()
.endianess(le)
.string('key', { length: keySize, stripNull: true })
.uint64('offset')
.uint32('length')
.uint32('reserved'),
}),
},
});
const bptReadNode = (nodeOffset) => __awaiter(this, void 0, void 0, function* () {
const val = Number(nodeOffset);
const len = 4 + blockSize * (keySize + valSize);
const { buffer } = yield this.bbi.read(Buffer.alloc(len), 0, len, val, opts);
const node = bpt.parse(buffer);
if (node.leafkeys) {
let lastOffset;
for (let i = 0; i < node.leafkeys.length; i += 1) {
const { key } = node.leafkeys[i];
if (name.localeCompare(key) < 0 && lastOffset) {
return bptReadNode(lastOffset);
}
lastOffset = node.leafkeys[i].offset;
}
locs = indices.map(function (index) { return __awaiter(_this, void 0, void 0, function () {
var offset, field, data, le, p, _a, blockSize, keySize, valSize, bpt, bptReadNode, rootNodeOffset;
var _this = this;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
offset = index.offset, field = index.field;
return [4 /*yield*/, this.bbi.read(Buffer.alloc(32), 0, 32, Number(offset), opts)];
case 1:
data = (_b.sent()).buffer;
le = isBigEndian ? 'big' : 'little';
p = new binary_parser_1.Parser()
.endianess(le)
.int32('magic')
.int32('blockSize')
.int32('keySize')
.int32('valSize')
.uint64('itemCount');
_a = p.parse(data), blockSize = _a.blockSize, keySize = _a.keySize, valSize = _a.valSize;
bpt = new binary_parser_1.Parser()
.endianess(le)
.int8('nodeType')
.skip(1)
.int16('cnt')
.choice({
tag: 'nodeType',
choices: {
0: new binary_parser_1.Parser().array('leafkeys', {
length: 'cnt',
type: new binary_parser_1.Parser()
.endianess(le)
.string('key', { length: keySize, stripNull: true })
.uint64('offset'),
}),
1: new binary_parser_1.Parser().array('keys', {
length: 'cnt',
type: new binary_parser_1.Parser()
.endianess(le)
.string('key', { length: keySize, stripNull: true })
.uint64('offset')
.uint32('length')
.uint32('reserved'),
}),
},
});
bptReadNode = function (nodeOffset) { return __awaiter(_this, void 0, void 0, function () {
var val, len, buffer, node, lastOffset, i, key, i;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
val = Number(nodeOffset);
len = 4 + blockSize * (keySize + valSize);
return [4 /*yield*/, this.bbi.read(Buffer.alloc(len), 0, len, val, opts)];
case 1:
buffer = (_a.sent()).buffer;
node = bpt.parse(buffer);
if (node.leafkeys) {
lastOffset = void 0;
for (i = 0; i < node.leafkeys.length; i += 1) {
key = node.leafkeys[i].key;
if (name.localeCompare(key) < 0 && lastOffset) {
return [2 /*return*/, bptReadNode(lastOffset)];
}
lastOffset = node.leafkeys[i].offset;
}
return [2 /*return*/, bptReadNode(lastOffset)];
}
for (i = 0; i < node.keys.length; i += 1) {
if (node.keys[i].key === name) {
return [2 /*return*/, __assign(__assign({}, node.keys[i]), { field: field })];
}
}
return [2 /*return*/, undefined];
}
});
}); };
rootNodeOffset = 32;
return [2 /*return*/, bptReadNode(Number(offset) + rootNodeOffset)];
}
});
}); });
_a = filterUndef;
return [4 /*yield*/, Promise.all(locs)];
case 3: return [2 /*return*/, _a.apply(void 0, [_b.sent()])];
}
});
return bptReadNode(lastOffset);
}
for (let i = 0; i < node.keys.length; i += 1) {
if (node.keys[i].key === name) {
return Object.assign(Object.assign({}, node.keys[i]), { field });
}
}
return undefined;
});
const rootNodeOffset = 32;
return bptReadNode(Number(offset) + rootNodeOffset);
}));
return filterUndef(yield Promise.all(locs));
});
};
}
/*

@@ -285,38 +185,25 @@ * retrieve the features from the bigbed data that were found through the lookup of the extraIndex

*/
BigBed.prototype.searchExtraIndex = function (name, opts) {
if (opts === void 0) { opts = {}; }
return __awaiter(this, void 0, void 0, function () {
var blocks, view, res, ret;
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, this.searchExtraIndexBlocks(name, opts)];
case 1:
blocks = _a.sent();
if (!blocks.length) {
return [2 /*return*/, []];
}
return [4 /*yield*/, this.getUnzoomedView(opts)];
case 2:
view = _a.sent();
res = blocks.map(function (block) {
return new rxjs_1.Observable(function (observer) {
view.readFeatures(observer, [block], opts);
}).pipe((0, operators_1.reduce)(function (acc, curr) { return acc.concat(curr); }), (0, operators_1.map)(function (x) {
for (var i = 0; i < x.length; i += 1) {
x[i].field = block.field;
}
return x;
}));
});
return [4 /*yield*/, rxjs_1.merge.apply(void 0, res).toPromise()];
case 3:
ret = _a.sent();
return [2 /*return*/, ret.filter(function (f) { var _a; return ((_a = f.rest) === null || _a === void 0 ? void 0 : _a.split('\t')[(f.field || 0) - 3]) === name; })];
}
searchExtraIndex(name, opts = {}) {
return __awaiter(this, void 0, void 0, function* () {
const blocks = yield this.searchExtraIndexBlocks(name, opts);
if (!blocks.length) {
return [];
}
const view = yield this.getUnzoomedView(opts);
const res = blocks.map(block => {
return new rxjs_1.Observable(observer => {
view.readFeatures(observer, [block], opts);
}).pipe((0, operators_1.reduce)((acc, curr) => acc.concat(curr)), (0, operators_1.map)(x => {
for (let i = 0; i < x.length; i += 1) {
x[i].field = block.field;
}
return x;
}));
});
const ret = yield (0, rxjs_1.merge)(...res).toPromise();
return ret.filter(f => { var _a; return ((_a = f.rest) === null || _a === void 0 ? void 0 : _a.split('\t')[(f.field || 0) - 3]) === name; });
});
};
return BigBed;
}(bbi_1.BBI));
}
}
exports.BigBed = BigBed;
//# sourceMappingURL=bigbed.js.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var pure_1 = require("./pure");
const pure_1 = require("./pure");
if (!('getBigInt64' in DataView)) {

@@ -5,0 +5,0 @@ DataView.prototype.getBigInt64 = function (byteOffset, littleEndian) {

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.setBigUint64 = exports.setBigInt64 = exports.getBigUint64 = exports.getBigInt64 = void 0;
var BigInt32 = BigInt(32);
const BigInt32 = BigInt(32);
function getBigInt64(dataView, byteOffset, littleEndian) {
var littleEndianMask = Number(!!littleEndian);
var bigEndianMask = Number(!littleEndian);
const littleEndianMask = Number(!!littleEndian);
const bigEndianMask = Number(!littleEndian);
return ((BigInt(dataView.getInt32(byteOffset, littleEndian) * bigEndianMask +

@@ -16,6 +16,6 @@ dataView.getInt32(byteOffset + 4, littleEndian) * littleEndianMask) <<

function getBigUint64(dataView, byteOffset, littleEndian) {
var a = dataView.getUint32(byteOffset, littleEndian);
var b = dataView.getUint32(byteOffset + 4, littleEndian);
var littleEndianMask = Number(!!littleEndian);
var bigEndianMask = Number(!littleEndian);
const a = dataView.getUint32(byteOffset, littleEndian);
const b = dataView.getUint32(byteOffset + 4, littleEndian);
const littleEndianMask = Number(!!littleEndian);
const bigEndianMask = Number(!littleEndian);
// This branch-less optimization is 77x faster than normal ternary operator.

@@ -29,4 +29,4 @@ // and only 3% slower than native implementation

function setBigInt64(dataView, byteOffset, value, littleEndian) {
var hi = Number(value >> BigInt32);
var lo = Number(value & BigInt(0xffffffff));
const hi = Number(value >> BigInt32);
const lo = Number(value & BigInt(0xffffffff));
if (littleEndian) {

@@ -43,4 +43,4 @@ dataView.setInt32(byteOffset + 4, hi, littleEndian);

function setBigUint64(dataView, byteOffset, value, littleEndian) {
var hi = Number(value >> BigInt32);
var lo = Number(value & BigInt(0xffffffff));
const hi = Number(value >> BigInt32);
const lo = Number(value & BigInt(0xffffffff));
if (littleEndian) {

@@ -47,0 +47,0 @@ dataView.setUint32(byteOffset + 4, hi, littleEndian);

"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {

@@ -26,38 +11,7 @@ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }

};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.BigWig = void 0;
var blockView_1 = require("./blockView");
var bbi_1 = require("./bbi");
var BigWig = /** @class */ (function (_super) {
__extends(BigWig, _super);
function BigWig() {
return _super !== null && _super.apply(this, arguments) || this;
}
const blockView_1 = require("./blockView");
const bbi_1 = require("./bbi");
class BigWig extends bbi_1.BBI {
/**

@@ -69,31 +23,23 @@ * Retrieves a BlockView of a specific zoomLevel

*/
BigWig.prototype.getView = function (scale, opts) {
return __awaiter(this, void 0, void 0, function () {
var _a, zoomLevels, refsByName, fileSize, isBigEndian, uncompressBufSize, basesPerPx, maxLevel, i, zh, indexOffset;
return __generator(this, function (_b) {
switch (_b.label) {
case 0: return [4 /*yield*/, this.getHeader(opts)];
case 1:
_a = _b.sent(), zoomLevels = _a.zoomLevels, refsByName = _a.refsByName, fileSize = _a.fileSize, isBigEndian = _a.isBigEndian, uncompressBufSize = _a.uncompressBufSize;
basesPerPx = 1 / scale;
maxLevel = zoomLevels.length;
if (!fileSize) {
// if we don't know the file size, we can't fetch the highest zoom level :-(
maxLevel -= 1;
}
for (i = maxLevel; i >= 0; i -= 1) {
zh = zoomLevels[i];
if (zh && zh.reductionLevel <= 2 * basesPerPx) {
indexOffset = Number(zh.indexOffset);
return [2 /*return*/, new blockView_1.BlockView(this.bbi, refsByName, indexOffset, isBigEndian, uncompressBufSize > 0, 'summary')];
}
}
return [2 /*return*/, this.getUnzoomedView(opts)];
getView(scale, opts) {
return __awaiter(this, void 0, void 0, function* () {
const { zoomLevels, refsByName, fileSize, isBigEndian, uncompressBufSize } = yield this.getHeader(opts);
const basesPerPx = 1 / scale;
let maxLevel = zoomLevels.length;
if (!fileSize) {
// if we don't know the file size, we can't fetch the highest zoom level :-(
maxLevel -= 1;
}
for (let i = maxLevel; i >= 0; i -= 1) {
const zh = zoomLevels[i];
if (zh && zh.reductionLevel <= 2 * basesPerPx) {
const indexOffset = Number(zh.indexOffset);
return new blockView_1.BlockView(this.bbi, refsByName, indexOffset, isBigEndian, uncompressBufSize > 0, 'summary');
}
});
}
return this.getUnzoomedView(opts);
});
};
return BigWig;
}(bbi_1.BBI));
}
}
exports.BigWig = BigWig;
//# sourceMappingURL=bigwig.js.map
"use strict";
var __assign = (this && this.__assign) || function () {
__assign = Object.assign || function(t) {
for (var s, i = 1, n = arguments.length; i < n; i++) {
s = arguments[i];
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
t[p] = s[p];
}
return t;
};
return __assign.apply(this, arguments);
};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {

@@ -22,29 +11,2 @@ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }

};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
var __importDefault = (this && this.__importDefault) || function (mod) {

@@ -55,12 +17,12 @@ return (mod && mod.__esModule) ? mod : { "default": mod };

exports.BlockView = void 0;
var binary_parser_1 = require("binary-parser");
var abortable_promise_cache_1 = __importDefault(require("abortable-promise-cache"));
var quick_lru_1 = __importDefault(require("quick-lru"));
const binary_parser_1 = require("binary-parser");
const abortable_promise_cache_1 = __importDefault(require("abortable-promise-cache"));
const quick_lru_1 = __importDefault(require("quick-lru"));
// locals
var range_1 = __importDefault(require("./range"));
var unzip_1 = require("./unzip");
var util_1 = require("./util");
var BIG_WIG_TYPE_GRAPH = 1;
var BIG_WIG_TYPE_VSTEP = 2;
var BIG_WIG_TYPE_FSTEP = 3;
const range_1 = __importDefault(require("./range"));
const unzip_1 = require("./unzip");
const util_1 = require("./util");
const BIG_WIG_TYPE_GRAPH = 1;
const BIG_WIG_TYPE_VSTEP = 2;
const BIG_WIG_TYPE_FSTEP = 3;
function coordFilter(s1, e1, s2, e2) {

@@ -70,5 +32,4 @@ return s1 < e2 && e1 >= s2;

function getParsers(isBigEndian) {
var _a;
var le = isBigEndian ? 'big' : 'little';
var summaryParser = new binary_parser_1.Parser()
const le = isBigEndian ? 'big' : 'little';
const summaryParser = new binary_parser_1.Parser()
.endianess(le)

@@ -84,3 +45,3 @@ .uint32('chromId')

.saveOffset('offset');
var leafParser = new binary_parser_1.Parser()
const leafParser = new binary_parser_1.Parser()
.endianess(le)

@@ -118,3 +79,3 @@ .uint8('isLeaf')

});
var bigBedParser = new binary_parser_1.Parser()
const bigBedParser = new binary_parser_1.Parser()
.endianess(le)

@@ -128,3 +89,3 @@ .uint32('chromId')

.saveOffset('offset');
var bigWigParser = new binary_parser_1.Parser()
const bigWigParser = new binary_parser_1.Parser()
.endianess(le)

@@ -141,12 +102,12 @@ .skip(4)

tag: 'blockType',
choices: (_a = {},
_a[BIG_WIG_TYPE_FSTEP] = new binary_parser_1.Parser().array('items', {
choices: {
[BIG_WIG_TYPE_FSTEP]: new binary_parser_1.Parser().array('items', {
length: 'itemCount',
type: new binary_parser_1.Parser().floatle('score'),
}),
_a[BIG_WIG_TYPE_VSTEP] = new binary_parser_1.Parser().array('items', {
[BIG_WIG_TYPE_VSTEP]: new binary_parser_1.Parser().array('items', {
length: 'itemCount',
type: new binary_parser_1.Parser().endianess(le).int32('start').floatle('score'),
}),
_a[BIG_WIG_TYPE_GRAPH] = new binary_parser_1.Parser().array('items', {
[BIG_WIG_TYPE_GRAPH]: new binary_parser_1.Parser().array('items', {
length: 'itemCount',

@@ -159,9 +120,9 @@ type: new binary_parser_1.Parser()

}),
_a),
},
});
return {
bigWigParser: bigWigParser,
bigBedParser: bigBedParser,
summaryParser: summaryParser,
leafParser: leafParser,
bigWigParser,
bigBedParser,
summaryParser,
leafParser,
};

@@ -176,5 +137,4 @@ }

*/
var BlockView = /** @class */ (function () {
function BlockView(bbi, refsByName, cirTreeOffset, isBigEndian, isCompressed, blockType) {
var _this = this;
class BlockView {
constructor(bbi, refsByName, cirTreeOffset, isBigEndian, isCompressed, blockType) {
this.bbi = bbi;

@@ -188,18 +148,10 @@ this.refsByName = refsByName;

cache: new quick_lru_1.default({ maxSize: 1000 }),
fill: function (requestData, signal) { return __awaiter(_this, void 0, void 0, function () {
var len, off, buffer;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
len = Number(requestData.length);
off = Number(requestData.offset);
return [4 /*yield*/, this.bbi.read(Buffer.alloc(len), 0, len, off, {
signal: signal,
})];
case 1:
buffer = (_a.sent()).buffer;
return [2 /*return*/, buffer];
}
fill: (requestData, signal) => __awaiter(this, void 0, void 0, function* () {
const len = Number(requestData.length);
const off = Number(requestData.offset);
const { buffer } = yield this.bbi.read(Buffer.alloc(len), 0, len, off, {
signal,
});
}); },
return buffer;
}),
});

@@ -209,135 +161,115 @@ if (!(cirTreeOffset >= 0)) {

}
var parsers = getParsers(isBigEndian);
const parsers = getParsers(isBigEndian);
this.leafParser = parsers.leafParser;
this.bigBedParser = parsers.bigBedParser;
}
BlockView.prototype.readWigData = function (chrName, start, end, observer, opts) {
return __awaiter(this, void 0, void 0, function () {
var _a, refsByName, bbi, cirTreeOffset, isBigEndian, chrId_1, request_1, buffer, cirBlockSize_1, blocksToFetch_1, outstanding_1, cirFobRecur2_1, filterFeats_1, cirFobStartFetch_1, cirFobRecur_1, e_1;
var _this = this;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
_b.trys.push([0, 2, , 3]);
_a = this, refsByName = _a.refsByName, bbi = _a.bbi, cirTreeOffset = _a.cirTreeOffset, isBigEndian = _a.isBigEndian;
chrId_1 = refsByName[chrName];
if (chrId_1 === undefined) {
observer.complete();
readWigData(chrName, start, end, observer, opts) {
return __awaiter(this, void 0, void 0, function* () {
try {
const { refsByName, bbi, cirTreeOffset, isBigEndian } = this;
const chrId = refsByName[chrName];
if (chrId === undefined) {
observer.complete();
}
const request = { chrId, start, end };
if (!this.cirTreePromise) {
this.cirTreePromise = bbi.read(Buffer.alloc(48), 0, 48, Number(cirTreeOffset), opts);
}
const { buffer } = yield this.cirTreePromise;
const cirBlockSize = isBigEndian
? buffer.readUInt32BE(4)
: buffer.readUInt32LE(4);
let blocksToFetch = [];
let outstanding = 0;
const cirFobRecur2 = (cirBlockData, offset, level) => {
try {
const data = cirBlockData.subarray(offset);
const p = this.leafParser.parse(data);
if (p.blocksToFetch) {
blocksToFetch = blocksToFetch.concat(p.blocksToFetch
.filter(filterFeats)
.map((l) => ({
offset: l.blockOffset,
length: l.blockSize,
})));
}
request_1 = { chrId: chrId_1, start: start, end: end };
if (!this.cirTreePromise) {
this.cirTreePromise = bbi.read(Buffer.alloc(48), 0, 48, Number(cirTreeOffset), opts);
if (p.recurOffsets) {
const recurOffsets = p.recurOffsets
.filter(filterFeats)
.map(l => Number(l.blockOffset));
if (recurOffsets.length > 0) {
cirFobRecur(recurOffsets, level + 1);
}
}
return [4 /*yield*/, this.cirTreePromise];
case 1:
buffer = (_b.sent()).buffer;
cirBlockSize_1 = isBigEndian
? buffer.readUInt32BE(4)
: buffer.readUInt32LE(4);
blocksToFetch_1 = [];
outstanding_1 = 0;
cirFobRecur2_1 = function (cirBlockData, offset, level) {
try {
var data = cirBlockData.subarray(offset);
var p = _this.leafParser.parse(data);
if (p.blocksToFetch) {
blocksToFetch_1 = blocksToFetch_1.concat(p.blocksToFetch
.filter(filterFeats_1)
.map(function (l) { return ({
offset: l.blockOffset,
length: l.blockSize,
}); }));
}
catch (e) {
observer.error(e);
}
};
const filterFeats = (b) => {
const { startChrom, startBase, endChrom, endBase } = b;
return ((startChrom < chrId || (startChrom === chrId && startBase <= end)) &&
(endChrom > chrId || (endChrom === chrId && endBase >= start)));
};
const cirFobStartFetch = (off, fr, level) => __awaiter(this, void 0, void 0, function* () {
try {
const length = fr.max() - fr.min();
const offset = fr.min();
const resultBuffer = yield this.featureCache.get(`${length}_${offset}`, { length, offset }, opts.signal);
for (let i = 0; i < off.length; i += 1) {
if (fr.contains(off[i])) {
cirFobRecur2(resultBuffer, off[i] - offset, level);
outstanding -= 1;
if (outstanding === 0) {
this.readFeatures(observer, blocksToFetch, Object.assign(Object.assign({}, opts), { request }));
}
if (p.recurOffsets) {
var recurOffsets = p.recurOffsets
.filter(filterFeats_1)
.map(function (l) { return Number(l.blockOffset); });
if (recurOffsets.length > 0) {
cirFobRecur_1(recurOffsets, level + 1);
}
}
}
catch (e) {
observer.error(e);
}
};
filterFeats_1 = function (b) {
var startChrom = b.startChrom, startBase = b.startBase, endChrom = b.endChrom, endBase = b.endBase;
return ((startChrom < chrId_1 || (startChrom === chrId_1 && startBase <= end)) &&
(endChrom > chrId_1 || (endChrom === chrId_1 && endBase >= start)));
};
cirFobStartFetch_1 = function (off, fr, level) { return __awaiter(_this, void 0, void 0, function () {
var length_1, offset, resultBuffer, i, e_2;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
_a.trys.push([0, 2, , 3]);
length_1 = fr.max() - fr.min();
offset = fr.min();
return [4 /*yield*/, this.featureCache.get("".concat(length_1, "_").concat(offset), { length: length_1, offset: offset }, opts.signal)];
case 1:
resultBuffer = _a.sent();
for (i = 0; i < off.length; i += 1) {
if (fr.contains(off[i])) {
cirFobRecur2_1(resultBuffer, off[i] - offset, level);
outstanding_1 -= 1;
if (outstanding_1 === 0) {
this.readFeatures(observer, blocksToFetch_1, __assign(__assign({}, opts), { request: request_1 }));
}
}
}
return [3 /*break*/, 3];
case 2:
e_2 = _a.sent();
observer.error(e_2);
return [3 /*break*/, 3];
case 3: return [2 /*return*/];
}
});
}); };
cirFobRecur_1 = function (offset, level) {
try {
outstanding_1 += offset.length;
var maxCirBlockSpan = 4 + Number(cirBlockSize_1) * 32; // Upper bound on size, based on a completely full leaf node.
var spans = new range_1.default(offset[0], offset[0] + maxCirBlockSpan);
for (var i = 1; i < offset.length; i += 1) {
var blockSpan = new range_1.default(offset[i], offset[i] + maxCirBlockSpan);
spans = spans.union(blockSpan);
}
spans.getRanges().map(function (fr) { return cirFobStartFetch_1(offset, fr, level); });
}
catch (e) {
observer.error(e);
}
};
return [2 /*return*/, cirFobRecur_1([Number(cirTreeOffset) + 48], 1)];
case 2:
e_1 = _b.sent();
observer.error(e_1);
return [3 /*break*/, 3];
case 3: return [2 /*return*/];
}
});
}
}
catch (e) {
observer.error(e);
}
});
const cirFobRecur = (offset, level) => {
try {
outstanding += offset.length;
const maxCirBlockSpan = 4 + Number(cirBlockSize) * 32; // Upper bound on size, based on a completely full leaf node.
let spans = new range_1.default(offset[0], offset[0] + maxCirBlockSpan);
for (let i = 1; i < offset.length; i += 1) {
const blockSpan = new range_1.default(offset[i], offset[i] + maxCirBlockSpan);
spans = spans.union(blockSpan);
}
spans.getRanges().map(fr => cirFobStartFetch(offset, fr, level));
}
catch (e) {
observer.error(e);
}
};
return cirFobRecur([Number(cirTreeOffset) + 48], 1);
}
catch (e) {
observer.error(e);
}
});
};
BlockView.prototype.parseSummaryBlock = function (buffer, startOffset, request) {
var features = [];
var offset = startOffset;
var dataView = new DataView(buffer.buffer, buffer.byteOffset, buffer.length);
}
parseSummaryBlock(buffer, startOffset, request) {
const features = [];
let offset = startOffset;
const dataView = new DataView(buffer.buffer, buffer.byteOffset, buffer.length);
while (offset < buffer.byteLength) {
// this was extracted from looking at the runtime code generated by
// binary-parser
var chromId = dataView.getUint32(offset, true);
const chromId = dataView.getUint32(offset, true);
offset += 4;
var start = dataView.getUint32(offset, true);
const start = dataView.getUint32(offset, true);
offset += 4;
var end = dataView.getUint32(offset, true);
const end = dataView.getUint32(offset, true);
offset += 4;
var validCnt = dataView.getUint32(offset, true);
const validCnt = dataView.getUint32(offset, true);
offset += 4;
var minScore = dataView.getFloat32(offset, true);
const minScore = dataView.getFloat32(offset, true);
offset += 4;
var maxScore = dataView.getFloat32(offset, true);
const maxScore = dataView.getFloat32(offset, true);
offset += 4;
var sumData = dataView.getFloat32(offset, true);
const sumData = dataView.getFloat32(offset, true);
offset += 4;

@@ -352,6 +284,6 @@ // unused

features.push({
start: start,
end: end,
maxScore: maxScore,
minScore: minScore,
start,
end,
maxScore,
minScore,
summary: true,

@@ -363,60 +295,58 @@ score: sumData / (validCnt || 1),

return features;
};
BlockView.prototype.parseBigBedBlock = function (data, startOffset, offset, request) {
var items = [];
var currOffset = startOffset;
}
parseBigBedBlock(data, startOffset, offset, request) {
const items = [];
let currOffset = startOffset;
while (currOffset < data.byteLength) {
var res = this.bigBedParser.parse(data.subarray(currOffset));
items.push(__assign(__assign({}, res), { uniqueId: "bb-".concat(offset + currOffset) }));
const res = this.bigBedParser.parse(data.subarray(currOffset));
items.push(Object.assign(Object.assign({}, res), { uniqueId: `bb-${offset + currOffset}` }));
currOffset += res.offset;
}
return request
? items.filter(function (f) {
return coordFilter(f.start, f.end, request.start, request.end);
})
? items.filter((f) => coordFilter(f.start, f.end, request.start, request.end))
: items;
};
BlockView.prototype.parseBigWigBlock = function (buffer, startOffset, request) {
var b = buffer.subarray(startOffset);
var dataView = new DataView(b.buffer, b.byteOffset, b.length);
var offset = 0;
}
parseBigWigBlock(buffer, startOffset, request) {
const b = buffer.subarray(startOffset);
const dataView = new DataView(b.buffer, b.byteOffset, b.length);
let offset = 0;
offset += 4;
var blockStart = dataView.getInt32(offset, true);
const blockStart = dataView.getInt32(offset, true);
offset += 8;
var itemStep = dataView.getUint32(offset, true);
const itemStep = dataView.getUint32(offset, true);
offset += 4;
var itemSpan = dataView.getUint32(offset, true);
const itemSpan = dataView.getUint32(offset, true);
offset += 4;
var blockType = dataView.getUint8(offset);
const blockType = dataView.getUint8(offset);
offset += 2;
var itemCount = dataView.getUint16(offset, true);
const itemCount = dataView.getUint16(offset, true);
offset += 2;
var items = new Array(itemCount);
const items = new Array(itemCount);
switch (blockType) {
case 1:
for (var i = 0; i < itemCount; i++) {
var start = dataView.getInt32(offset, true);
for (let i = 0; i < itemCount; i++) {
const start = dataView.getInt32(offset, true);
offset += 4;
var end = dataView.getInt32(offset, true);
const end = dataView.getInt32(offset, true);
offset += 4;
var score = dataView.getFloat32(offset, true);
const score = dataView.getFloat32(offset, true);
offset += 4;
items[i] = { start: start, end: end, score: score };
items[i] = { start, end, score };
}
break;
case 2:
for (var i = 0; i < itemCount; i++) {
var start = dataView.getInt32(offset, true);
for (let i = 0; i < itemCount; i++) {
const start = dataView.getInt32(offset, true);
offset += 4;
var score = dataView.getFloat32(offset, true);
const score = dataView.getFloat32(offset, true);
offset += 4;
items[i] = { score: score, start: start, end: start + itemSpan };
items[i] = { score, start, end: start + itemSpan };
}
break;
case 3:
for (var i = 0; i < itemCount; i++) {
var score = dataView.getFloat32(offset, true);
for (let i = 0; i < itemCount; i++) {
const score = dataView.getFloat32(offset, true);
offset += 4;
var start = blockStart + i * itemStep;
items[i] = { score: score, start: start, end: start + itemSpan };
const start = blockStart + i * itemStep;
items[i] = { score, start, end: start + itemSpan };
}

@@ -426,74 +356,49 @@ break;

return request
? items.filter(function (f) {
return coordFilter(f.start, f.end, request.start, request.end);
})
? items.filter((f) => coordFilter(f.start, f.end, request.start, request.end))
: items;
};
BlockView.prototype.readFeatures = function (observer, blocks, opts) {
if (opts === void 0) { opts = {}; }
return __awaiter(this, void 0, void 0, function () {
var _a, blockType_1, isCompressed_1, signal_1, request_2, blockGroupsToFetch, e_3;
var _this = this;
return __generator(this, function (_b) {
switch (_b.label) {
case 0:
_b.trys.push([0, 2, , 3]);
_a = this, blockType_1 = _a.blockType, isCompressed_1 = _a.isCompressed;
signal_1 = opts.signal, request_2 = opts.request;
blockGroupsToFetch = (0, util_1.groupBlocks)(blocks);
(0, util_1.checkAbortSignal)(signal_1);
return [4 /*yield*/, Promise.all(blockGroupsToFetch.map(function (blockGroup) { return __awaiter(_this, void 0, void 0, function () {
var length, offset, data;
var _this = this;
return __generator(this, function (_a) {
switch (_a.label) {
case 0:
(0, util_1.checkAbortSignal)(signal_1);
length = blockGroup.length, offset = blockGroup.offset;
return [4 /*yield*/, this.featureCache.get("".concat(length, "_").concat(offset), blockGroup, signal_1)];
case 1:
data = _a.sent();
blockGroup.blocks.forEach(function (block) {
(0, util_1.checkAbortSignal)(signal_1);
var blockOffset = Number(block.offset) - Number(blockGroup.offset);
var resultData = data;
if (isCompressed_1) {
resultData = (0, unzip_1.unzip)(data.subarray(blockOffset));
blockOffset = 0;
}
(0, util_1.checkAbortSignal)(signal_1);
switch (blockType_1) {
case 'summary':
observer.next(_this.parseSummaryBlock(resultData, blockOffset, request_2));
break;
case 'bigwig':
observer.next(_this.parseBigWigBlock(resultData, blockOffset, request_2));
break;
case 'bigbed':
observer.next(_this.parseBigBedBlock(resultData, blockOffset, Number(block.offset) * (1 << 8), request_2));
break;
default:
console.warn("Don't know what to do with ".concat(blockType_1));
}
});
return [2 /*return*/];
}
});
}); }))];
case 1:
_b.sent();
observer.complete();
return [3 /*break*/, 3];
case 2:
e_3 = _b.sent();
observer.error(e_3);
return [3 /*break*/, 3];
case 3: return [2 /*return*/];
}
});
}
readFeatures(observer, blocks, opts = {}) {
return __awaiter(this, void 0, void 0, function* () {
try {
const { blockType, isCompressed } = this;
const { signal, request } = opts;
const blockGroupsToFetch = (0, util_1.groupBlocks)(blocks);
(0, util_1.checkAbortSignal)(signal);
yield Promise.all(blockGroupsToFetch.map((blockGroup) => __awaiter(this, void 0, void 0, function* () {
(0, util_1.checkAbortSignal)(signal);
const { length, offset } = blockGroup;
const data = yield this.featureCache.get(`${length}_${offset}`, blockGroup, signal);
blockGroup.blocks.forEach(block => {
(0, util_1.checkAbortSignal)(signal);
let blockOffset = Number(block.offset) - Number(blockGroup.offset);
let resultData = data;
if (isCompressed) {
resultData = (0, unzip_1.unzip)(data.subarray(blockOffset));
blockOffset = 0;
}
(0, util_1.checkAbortSignal)(signal);
switch (blockType) {
case 'summary':
observer.next(this.parseSummaryBlock(resultData, blockOffset, request));
break;
case 'bigwig':
observer.next(this.parseBigWigBlock(resultData, blockOffset, request));
break;
case 'bigbed':
observer.next(this.parseBigBedBlock(resultData, blockOffset, Number(block.offset) * (1 << 8), request));
break;
default:
console.warn(`Don't know what to do with ${blockType}`);
}
});
})));
observer.complete();
}
catch (e) {
observer.error(e);
}
});
};
return BlockView;
}());
}
}
exports.BlockView = BlockView;
//# sourceMappingURL=blockView.js.map

@@ -8,4 +8,4 @@ "use strict";

*/
var Range = /** @class */ (function () {
function Range(arg1, arg2) {
class Range {
constructor(arg1, arg2) {
this.ranges =

@@ -18,11 +18,11 @@ arguments.length === 2

}
Range.prototype.min = function () {
min() {
return this.ranges[0].min;
};
Range.prototype.max = function () {
}
max() {
return this.ranges[this.ranges.length - 1].max;
};
Range.prototype.contains = function (pos) {
for (var s = 0; s < this.ranges.length; s += 1) {
var r = this.ranges[s];
}
contains(pos) {
for (let s = 0; s < this.ranges.length; s += 1) {
const r = this.ranges[s];
if (r.min <= pos && r.max >= pos) {

@@ -33,18 +33,18 @@ return true;

return false;
};
Range.prototype.isContiguous = function () {
}
isContiguous() {
return this.ranges.length > 1;
};
Range.prototype.getRanges = function () {
return this.ranges.map(function (r) { return new Range(r.min, r.max); });
};
Range.prototype.toString = function () {
return this.ranges.map(function (r) { return "[".concat(r.min, "-").concat(r.max, "]"); }).join(',');
};
Range.prototype.union = function (s1) {
var ranges = this.getRanges().concat(s1.getRanges()).sort(this.rangeOrder);
var oranges = [];
var current = ranges[0];
for (var i = 1; i < ranges.length; i += 1) {
var nxt = ranges[i];
}
getRanges() {
return this.ranges.map((r) => new Range(r.min, r.max));
}
toString() {
return this.ranges.map((r) => `[${r.min}-${r.max}]`).join(',');
}
union(s1) {
const ranges = this.getRanges().concat(s1.getRanges()).sort(this.rangeOrder);
const oranges = [];
let current = ranges[0];
for (let i = 1; i < ranges.length; i += 1) {
const nxt = ranges[i];
if (nxt.min() > current.max() + 1) {

@@ -63,19 +63,19 @@ oranges.push(current);

return new Range(oranges);
};
Range.prototype.intersection = function (arg) {
}
intersection(arg) {
// eslint-disable-next-line @typescript-eslint/no-this-alias
var s0 = this;
var s1 = arg;
var r0 = this.ranges();
var r1 = s1.ranges();
var l0 = r0.length;
var l1 = r1.length;
var i0 = 0;
var i1 = 0;
var or = [];
let s0 = this;
let s1 = arg;
const r0 = this.ranges();
const r1 = s1.ranges();
const l0 = r0.length;
const l1 = r1.length;
let i0 = 0;
let i1 = 0;
const or = [];
while (i0 < l0 && i1 < l1) {
s0 = r0[i0];
s1 = r1[i1];
var lapMin = Math.max(s0.min(), s1.min());
var lapMax = Math.min(s0.max(), s1.max());
const lapMin = Math.max(s0.min(), s1.min());
const lapMax = Math.min(s0.max(), s1.max());
if (lapMax >= lapMin) {

@@ -98,15 +98,15 @@ or.push(new Range(lapMin, lapMax));

return new Range(or);
};
Range.prototype.coverage = function () {
var tot = 0;
var rl = this.ranges();
for (var ri = 0; ri < rl.length; ri += 1) {
var r = rl[ri];
}
coverage() {
let tot = 0;
const rl = this.ranges();
for (let ri = 0; ri < rl.length; ri += 1) {
const r = rl[ri];
tot += r.max() - r.min() + 1;
}
return tot;
};
Range.prototype.rangeOrder = function (tmpa, tmpb) {
var a = tmpa;
var b = tmpb;
}
rangeOrder(tmpa, tmpb) {
let a = tmpa;
let b = tmpb;
if (arguments.length < 2) {

@@ -130,6 +130,5 @@ b = a;

return 0;
};
return Range;
}());
}
}
exports.default = Range;
//# sourceMappingURL=range.js.map
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.unzip = void 0;
var pako_1 = require("pako");
const pako_1 = require("pako");
function unzip(input) {

@@ -6,0 +6,0 @@ return (0, pako_1.inflateRaw)(input.subarray(2));

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.unzip = void 0;
var zlib_1 = require("zlib");
const zlib_1 = require("zlib");
Object.defineProperty(exports, "unzip", { enumerable: true, get: function () { return zlib_1.inflateSync; } });
//# sourceMappingURL=unzip.js.map
"use strict";
var __extends = (this && this.__extends) || (function () {
var extendStatics = function (d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
return extendStatics(d, b);
};
return function (d, b) {
if (typeof b !== "function" && b !== null)
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {

@@ -26,41 +11,11 @@ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }

};
var __generator = (this && this.__generator) || function (thisArg, body) {
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
function verb(n) { return function (v) { return step([n, v]); }; }
function step(op) {
if (f) throw new TypeError("Generator is already executing.");
while (_) try {
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
if (y = 0, t) op = [op[0] & 2, t.value];
switch (op[0]) {
case 0: case 1: t = op; break;
case 4: _.label++; return { value: op[1], done: false };
case 5: _.label++; y = op[1]; op = [0]; continue;
case 7: op = _.ops.pop(); _.trys.pop(); continue;
default:
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
if (t[2]) _.ops.pop();
_.trys.pop(); continue;
}
op = body.call(thisArg, _);
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
}
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.abortBreakPoint = exports.checkAbortSignal = exports.groupBlocks = exports.AbortError = void 0;
/* eslint no-bitwise: ["error", { "allow": ["|"] }] */
var AbortError = /** @class */ (function (_super) {
__extends(AbortError, _super);
function AbortError(message) {
var _this = _super.call(this, message) || this;
_this.code = 'ERR_ABORTED';
return _this;
class AbortError extends Error {
constructor(message) {
super(message);
this.code = 'ERR_ABORTED';
}
return AbortError;
}(Error));
}
exports.AbortError = AbortError;

@@ -70,7 +25,7 @@ // sort blocks by file offset and

function groupBlocks(blocks) {
blocks.sort(function (b0, b1) { return Number(b0.offset) - Number(b1.offset); });
var blockGroups = [];
var lastBlock;
var lastBlockEnd;
for (var i = 0; i < blocks.length; i += 1) {
blocks.sort((b0, b1) => Number(b0.offset) - Number(b1.offset));
const blockGroups = [];
let lastBlock;
let lastBlockEnd;
for (let i = 0; i < blocks.length; i += 1) {
if (lastBlock &&

@@ -118,3 +73,3 @@ lastBlockEnd &&

else {
var e = new AbortError('aborted');
const e = new AbortError('aborted');
e.code = 'ERR_ABORTED';

@@ -133,12 +88,5 @@ throw e;

function abortBreakPoint(signal) {
return __awaiter(this, void 0, void 0, function () {
return __generator(this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, Promise.resolve()];
case 1:
_a.sent();
checkAbortSignal(signal);
return [2 /*return*/];
}
});
return __awaiter(this, void 0, void 0, function* () {
yield Promise.resolve();
checkAbortSignal(signal);
});

@@ -145,0 +93,0 @@ }

{
"name": "@gmod/bbi",
"version": "2.0.4",
"version": "2.0.5",
"description": "Parser for BigWig/BigBed files",

@@ -29,3 +29,3 @@ "license": "MIT",

"build:esm": "tsc --target es2018 --outDir esm --module es2020",
"build:es5": "tsc --target es5 --outDir dist --module commonjs",
"build:es5": "tsc --target es2015 --module commonjs --outDir dist",
"build": "npm run build:esm && npm run build:es5",

@@ -32,0 +32,0 @@ "preversion": "npm run lint && npm test && npm run build",

@@ -5,3 +5,3 @@ # bbi-js

[![Coverage Status](https://img.shields.io/codecov/c/github/GMOD/bbi-js/master.svg?style=flat-square)](https://codecov.io/gh/GMOD/bbi-js/branch/master)
[![Build Status](https://img.shields.io/github/workflow/status/GMOD/bbi-js/Push/master?logo=github&style=flat-query)](https://github.com/GMOD/bbi-js/actions?query=branch%3Amaster+workflow%3APush+)
[![Build Status](https://img.shields.io/github/actions/workflow/status/GMOD/bbi-js/push.yml?branch=master)](https://github.com/GMOD/bbi-js/actions?query=branch%3Amaster+workflow%3APush+)

@@ -8,0 +8,0 @@ A parser for bigwig and bigbed file formats

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc