@gmod/cram
Advanced tools
Comparing version 1.5.9 to 1.6.0
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); | ||
var _sort = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/sort")); | ||
var _entries = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/entries")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _parseInt = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/number/parse-int")); | ||
var _bind = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/bind")); | ||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/slicedToArray")); | ||
var _some = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/some")); | ||
var _concat = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/concat")); | ||
var _assign = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/assign")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var AbortablePromiseCache = require('abortable-promise-cache').default; | ||
var QuickLRU = require('quick-lru'); | ||
var _require = require('es6-promisify'), | ||
promisify = _require.promisify; | ||
var zlib = require('zlib'); | ||
var gunzip = promisify(zlib.gunzip); | ||
var _require2 = require('./io'), | ||
open = _require2.open; | ||
var _require3 = require('./errors'), | ||
CramMalformedError = _require3.CramMalformedError; | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
}; | ||
var __generator = (this && this.__generator) || function (thisArg, body) { | ||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; | ||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; | ||
function verb(n) { return function (v) { return step([n, v]); }; } | ||
function step(op) { | ||
if (f) throw new TypeError("Generator is already executing."); | ||
while (_) try { | ||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; | ||
if (y = 0, t) op = [op[0] & 2, t.value]; | ||
switch (op[0]) { | ||
case 0: case 1: t = op; break; | ||
case 4: _.label++; return { value: op[1], done: false }; | ||
case 5: _.label++; y = op[1]; op = [0]; continue; | ||
case 7: op = _.ops.pop(); _.trys.pop(); continue; | ||
default: | ||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } | ||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } | ||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } | ||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } | ||
if (t[2]) _.ops.pop(); | ||
_.trys.pop(); continue; | ||
} | ||
op = body.call(thisArg, _); | ||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } | ||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; | ||
} | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var abortable_promise_cache_1 = __importDefault(require("abortable-promise-cache")); | ||
var quick_lru_1 = __importDefault(require("quick-lru")); | ||
var es6_promisify_1 = require("es6-promisify"); | ||
var zlib_1 = __importDefault(require("zlib")); | ||
var io_1 = require("./io"); | ||
var errors_1 = require("./errors"); | ||
var gunzip = (0, es6_promisify_1.promisify)(zlib_1.default.gunzip); | ||
var BAI_MAGIC = 21578050; // BAI\1 | ||
var Slice = | ||
/*#__PURE__*/ | ||
function () { | ||
function Slice(args) { | ||
(0, _classCallCheck2.default)(this, Slice); | ||
(0, _assign.default)(this, args); | ||
} | ||
(0, _createClass2.default)(Slice, [{ | ||
key: "toString", | ||
value: function toString() { | ||
var _context, _context2, _context3, _context4; | ||
return (0, _concat.default)(_context = (0, _concat.default)(_context2 = (0, _concat.default)(_context3 = (0, _concat.default)(_context4 = "".concat(this.start, ":")).call(_context4, this.span, ":")).call(_context3, this.containerStart, ":")).call(_context2, this.sliceStart, ":")).call(_context, this.sliceBytes); | ||
var Slice = /** @class */ (function () { | ||
function Slice(args) { | ||
Object.assign(this, args); | ||
} | ||
}]); | ||
return Slice; | ||
}(); | ||
Slice.prototype.toString = function () { | ||
return "".concat(this.start, ":").concat(this.span, ":").concat(this.containerStart, ":").concat(this.sliceStart, ":").concat(this.sliceBytes); | ||
}; | ||
return Slice; | ||
}()); | ||
function addRecordToIndex(index, record) { | ||
if ((0, _some.default)(record).call(record, function (el) { | ||
return el === undefined; | ||
})) { | ||
throw new CramMalformedError('invalid .crai index file'); | ||
} | ||
var _record = (0, _slicedToArray2.default)(record, 6), | ||
seqId = _record[0], | ||
start = _record[1], | ||
span = _record[2], | ||
containerStart = _record[3], | ||
sliceStart = _record[4], | ||
sliceBytes = _record[5]; | ||
if (!index[seqId]) index[seqId] = []; | ||
index[seqId].push(new Slice({ | ||
start: start, | ||
span: span, | ||
containerStart: containerStart, | ||
sliceStart: sliceStart, | ||
sliceBytes: sliceBytes | ||
})); | ||
if (record.some(function (el) { return el === undefined; })) { | ||
throw new errors_1.CramMalformedError('invalid .crai index file'); | ||
} | ||
var seqId = record[0], start = record[1], span = record[2], containerStart = record[3], sliceStart = record[4], sliceBytes = record[5]; | ||
if (!index[seqId]) { | ||
index[seqId] = []; | ||
} | ||
index[seqId].push(new Slice({ | ||
start: start, | ||
span: span, | ||
containerStart: containerStart, | ||
sliceStart: sliceStart, | ||
sliceBytes: sliceBytes, | ||
})); | ||
} | ||
var CraiIndex = | ||
/*#__PURE__*/ | ||
function () { | ||
// A CRAM index (.crai) is a gzipped tab delimited file containing the following columns: | ||
// 1. Sequence id | ||
// 2. Alignment start | ||
// 3. Alignment span | ||
// 4. Container start byte position in the file | ||
// 5. Slice start byte position in the container data (‘blocks’) | ||
// 6. Slice size in bytes | ||
// Each line represents a slice in the CRAM file. Please note that all slices must be listed in index file. | ||
/** | ||
* | ||
* @param {object} args | ||
* @param {string} [args.path] | ||
* @param {string} [args.url] | ||
* @param {FileHandle} [args.filehandle] | ||
*/ | ||
function CraiIndex(args) { | ||
var _this = this, | ||
_context5; | ||
(0, _classCallCheck2.default)(this, CraiIndex); | ||
var filehandle = open(args.url, args.path, args.filehandle); | ||
this._parseCache = new AbortablePromiseCache({ | ||
cache: new QuickLRU({ | ||
maxSize: 1 | ||
}), | ||
fill: function fill(data, signal) { | ||
return _this.parseIndex({ | ||
signal: signal | ||
var CraiIndex = /** @class */ (function () { | ||
// A CRAM index (.crai) is a gzipped tab delimited file containing the following columns: | ||
// 1. Sequence id | ||
// 2. Alignment start | ||
// 3. Alignment span | ||
// 4. Container start byte position in the file | ||
// 5. Slice start byte position in the container data (‘blocks’) | ||
// 6. Slice size in bytes | ||
// Each line represents a slice in the CRAM file. Please note that all slices must be listed in index file. | ||
/** | ||
* | ||
* @param {object} args | ||
* @param {string} [args.path] | ||
* @param {string} [args.url] | ||
* @param {FileHandle} [args.filehandle] | ||
*/ | ||
function CraiIndex(args) { | ||
var _this = this; | ||
var filehandle = (0, io_1.open)(args.url, args.path, args.filehandle); | ||
this._parseCache = new abortable_promise_cache_1.default({ | ||
cache: new quick_lru_1.default({ maxSize: 1 }), | ||
fill: function (data, signal) { return _this.parseIndex({ signal: signal }); }, | ||
}); | ||
} | ||
}); | ||
this.readFile = (0, _bind.default)(_context5 = filehandle.readFile).call(_context5, filehandle); | ||
} | ||
(0, _createClass2.default)(CraiIndex, [{ | ||
key: "parseIndex", | ||
value: function parseIndex() { | ||
var index = {}; | ||
return this.readFile().then(function (data) { | ||
if (data[0] === 31 && data[1] === 139) return gunzip(data); | ||
return data; | ||
}).then(function (uncompressedBuffer) { | ||
var _context6; | ||
if (uncompressedBuffer.length > 4 && uncompressedBuffer.readUInt32LE(0) === BAI_MAGIC) { | ||
throw new CramMalformedError('invalid .crai index file. note: file appears to be a .bai index. this is technically legal but please open a github issue if you need support'); | ||
} // interpret the text as regular ascii, since it is | ||
// supposed to be only digits and whitespace characters | ||
// this is written in a deliberately low-level fashion for performance, | ||
// because some .crai files can be pretty large. | ||
var currentRecord = []; | ||
var currentString = ''; | ||
for (var i = 0; i < uncompressedBuffer.length; i += 1) { | ||
var charCode = uncompressedBuffer[i]; | ||
if (charCode >= 48 && charCode <= 57 || | ||
/* 0-9 */ | ||
!currentString && charCode === 45 | ||
/* leading - */ | ||
) { | ||
currentString += String.fromCharCode(charCode); | ||
} else if (charCode === 9 | ||
/* \t */ | ||
) { | ||
currentRecord.push((0, _parseInt.default)(currentString, 10)); | ||
currentString = ''; | ||
} else if (charCode === 10 | ||
/* \n */ | ||
) { | ||
currentRecord.push((0, _parseInt.default)(currentString, 10)); | ||
currentString = ''; | ||
addRecordToIndex(index, currentRecord); | ||
currentRecord = []; | ||
} else if (charCode !== 13 | ||
/* \r */ | ||
&& charCode !== 32 | ||
/* space */ | ||
) { | ||
// if there are other characters in the file besides | ||
// space and \r, something is wrong. | ||
throw new CramMalformedError('invalid .crai index file'); | ||
this.readFile = filehandle.readFile.bind(filehandle); | ||
} | ||
CraiIndex.prototype.parseIndex = function () { | ||
var index = {}; | ||
return this.readFile() | ||
.then(function (data) { | ||
if (data[0] === 31 && data[1] === 139) { | ||
return gunzip(data); | ||
} | ||
} // if the file ends without a \n, we need to flush our buffers | ||
if (currentString) { | ||
currentRecord.push((0, _parseInt.default)(currentString, 10)); | ||
} | ||
if (currentRecord.length === 6) { | ||
addRecordToIndex(index, currentRecord); | ||
} // sort each of them by start | ||
(0, _forEach.default)(_context6 = (0, _entries.default)(index)).call(_context6, function (_ref) { | ||
var _ref2 = (0, _slicedToArray2.default)(_ref, 2), | ||
seqId = _ref2[0], | ||
ent = _ref2[1]; | ||
index[seqId] = (0, _sort.default)(ent).call(ent, function (a, b) { | ||
return a.start - b.start || a.span - b.span; | ||
}); | ||
return data; | ||
}) | ||
.then(function (uncompressedBuffer) { | ||
if (uncompressedBuffer.length > 4 && | ||
uncompressedBuffer.readUInt32LE(0) === BAI_MAGIC) { | ||
throw new errors_1.CramMalformedError('invalid .crai index file. note: file appears to be a .bai index. this is technically legal but please open a github issue if you need support'); | ||
} | ||
// interpret the text as regular ascii, since it is | ||
// supposed to be only digits and whitespace characters | ||
// this is written in a deliberately low-level fashion for performance, | ||
// because some .crai files can be pretty large. | ||
var currentRecord = []; | ||
var currentString = ''; | ||
for (var i = 0; i < uncompressedBuffer.length; i += 1) { | ||
var charCode = uncompressedBuffer[i]; | ||
if ((charCode >= 48 && charCode <= 57) /* 0-9 */ || | ||
(!currentString && charCode === 45) /* leading - */) { | ||
currentString += String.fromCharCode(charCode); | ||
} | ||
else if (charCode === 9 /* \t */) { | ||
currentRecord.push(Number.parseInt(currentString, 10)); | ||
currentString = ''; | ||
} | ||
else if (charCode === 10 /* \n */) { | ||
currentRecord.push(Number.parseInt(currentString, 10)); | ||
currentString = ''; | ||
addRecordToIndex(index, currentRecord); | ||
currentRecord = []; | ||
} | ||
else if (charCode !== 13 /* \r */ && charCode !== 32 /* space */) { | ||
// if there are other characters in the file besides | ||
// space and \r, something is wrong. | ||
throw new errors_1.CramMalformedError('invalid .crai index file'); | ||
} | ||
} | ||
// if the file ends without a \n, we need to flush our buffers | ||
if (currentString) { | ||
currentRecord.push(Number.parseInt(currentString, 10)); | ||
} | ||
if (currentRecord.length === 6) { | ||
addRecordToIndex(index, currentRecord); | ||
} | ||
// sort each of them by start | ||
Object.entries(index).forEach(function (_a) { | ||
var seqId = _a[0], ent = _a[1]; | ||
index[seqId] = ent.sort(function (a, b) { return a.start - b.start || a.span - b.span; }); | ||
}); | ||
return index; | ||
}); | ||
return index; | ||
}); | ||
} | ||
}, { | ||
key: "getIndex", | ||
value: function getIndex() { | ||
var opts = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
return this._parseCache.get('index', null, opts.signal); | ||
} | ||
}; | ||
CraiIndex.prototype.getIndex = function (opts) { | ||
if (opts === void 0) { opts = {}; } | ||
return this._parseCache.get('index', null, opts.signal); | ||
}; | ||
/** | ||
@@ -218,34 +166,12 @@ * @param {number} seqId | ||
*/ | ||
}, { | ||
key: "hasDataForReferenceSequence", | ||
value: function () { | ||
var _hasDataForReferenceSequence = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee(seqId) { | ||
return _regenerator.default.wrap(function _callee$(_context7) { | ||
while (1) { | ||
switch (_context7.prev = _context7.next) { | ||
case 0: | ||
_context7.next = 2; | ||
return this.getIndex(); | ||
case 2: | ||
_context7.t0 = seqId; | ||
return _context7.abrupt("return", !!_context7.sent[_context7.t0]); | ||
case 4: | ||
case "end": | ||
return _context7.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function hasDataForReferenceSequence(_x) { | ||
return _hasDataForReferenceSequence.apply(this, arguments); | ||
} | ||
return hasDataForReferenceSequence; | ||
}() | ||
CraiIndex.prototype.hasDataForReferenceSequence = function (seqId) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getIndex()]; | ||
case 1: return [2 /*return*/, !!(_a.sent())[seqId]]; | ||
} | ||
}); | ||
}); | ||
}; | ||
/** | ||
@@ -262,67 +188,38 @@ * fetch index entries for the given range | ||
*/ | ||
}, { | ||
key: "getEntriesForRange", | ||
value: function () { | ||
var _getEntriesForRange = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee2(seqId, queryStart, queryEnd) { | ||
var seqEntries, compare, bins, i; | ||
return _regenerator.default.wrap(function _callee2$(_context8) { | ||
while (1) { | ||
switch (_context8.prev = _context8.next) { | ||
case 0: | ||
_context8.next = 2; | ||
return this.getIndex(); | ||
case 2: | ||
_context8.t0 = seqId; | ||
seqEntries = _context8.sent[_context8.t0]; | ||
if (seqEntries) { | ||
_context8.next = 6; | ||
break; | ||
CraiIndex.prototype.getEntriesForRange = function (seqId, queryStart, queryEnd) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var seqEntries, compare, bins, i; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getIndex()]; | ||
case 1: | ||
seqEntries = (_a.sent())[seqId]; | ||
if (!seqEntries) { | ||
return [2 /*return*/, []]; | ||
} | ||
compare = function (entry) { | ||
var entryStart = entry.start; | ||
var entryEnd = entry.start + entry.span; | ||
if (entryStart >= queryEnd) { | ||
return -1; | ||
} // entry is ahead of query | ||
if (entryEnd <= queryStart) { | ||
return 1; | ||
} // entry is behind query | ||
return 0; // entry overlaps query | ||
}; | ||
bins = []; | ||
for (i = 0; i < seqEntries.length; i += 1) { | ||
if (compare(seqEntries[i]) === 0) { | ||
bins.push(seqEntries[i]); | ||
} | ||
} | ||
return [2 /*return*/, bins]; | ||
} | ||
return _context8.abrupt("return", []); | ||
case 6: | ||
compare = function compare(entry) { | ||
var entryStart = entry.start; | ||
var entryEnd = entry.start + entry.span; | ||
if (entryStart >= queryEnd) return -1; // entry is ahead of query | ||
if (entryEnd <= queryStart) return 1; // entry is behind query | ||
return 0; // entry overlaps query | ||
}; | ||
bins = []; | ||
for (i = 0; i < seqEntries.length; i += 1) { | ||
if (compare(seqEntries[i]) === 0) { | ||
bins.push(seqEntries[i]); | ||
} | ||
} | ||
return _context8.abrupt("return", bins); | ||
case 10: | ||
case "end": | ||
return _context8.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function getEntriesForRange(_x2, _x3, _x4) { | ||
return _getEntriesForRange.apply(this, arguments); | ||
} | ||
return getEntriesForRange; | ||
}() | ||
}]); | ||
return CraiIndex; | ||
}(); | ||
module.exports = CraiIndex; | ||
}); | ||
}); | ||
}; | ||
return CraiIndex; | ||
}()); | ||
exports.default = CraiIndex; | ||
//# sourceMappingURL=craiIndex.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _require = require('../../errors'), | ||
CramBufferOverrunError = _require.CramBufferOverrunError; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var validDataTypes = { | ||
int: true, | ||
byte: true, | ||
long: true, | ||
byteArray: true, | ||
byteArrayBlock: true | ||
}; // codec base class | ||
var CramCodec = | ||
/*#__PURE__*/ | ||
function () { | ||
function CramCodec() { | ||
var parameters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var dataType = arguments.length > 1 ? arguments[1] : undefined; | ||
(0, _classCallCheck2.default)(this, CramCodec); | ||
this.parameters = parameters; | ||
this.dataType = dataType; | ||
if (!dataType) throw new TypeError('must provide a data type to codec constructor'); | ||
if (!validDataTypes[dataType]) throw new TypeError("invalid data type ".concat(dataType)); | ||
} // decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
// } | ||
(0, _createClass2.default)(CramCodec, [{ | ||
key: "_getBits", | ||
value: function _getBits(data, cursor, numBits) { | ||
var val = 0; | ||
if (cursor.bytePosition + (7 - cursor.bitPosition + numBits) / 8 > data.length) throw new CramBufferOverrunError('read error during decoding. the file seems to be truncated.'); | ||
for (var dlen = numBits; dlen; dlen -= 1) { | ||
// get the next `dlen` bits in the input, put them in val | ||
val <<= 1; | ||
val |= data[cursor.bytePosition] >> cursor.bitPosition & 1; | ||
cursor.bitPosition -= 1; | ||
if (cursor.bitPosition < 0) cursor.bytePosition += 1; | ||
cursor.bitPosition &= 7; | ||
} | ||
return val; | ||
int: true, | ||
byte: true, | ||
long: true, | ||
byteArray: true, | ||
byteArrayBlock: true, | ||
}; | ||
// codec base class | ||
var CramCodec = /** @class */ (function () { | ||
function CramCodec(parameters, dataType) { | ||
if (parameters === void 0) { parameters = {}; } | ||
this.parameters = parameters; | ||
this.dataType = dataType; | ||
if (!dataType) { | ||
throw new TypeError('must provide a data type to codec constructor'); | ||
} | ||
if (!validDataTypes[dataType]) { | ||
throw new TypeError("invalid data type ".concat(dataType)); | ||
} | ||
} | ||
}]); | ||
return CramCodec; | ||
}(); | ||
module.exports = CramCodec; | ||
// decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
// } | ||
CramCodec.prototype._getBits = function (data, cursor, numBits) { | ||
var val = 0; | ||
if (cursor.bytePosition + (7 - cursor.bitPosition + numBits) / 8 > | ||
data.length) { | ||
throw new errors_1.CramBufferOverrunError('read error during decoding. the file seems to be truncated.'); | ||
} | ||
for (var dlen = numBits; dlen; dlen -= 1) { | ||
// get the next `dlen` bits in the input, put them in val | ||
val <<= 1; | ||
val |= (data[cursor.bytePosition] >> cursor.bitPosition) & 1; | ||
cursor.bitPosition -= 1; | ||
if (cursor.bitPosition < 0) { | ||
cursor.bytePosition += 1; | ||
} | ||
cursor.bitPosition &= 7; | ||
} | ||
return val; | ||
}; | ||
return CramCodec; | ||
}()); | ||
exports.default = CramCodec; | ||
//# sourceMappingURL=_base.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _require = require('../../errors'), | ||
CramUnimplementedError = _require.CramUnimplementedError; | ||
var CramCodec = require('./_base'); | ||
var BetaCodec = | ||
/*#__PURE__*/ | ||
function (_CramCodec) { | ||
(0, _inherits2.default)(BetaCodec, _CramCodec); | ||
function BetaCodec() { | ||
var _this; | ||
var parameters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var dataType = arguments.length > 1 ? arguments[1] : undefined; | ||
(0, _classCallCheck2.default)(this, BetaCodec); | ||
_this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(BetaCodec).call(this, parameters, dataType)); | ||
if (_this.dataType !== 'int') { | ||
throw new CramUnimplementedError("".concat(_this.dataType, " decoding not yet implemented by BETA codec")); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var _base_1 = __importDefault(require("./_base")); | ||
var BetaCodec = /** @class */ (function (_super) { | ||
__extends(BetaCodec, _super); | ||
function BetaCodec(parameters, dataType) { | ||
if (parameters === void 0) { parameters = {}; } | ||
var _this = _super.call(this, parameters, dataType) || this; | ||
if (_this.dataType !== 'int') { | ||
throw new errors_1.CramUnimplementedError("".concat(_this.dataType, " decoding not yet implemented by BETA codec")); | ||
} | ||
return _this; | ||
} | ||
return _this; | ||
} | ||
(0, _createClass2.default)(BetaCodec, [{ | ||
key: "decode", | ||
value: function decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
var data = this._getBits(coreDataBlock.content, cursors.coreBlock, this.parameters.length) - this.parameters.offset; | ||
return data; | ||
} | ||
}]); | ||
return BetaCodec; | ||
}(CramCodec); | ||
module.exports = BetaCodec; | ||
BetaCodec.prototype.decode = function (slice, coreDataBlock, blocksByContentId, cursors) { | ||
var data = this._getBits(coreDataBlock.content, cursors.coreBlock, this.parameters.length) - this.parameters.offset; | ||
return data; | ||
}; | ||
return BetaCodec; | ||
}(_base_1.default)); | ||
exports.default = BetaCodec; | ||
//# sourceMappingURL=beta.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _context; | ||
var _require = require('../util'), | ||
tinyMemoize = _require.tinyMemoize; | ||
var CramCodec = require('./_base'); | ||
var ByteArrayStopCodec = | ||
/*#__PURE__*/ | ||
function (_CramCodec) { | ||
(0, _inherits2.default)(ByteArrayStopCodec, _CramCodec); | ||
function ByteArrayStopCodec() { | ||
var _this; | ||
var parameters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var dataType = arguments.length > 1 ? arguments[1] : undefined; | ||
var instantiateCodec = arguments.length > 2 ? arguments[2] : undefined; | ||
(0, _classCallCheck2.default)(this, ByteArrayStopCodec); | ||
_this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(ByteArrayStopCodec).call(this, parameters, dataType)); | ||
_this.instantiateCodec = instantiateCodec; | ||
if (dataType !== 'byteArray') throw new TypeError("byteArrayLength does not support data type ".concat(dataType)); | ||
return _this; | ||
} | ||
(0, _createClass2.default)(ByteArrayStopCodec, [{ | ||
key: "decode", | ||
value: function decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
var lengthCodec = this._getLengthCodec(); | ||
var arrayLength = lengthCodec.decode(slice, coreDataBlock, blocksByContentId, cursors); | ||
var dataCodec = this._getDataCodec(); | ||
var data = new Array(arrayLength); | ||
for (var i = 0; i < arrayLength; i += 1) { | ||
data[i] = dataCodec.decode(slice, coreDataBlock, blocksByContentId, cursors); | ||
} | ||
return data; | ||
} // memoize | ||
}, { | ||
key: "_getLengthCodec", | ||
value: function _getLengthCodec() { | ||
var encodingParams = this.parameters.lengthsEncoding; | ||
return this.instantiateCodec(encodingParams, 'int'); | ||
} // memoize | ||
}, { | ||
key: "_getDataCodec", | ||
value: function _getDataCodec() { | ||
var encodingParams = this.parameters.valuesEncoding; | ||
return this.instantiateCodec(encodingParams, 'byte'); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var util_1 = require("../util"); | ||
var _base_1 = __importDefault(require("./_base")); | ||
var ByteArrayStopCodec = /** @class */ (function (_super) { | ||
__extends(ByteArrayStopCodec, _super); | ||
function ByteArrayStopCodec(parameters, dataType, instantiateCodec) { | ||
if (parameters === void 0) { parameters = {}; } | ||
var _this = _super.call(this, parameters, dataType) || this; | ||
_this.instantiateCodec = instantiateCodec; | ||
if (dataType !== 'byteArray') { | ||
throw new TypeError("byteArrayLength does not support data type ".concat(dataType)); | ||
} | ||
return _this; | ||
} | ||
}]); | ||
return ByteArrayStopCodec; | ||
}(CramCodec); | ||
(0, _forEach.default)(_context = '_getLengthCodec _getDataCodec'.split(' ')).call(_context, function (method) { | ||
return tinyMemoize(ByteArrayStopCodec, method); | ||
}); | ||
module.exports = ByteArrayStopCodec; | ||
ByteArrayStopCodec.prototype.decode = function (slice, coreDataBlock, blocksByContentId, cursors) { | ||
var lengthCodec = this._getLengthCodec(); | ||
var arrayLength = lengthCodec.decode(slice, coreDataBlock, blocksByContentId, cursors); | ||
var dataCodec = this._getDataCodec(); | ||
var data = new Array(arrayLength); | ||
for (var i = 0; i < arrayLength; i += 1) { | ||
data[i] = dataCodec.decode(slice, coreDataBlock, blocksByContentId, cursors); | ||
} | ||
return data; | ||
}; | ||
// memoize | ||
ByteArrayStopCodec.prototype._getLengthCodec = function () { | ||
var encodingParams = this.parameters.lengthsEncoding; | ||
return this.instantiateCodec(encodingParams, 'int'); | ||
}; | ||
// memoize | ||
ByteArrayStopCodec.prototype._getDataCodec = function () { | ||
var encodingParams = this.parameters.valuesEncoding; | ||
return this.instantiateCodec(encodingParams, 'byte'); | ||
}; | ||
return ByteArrayStopCodec; | ||
}(_base_1.default)); | ||
exports.default = ByteArrayStopCodec; | ||
'_getLengthCodec _getDataCodec' | ||
.split(' ') | ||
.forEach(function (method) { return (0, util_1.tinyMemoize)(ByteArrayStopCodec, method); }); | ||
//# sourceMappingURL=byteArrayLength.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _slice = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/slice")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _require = require('../../errors'), | ||
CramBufferOverrunError = _require.CramBufferOverrunError, | ||
CramMalformedError = _require.CramMalformedError; | ||
var CramCodec = require('./_base'); | ||
var ByteArrayStopCodec = | ||
/*#__PURE__*/ | ||
function (_CramCodec) { | ||
(0, _inherits2.default)(ByteArrayStopCodec, _CramCodec); | ||
function ByteArrayStopCodec() { | ||
var _this; | ||
var parameters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var dataType = arguments.length > 1 ? arguments[1] : undefined; | ||
(0, _classCallCheck2.default)(this, ByteArrayStopCodec); | ||
_this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(ByteArrayStopCodec).call(this, parameters, dataType)); | ||
if (dataType === 'byteArray') { | ||
_this._decode = _this._decodeByteArray; | ||
} else { | ||
throw new TypeError("byteArrayStop codec does not support data type ".concat(dataType)); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var _base_1 = __importDefault(require("./_base")); | ||
var ByteArrayStopCodec = /** @class */ (function (_super) { | ||
__extends(ByteArrayStopCodec, _super); | ||
function ByteArrayStopCodec(parameters, dataType) { | ||
if (parameters === void 0) { parameters = {}; } | ||
var _this = _super.call(this, parameters, dataType) || this; | ||
if (dataType === 'byteArray') { | ||
_this._decode = _this._decodeByteArray; | ||
} | ||
else { | ||
throw new TypeError("byteArrayStop codec does not support data type ".concat(dataType)); | ||
} | ||
return _this; | ||
} | ||
return _this; | ||
} | ||
(0, _createClass2.default)(ByteArrayStopCodec, [{ | ||
key: "decode", | ||
value: function decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
var blockContentId = this.parameters.blockContentId; | ||
var contentBlock = blocksByContentId[blockContentId]; | ||
if (!contentBlock) throw new CramMalformedError("no block found with content ID ".concat(blockContentId)); | ||
var cursor = cursors.externalBlocks.getCursor(blockContentId); | ||
return this._decode(contentBlock, cursor); | ||
} | ||
}, { | ||
key: "_decodeByteArray", | ||
value: function _decodeByteArray(contentBlock, cursor) { | ||
var dataBuffer = contentBlock.content; | ||
var stopByte = this.parameters.stopByte; // scan to the next stop byte | ||
var startPosition = cursor.bytePosition; | ||
var stopPosition = cursor.bytePosition; | ||
while (dataBuffer[stopPosition] !== stopByte && stopPosition < dataBuffer.length) { | ||
if (stopPosition === dataBuffer.length) { | ||
throw new CramBufferOverrunError("byteArrayStop reading beyond length of data buffer?"); | ||
ByteArrayStopCodec.prototype.decode = function (slice, coreDataBlock, blocksByContentId, cursors) { | ||
var blockContentId = this.parameters.blockContentId; | ||
var contentBlock = blocksByContentId[blockContentId]; | ||
if (!contentBlock) { | ||
throw new errors_1.CramMalformedError("no block found with content ID ".concat(blockContentId)); | ||
} | ||
stopPosition += 1; | ||
} | ||
cursor.bytePosition = stopPosition + 1; | ||
var data = (0, _slice.default)(dataBuffer).call(dataBuffer, startPosition, stopPosition); | ||
return data; | ||
} | ||
}]); | ||
return ByteArrayStopCodec; | ||
}(CramCodec); | ||
module.exports = ByteArrayStopCodec; | ||
var cursor = cursors.externalBlocks.getCursor(blockContentId); | ||
return this._decode(contentBlock, cursor); | ||
}; | ||
ByteArrayStopCodec.prototype._decodeByteArray = function (contentBlock, cursor) { | ||
var dataBuffer = contentBlock.content; | ||
var stopByte = this.parameters.stopByte; | ||
// scan to the next stop byte | ||
var startPosition = cursor.bytePosition; | ||
var stopPosition = cursor.bytePosition; | ||
while (dataBuffer[stopPosition] !== stopByte && | ||
stopPosition < dataBuffer.length) { | ||
if (stopPosition === dataBuffer.length) { | ||
throw new errors_1.CramBufferOverrunError("byteArrayStop reading beyond length of data buffer?"); | ||
} | ||
stopPosition += 1; | ||
} | ||
cursor.bytePosition = stopPosition + 1; | ||
var data = dataBuffer.slice(startPosition, stopPosition); | ||
return data; | ||
}; | ||
return ByteArrayStopCodec; | ||
}(_base_1.default)); | ||
exports.default = ByteArrayStopCodec; | ||
//# sourceMappingURL=byteArrayStop.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/slicedToArray")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _require = require('../../errors'), | ||
CramUnimplementedError = _require.CramUnimplementedError, | ||
CramMalformedError = _require.CramMalformedError, | ||
CramBufferOverrunError = _require.CramBufferOverrunError; | ||
var CramCodec = require('./_base'); | ||
var _require2 = require('../util'), | ||
parseItf8 = _require2.parseItf8; | ||
var ExternalCodec = | ||
/*#__PURE__*/ | ||
function (_CramCodec) { | ||
(0, _inherits2.default)(ExternalCodec, _CramCodec); | ||
function ExternalCodec() { | ||
var _this; | ||
var parameters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var dataType = arguments.length > 1 ? arguments[1] : undefined; | ||
(0, _classCallCheck2.default)(this, ExternalCodec); | ||
_this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(ExternalCodec).call(this, parameters, dataType)); | ||
if (_this.dataType === 'int') { | ||
_this._decodeData = _this._decodeInt; | ||
} else if (_this.dataType === 'byte') { | ||
_this._decodeData = _this._decodeByte; | ||
} else { | ||
throw new CramUnimplementedError("".concat(_this.dataType, " decoding not yet implemented by EXTERNAL codec")); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var _base_1 = __importDefault(require("./_base")); | ||
var util_1 = require("../util"); | ||
var ExternalCodec = /** @class */ (function (_super) { | ||
__extends(ExternalCodec, _super); | ||
function ExternalCodec(parameters, dataType) { | ||
if (parameters === void 0) { parameters = {}; } | ||
var _this = _super.call(this, parameters, dataType) || this; | ||
if (_this.dataType === 'int') { | ||
_this._decodeData = _this._decodeInt; | ||
} | ||
else if (_this.dataType === 'byte') { | ||
_this._decodeData = _this._decodeByte; | ||
} | ||
else { | ||
throw new errors_1.CramUnimplementedError("".concat(_this.dataType, " decoding not yet implemented by EXTERNAL codec")); | ||
} | ||
return _this; | ||
} | ||
return _this; | ||
} | ||
(0, _createClass2.default)(ExternalCodec, [{ | ||
key: "decode", | ||
value: function decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
var blockContentId = this.parameters.blockContentId; | ||
var contentBlock = blocksByContentId[blockContentId]; | ||
if (!contentBlock) throw new CramMalformedError("no block found with content ID ".concat(blockContentId)); | ||
var cursor = cursors.externalBlocks.getCursor(blockContentId); | ||
return this._decodeData(contentBlock, cursor); | ||
} | ||
}, { | ||
key: "_decodeInt", | ||
value: function _decodeInt(contentBlock, cursor) { | ||
var _parseItf = parseItf8(contentBlock.content, cursor.bytePosition), | ||
_parseItf2 = (0, _slicedToArray2.default)(_parseItf, 2), | ||
result = _parseItf2[0], | ||
bytesRead = _parseItf2[1]; | ||
cursor.bytePosition += bytesRead; | ||
return result; | ||
} | ||
}, { | ||
key: "_decodeByte", | ||
value: function _decodeByte(contentBlock, cursor) { | ||
if (cursor.bytePosition >= contentBlock.content.length) throw new CramBufferOverrunError('attempted to read beyond end of block. this file seems truncated.'); | ||
var result = contentBlock.content[cursor.bytePosition]; | ||
cursor.bytePosition += 1; | ||
return result; | ||
} | ||
}]); | ||
return ExternalCodec; | ||
}(CramCodec); | ||
module.exports = ExternalCodec; | ||
ExternalCodec.prototype.decode = function (slice, coreDataBlock, blocksByContentId, cursors) { | ||
var blockContentId = this.parameters.blockContentId; | ||
var contentBlock = blocksByContentId[blockContentId]; | ||
if (!contentBlock) { | ||
throw new errors_1.CramMalformedError("no block found with content ID ".concat(blockContentId)); | ||
} | ||
var cursor = cursors.externalBlocks.getCursor(blockContentId); | ||
return this._decodeData(contentBlock, cursor); | ||
}; | ||
ExternalCodec.prototype._decodeInt = function (contentBlock, cursor) { | ||
var _a = (0, util_1.parseItf8)(contentBlock.content, cursor.bytePosition), result = _a[0], bytesRead = _a[1]; | ||
cursor.bytePosition += bytesRead; | ||
return result; | ||
}; | ||
ExternalCodec.prototype._decodeByte = function (contentBlock, cursor) { | ||
if (cursor.bytePosition >= contentBlock.content.length) { | ||
throw new errors_1.CramBufferOverrunError('attempted to read beyond end of block. this file seems truncated.'); | ||
} | ||
var result = contentBlock.content[cursor.bytePosition]; | ||
cursor.bytePosition += 1; | ||
return result; | ||
}; | ||
return ExternalCodec; | ||
}(_base_1.default)); | ||
exports.default = ExternalCodec; | ||
//# sourceMappingURL=external.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _require = require('../../errors'), | ||
CramUnimplementedError = _require.CramUnimplementedError; | ||
var CramCodec = require('./_base'); | ||
var GammaCodec = | ||
/*#__PURE__*/ | ||
function (_CramCodec) { | ||
(0, _inherits2.default)(GammaCodec, _CramCodec); | ||
function GammaCodec() { | ||
var _this; | ||
var parameters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var dataType = arguments.length > 1 ? arguments[1] : undefined; | ||
(0, _classCallCheck2.default)(this, GammaCodec); | ||
_this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(GammaCodec).call(this, parameters, dataType)); | ||
if (_this.dataType !== 'int') { | ||
throw new CramUnimplementedError("".concat(_this.dataType, " decoding not yet implemented by GAMMA codec")); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var _base_1 = __importDefault(require("./_base")); | ||
var GammaCodec = /** @class */ (function (_super) { | ||
__extends(GammaCodec, _super); | ||
function GammaCodec(parameters, dataType) { | ||
if (parameters === void 0) { parameters = {}; } | ||
var _this = _super.call(this, parameters, dataType) || this; | ||
if (_this.dataType !== 'int') { | ||
throw new errors_1.CramUnimplementedError("".concat(_this.dataType, " decoding not yet implemented by GAMMA codec")); | ||
} | ||
return _this; | ||
} | ||
return _this; | ||
} | ||
(0, _createClass2.default)(GammaCodec, [{ | ||
key: "decode", | ||
value: function decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
var length = 1; | ||
while (this._getBits(coreDataBlock.content, cursors.coreBlock, 1) === 0) { | ||
length += 1; | ||
} | ||
var readBits = this._getBits(coreDataBlock.content, cursors.coreBlock, length - 1); | ||
var value = readBits | 1 << length - 1; | ||
return value - this.parameters.offset; | ||
} | ||
}]); | ||
return GammaCodec; | ||
}(CramCodec); | ||
module.exports = GammaCodec; | ||
GammaCodec.prototype.decode = function (slice, coreDataBlock, blocksByContentId, cursors) { | ||
var length = 1; | ||
while (this._getBits(coreDataBlock.content, cursors.coreBlock, 1) === 0) { | ||
length += 1; | ||
} | ||
var readBits = this._getBits(coreDataBlock.content, cursors.coreBlock, length - 1); | ||
var value = readBits | (1 << (length - 1)); | ||
return value - this.parameters.offset; | ||
}; | ||
return GammaCodec; | ||
}(_base_1.default)); | ||
exports.default = GammaCodec; | ||
//# sourceMappingURL=gamma.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _fill = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/fill")); | ||
var _toConsumableArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/toConsumableArray")); | ||
var _map = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/map")); | ||
var _values = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/values")); | ||
var _parseInt2 = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/parse-int")); | ||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/slicedToArray")); | ||
var _entries = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/entries")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _sort = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/sort")); | ||
var _includes = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/includes")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _require = require('../../errors'), | ||
CramMalformedError = _require.CramMalformedError; | ||
var CramCodec = require('./_base'); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var _base_1 = __importDefault(require("./_base")); | ||
function numberOfSetBits(ii) { | ||
var i = ii - (ii >> 1) & 0x55555555; | ||
i = (i & 0x33333333) + (i >> 2 & 0x33333333); | ||
return (i + (i >> 4) & 0x0f0f0f0f) * 0x01010101 >> 24; | ||
var i = (ii - (ii >> 1)) & 0x55555555; | ||
i = (i & 0x33333333) + ((i >> 2) & 0x33333333); | ||
return (((i + (i >> 4)) & 0x0f0f0f0f) * 0x01010101) >> 24; | ||
} | ||
var HuffmanIntCodec = | ||
/*#__PURE__*/ | ||
function (_CramCodec) { | ||
(0, _inherits2.default)(HuffmanIntCodec, _CramCodec); | ||
function HuffmanIntCodec() { | ||
var _context; | ||
var _this; | ||
var parameters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var dataType = arguments.length > 1 ? arguments[1] : undefined; | ||
(0, _classCallCheck2.default)(this, HuffmanIntCodec); | ||
_this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(HuffmanIntCodec).call(this, parameters, dataType)); | ||
if (!(0, _includes.default)(_context = ['byte', 'int']).call(_context, _this.dataType)) { | ||
throw new TypeError("".concat(_this.dataType, " decoding not yet implemented by HUFFMAN_INT codec")); | ||
var HuffmanIntCodec = /** @class */ (function (_super) { | ||
__extends(HuffmanIntCodec, _super); | ||
function HuffmanIntCodec(parameters, dataType) { | ||
if (parameters === void 0) { parameters = {}; } | ||
var _this = _super.call(this, parameters, dataType) || this; | ||
if (!['byte', 'int'].includes(_this.dataType)) { | ||
throw new TypeError("".concat(_this.dataType, " decoding not yet implemented by HUFFMAN_INT codec")); | ||
} | ||
_this.buildCodeBook(); | ||
_this.buildCodes(); | ||
_this.buildCaches(); | ||
// if this is a degenerate zero-length huffman code, special-case the decoding | ||
if (_this.sortedCodes[0].bitLength === 0) { | ||
_this._decode = _this._decodeZeroLengthCode; | ||
} | ||
return _this; | ||
} | ||
_this.buildCodeBook(); | ||
_this.buildCodes(); | ||
_this.buildCaches(); // if this is a degenerate zero-length huffman code, special-case the decoding | ||
if (_this.sortedCodes[0].bitLength === 0) _this._decode = _this._decodeZeroLengthCode; | ||
return _this; | ||
} | ||
(0, _createClass2.default)(HuffmanIntCodec, [{ | ||
key: "buildCodeBook", | ||
value: function buildCodeBook() { | ||
var _this2 = this; | ||
// parse the parameters together into a `codes` data structure | ||
var codes = new Array(this.parameters.numCodes); | ||
for (var i = 0; i < this.parameters.numCodes; i += 1) { | ||
codes[i] = { | ||
symbol: this.parameters.symbols[i], | ||
bitLength: this.parameters.bitLengths[i] | ||
}; | ||
} // sort the codes by bit length and symbol value | ||
codes = (0, _sort.default)(codes).call(codes, function (a, b) { | ||
return a.bitLength - b.bitLength || a.symbol - b.symbol; | ||
}); | ||
this.codeBook = {}; | ||
(0, _forEach.default)(codes).call(codes, function (code) { | ||
if (!_this2.codeBook[code.bitLength]) _this2.codeBook[code.bitLength] = []; | ||
_this2.codeBook[code.bitLength].push(code.symbol); | ||
}); | ||
} | ||
}, { | ||
key: "buildCodes", | ||
value: function buildCodes() { | ||
var _context2, | ||
_this3 = this; | ||
this.codes = {}; | ||
/* new TreeMap<Integer, HuffmanBitCode>(); */ | ||
var codeLength = 0; | ||
var codeValue = -1; | ||
(0, _forEach.default)(_context2 = (0, _entries.default)(this.codeBook)).call(_context2, function (_ref) { | ||
var _ref2 = (0, _slicedToArray2.default)(_ref, 2), | ||
bitLength = _ref2[0], | ||
symbols = _ref2[1]; | ||
bitLength = (0, _parseInt2.default)(bitLength, 10); | ||
(0, _forEach.default)(symbols).call(symbols, function (symbol) { | ||
var code = { | ||
bitLength: bitLength, | ||
value: symbol | ||
}; | ||
codeValue += 1; | ||
var delta = bitLength - codeLength; // new length? | ||
codeValue <<= delta; // pad with 0's | ||
code.bitCode = codeValue; // calculated: huffman code | ||
codeLength += delta; // adjust current code length | ||
if (numberOfSetBits(codeValue) > bitLength) throw new CramMalformedError('Symbol out of range'); | ||
_this3.codes[symbol] = code; | ||
HuffmanIntCodec.prototype.buildCodeBook = function () { | ||
var _this = this; | ||
// parse the parameters together into a `codes` data structure | ||
var codes = new Array(this.parameters.numCodes); | ||
for (var i = 0; i < this.parameters.numCodes; i += 1) { | ||
codes[i] = { | ||
symbol: this.parameters.symbols[i], | ||
bitLength: this.parameters.bitLengths[i], | ||
}; | ||
} | ||
// sort the codes by bit length and symbol value | ||
codes = codes.sort(function (a, b) { return a.bitLength - b.bitLength || a.symbol - b.symbol; }); | ||
this.codeBook = {}; | ||
codes.forEach(function (code) { | ||
if (!_this.codeBook[code.bitLength]) { | ||
_this.codeBook[code.bitLength] = []; | ||
} | ||
_this.codeBook[code.bitLength].push(code.symbol); | ||
}); | ||
}); | ||
} | ||
}, { | ||
key: "buildCaches", | ||
value: function buildCaches() { | ||
var _context3, _context4, _context5, _context6, _context7, _context8; | ||
this.sortedCodes = (0, _sort.default)(_context3 = (0, _values.default)(this.codes)).call(_context3, function (a, b) { | ||
return a.bitLength - b.bitLength || a.bitCode - b.bitCode; | ||
}); // this.sortedValues = this.parameters.values.sort((a,b) => a-b) | ||
this.sortedByValue = (0, _sort.default)(_context4 = (0, _values.default)(this.codes)).call(_context4, function (a, b) { | ||
return a.value - b.value; | ||
}); | ||
this.sortedValuesByBitCode = (0, _map.default)(_context5 = this.sortedCodes).call(_context5, function (c) { | ||
return c.value; | ||
}); | ||
this.sortedBitCodes = (0, _map.default)(_context6 = this.sortedCodes).call(_context6, function (c) { | ||
return c.bitCode; | ||
}); | ||
this.sortedBitLengthsByBitCode = (0, _map.default)(_context7 = this.sortedCodes).call(_context7, function (c) { | ||
return c.bitLength; | ||
}); | ||
var maxBitCode = Math.max.apply(Math, (0, _toConsumableArray2.default)(this.sortedBitCodes)); | ||
this.bitCodeToValue = (0, _fill.default)(_context8 = new Array(maxBitCode + 1)).call(_context8, -1); | ||
for (var i = 0; i < this.sortedBitCodes.length; i += 1) { | ||
this.bitCodeToValue[this.sortedCodes[i].bitCode] = i; | ||
} | ||
} | ||
}, { | ||
key: "decode", | ||
value: function decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
return this._decode(slice, coreDataBlock, cursors.coreBlock); | ||
} // _decodeNull() { | ||
}; | ||
HuffmanIntCodec.prototype.buildCodes = function () { | ||
var _this = this; | ||
this.codes = {}; /* new TreeMap<Integer, HuffmanBitCode>(); */ | ||
var codeLength = 0; | ||
var codeValue = -1; | ||
Object.entries(this.codeBook).forEach(function (_a) { | ||
var bitLength = _a[0], symbols = _a[1]; | ||
bitLength = parseInt(bitLength, 10); | ||
symbols.forEach(function (symbol) { | ||
var code = { bitLength: bitLength, value: symbol }; | ||
codeValue += 1; | ||
var delta = bitLength - codeLength; // new length? | ||
codeValue <<= delta; // pad with 0's | ||
code.bitCode = codeValue; // calculated: huffman code | ||
codeLength += delta; // adjust current code length | ||
if (numberOfSetBits(codeValue) > bitLength) { | ||
throw new errors_1.CramMalformedError('Symbol out of range'); | ||
} | ||
_this.codes[symbol] = code; | ||
}); | ||
}); | ||
}; | ||
HuffmanIntCodec.prototype.buildCaches = function () { | ||
this.sortedCodes = Object.values(this.codes).sort(function (a, b) { return a.bitLength - b.bitLength || a.bitCode - b.bitCode; }); | ||
// this.sortedValues = this.parameters.values.sort((a,b) => a-b) | ||
this.sortedByValue = Object.values(this.codes).sort(function (a, b) { return a.value - b.value; }); | ||
this.sortedValuesByBitCode = this.sortedCodes.map(function (c) { return c.value; }); | ||
this.sortedBitCodes = this.sortedCodes.map(function (c) { return c.bitCode; }); | ||
this.sortedBitLengthsByBitCode = this.sortedCodes.map(function (c) { return c.bitLength; }); | ||
var maxBitCode = Math.max.apply(Math, this.sortedBitCodes); | ||
this.bitCodeToValue = new Array(maxBitCode + 1).fill(-1); | ||
for (var i = 0; i < this.sortedBitCodes.length; i += 1) { | ||
this.bitCodeToValue[this.sortedCodes[i].bitCode] = i; | ||
} | ||
}; | ||
HuffmanIntCodec.prototype.decode = function (slice, coreDataBlock, blocksByContentId, cursors) { | ||
return this._decode(slice, coreDataBlock, cursors.coreBlock); | ||
}; | ||
// _decodeNull() { | ||
// return -1 | ||
// } | ||
// the special case for zero-length codes | ||
}, { | ||
key: "_decodeZeroLengthCode", | ||
value: function _decodeZeroLengthCode() { | ||
return this.sortedCodes[0].value; | ||
} | ||
}, { | ||
key: "_decode", | ||
value: function _decode(slice, coreDataBlock, coreCursor) { | ||
var input = coreDataBlock.content; | ||
var prevLen = 0; | ||
var bits = 0; | ||
for (var i = 0; i < this.sortedCodes.length; i += 1) { | ||
var length = this.sortedCodes[i].bitLength; | ||
bits <<= length - prevLen; | ||
bits |= this._getBits(input, coreCursor, length - prevLen); | ||
prevLen = length; | ||
{ | ||
var index = this.bitCodeToValue[bits]; | ||
if (index > -1 && this.sortedBitLengthsByBitCode[index] === length) return this.sortedValuesByBitCode[index]; | ||
for (var j = i; this.sortedCodes[j + 1].bitLength === length && j < this.sortedCodes.length; j += 1) { | ||
i += 1; | ||
} | ||
HuffmanIntCodec.prototype._decodeZeroLengthCode = function () { | ||
return this.sortedCodes[0].value; | ||
}; | ||
HuffmanIntCodec.prototype._decode = function (slice, coreDataBlock, coreCursor) { | ||
var input = coreDataBlock.content; | ||
var prevLen = 0; | ||
var bits = 0; | ||
for (var i = 0; i < this.sortedCodes.length; i += 1) { | ||
var length_1 = this.sortedCodes[i].bitLength; | ||
bits <<= length_1 - prevLen; | ||
bits |= this._getBits(input, coreCursor, length_1 - prevLen); | ||
prevLen = length_1; | ||
{ | ||
var index = this.bitCodeToValue[bits]; | ||
if (index > -1 && this.sortedBitLengthsByBitCode[index] === length_1) { | ||
return this.sortedValuesByBitCode[index]; | ||
} | ||
for (var j = i; this.sortedCodes[j + 1].bitLength === length_1 && | ||
j < this.sortedCodes.length; j += 1) { | ||
i += 1; | ||
} | ||
} | ||
} | ||
} | ||
throw new CramMalformedError('Huffman symbol not found.'); | ||
} | ||
}]); | ||
return HuffmanIntCodec; | ||
}(CramCodec); | ||
module.exports = HuffmanIntCodec; | ||
throw new errors_1.CramMalformedError('Huffman symbol not found.'); | ||
}; | ||
return HuffmanIntCodec; | ||
}(_base_1.default)); | ||
exports.default = HuffmanIntCodec; | ||
//# sourceMappingURL=huffman.js.map |
"use strict"; | ||
var _require = require('../../errors'), | ||
CramUnimplementedError = _require.CramUnimplementedError; | ||
var HuffmanIntCodec = require('./huffman'); | ||
var ExternalCodec = require('./external'); | ||
var ByteArrayStopCodec = require('./byteArrayStop'); | ||
var ByteArrayLengthCodec = require('./byteArrayLength'); | ||
var BetaCodec = require('./beta'); | ||
var GammaCodec = require('./gamma'); | ||
var SubexpCodec = require('./subexp'); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.instantiateCodec = exports.getCodecClassWithId = void 0; | ||
var errors_1 = require("../../errors"); | ||
var huffman_1 = __importDefault(require("./huffman")); | ||
var external_1 = __importDefault(require("./external")); | ||
var byteArrayStop_1 = __importDefault(require("./byteArrayStop")); | ||
var byteArrayLength_1 = __importDefault(require("./byteArrayLength")); | ||
var beta_1 = __importDefault(require("./beta")); | ||
var gamma_1 = __importDefault(require("./gamma")); | ||
var subexp_1 = __importDefault(require("./subexp")); | ||
var codecClasses = { | ||
1: ExternalCodec, | ||
// 2: GolombCodec, | ||
3: HuffmanIntCodec, | ||
4: ByteArrayLengthCodec, | ||
5: ByteArrayStopCodec, | ||
6: BetaCodec, | ||
7: SubexpCodec, | ||
// 8: GolombRiceCodec, | ||
9: GammaCodec | ||
1: external_1.default, | ||
// 2: GolombCodec, | ||
3: huffman_1.default, | ||
4: byteArrayLength_1.default, | ||
5: byteArrayStop_1.default, | ||
6: beta_1.default, | ||
7: subexp_1.default, | ||
// 8: GolombRiceCodec, | ||
9: gamma_1.default, | ||
}; | ||
function getCodecClassWithId(id) { | ||
return codecClasses[id]; | ||
return codecClasses[id]; | ||
} | ||
exports.getCodecClassWithId = getCodecClassWithId; | ||
function instantiateCodec(encodingData, dataType) { | ||
var CodecClass = getCodecClassWithId(dataType === 'ignore' ? 0 : encodingData.codecId); | ||
if (!CodecClass) throw new CramUnimplementedError("no codec implemented for codec ID ".concat(encodingData.codecId)); | ||
return new CodecClass(encodingData.parameters, dataType, instantiateCodec); | ||
var CodecClass = getCodecClassWithId(dataType === 'ignore' ? 0 : encodingData.codecId); | ||
if (!CodecClass) { | ||
throw new errors_1.CramUnimplementedError("no codec implemented for codec ID ".concat(encodingData.codecId)); | ||
} | ||
return new CodecClass(encodingData.parameters, dataType, instantiateCodec); | ||
} | ||
module.exports = { | ||
getCodecClassWithId: getCodecClassWithId, | ||
instantiateCodec: instantiateCodec | ||
}; | ||
exports.instantiateCodec = instantiateCodec; | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _require = require('../../errors'), | ||
CramUnimplementedError = _require.CramUnimplementedError; | ||
var CramCodec = require('./_base'); | ||
var SubexpCodec = | ||
/*#__PURE__*/ | ||
function (_CramCodec) { | ||
(0, _inherits2.default)(SubexpCodec, _CramCodec); | ||
function SubexpCodec() { | ||
var _this; | ||
var parameters = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; | ||
var dataType = arguments.length > 1 ? arguments[1] : undefined; | ||
(0, _classCallCheck2.default)(this, SubexpCodec); | ||
_this = (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(SubexpCodec).call(this, parameters, dataType)); | ||
if (_this.dataType !== 'int') { | ||
throw new CramUnimplementedError("".concat(_this.dataType, " decoding not yet implemented by SUBEXP codec")); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var _base_1 = __importDefault(require("./_base")); | ||
var SubexpCodec = /** @class */ (function (_super) { | ||
__extends(SubexpCodec, _super); | ||
function SubexpCodec(parameters, dataType) { | ||
if (parameters === void 0) { parameters = {}; } | ||
var _this = _super.call(this, parameters, dataType) || this; | ||
if (_this.dataType !== 'int') { | ||
throw new errors_1.CramUnimplementedError("".concat(_this.dataType, " decoding not yet implemented by SUBEXP codec")); | ||
} | ||
return _this; | ||
} | ||
return _this; | ||
} | ||
(0, _createClass2.default)(SubexpCodec, [{ | ||
key: "decode", | ||
value: function decode(slice, coreDataBlock, blocksByContentId, cursors) { | ||
var numLeadingOnes = 0; | ||
while (this._getBits(coreDataBlock.content, cursors.coreBlock, 1)) { | ||
numLeadingOnes += 1; | ||
} | ||
var b; | ||
var n; | ||
if (numLeadingOnes === 0) { | ||
b = this.parameters.K; | ||
n = this._getBits(coreDataBlock.content, cursors.coreBlock, b); | ||
} else { | ||
b = numLeadingOnes + this.parameters.K - 1; | ||
n = 1 << b | this._getBits(coreDataBlock.content, cursors.coreBlock, b); | ||
} | ||
return n - this.parameters.offset; | ||
} | ||
}]); | ||
return SubexpCodec; | ||
}(CramCodec); | ||
module.exports = SubexpCodec; | ||
SubexpCodec.prototype.decode = function (slice, coreDataBlock, blocksByContentId, cursors) { | ||
var numLeadingOnes = 0; | ||
while (this._getBits(coreDataBlock.content, cursors.coreBlock, 1)) { | ||
numLeadingOnes += 1; | ||
} | ||
var b; | ||
var n; | ||
if (numLeadingOnes === 0) { | ||
b = this.parameters.K; | ||
n = this._getBits(coreDataBlock.content, cursors.coreBlock, b); | ||
} | ||
else { | ||
b = numLeadingOnes + this.parameters.K - 1; | ||
n = (1 << b) | this._getBits(coreDataBlock.content, cursors.coreBlock, b); | ||
} | ||
return n - this.parameters.offset; | ||
}; | ||
return SubexpCodec; | ||
}(_base_1.default)); | ||
exports.default = SubexpCodec; | ||
//# sourceMappingURL=subexp.js.map |
"use strict"; | ||
module.exports = { | ||
CRAM_FLAG_PRESERVE_QUAL_SCORES: 1 << 0, | ||
CRAM_FLAG_DETACHED: 1 << 1, | ||
CRAM_FLAG_MATE_DOWNSTREAM: 1 << 2, | ||
CRAM_FLAG_NO_SEQ: 1 << 3, | ||
CRAM_FLAG_MASK: (1 << 4) - 1, | ||
// mate read is reversed | ||
CRAM_M_REVERSE: 1, | ||
// mated read is unmapped | ||
CRAM_M_UNMAP: 2, | ||
// the read is paired in sequencing, no matter whether it is mapped in a pair | ||
BAM_FPAIRED: 1, | ||
// the read is mapped in a proper pair | ||
BAM_FPROPER_PAIR: 2, | ||
// the read itself is unmapped; conflictive with BAM_FPROPER_PAIR | ||
BAM_FUNMAP: 4, | ||
// the mate is unmapped | ||
BAM_FMUNMAP: 8, | ||
// the read is mapped to the reverse strand | ||
BAM_FREVERSE: 16, | ||
// the mate is mapped to the reverse strand | ||
BAM_FMREVERSE: 32, | ||
// this is read1 | ||
BAM_FREAD1: 64, | ||
// this is read2 | ||
BAM_FREAD2: 128, | ||
// not primary alignment | ||
BAM_FSECONDARY: 256, | ||
// QC failure | ||
BAM_FQCFAIL: 512, | ||
// optical or PCR duplicate | ||
BAM_FDUP: 1024, | ||
// supplementary alignment | ||
BAM_FSUPPLEMENTARY: 2048, | ||
BAM_CMATCH: 0, | ||
BAM_CINS: 1, | ||
BAM_CDEL: 2, | ||
BAM_CREF_SKIP: 3, | ||
BAM_CSOFT_CLIP: 4, | ||
BAM_CHARD_CLIP: 5, | ||
BAM_CPAD: 6, | ||
BAM_CEQUAL: 7, | ||
BAM_CDIFF: 8, | ||
BAM_CBACK: 9, | ||
BAM_CIGAR_STR: 'MIDNSHP:XB', | ||
BAM_CIGAR_SHIFT: 4, | ||
BAM_CIGAR_MASK: 0xf, | ||
BAM_CIGAR_TYPE: 0x3c1a7 | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var Constants = { | ||
CRAM_FLAG_PRESERVE_QUAL_SCORES: 1 << 0, | ||
CRAM_FLAG_DETACHED: 1 << 1, | ||
CRAM_FLAG_MATE_DOWNSTREAM: 1 << 2, | ||
CRAM_FLAG_NO_SEQ: 1 << 3, | ||
CRAM_FLAG_MASK: (1 << 4) - 1, | ||
// mate read is reversed | ||
CRAM_M_REVERSE: 1, | ||
// mated read is unmapped | ||
CRAM_M_UNMAP: 2, | ||
// the read is paired in sequencing, no matter whether it is mapped in a pair | ||
BAM_FPAIRED: 1, | ||
// the read is mapped in a proper pair | ||
BAM_FPROPER_PAIR: 2, | ||
// the read itself is unmapped; conflictive with BAM_FPROPER_PAIR | ||
BAM_FUNMAP: 4, | ||
// the mate is unmapped | ||
BAM_FMUNMAP: 8, | ||
// the read is mapped to the reverse strand | ||
BAM_FREVERSE: 16, | ||
// the mate is mapped to the reverse strand | ||
BAM_FMREVERSE: 32, | ||
// this is read1 | ||
BAM_FREAD1: 64, | ||
// this is read2 | ||
BAM_FREAD2: 128, | ||
// not primary alignment | ||
BAM_FSECONDARY: 256, | ||
// QC failure | ||
BAM_FQCFAIL: 512, | ||
// optical or PCR duplicate | ||
BAM_FDUP: 1024, | ||
// supplementary alignment | ||
BAM_FSUPPLEMENTARY: 2048, | ||
BAM_CMATCH: 0, | ||
BAM_CINS: 1, | ||
BAM_CDEL: 2, | ||
BAM_CREF_SKIP: 3, | ||
BAM_CSOFT_CLIP: 4, | ||
BAM_CHARD_CLIP: 5, | ||
BAM_CPAD: 6, | ||
BAM_CEQUAL: 7, | ||
BAM_CDIFF: 8, | ||
BAM_CBACK: 9, | ||
BAM_CIGAR_STR: 'MIDNSHP:XB', | ||
BAM_CIGAR_SHIFT: 4, | ||
BAM_CIGAR_MASK: 0xf, | ||
BAM_CIGAR_TYPE: 0x3c1a7, | ||
}; | ||
exports.default = Constants; | ||
//# sourceMappingURL=constants.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _keys = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/keys")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _assign = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/assign")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _require = require('../../errors'), | ||
CramMalformedError = _require.CramMalformedError; | ||
var _require2 = require('../codecs'), | ||
instantiateCodec = _require2.instantiateCodec; // the hardcoded data type to be decoded for each core | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var codecs_1 = require("../codecs"); | ||
// the hardcoded data type to be decoded for each core | ||
// data field | ||
var dataSeriesTypes = { | ||
BF: 'int', | ||
CF: 'int', | ||
RI: 'int', | ||
RL: 'int', | ||
AP: 'int', | ||
RG: 'int', | ||
MF: 'int', | ||
NS: 'int', | ||
NP: 'int', | ||
TS: 'int', | ||
NF: 'int', | ||
TC: 'byte', | ||
TN: 'int', | ||
FN: 'int', | ||
FC: 'byte', | ||
FP: 'int', | ||
BS: 'byte', | ||
IN: 'byteArray', | ||
SC: 'byteArray', | ||
DL: 'int', | ||
BA: 'byte', | ||
BB: 'byteArray', | ||
RS: 'int', | ||
PD: 'int', | ||
HC: 'int', | ||
MQ: 'int', | ||
RN: 'byteArray', | ||
QS: 'byte', | ||
QQ: 'byteArray', | ||
TL: 'int', | ||
TM: 'ignore', | ||
TV: 'ignore' | ||
BF: 'int', | ||
CF: 'int', | ||
RI: 'int', | ||
RL: 'int', | ||
AP: 'int', | ||
RG: 'int', | ||
MF: 'int', | ||
NS: 'int', | ||
NP: 'int', | ||
TS: 'int', | ||
NF: 'int', | ||
TC: 'byte', | ||
TN: 'int', | ||
FN: 'int', | ||
FC: 'byte', | ||
FP: 'int', | ||
BS: 'byte', | ||
IN: 'byteArray', | ||
SC: 'byteArray', | ||
DL: 'int', | ||
BA: 'byte', | ||
BB: 'byteArray', | ||
RS: 'int', | ||
PD: 'int', | ||
HC: 'int', | ||
MQ: 'int', | ||
RN: 'byteArray', | ||
QS: 'byte', | ||
QQ: 'byteArray', | ||
TL: 'int', | ||
TM: 'ignore', | ||
TV: 'ignore', | ||
}; | ||
function parseSubstitutionMatrix(byteArray) { | ||
var matrix = new Array(5); | ||
for (var i = 0; i < 5; i += 1) { | ||
matrix[i] = new Array(4); | ||
} | ||
matrix[0][byteArray[0] >> 6 & 3] = 'C'; | ||
matrix[0][byteArray[0] >> 4 & 3] = 'G'; | ||
matrix[0][byteArray[0] >> 2 & 3] = 'T'; | ||
matrix[0][byteArray[0] >> 0 & 3] = 'N'; | ||
matrix[1][byteArray[1] >> 6 & 3] = 'A'; | ||
matrix[1][byteArray[1] >> 4 & 3] = 'G'; | ||
matrix[1][byteArray[1] >> 2 & 3] = 'T'; | ||
matrix[1][byteArray[1] >> 0 & 3] = 'N'; | ||
matrix[2][byteArray[2] >> 6 & 3] = 'A'; | ||
matrix[2][byteArray[2] >> 4 & 3] = 'C'; | ||
matrix[2][byteArray[2] >> 2 & 3] = 'T'; | ||
matrix[2][byteArray[2] >> 0 & 3] = 'N'; | ||
matrix[3][byteArray[3] >> 6 & 3] = 'A'; | ||
matrix[3][byteArray[3] >> 4 & 3] = 'C'; | ||
matrix[3][byteArray[3] >> 2 & 3] = 'G'; | ||
matrix[3][byteArray[3] >> 0 & 3] = 'N'; | ||
matrix[4][byteArray[4] >> 6 & 3] = 'A'; | ||
matrix[4][byteArray[4] >> 4 & 3] = 'C'; | ||
matrix[4][byteArray[4] >> 2 & 3] = 'G'; | ||
matrix[4][byteArray[4] >> 0 & 3] = 'T'; | ||
return matrix; | ||
var matrix = new Array(5); | ||
for (var i = 0; i < 5; i += 1) { | ||
matrix[i] = new Array(4); | ||
} | ||
matrix[0][(byteArray[0] >> 6) & 3] = 'C'; | ||
matrix[0][(byteArray[0] >> 4) & 3] = 'G'; | ||
matrix[0][(byteArray[0] >> 2) & 3] = 'T'; | ||
matrix[0][(byteArray[0] >> 0) & 3] = 'N'; | ||
matrix[1][(byteArray[1] >> 6) & 3] = 'A'; | ||
matrix[1][(byteArray[1] >> 4) & 3] = 'G'; | ||
matrix[1][(byteArray[1] >> 2) & 3] = 'T'; | ||
matrix[1][(byteArray[1] >> 0) & 3] = 'N'; | ||
matrix[2][(byteArray[2] >> 6) & 3] = 'A'; | ||
matrix[2][(byteArray[2] >> 4) & 3] = 'C'; | ||
matrix[2][(byteArray[2] >> 2) & 3] = 'T'; | ||
matrix[2][(byteArray[2] >> 0) & 3] = 'N'; | ||
matrix[3][(byteArray[3] >> 6) & 3] = 'A'; | ||
matrix[3][(byteArray[3] >> 4) & 3] = 'C'; | ||
matrix[3][(byteArray[3] >> 2) & 3] = 'G'; | ||
matrix[3][(byteArray[3] >> 0) & 3] = 'N'; | ||
matrix[4][(byteArray[4] >> 6) & 3] = 'A'; | ||
matrix[4][(byteArray[4] >> 4) & 3] = 'C'; | ||
matrix[4][(byteArray[4] >> 2) & 3] = 'G'; | ||
matrix[4][(byteArray[4] >> 0) & 3] = 'T'; | ||
return matrix; | ||
} | ||
var CramContainerCompressionScheme = | ||
/*#__PURE__*/ | ||
function () { | ||
function CramContainerCompressionScheme(content) { | ||
(0, _classCallCheck2.default)(this, CramContainerCompressionScheme); | ||
(0, _assign.default)(this, content); // interpret some of the preservation map tags for convenient use | ||
this.readNamesIncluded = content.preservation.RN; | ||
this.APdelta = content.preservation.AP; | ||
this.referenceRequired = !!content.preservation.RR; | ||
this.tagIdsDictionary = content.preservation.TD; | ||
this.substitutionMatrix = parseSubstitutionMatrix(content.preservation.SM); | ||
this.dataSeriesCodecCache = {}; | ||
this.tagCodecCache = {}; | ||
} | ||
/** | ||
* @param {string} tagName three-character tag name | ||
* @private | ||
*/ | ||
(0, _createClass2.default)(CramContainerCompressionScheme, [{ | ||
key: "getCodecForTag", | ||
value: function getCodecForTag(tagName) { | ||
if (!this.tagCodecCache[tagName]) { | ||
var encodingData = this.tagEncoding[tagName]; | ||
if (encodingData) { | ||
this.tagCodecCache[tagName] = instantiateCodec(encodingData, 'byteArray' // all tags are byte array data | ||
); | ||
} | ||
} | ||
return this.tagCodecCache[tagName]; | ||
var CramContainerCompressionScheme = /** @class */ (function () { | ||
function CramContainerCompressionScheme(content) { | ||
Object.assign(this, content); | ||
// interpret some of the preservation map tags for convenient use | ||
this.readNamesIncluded = content.preservation.RN; | ||
this.APdelta = content.preservation.AP; | ||
this.referenceRequired = !!content.preservation.RR; | ||
this.tagIdsDictionary = content.preservation.TD; | ||
this.substitutionMatrix = parseSubstitutionMatrix(content.preservation.SM); | ||
this.dataSeriesCodecCache = {}; | ||
this.tagCodecCache = {}; | ||
} | ||
/** | ||
* @param {string} tagName three-character tag name | ||
* @private | ||
*/ | ||
CramContainerCompressionScheme.prototype.getCodecForTag = function (tagName) { | ||
if (!this.tagCodecCache[tagName]) { | ||
var encodingData = this.tagEncoding[tagName]; | ||
if (encodingData) { | ||
this.tagCodecCache[tagName] = (0, codecs_1.instantiateCodec)(encodingData, 'byteArray'); | ||
} | ||
} | ||
return this.tagCodecCache[tagName]; | ||
}; | ||
/** | ||
* | ||
@@ -128,40 +98,32 @@ * @param {number} tagListId ID of the tag list to fetch from the tag dictionary | ||
*/ | ||
}, { | ||
key: "getTagNames", | ||
value: function getTagNames(tagListId) { | ||
return this.tagIdsDictionary[tagListId]; | ||
} | ||
}, { | ||
key: "getCodecForDataSeries", | ||
value: function getCodecForDataSeries(dataSeriesName) { | ||
if (!this.dataSeriesCodecCache[dataSeriesName]) { | ||
var encodingData = this.dataSeriesEncoding[dataSeriesName]; | ||
if (encodingData) { | ||
var dataType = dataSeriesTypes[dataSeriesName]; | ||
if (!dataType) throw new CramMalformedError("data series name ".concat(dataSeriesName, " not defined in file compression header")); | ||
this.dataSeriesCodecCache[dataSeriesName] = instantiateCodec(encodingData, dataType); | ||
CramContainerCompressionScheme.prototype.getTagNames = function (tagListId) { | ||
return this.tagIdsDictionary[tagListId]; | ||
}; | ||
CramContainerCompressionScheme.prototype.getCodecForDataSeries = function (dataSeriesName) { | ||
if (!this.dataSeriesCodecCache[dataSeriesName]) { | ||
var encodingData = this.dataSeriesEncoding[dataSeriesName]; | ||
if (encodingData) { | ||
var dataType = dataSeriesTypes[dataSeriesName]; | ||
if (!dataType) { | ||
throw new errors_1.CramMalformedError("data series name ".concat(dataSeriesName, " not defined in file compression header")); | ||
} | ||
this.dataSeriesCodecCache[dataSeriesName] = (0, codecs_1.instantiateCodec)(encodingData, dataType); | ||
} | ||
} | ||
} | ||
return this.dataSeriesCodecCache[dataSeriesName]; | ||
} | ||
}, { | ||
key: "toJSON", | ||
value: function toJSON() { | ||
var _context, | ||
_this = this; | ||
var data = {}; | ||
(0, _forEach.default)(_context = (0, _keys.default)(this)).call(_context, function (k) { | ||
if (/Cache$/.test(k)) return; | ||
data[k] = _this[k]; | ||
}); | ||
return data; | ||
} | ||
}]); | ||
return CramContainerCompressionScheme; | ||
}(); | ||
module.exports = CramContainerCompressionScheme; | ||
return this.dataSeriesCodecCache[dataSeriesName]; | ||
}; | ||
CramContainerCompressionScheme.prototype.toJSON = function () { | ||
var _this = this; | ||
var data = {}; | ||
Object.keys(this).forEach(function (k) { | ||
if (/Cache$/.test(k)) { | ||
return; | ||
} | ||
data[k] = _this[k]; | ||
}); | ||
return data; | ||
}; | ||
return CramContainerCompressionScheme; | ||
}()); | ||
exports.default = CramContainerCompressionScheme; | ||
//# sourceMappingURL=compressionScheme.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _assign = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/assign")); | ||
var _concat = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/concat")); | ||
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _context7; | ||
var _require = require('../../errors'), | ||
CramMalformedError = _require.CramMalformedError; | ||
var _require2 = require('../util'), | ||
itf8Size = _require2.itf8Size, | ||
parseItem = _require2.parseItem, | ||
tinyMemoize = _require2.tinyMemoize; | ||
var CramSlice = require('../slice'); | ||
var CramContainerCompressionScheme = require('./compressionScheme'); | ||
var CramContainer = | ||
/*#__PURE__*/ | ||
function () { | ||
function CramContainer(cramFile, position) { | ||
(0, _classCallCheck2.default)(this, CramContainer); | ||
// cram file this container comes from | ||
this.file = cramFile; // position of this container in the file | ||
this.filePosition = position; // console.log(`container: ${this.filePosition}`) | ||
} // memoize | ||
(0, _createClass2.default)(CramContainer, [{ | ||
key: "getHeader", | ||
value: function getHeader() { | ||
return this._readContainerHeader(this.filePosition); | ||
} // memoize | ||
}, { | ||
key: "getCompressionHeaderBlock", | ||
value: function () { | ||
var _getCompressionHeaderBlock = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee() { | ||
var containerHeader, sectionParsers, block, content; | ||
return _regenerator.default.wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
_context.next = 2; | ||
return this.getHeader(); | ||
case 2: | ||
containerHeader = _context.sent; | ||
if (containerHeader.numRecords) { | ||
_context.next = 5; | ||
break; | ||
} | ||
return _context.abrupt("return", null); | ||
case 5: | ||
_context.next = 7; | ||
return this.file.getSectionParsers(); | ||
case 7: | ||
sectionParsers = _context.sent; | ||
_context.next = 10; | ||
return this.getFirstBlock(); | ||
case 10: | ||
block = _context.sent; | ||
if (!(block.contentType !== 'COMPRESSION_HEADER')) { | ||
_context.next = 13; | ||
break; | ||
} | ||
throw new CramMalformedError("invalid content type ".concat(block.contentType, " in what is supposed to be the compression header block")); | ||
case 13: | ||
content = parseItem(block.content, sectionParsers.cramCompressionHeader.parser, 0, block.contentPosition); | ||
block.content = content; | ||
return _context.abrupt("return", block); | ||
case 16: | ||
case "end": | ||
return _context.stop(); | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
}; | ||
var __generator = (this && this.__generator) || function (thisArg, body) { | ||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; | ||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; | ||
function verb(n) { return function (v) { return step([n, v]); }; } | ||
function step(op) { | ||
if (f) throw new TypeError("Generator is already executing."); | ||
while (_) try { | ||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; | ||
if (y = 0, t) op = [op[0] & 2, t.value]; | ||
switch (op[0]) { | ||
case 0: case 1: t = op; break; | ||
case 4: _.label++; return { value: op[1], done: false }; | ||
case 5: _.label++; y = op[1]; op = [0]; continue; | ||
case 7: op = _.ops.pop(); _.trys.pop(); continue; | ||
default: | ||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } | ||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } | ||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } | ||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } | ||
if (t[2]) _.ops.pop(); | ||
_.trys.pop(); continue; | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function getCompressionHeaderBlock() { | ||
return _getCompressionHeaderBlock.apply(this, arguments); | ||
} | ||
return getCompressionHeaderBlock; | ||
}() | ||
}, { | ||
key: "getFirstBlock", | ||
value: function () { | ||
var _getFirstBlock = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee2() { | ||
var containerHeader; | ||
return _regenerator.default.wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
_context2.next = 2; | ||
return this.getHeader(); | ||
case 2: | ||
containerHeader = _context2.sent; | ||
return _context2.abrupt("return", this.file.readBlock(containerHeader._endPosition)); | ||
case 4: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function getFirstBlock() { | ||
return _getFirstBlock.apply(this, arguments); | ||
} | ||
return getFirstBlock; | ||
}() // parses the compression header data into a CramContainerCompressionScheme object | ||
op = body.call(thisArg, _); | ||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } | ||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; | ||
} | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var util_1 = require("../util"); | ||
var slice_1 = __importDefault(require("../slice")); | ||
var compressionScheme_1 = __importDefault(require("./compressionScheme")); | ||
var CramContainer = /** @class */ (function () { | ||
function CramContainer(cramFile, position) { | ||
// cram file this container comes from | ||
this.file = cramFile; | ||
// position of this container in the file | ||
this.filePosition = position; | ||
// console.log(`container: ${this.filePosition}`) | ||
} | ||
// memoize | ||
}, { | ||
key: "getCompressionScheme", | ||
value: function () { | ||
var _getCompressionScheme = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee3() { | ||
var header; | ||
return _regenerator.default.wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this.getCompressionHeaderBlock(); | ||
case 2: | ||
header = _context3.sent; | ||
if (header) { | ||
_context3.next = 5; | ||
break; | ||
CramContainer.prototype.getHeader = function () { | ||
return this._readContainerHeader(this.filePosition); | ||
}; | ||
// memoize | ||
CramContainer.prototype.getCompressionHeaderBlock = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var containerHeader, sectionParsers, block, content; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getHeader() | ||
// if there are no records in the container, there will be no compression header | ||
]; | ||
case 1: | ||
containerHeader = _a.sent(); | ||
// if there are no records in the container, there will be no compression header | ||
if (!containerHeader.numRecords) { | ||
return [2 /*return*/, null]; | ||
} | ||
return [4 /*yield*/, this.file.getSectionParsers()]; | ||
case 2: | ||
sectionParsers = _a.sent(); | ||
return [4 /*yield*/, this.getFirstBlock()]; | ||
case 3: | ||
block = _a.sent(); | ||
if (block.contentType !== 'COMPRESSION_HEADER') { | ||
throw new errors_1.CramMalformedError("invalid content type ".concat(block.contentType, " in what is supposed to be the compression header block")); | ||
} | ||
content = (0, util_1.parseItem)(block.content, sectionParsers.cramCompressionHeader.parser, 0, block.contentPosition); | ||
block.content = content; | ||
return [2 /*return*/, block]; | ||
} | ||
return _context3.abrupt("return", undefined); | ||
case 5: | ||
return _context3.abrupt("return", new CramContainerCompressionScheme(header.content)); | ||
case 6: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
function getCompressionScheme() { | ||
return _getCompressionScheme.apply(this, arguments); | ||
} | ||
return getCompressionScheme; | ||
}() | ||
}, { | ||
key: "getSlice", | ||
value: function getSlice(slicePosition, sliceSize) { | ||
// note: slicePosition is relative to the end of the container header | ||
// TODO: perhaps we should cache slices? | ||
return new CramSlice(this, slicePosition, sliceSize); | ||
} | ||
}, { | ||
key: "_readContainerHeader", | ||
value: function () { | ||
var _readContainerHeader2 = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee4(position) { | ||
var sectionParsers, cramContainerHeader1, cramContainerHeader2, _ref, fileSize, bytes1, header1, numLandmarksSize, _context4, _context5, bytes2, header2, completeHeader; | ||
return _regenerator.default.wrap(function _callee4$(_context6) { | ||
while (1) { | ||
switch (_context6.prev = _context6.next) { | ||
case 0: | ||
_context6.next = 2; | ||
return this.file.getSectionParsers(); | ||
case 2: | ||
sectionParsers = _context6.sent; | ||
cramContainerHeader1 = sectionParsers.cramContainerHeader1, cramContainerHeader2 = sectionParsers.cramContainerHeader2; | ||
_context6.next = 6; | ||
return this.file.stat(); | ||
case 6: | ||
_ref = _context6.sent; | ||
fileSize = _ref.size; | ||
if (!(position >= fileSize)) { | ||
_context6.next = 10; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramContainer.prototype.getFirstBlock = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var containerHeader; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getHeader()]; | ||
case 1: | ||
containerHeader = _a.sent(); | ||
return [2 /*return*/, this.file.readBlock(containerHeader._endPosition)]; | ||
} | ||
return _context6.abrupt("return", undefined); | ||
case 10: | ||
// parse the container header. do it in 2 pieces because you cannot tell | ||
// how much to buffer until you read numLandmarks | ||
bytes1 = Buffer.allocUnsafe(cramContainerHeader1.maxLength); | ||
_context6.next = 13; | ||
return this.file.read(bytes1, 0, cramContainerHeader1.maxLength, position); | ||
case 13: | ||
header1 = parseItem(bytes1, cramContainerHeader1.parser); | ||
numLandmarksSize = itf8Size(header1.numLandmarks); | ||
if (!(position + header1.length >= fileSize)) { | ||
_context6.next = 18; | ||
break; | ||
}); | ||
}); | ||
}; | ||
// parses the compression header data into a CramContainerCompressionScheme object | ||
// memoize | ||
CramContainer.prototype.getCompressionScheme = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var header; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getCompressionHeaderBlock()]; | ||
case 1: | ||
header = _a.sent(); | ||
if (!header) { | ||
return [2 /*return*/, undefined]; | ||
} | ||
return [2 /*return*/, new compressionScheme_1.default(header.content)]; | ||
} | ||
console.warn((0, _concat.default)(_context4 = (0, _concat.default)(_context5 = "".concat(this.file, ": container header at ")).call(_context5, position, " indicates that the container has length ")).call(_context4, header1.length, ", which extends beyond the length of the file. Skipping this container.")); | ||
return _context6.abrupt("return", undefined); | ||
case 18: | ||
bytes2 = Buffer.allocUnsafe(cramContainerHeader2.maxLength(header1.numLandmarks)); | ||
_context6.next = 21; | ||
return this.file.read(bytes2, 0, cramContainerHeader2.maxLength(header1.numLandmarks), position + header1._size - numLandmarksSize); | ||
case 21: | ||
header2 = parseItem(bytes2, cramContainerHeader2.parser); | ||
if (!(this.file.validateChecksums && header2.crc32 !== undefined)) { | ||
_context6.next = 25; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramContainer.prototype.getSlice = function (slicePosition, sliceSize) { | ||
// note: slicePosition is relative to the end of the container header | ||
// TODO: perhaps we should cache slices? | ||
return new slice_1.default(this, slicePosition, sliceSize); | ||
}; | ||
CramContainer.prototype._readContainerHeader = function (position) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var sectionParsers, cramContainerHeader1, cramContainerHeader2, fileSize, bytes1, header1, numLandmarksSize, bytes2, header2, completeHeader; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.file.getSectionParsers()]; | ||
case 1: | ||
sectionParsers = _a.sent(); | ||
cramContainerHeader1 = sectionParsers.cramContainerHeader1, cramContainerHeader2 = sectionParsers.cramContainerHeader2; | ||
return [4 /*yield*/, this.file.stat()]; | ||
case 2: | ||
fileSize = (_a.sent()).size; | ||
if (position >= fileSize) { | ||
return [2 /*return*/, undefined]; | ||
} | ||
bytes1 = Buffer.allocUnsafe(cramContainerHeader1.maxLength); | ||
return [4 /*yield*/, this.file.read(bytes1, 0, cramContainerHeader1.maxLength, position)]; | ||
case 3: | ||
_a.sent(); | ||
header1 = (0, util_1.parseItem)(bytes1, cramContainerHeader1.parser); | ||
numLandmarksSize = (0, util_1.itf8Size)(header1.numLandmarks); | ||
if (position + header1.length >= fileSize) { | ||
console.warn("".concat(this.file, ": container header at ").concat(position, " indicates that the container has length ").concat(header1.length, ", which extends beyond the length of the file. Skipping this container.")); | ||
return [2 /*return*/, undefined]; | ||
} | ||
bytes2 = Buffer.allocUnsafe(cramContainerHeader2.maxLength(header1.numLandmarks)); | ||
return [4 /*yield*/, this.file.read(bytes2, 0, cramContainerHeader2.maxLength(header1.numLandmarks), position + header1._size - numLandmarksSize)]; | ||
case 4: | ||
_a.sent(); | ||
header2 = (0, util_1.parseItem)(bytes2, cramContainerHeader2.parser); | ||
if (!(this.file.validateChecksums && header2.crc32 !== undefined)) return [3 /*break*/, 6]; | ||
return [4 /*yield*/, this.file.checkCrc32(position, header1._size + header2._size - numLandmarksSize - 4, header2.crc32, "container header beginning at position ".concat(position))]; | ||
case 5: | ||
_a.sent(); | ||
_a.label = 6; | ||
case 6: | ||
completeHeader = Object.assign(header1, header2, { | ||
_size: header1._size + header2._size - numLandmarksSize, | ||
_endPosition: header1._size + header2._size - numLandmarksSize + position, | ||
}); | ||
return [2 /*return*/, completeHeader]; | ||
} | ||
_context6.next = 25; | ||
return this.file.checkCrc32(position, header1._size + header2._size - numLandmarksSize - 4, header2.crc32, "container header beginning at position ".concat(position)); | ||
case 25: | ||
completeHeader = (0, _assign.default)(header1, header2, { | ||
_size: header1._size + header2._size - numLandmarksSize, | ||
_endPosition: header1._size + header2._size - numLandmarksSize + position | ||
}); | ||
return _context6.abrupt("return", completeHeader); | ||
case 27: | ||
case "end": | ||
return _context6.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function _readContainerHeader(_x) { | ||
return _readContainerHeader2.apply(this, arguments); | ||
} | ||
return _readContainerHeader; | ||
}() | ||
}]); | ||
return CramContainer; | ||
}(); | ||
(0, _forEach.default)(_context7 = 'getHeader getCompressionHeaderBlock getCompressionScheme'.split(' ')).call(_context7, function (method) { | ||
return tinyMemoize(CramContainer, method); | ||
}); | ||
module.exports = CramContainer; | ||
}); | ||
}); | ||
}; | ||
return CramContainer; | ||
}()); | ||
exports.default = CramContainer; | ||
'getHeader getCompressionHeaderBlock getCompressionScheme' | ||
.split(' ') | ||
.forEach(function (method) { return (0, util_1.tinyMemoize)(CramContainer, method); }); | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _concat = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/concat")); | ||
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _context14; | ||
var zlib = require('zlib'); | ||
var crc32 = require('buffer-crc32'); | ||
var LRU = require('quick-lru'); | ||
var _require = require('../errors'), | ||
CramUnimplementedError = _require.CramUnimplementedError, | ||
CramMalformedError = _require.CramMalformedError; | ||
var rans = require('../rans'); | ||
var _require2 = require('./sectionParsers'), | ||
cramFileDefinitionParser = _require2.cramFileDefinition, | ||
_getSectionParsers = _require2.getSectionParsers; | ||
var CramContainer = require('./container'); | ||
var _require3 = require('../io'), | ||
open = _require3.open; | ||
var _require4 = require('./util'), | ||
parseItem = _require4.parseItem, | ||
tinyMemoize = _require4.tinyMemoize; | ||
var _require5 = require('../sam'), | ||
parseHeaderText = _require5.parseHeaderText; | ||
var CramFile = | ||
/*#__PURE__*/ | ||
function () { | ||
/** | ||
* @param {object} args | ||
* @param {object} [args.filehandle] - a filehandle that implements the stat() and | ||
* read() methods of the Node filehandle API https://nodejs.org/api/fs.html#fs_class_filehandle | ||
* @param {object} [args.path] - path to the cram file | ||
* @param {object} [args.url] - url for the cram file. also supports file:// urls for local files | ||
* @param {function} [args.seqFetch] - a function with signature | ||
* `(seqId, startCoordinate, endCoordinate)` that returns a promise for a string of sequence bases | ||
* @param {number} [args.cacheSize] optional maximum number of CRAM records to cache. default 20,000 | ||
* @param {boolean} [args.checkSequenceMD5] - default true. if false, disables verifying the MD5 | ||
* checksum of the reference sequence underlying a slice. In some applications, this check can cause an inconvenient amount (many megabases) of sequences to be fetched. | ||
*/ | ||
function CramFile(args) { | ||
(0, _classCallCheck2.default)(this, CramFile); | ||
this.file = open(args.url, args.path, args.filehandle); | ||
this.validateChecksums = true; | ||
this.fetchReferenceSequenceCallback = args.seqFetch; | ||
this.options = { | ||
checkSequenceMD5: args.checkSequenceMD5 !== false, | ||
cacheSize: args.cacheSize !== undefined ? args.cacheSize : 20000 | ||
}; // cache of features in a slice, keyed by the | ||
// slice offset. caches all of the features in a slice, or none. | ||
// the cache is actually used by the slice object, it's just | ||
// kept here at the level of the file | ||
this.featureCache = new LRU({ | ||
maxSize: this.options.cacheSize | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
} | ||
(0, _createClass2.default)(CramFile, [{ | ||
key: "toString", | ||
value: function toString() { | ||
if (this.file.filename) return this.file.filename; | ||
if (this.file.url) return this.file.url; | ||
return '(cram file)'; | ||
} // can just read this object like a filehandle | ||
}, { | ||
key: "read", | ||
value: function read(buffer, offset, length, position) { | ||
return this.file.read(buffer, offset, length, position); | ||
} // can just stat this object like a filehandle | ||
}, { | ||
key: "stat", | ||
value: function stat() { | ||
return this.file.stat(); | ||
} // memoized | ||
}, { | ||
key: "getDefinition", | ||
value: function () { | ||
var _getDefinition = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee() { | ||
var headbytes, definition; | ||
return _regenerator.default.wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
headbytes = Buffer.allocUnsafe(cramFileDefinitionParser.maxLength); | ||
_context.next = 3; | ||
return this.file.read(headbytes, 0, cramFileDefinitionParser.maxLength, 0); | ||
case 3: | ||
definition = cramFileDefinitionParser.parser.parse(headbytes).result; | ||
if (!(definition.majorVersion !== 2 && definition.majorVersion !== 3)) { | ||
_context.next = 6; | ||
break; | ||
} | ||
throw new CramUnimplementedError("CRAM version ".concat(definition.majorVersion, " not supported")); | ||
case 6: | ||
return _context.abrupt("return", definition); | ||
case 7: | ||
case "end": | ||
return _context.stop(); | ||
}; | ||
var __generator = (this && this.__generator) || function (thisArg, body) { | ||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; | ||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; | ||
function verb(n) { return function (v) { return step([n, v]); }; } | ||
function step(op) { | ||
if (f) throw new TypeError("Generator is already executing."); | ||
while (_) try { | ||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; | ||
if (y = 0, t) op = [op[0] & 2, t.value]; | ||
switch (op[0]) { | ||
case 0: case 1: t = op; break; | ||
case 4: _.label++; return { value: op[1], done: false }; | ||
case 5: _.label++; y = op[1]; op = [0]; continue; | ||
case 7: op = _.ops.pop(); _.trys.pop(); continue; | ||
default: | ||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } | ||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } | ||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } | ||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } | ||
if (t[2]) _.ops.pop(); | ||
_.trys.pop(); continue; | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function getDefinition() { | ||
return _getDefinition.apply(this, arguments); | ||
} | ||
return getDefinition; | ||
}() // memoize | ||
}, { | ||
key: "getSamHeader", | ||
value: function () { | ||
var _getSamHeader = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee2() { | ||
var firstContainer, _ref, content, headerLength, textStart, text; | ||
return _regenerator.default.wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
_context2.next = 2; | ||
return this.getContainerById(0); | ||
case 2: | ||
firstContainer = _context2.sent; | ||
if (firstContainer) { | ||
_context2.next = 5; | ||
break; | ||
op = body.call(thisArg, _); | ||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } | ||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; | ||
} | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var zlib_1 = __importDefault(require("zlib")); | ||
var buffer_crc32_1 = __importDefault(require("buffer-crc32")); | ||
var quick_lru_1 = __importDefault(require("quick-lru")); | ||
var errors_1 = require("../errors"); | ||
var rans_1 = __importDefault(require("../rans")); | ||
var sectionParsers_1 = require("./sectionParsers"); | ||
var htscodecs_1 = __importDefault(require("@jkbonfield/htscodecs")); | ||
var container_1 = __importDefault(require("./container")); | ||
var io_1 = require("../io"); | ||
var util_1 = require("./util"); | ||
var sam_1 = require("../sam"); | ||
var CramFile = /** @class */ (function () { | ||
/** | ||
* @param {object} args | ||
* @param {object} [args.filehandle] - a filehandle that implements the stat() and | ||
* read() methods of the Node filehandle API https://nodejs.org/api/fs.html#fs_class_filehandle | ||
* @param {object} [args.path] - path to the cram file | ||
* @param {object} [args.url] - url for the cram file. also supports file:// urls for local files | ||
* @param {function} [args.seqFetch] - a function with signature | ||
* `(seqId, startCoordinate, endCoordinate)` that returns a promise for a string of sequence bases | ||
* @param {number} [args.cacheSize] optional maximum number of CRAM records to cache. default 20,000 | ||
* @param {boolean} [args.checkSequenceMD5] - default true. if false, disables verifying the MD5 | ||
* checksum of the reference sequence underlying a slice. In some applications, this check can cause an inconvenient amount (many megabases) of sequences to be fetched. | ||
*/ | ||
function CramFile(args) { | ||
this.file = (0, io_1.open)(args.url, args.path, args.filehandle); | ||
this.validateChecksums = true; | ||
this.fetchReferenceSequenceCallback = args.seqFetch; | ||
this.options = { | ||
checkSequenceMD5: args.checkSequenceMD5 !== false, | ||
cacheSize: args.cacheSize !== undefined ? args.cacheSize : 20000, | ||
}; | ||
// cache of features in a slice, keyed by the | ||
// slice offset. caches all of the features in a slice, or none. | ||
// the cache is actually used by the slice object, it's just | ||
// kept here at the level of the file | ||
this.featureCache = new quick_lru_1.default({ | ||
maxSize: this.options.cacheSize, | ||
}); | ||
} | ||
CramFile.prototype.toString = function () { | ||
if (this.file.filename) { | ||
return this.file.filename; | ||
} | ||
if (this.file.url) { | ||
return this.file.url; | ||
} | ||
return '(cram file)'; | ||
}; | ||
// can just read this object like a filehandle | ||
CramFile.prototype.read = function (buffer, offset, length, position) { | ||
return this.file.read(buffer, offset, length, position); | ||
}; | ||
// can just stat this object like a filehandle | ||
CramFile.prototype.stat = function () { | ||
return this.file.stat(); | ||
}; | ||
// memoized | ||
CramFile.prototype.getDefinition = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var headbytes, definition; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
headbytes = Buffer.allocUnsafe(sectionParsers_1.cramFileDefinition.maxLength); | ||
return [4 /*yield*/, this.file.read(headbytes, 0, sectionParsers_1.cramFileDefinition.maxLength, 0)]; | ||
case 1: | ||
_a.sent(); | ||
definition = sectionParsers_1.cramFileDefinition.parser.parse(headbytes).result; | ||
if (definition.majorVersion !== 2 && definition.majorVersion !== 3) { | ||
throw new errors_1.CramUnimplementedError("CRAM version ".concat(definition.majorVersion, " not supported")); | ||
} | ||
return [2 /*return*/, definition]; | ||
} | ||
throw new CramMalformedError('file contains no containers'); | ||
case 5: | ||
_context2.next = 7; | ||
return firstContainer.getFirstBlock(); | ||
case 7: | ||
_ref = _context2.sent; | ||
content = _ref.content; | ||
// find the end of the trailing zeros in the header text | ||
headerLength = content.readInt32LE(0); | ||
textStart = 4; // let textEnd = content.length - 1 | ||
// while (textEnd >= textStart && !content[textEnd]) textEnd -= 1 | ||
// trim off the trailing zeros | ||
text = content.toString('utf8', textStart, textStart + headerLength); | ||
this.header = text; | ||
return _context2.abrupt("return", parseHeaderText(text)); | ||
case 14: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function getSamHeader() { | ||
return _getSamHeader.apply(this, arguments); | ||
} | ||
return getSamHeader; | ||
}() | ||
}, { | ||
key: "getHeaderText", | ||
value: function () { | ||
var _getHeaderText = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee3() { | ||
return _regenerator.default.wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this.getSamHeader(); | ||
case 2: | ||
return _context3.abrupt("return", this.header); | ||
case 3: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
function getHeaderText() { | ||
return _getHeaderText.apply(this, arguments); | ||
} | ||
return getHeaderText; | ||
}() // memoize | ||
}, { | ||
key: "getSectionParsers", | ||
value: function () { | ||
var _getSectionParsers2 = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee4() { | ||
var _ref2, majorVersion; | ||
return _regenerator.default.wrap(function _callee4$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return this.getDefinition(); | ||
case 2: | ||
_ref2 = _context4.sent; | ||
majorVersion = _ref2.majorVersion; | ||
return _context4.abrupt("return", _getSectionParsers(majorVersion)); | ||
case 5: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function getSectionParsers() { | ||
return _getSectionParsers2.apply(this, arguments); | ||
} | ||
return getSectionParsers; | ||
}() | ||
}, { | ||
key: "getContainerById", | ||
value: function () { | ||
var _getContainerById = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee5(containerNumber) { | ||
var sectionParsers, position, _ref3, fileSize, cramContainerHeader1, currentContainer, i, currentHeader, j, block; | ||
return _regenerator.default.wrap(function _callee5$(_context5) { | ||
while (1) { | ||
switch (_context5.prev = _context5.next) { | ||
case 0: | ||
_context5.next = 2; | ||
return this.getSectionParsers(); | ||
case 2: | ||
sectionParsers = _context5.sent; | ||
position = sectionParsers.cramFileDefinition.maxLength; | ||
_context5.next = 6; | ||
return this.file.stat(); | ||
case 6: | ||
_ref3 = _context5.sent; | ||
fileSize = _ref3.size; | ||
cramContainerHeader1 = sectionParsers.cramContainerHeader1; // skip with a series of reads to the proper container | ||
i = 0; | ||
case 10: | ||
if (!(i <= containerNumber)) { | ||
_context5.next = 36; | ||
break; | ||
}); | ||
}); | ||
}; | ||
// memoize | ||
CramFile.prototype.getSamHeader = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var firstContainer, content, headerLength, textStart, text; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getContainerById(0)]; | ||
case 1: | ||
firstContainer = _a.sent(); | ||
if (!firstContainer) { | ||
throw new errors_1.CramMalformedError('file contains no containers'); | ||
} | ||
return [4 /*yield*/, firstContainer.getFirstBlock() | ||
// find the end of the trailing zeros in the header text | ||
]; | ||
case 2: | ||
content = (_a.sent()).content; | ||
headerLength = content.readInt32LE(0); | ||
textStart = 4; | ||
text = content.toString('utf8', textStart, textStart + headerLength); | ||
this.header = text; | ||
return [2 /*return*/, (0, sam_1.parseHeaderText)(text)]; | ||
} | ||
if (!(position + cramContainerHeader1.maxLength + 8 >= fileSize)) { | ||
_context5.next = 13; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramFile.prototype.getHeaderText = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getSamHeader()]; | ||
case 1: | ||
_a.sent(); | ||
return [2 /*return*/, this.header]; | ||
} | ||
return _context5.abrupt("return", undefined); | ||
case 13: | ||
currentContainer = this.getContainerAtPosition(position); | ||
_context5.next = 16; | ||
return currentContainer.getHeader(); | ||
case 16: | ||
currentHeader = _context5.sent; | ||
if (currentHeader) { | ||
_context5.next = 19; | ||
break; | ||
}); | ||
}); | ||
}; | ||
// memoize | ||
CramFile.prototype.getSectionParsers = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var majorVersion; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getDefinition()]; | ||
case 1: | ||
majorVersion = (_a.sent()).majorVersion; | ||
return [2 /*return*/, (0, sectionParsers_1.getSectionParsers)(majorVersion)]; | ||
} | ||
throw new CramMalformedError("container ".concat(containerNumber, " not found in file")); | ||
case 19: | ||
if (!(i === 0)) { | ||
_context5.next = 32; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramFile.prototype.getContainerById = function (containerNumber) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var sectionParsers, position, fileSize, cramContainerHeader1, currentContainer, i, currentHeader, j, block; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getSectionParsers()]; | ||
case 1: | ||
sectionParsers = _a.sent(); | ||
position = sectionParsers.cramFileDefinition.maxLength; | ||
return [4 /*yield*/, this.file.stat()]; | ||
case 2: | ||
fileSize = (_a.sent()).size; | ||
cramContainerHeader1 = sectionParsers.cramContainerHeader1; | ||
i = 0; | ||
_a.label = 3; | ||
case 3: | ||
if (!(i <= containerNumber)) return [3 /*break*/, 11]; | ||
// if we are about to go off the end of the file | ||
// and have not found that container, it does not exist | ||
if (position + cramContainerHeader1.maxLength + 8 >= fileSize) { | ||
return [2 /*return*/, undefined]; | ||
} | ||
currentContainer = this.getContainerAtPosition(position); | ||
return [4 /*yield*/, currentContainer.getHeader()]; | ||
case 4: | ||
currentHeader = _a.sent(); | ||
if (!currentHeader) { | ||
throw new errors_1.CramMalformedError("container ".concat(containerNumber, " not found in file")); | ||
} | ||
if (!(i === 0)) return [3 /*break*/, 9]; | ||
position = currentHeader._endPosition; | ||
j = 0; | ||
_a.label = 5; | ||
case 5: | ||
if (!(j < currentHeader.numBlocks)) return [3 /*break*/, 8]; | ||
return [4 /*yield*/, this.readBlock(position)]; | ||
case 6: | ||
block = _a.sent(); | ||
position = block._endPosition; | ||
_a.label = 7; | ||
case 7: | ||
j += 1; | ||
return [3 /*break*/, 5]; | ||
case 8: return [3 /*break*/, 10]; | ||
case 9: | ||
// otherwise, just traverse to the next container using the container's length | ||
position += currentHeader._size + currentHeader.length; | ||
_a.label = 10; | ||
case 10: | ||
i += 1; | ||
return [3 /*break*/, 3]; | ||
case 11: return [2 /*return*/, currentContainer]; | ||
} | ||
position = currentHeader._endPosition; | ||
j = 0; | ||
case 22: | ||
if (!(j < currentHeader.numBlocks)) { | ||
_context5.next = 30; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramFile.prototype.checkCrc32 = function (position, length, recordedCrc32, description) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var b, calculatedCrc32; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
b = Buffer.allocUnsafe(length); | ||
return [4 /*yield*/, this.file.read(b, 0, length, position)]; | ||
case 1: | ||
_a.sent(); | ||
calculatedCrc32 = buffer_crc32_1.default.unsigned(b); | ||
if (calculatedCrc32 !== recordedCrc32) { | ||
throw new errors_1.CramMalformedError("crc mismatch in ".concat(description, ": recorded CRC32 = ").concat(recordedCrc32, ", but calculated CRC32 = ").concat(calculatedCrc32)); | ||
} | ||
return [2 /*return*/]; | ||
} | ||
_context5.next = 25; | ||
return this.readBlock(position); | ||
case 25: | ||
block = _context5.sent; | ||
position = block._endPosition; | ||
case 27: | ||
j += 1; | ||
_context5.next = 22; | ||
break; | ||
case 30: | ||
_context5.next = 33; | ||
break; | ||
case 32: | ||
// otherwise, just traverse to the next container using the container's length | ||
position += currentHeader._size + currentHeader.length; | ||
case 33: | ||
i += 1; | ||
_context5.next = 10; | ||
break; | ||
case 36: | ||
return _context5.abrupt("return", currentContainer); | ||
case 37: | ||
case "end": | ||
return _context5.stop(); | ||
} | ||
} | ||
}, _callee5, this); | ||
})); | ||
function getContainerById(_x) { | ||
return _getContainerById.apply(this, arguments); | ||
} | ||
return getContainerById; | ||
}() | ||
}, { | ||
key: "checkCrc32", | ||
value: function () { | ||
var _checkCrc = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee6(position, length, recordedCrc32, description) { | ||
var b, calculatedCrc32, _context6, _context7; | ||
return _regenerator.default.wrap(function _callee6$(_context8) { | ||
while (1) { | ||
switch (_context8.prev = _context8.next) { | ||
case 0: | ||
b = Buffer.allocUnsafe(length); | ||
_context8.next = 3; | ||
return this.file.read(b, 0, length, position); | ||
case 3: | ||
calculatedCrc32 = crc32.unsigned(b); | ||
if (!(calculatedCrc32 !== recordedCrc32)) { | ||
_context8.next = 6; | ||
break; | ||
} | ||
throw new CramMalformedError((0, _concat.default)(_context6 = (0, _concat.default)(_context7 = "crc mismatch in ".concat(description, ": recorded CRC32 = ")).call(_context7, recordedCrc32, ", but calculated CRC32 = ")).call(_context6, calculatedCrc32)); | ||
case 6: | ||
case "end": | ||
return _context8.stop(); | ||
} | ||
} | ||
}, _callee6, this); | ||
})); | ||
function checkCrc32(_x2, _x3, _x4, _x5) { | ||
return _checkCrc.apply(this, arguments); | ||
} | ||
return checkCrc32; | ||
}() | ||
}); | ||
}); | ||
}; | ||
/** | ||
* @returns {Promise[number]} the number of containers in the file | ||
*/ | ||
}, { | ||
key: "containerCount", | ||
value: function () { | ||
var _containerCount = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee7() { | ||
var sectionParsers, _ref4, fileSize, cramContainerHeader1, containerCount, position, currentHeader, j, block; | ||
return _regenerator.default.wrap(function _callee7$(_context9) { | ||
while (1) { | ||
switch (_context9.prev = _context9.next) { | ||
case 0: | ||
_context9.next = 2; | ||
return this.getSectionParsers(); | ||
case 2: | ||
sectionParsers = _context9.sent; | ||
_context9.next = 5; | ||
return this.file.stat(); | ||
case 5: | ||
_ref4 = _context9.sent; | ||
fileSize = _ref4.size; | ||
cramContainerHeader1 = sectionParsers.cramContainerHeader1; | ||
containerCount = 0; | ||
position = sectionParsers.cramFileDefinition.maxLength; | ||
case 10: | ||
if (!(position + cramContainerHeader1.maxLength + 8 < fileSize)) { | ||
_context9.next = 33; | ||
break; | ||
CramFile.prototype.containerCount = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var sectionParsers, fileSize, cramContainerHeader1, containerCount, position, currentHeader, j, block; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getSectionParsers()]; | ||
case 1: | ||
sectionParsers = _a.sent(); | ||
return [4 /*yield*/, this.file.stat()]; | ||
case 2: | ||
fileSize = (_a.sent()).size; | ||
cramContainerHeader1 = sectionParsers.cramContainerHeader1; | ||
containerCount = 0; | ||
position = sectionParsers.cramFileDefinition.maxLength; | ||
_a.label = 3; | ||
case 3: | ||
if (!(position + cramContainerHeader1.maxLength + 8 < fileSize)) return [3 /*break*/, 11]; | ||
return [4 /*yield*/, this.getContainerAtPosition(position).getHeader()]; | ||
case 4: | ||
currentHeader = _a.sent(); | ||
if (!currentHeader) { | ||
return [3 /*break*/, 11]; | ||
} | ||
if (!(containerCount === 0)) return [3 /*break*/, 9]; | ||
position = currentHeader._endPosition; | ||
j = 0; | ||
_a.label = 5; | ||
case 5: | ||
if (!(j < currentHeader.numBlocks)) return [3 /*break*/, 8]; | ||
return [4 /*yield*/, this.readBlock(position)]; | ||
case 6: | ||
block = _a.sent(); | ||
position = block._endPosition; | ||
_a.label = 7; | ||
case 7: | ||
j += 1; | ||
return [3 /*break*/, 5]; | ||
case 8: return [3 /*break*/, 10]; | ||
case 9: | ||
// otherwise, just traverse to the next container using the container's length | ||
position += currentHeader._size + currentHeader.length; | ||
_a.label = 10; | ||
case 10: | ||
containerCount += 1; | ||
return [3 /*break*/, 3]; | ||
case 11: return [2 /*return*/, containerCount]; | ||
} | ||
_context9.next = 13; | ||
return this.getContainerAtPosition(position).getHeader(); | ||
case 13: | ||
currentHeader = _context9.sent; | ||
if (currentHeader) { | ||
_context9.next = 16; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramFile.prototype.getContainerAtPosition = function (position) { | ||
return new container_1.default(this, position); | ||
}; | ||
CramFile.prototype.readBlockHeader = function (position) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var sectionParsers, cramBlockHeader, fileSize, buffer; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getSectionParsers()]; | ||
case 1: | ||
sectionParsers = _a.sent(); | ||
cramBlockHeader = sectionParsers.cramBlockHeader; | ||
return [4 /*yield*/, this.file.stat()]; | ||
case 2: | ||
fileSize = (_a.sent()).size; | ||
if (position + cramBlockHeader.maxLength >= fileSize) { | ||
return [2 /*return*/, undefined]; | ||
} | ||
buffer = Buffer.allocUnsafe(cramBlockHeader.maxLength); | ||
return [4 /*yield*/, this.file.read(buffer, 0, cramBlockHeader.maxLength, position)]; | ||
case 3: | ||
_a.sent(); | ||
return [2 /*return*/, (0, util_1.parseItem)(buffer, cramBlockHeader.parser, 0, position)]; | ||
} | ||
return _context9.abrupt("break", 33); | ||
case 16: | ||
if (!(containerCount === 0)) { | ||
_context9.next = 29; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramFile.prototype._parseSection = function (section, position, size, preReadBuffer) { | ||
if (size === void 0) { size = section.maxLength; } | ||
return __awaiter(this, void 0, void 0, function () { | ||
var buffer, fileSize, data; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
if (!preReadBuffer) return [3 /*break*/, 1]; | ||
buffer = preReadBuffer; | ||
return [3 /*break*/, 4]; | ||
case 1: return [4 /*yield*/, this.file.stat()]; | ||
case 2: | ||
fileSize = (_a.sent()).size; | ||
if (position + size >= fileSize) { | ||
return [2 /*return*/, undefined]; | ||
} | ||
buffer = Buffer.allocUnsafe(size); | ||
return [4 /*yield*/, this.file.read(buffer, 0, size, position)]; | ||
case 3: | ||
_a.sent(); | ||
_a.label = 4; | ||
case 4: | ||
data = (0, util_1.parseItem)(buffer, section.parser, 0, position); | ||
if (data._size !== size) { | ||
throw new errors_1.CramMalformedError("section read error: requested size ".concat(size, " does not equal parsed size ").concat(data._size)); | ||
} | ||
return [2 /*return*/, data]; | ||
} | ||
position = currentHeader._endPosition; | ||
j = 0; | ||
case 19: | ||
if (!(j < currentHeader.numBlocks)) { | ||
_context9.next = 27; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramFile.prototype._uncompress = function (compressionMethod, inputBuffer, outputBuffer) { | ||
if (compressionMethod === 'gzip') { | ||
var result = zlib_1.default.gunzipSync(inputBuffer); | ||
result.copy(outputBuffer); | ||
} | ||
else if (compressionMethod === 'bzip2') { | ||
var bits = bzip2.array(inputBuffer); | ||
var size = bzip2.header(bits); | ||
var j = 0; | ||
do { | ||
var chunk = bzip2.decompress(bits, size); | ||
if (chunk != -1) { | ||
Buffer.from(chunk).copy(outputBuffer, j); | ||
j += chunk.length; | ||
size -= chunk.length; | ||
} | ||
_context9.next = 22; | ||
return this.readBlock(position); | ||
case 22: | ||
block = _context9.sent; | ||
position = block._endPosition; | ||
case 24: | ||
j += 1; | ||
_context9.next = 19; | ||
break; | ||
case 27: | ||
_context9.next = 30; | ||
break; | ||
case 29: | ||
// otherwise, just traverse to the next container using the container's length | ||
position += currentHeader._size + currentHeader.length; | ||
case 30: | ||
containerCount += 1; | ||
_context9.next = 10; | ||
break; | ||
case 33: | ||
return _context9.abrupt("return", containerCount); | ||
case 34: | ||
case "end": | ||
return _context9.stop(); | ||
} | ||
} | ||
}, _callee7, this); | ||
})); | ||
function containerCount() { | ||
return _containerCount.apply(this, arguments); | ||
} | ||
return containerCount; | ||
}() | ||
}, { | ||
key: "getContainerAtPosition", | ||
value: function getContainerAtPosition(position) { | ||
return new CramContainer(this, position); | ||
} | ||
}, { | ||
key: "readBlockHeader", | ||
value: function () { | ||
var _readBlockHeader = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee8(position) { | ||
var sectionParsers, cramBlockHeader, _ref5, fileSize, buffer; | ||
return _regenerator.default.wrap(function _callee8$(_context10) { | ||
while (1) { | ||
switch (_context10.prev = _context10.next) { | ||
case 0: | ||
_context10.next = 2; | ||
return this.getSectionParsers(); | ||
case 2: | ||
sectionParsers = _context10.sent; | ||
cramBlockHeader = sectionParsers.cramBlockHeader; | ||
_context10.next = 6; | ||
return this.file.stat(); | ||
case 6: | ||
_ref5 = _context10.sent; | ||
fileSize = _ref5.size; | ||
if (!(position + cramBlockHeader.maxLength >= fileSize)) { | ||
_context10.next = 10; | ||
break; | ||
} while (chunk != -1); | ||
} | ||
else if (compressionMethod === 'rans') { | ||
(0, rans_1.default)(inputBuffer, outputBuffer); | ||
//htscodecs r4x8 is slower, but compatible. | ||
//htscodecs.r4x8_uncompress(inputBuffer, outputBuffer); | ||
} | ||
else if (compressionMethod === 'rans4x16') { | ||
htscodecs_1.default.r4x16_uncompress(inputBuffer, outputBuffer); | ||
} | ||
else if (compressionMethod === 'arith') { | ||
htscodecs_1.default.arith_uncompress(inputBuffer, outputBuffer); | ||
} | ||
else if (compressionMethod === 'fqzcomp') { | ||
htscodecs_1.default.fqzcomp_uncompress(inputBuffer, outputBuffer); | ||
} | ||
else if (compressionMethod === 'tok3') { | ||
htscodecs_1.default.tok3_uncompress(inputBuffer, outputBuffer); | ||
} | ||
else { | ||
throw new errors_1.CramUnimplementedError("".concat(compressionMethod, " decompression not yet implemented")); | ||
} | ||
}; | ||
CramFile.prototype.readBlock = function (position) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var majorVersion, sectionParsers, block, blockContentPosition, uncompressedData, compressedData, crc; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getDefinition()]; | ||
case 1: | ||
majorVersion = (_a.sent()).majorVersion; | ||
return [4 /*yield*/, this.getSectionParsers()]; | ||
case 2: | ||
sectionParsers = _a.sent(); | ||
return [4 /*yield*/, this.readBlockHeader(position)]; | ||
case 3: | ||
block = _a.sent(); | ||
blockContentPosition = block._endPosition; | ||
block.contentPosition = block._endPosition; | ||
uncompressedData = Buffer.allocUnsafe(block.uncompressedSize); | ||
if (!(block.compressionMethod !== 'raw')) return [3 /*break*/, 5]; | ||
compressedData = Buffer.allocUnsafe(block.compressedSize); | ||
return [4 /*yield*/, this.read(compressedData, 0, block.compressedSize, blockContentPosition)]; | ||
case 4: | ||
_a.sent(); | ||
this._uncompress(block.compressionMethod, compressedData, uncompressedData); | ||
return [3 /*break*/, 7]; | ||
case 5: return [4 /*yield*/, this.read(uncompressedData, 0, block.uncompressedSize, blockContentPosition)]; | ||
case 6: | ||
_a.sent(); | ||
_a.label = 7; | ||
case 7: | ||
block.content = uncompressedData; | ||
if (!(majorVersion >= 3)) return [3 /*break*/, 11]; | ||
return [4 /*yield*/, this._parseSection(sectionParsers.cramBlockCrc32, blockContentPosition + block.compressedSize)]; | ||
case 8: | ||
crc = _a.sent(); | ||
block.crc32 = crc.crc32; | ||
if (!this.validateChecksums) return [3 /*break*/, 10]; | ||
return [4 /*yield*/, this.checkCrc32(position, block._size + block.compressedSize, block.crc32, 'block data')]; | ||
case 9: | ||
_a.sent(); | ||
_a.label = 10; | ||
case 10: | ||
// make the endposition and size reflect the whole block | ||
block._endPosition = crc._endPosition; | ||
block._size = | ||
block.compressedSize + sectionParsers.cramBlockCrc32.maxLength; | ||
return [3 /*break*/, 12]; | ||
case 11: | ||
block._endPosition = blockContentPosition + block.compressedSize; | ||
block._size = block.compressedSize; | ||
_a.label = 12; | ||
case 12: return [2 /*return*/, block]; | ||
} | ||
return _context10.abrupt("return", undefined); | ||
case 10: | ||
buffer = Buffer.allocUnsafe(cramBlockHeader.maxLength); | ||
_context10.next = 13; | ||
return this.file.read(buffer, 0, cramBlockHeader.maxLength, position); | ||
case 13: | ||
return _context10.abrupt("return", parseItem(buffer, cramBlockHeader.parser, 0, position)); | ||
case 14: | ||
case "end": | ||
return _context10.stop(); | ||
} | ||
} | ||
}, _callee8, this); | ||
})); | ||
function readBlockHeader(_x6) { | ||
return _readBlockHeader.apply(this, arguments); | ||
} | ||
return readBlockHeader; | ||
}() | ||
}, { | ||
key: "_parseSection", | ||
value: function () { | ||
var _parseSection2 = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee9(section, position) { | ||
var _context11; | ||
var size, | ||
preReadBuffer, | ||
buffer, | ||
_ref6, | ||
fileSize, | ||
data, | ||
_args9 = arguments; | ||
return _regenerator.default.wrap(function _callee9$(_context12) { | ||
while (1) { | ||
switch (_context12.prev = _context12.next) { | ||
case 0: | ||
size = _args9.length > 2 && _args9[2] !== undefined ? _args9[2] : section.maxLength; | ||
preReadBuffer = _args9.length > 3 ? _args9[3] : undefined; | ||
if (!preReadBuffer) { | ||
_context12.next = 6; | ||
break; | ||
} | ||
buffer = preReadBuffer; | ||
_context12.next = 15; | ||
break; | ||
case 6: | ||
_context12.next = 8; | ||
return this.file.stat(); | ||
case 8: | ||
_ref6 = _context12.sent; | ||
fileSize = _ref6.size; | ||
if (!(position + size >= fileSize)) { | ||
_context12.next = 12; | ||
break; | ||
} | ||
return _context12.abrupt("return", undefined); | ||
case 12: | ||
buffer = Buffer.allocUnsafe(size); | ||
_context12.next = 15; | ||
return this.file.read(buffer, 0, size, position); | ||
case 15: | ||
data = parseItem(buffer, section.parser, 0, position); | ||
if (!(data._size !== size)) { | ||
_context12.next = 18; | ||
break; | ||
} | ||
throw new CramMalformedError((0, _concat.default)(_context11 = "section read error: requested size ".concat(size, " does not equal parsed size ")).call(_context11, data._size)); | ||
case 18: | ||
return _context12.abrupt("return", data); | ||
case 19: | ||
case "end": | ||
return _context12.stop(); | ||
} | ||
} | ||
}, _callee9, this); | ||
})); | ||
function _parseSection(_x7, _x8) { | ||
return _parseSection2.apply(this, arguments); | ||
} | ||
return _parseSection; | ||
}() | ||
}, { | ||
key: "_uncompress", | ||
value: function _uncompress(compressionMethod, inputBuffer, outputBuffer) { | ||
if (compressionMethod === 'gzip') { | ||
var result = zlib.gunzipSync(inputBuffer); | ||
result.copy(outputBuffer); | ||
} else if (compressionMethod === 'rans') { | ||
rans.uncompress(inputBuffer, outputBuffer); | ||
} else { | ||
throw new CramUnimplementedError("".concat(compressionMethod, " decompression not yet implemented")); | ||
} | ||
} | ||
}, { | ||
key: "readBlock", | ||
value: function () { | ||
var _readBlock = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee10(position) { | ||
var _ref7, majorVersion, sectionParsers, block, blockContentPosition, uncompressedData, compressedData, crc; | ||
return _regenerator.default.wrap(function _callee10$(_context13) { | ||
while (1) { | ||
switch (_context13.prev = _context13.next) { | ||
case 0: | ||
_context13.next = 2; | ||
return this.getDefinition(); | ||
case 2: | ||
_ref7 = _context13.sent; | ||
majorVersion = _ref7.majorVersion; | ||
_context13.next = 6; | ||
return this.getSectionParsers(); | ||
case 6: | ||
sectionParsers = _context13.sent; | ||
_context13.next = 9; | ||
return this.readBlockHeader(position); | ||
case 9: | ||
block = _context13.sent; | ||
blockContentPosition = block._endPosition; | ||
block.contentPosition = block._endPosition; | ||
uncompressedData = Buffer.allocUnsafe(block.uncompressedSize); | ||
if (!(block.compressionMethod !== 'raw')) { | ||
_context13.next = 20; | ||
break; | ||
} | ||
compressedData = Buffer.allocUnsafe(block.compressedSize); | ||
_context13.next = 17; | ||
return this.read(compressedData, 0, block.compressedSize, blockContentPosition); | ||
case 17: | ||
this._uncompress(block.compressionMethod, compressedData, uncompressedData); | ||
_context13.next = 22; | ||
break; | ||
case 20: | ||
_context13.next = 22; | ||
return this.read(uncompressedData, 0, block.uncompressedSize, blockContentPosition); | ||
case 22: | ||
block.content = uncompressedData; | ||
if (!(majorVersion >= 3)) { | ||
_context13.next = 35; | ||
break; | ||
} | ||
_context13.next = 26; | ||
return this._parseSection(sectionParsers.cramBlockCrc32, blockContentPosition + block.compressedSize); | ||
case 26: | ||
crc = _context13.sent; | ||
block.crc32 = crc.crc32; // check the block data crc32 | ||
if (!this.validateChecksums) { | ||
_context13.next = 31; | ||
break; | ||
} | ||
_context13.next = 31; | ||
return this.checkCrc32(position, block._size + block.compressedSize, block.crc32, 'block data'); | ||
case 31: | ||
// make the endposition and size reflect the whole block | ||
block._endPosition = crc._endPosition; | ||
block._size = block.compressedSize + sectionParsers.cramBlockCrc32.maxLength; | ||
_context13.next = 37; | ||
break; | ||
case 35: | ||
block._endPosition = blockContentPosition + block.compressedSize; | ||
block._size = block.compressedSize; | ||
case 37: | ||
return _context13.abrupt("return", block); | ||
case 38: | ||
case "end": | ||
return _context13.stop(); | ||
} | ||
} | ||
}, _callee10, this); | ||
})); | ||
function readBlock(_x9) { | ||
return _readBlock.apply(this, arguments); | ||
} | ||
return readBlock; | ||
}() | ||
}]); | ||
return CramFile; | ||
}(); | ||
(0, _forEach.default)(_context14 = 'getDefinition getSectionParsers getSamHeader'.split(' ')).call(_context14, function (method) { | ||
return tinyMemoize(CramFile, method); | ||
}); | ||
module.exports = CramFile; | ||
}); | ||
}); | ||
}; | ||
return CramFile; | ||
}()); | ||
exports.default = CramFile; | ||
'getDefinition getSectionParsers getSamHeader' | ||
.split(' ') | ||
.forEach(function (method) { return (0, util_1.tinyMemoize)(CramFile, method); }); | ||
//# sourceMappingURL=file.js.map |
"use strict"; | ||
var CramFile = require('./file'); | ||
module.exports = CramFile; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var file_1 = __importDefault(require("./file")); | ||
exports.default = file_1.default; | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _keys = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/keys")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _flags = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/flags")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _toConsumableArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/toConsumableArray")); | ||
var Constants = require('./constants'); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var constants_1 = __importDefault(require("./constants")); | ||
function decodeReadSequence(cramRecord, refRegion) { | ||
// if it has no length, it has no sequence | ||
if (!cramRecord.lengthOnRef && !cramRecord.readLength) return undefined; | ||
if (cramRecord.isUnknownBases()) return undefined; // remember: all coordinates are 1-based closed | ||
var regionSeqOffset = cramRecord.alignmentStart - refRegion.start; | ||
if (!cramRecord.readFeatures) return refRegion.seq.substr(regionSeqOffset, cramRecord.lengthOnRef).toUpperCase(); | ||
var bases = ''; | ||
var regionPos = regionSeqOffset; | ||
var currentReadFeature = 0; | ||
while (bases.length < cramRecord.readLength) { | ||
if (currentReadFeature < cramRecord.readFeatures.length) { | ||
var feature = cramRecord.readFeatures[currentReadFeature]; | ||
if (feature.code === 'Q' || feature.code === 'q') { | ||
currentReadFeature += 1; | ||
} else if (feature.pos === bases.length + 1) { | ||
// process the read feature | ||
currentReadFeature += 1; | ||
if (feature.code === 'b') { | ||
// specify a base pair for some reason | ||
var ret = feature.data.split(','); | ||
var added = String.fromCharCode.apply(String, (0, _toConsumableArray2.default)(ret)); | ||
bases += added; | ||
regionPos += added.length; | ||
} else if (feature.code === 'B') { | ||
// base pair and associated quality | ||
// TODO: do we need to set the quality in the qual scores? | ||
bases += feature.data[0]; | ||
regionPos += 1; | ||
} else if (feature.code === 'X') { | ||
// base substitution | ||
bases += feature.sub; | ||
regionPos += 1; | ||
} else if (feature.code === 'I') { | ||
// insertion | ||
bases += feature.data; | ||
} else if (feature.code === 'D') { | ||
// deletion | ||
regionPos += feature.data; | ||
} else if (feature.code === 'i') { | ||
// insert single base | ||
bases += feature.data; | ||
} else if (feature.code === 'N') { | ||
// reference skip. delete some bases | ||
// do nothing | ||
// seqBases.splice(feature.pos - 1, feature.data) | ||
regionPos += feature.data; | ||
} else if (feature.code === 'S') { | ||
// soft clipped bases that should be present in the read seq | ||
// seqBases.splice(feature.pos - 1, 0, ...feature.data.split('')) | ||
bases += feature.data; | ||
} else if (feature.code === 'P') {// padding, do nothing | ||
} else if (feature.code === 'H') {// hard clip, do nothing | ||
// if it has no length, it has no sequence | ||
if (!cramRecord.lengthOnRef && !cramRecord.readLength) { | ||
return undefined; | ||
} | ||
if (cramRecord.isUnknownBases()) { | ||
return undefined; | ||
} | ||
// remember: all coordinates are 1-based closed | ||
var regionSeqOffset = cramRecord.alignmentStart - refRegion.start; | ||
if (!cramRecord.readFeatures) { | ||
return refRegion.seq | ||
.substr(regionSeqOffset, cramRecord.lengthOnRef) | ||
.toUpperCase(); | ||
} | ||
var bases = ''; | ||
var regionPos = regionSeqOffset; | ||
var currentReadFeature = 0; | ||
while (bases.length < cramRecord.readLength) { | ||
if (currentReadFeature < cramRecord.readFeatures.length) { | ||
var feature = cramRecord.readFeatures[currentReadFeature]; | ||
if (feature.code === 'Q' || feature.code === 'q') { | ||
currentReadFeature += 1; | ||
} | ||
else if (feature.pos === bases.length + 1) { | ||
// process the read feature | ||
currentReadFeature += 1; | ||
if (feature.code === 'b') { | ||
// specify a base pair for some reason | ||
var ret = feature.data.split(','); | ||
var added = String.fromCharCode.apply(String, ret); | ||
bases += added; | ||
regionPos += added.length; | ||
} | ||
else if (feature.code === 'B') { | ||
// base pair and associated quality | ||
// TODO: do we need to set the quality in the qual scores? | ||
bases += feature.data[0]; | ||
regionPos += 1; | ||
} | ||
else if (feature.code === 'X') { | ||
// base substitution | ||
bases += feature.sub; | ||
regionPos += 1; | ||
} | ||
else if (feature.code === 'I') { | ||
// insertion | ||
bases += feature.data; | ||
} | ||
else if (feature.code === 'D') { | ||
// deletion | ||
regionPos += feature.data; | ||
} | ||
else if (feature.code === 'i') { | ||
// insert single base | ||
bases += feature.data; | ||
} | ||
else if (feature.code === 'N') { | ||
// reference skip. delete some bases | ||
// do nothing | ||
// seqBases.splice(feature.pos - 1, feature.data) | ||
regionPos += feature.data; | ||
} | ||
else if (feature.code === 'S') { | ||
// soft clipped bases that should be present in the read seq | ||
// seqBases.splice(feature.pos - 1, 0, ...feature.data.split('')) | ||
bases += feature.data; | ||
} | ||
else if (feature.code === 'P') { | ||
// padding, do nothing | ||
} | ||
else if (feature.code === 'H') { | ||
// hard clip, do nothing | ||
} | ||
} | ||
else if (currentReadFeature < cramRecord.readFeatures.length) { | ||
// put down a chunk of sequence up to the next read feature | ||
var chunk = refRegion.seq.substr(regionPos, cramRecord.readFeatures[currentReadFeature].pos - bases.length - 1); | ||
bases += chunk; | ||
regionPos += chunk.length; | ||
} | ||
} | ||
} else if (currentReadFeature < cramRecord.readFeatures.length) { | ||
// put down a chunk of sequence up to the next read feature | ||
var chunk = refRegion.seq.substr(regionPos, cramRecord.readFeatures[currentReadFeature].pos - bases.length - 1); | ||
bases += chunk; | ||
regionPos += chunk.length; | ||
} | ||
} else { | ||
// put down a chunk of reference up to the full read length | ||
var _chunk = refRegion.seq.substr(regionPos, cramRecord.readLength - bases.length); | ||
bases += _chunk; | ||
regionPos += _chunk.length; | ||
else { | ||
// put down a chunk of reference up to the full read length | ||
var chunk = refRegion.seq.substr(regionPos, cramRecord.readLength - bases.length); | ||
bases += chunk; | ||
regionPos += chunk.length; | ||
} | ||
} | ||
} | ||
return bases.toUpperCase(); | ||
return bases.toUpperCase(); | ||
} | ||
var baseNumbers = { | ||
a: 0, | ||
A: 0, | ||
c: 1, | ||
C: 1, | ||
g: 2, | ||
G: 2, | ||
t: 3, | ||
T: 3, | ||
n: 4, | ||
N: 4 | ||
a: 0, | ||
A: 0, | ||
c: 1, | ||
C: 1, | ||
g: 2, | ||
G: 2, | ||
t: 3, | ||
T: 3, | ||
n: 4, | ||
N: 4, | ||
}; | ||
function decodeBaseSubstitution(cramRecord, refRegion, compressionScheme, readFeature) { | ||
if (!refRegion) return; // decode base substitution code using the substitution matrix | ||
var refCoord = readFeature.refPos - refRegion.start; | ||
var refBase = refRegion.seq.charAt(refCoord); | ||
if (refBase) readFeature.ref = refBase; | ||
var baseNumber = baseNumbers[refBase]; | ||
if (baseNumber === undefined) baseNumber = 4; | ||
var substitutionScheme = compressionScheme.substitutionMatrix[baseNumber]; | ||
var base = substitutionScheme[readFeature.data]; | ||
if (base) readFeature.sub = base; | ||
if (!refRegion) { | ||
return; | ||
} | ||
// decode base substitution code using the substitution matrix | ||
var refCoord = readFeature.refPos - refRegion.start; | ||
var refBase = refRegion.seq.charAt(refCoord); | ||
if (refBase) { | ||
readFeature.ref = refBase; | ||
} | ||
var baseNumber = baseNumbers[refBase]; | ||
if (baseNumber === undefined) { | ||
baseNumber = 4; | ||
} | ||
var substitutionScheme = compressionScheme.substitutionMatrix[baseNumber]; | ||
var base = substitutionScheme[readFeature.data]; | ||
if (base) { | ||
readFeature.sub = base; | ||
} | ||
} | ||
@@ -122,128 +133,74 @@ /** | ||
*/ | ||
var CramRecord = | ||
/*#__PURE__*/ | ||
function () { | ||
function CramRecord() { | ||
(0, _classCallCheck2.default)(this, CramRecord); | ||
this.tags = {}; | ||
} | ||
/** | ||
* @returns {boolean} true if the read is paired, regardless of whether both segments are mapped | ||
*/ | ||
(0, _createClass2.default)(CramRecord, [{ | ||
key: "isPaired", | ||
value: function isPaired() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FPAIRED); | ||
var CramRecord = /** @class */ (function () { | ||
function CramRecord() { | ||
this.tags = {}; | ||
} | ||
/** | ||
* @returns {boolean} true if the read is paired, regardless of whether both segments are mapped | ||
*/ | ||
CramRecord.prototype.isPaired = function () { | ||
return !!(this.flags & constants_1.default.BAM_FPAIRED); | ||
}; | ||
/** @returns {boolean} true if the read is paired, and both segments are mapped */ | ||
}, { | ||
key: "isProperlyPaired", | ||
value: function isProperlyPaired() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FPROPER_PAIR); | ||
} | ||
CramRecord.prototype.isProperlyPaired = function () { | ||
return !!(this.flags & constants_1.default.BAM_FPROPER_PAIR); | ||
}; | ||
/** @returns {boolean} true if the read itself is unmapped; conflictive with isProperlyPaired */ | ||
}, { | ||
key: "isSegmentUnmapped", | ||
value: function isSegmentUnmapped() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FUNMAP); | ||
} | ||
CramRecord.prototype.isSegmentUnmapped = function () { | ||
return !!(this.flags & constants_1.default.BAM_FUNMAP); | ||
}; | ||
/** @returns {boolean} true if the read itself is unmapped; conflictive with isProperlyPaired */ | ||
}, { | ||
key: "isMateUnmapped", | ||
value: function isMateUnmapped() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FMUNMAP); | ||
} | ||
CramRecord.prototype.isMateUnmapped = function () { | ||
return !!(this.flags & constants_1.default.BAM_FMUNMAP); | ||
}; | ||
/** @returns {boolean} true if the read is mapped to the reverse strand */ | ||
}, { | ||
key: "isReverseComplemented", | ||
value: function isReverseComplemented() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FREVERSE); | ||
} | ||
CramRecord.prototype.isReverseComplemented = function () { | ||
return !!(this.flags & constants_1.default.BAM_FREVERSE); | ||
}; | ||
/** @returns {boolean} true if the mate is mapped to the reverse strand */ | ||
}, { | ||
key: "isMateReverseComplemented", | ||
value: function isMateReverseComplemented() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FMREVERSE); | ||
} | ||
CramRecord.prototype.isMateReverseComplemented = function () { | ||
return !!(this.flags & constants_1.default.BAM_FMREVERSE); | ||
}; | ||
/** @returns {boolean} true if this is read number 1 in a pair */ | ||
}, { | ||
key: "isRead1", | ||
value: function isRead1() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FREAD1); | ||
} | ||
CramRecord.prototype.isRead1 = function () { | ||
return !!(this.flags & constants_1.default.BAM_FREAD1); | ||
}; | ||
/** @returns {boolean} true if this is read number 2 in a pair */ | ||
}, { | ||
key: "isRead2", | ||
value: function isRead2() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FREAD2); | ||
} | ||
CramRecord.prototype.isRead2 = function () { | ||
return !!(this.flags & constants_1.default.BAM_FREAD2); | ||
}; | ||
/** @returns {boolean} true if this is a secondary alignment */ | ||
}, { | ||
key: "isSecondary", | ||
value: function isSecondary() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FSECONDARY); | ||
} | ||
CramRecord.prototype.isSecondary = function () { | ||
return !!(this.flags & constants_1.default.BAM_FSECONDARY); | ||
}; | ||
/** @returns {boolean} true if this read has failed QC checks */ | ||
}, { | ||
key: "isFailedQc", | ||
value: function isFailedQc() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FQCFAIL); | ||
} | ||
CramRecord.prototype.isFailedQc = function () { | ||
return !!(this.flags & constants_1.default.BAM_FQCFAIL); | ||
}; | ||
/** @returns {boolean} true if the read is an optical or PCR duplicate */ | ||
}, { | ||
key: "isDuplicate", | ||
value: function isDuplicate() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FDUP); | ||
} | ||
CramRecord.prototype.isDuplicate = function () { | ||
return !!(this.flags & constants_1.default.BAM_FDUP); | ||
}; | ||
/** @returns {boolean} true if this is a supplementary alignment */ | ||
}, { | ||
key: "isSupplementary", | ||
value: function isSupplementary() { | ||
return !!((0, _flags.default)(this) & Constants.BAM_FSUPPLEMENTARY); | ||
} | ||
CramRecord.prototype.isSupplementary = function () { | ||
return !!(this.flags & constants_1.default.BAM_FSUPPLEMENTARY); | ||
}; | ||
/** | ||
* @returns {boolean} true if the read is detached | ||
*/ | ||
}, { | ||
key: "isDetached", | ||
value: function isDetached() { | ||
return !!(this.cramFlags & Constants.CRAM_FLAG_DETACHED); | ||
} | ||
CramRecord.prototype.isDetached = function () { | ||
return !!(this.cramFlags & constants_1.default.CRAM_FLAG_DETACHED); | ||
}; | ||
/** @returns {boolean} true if the read has a mate in this same CRAM segment */ | ||
}, { | ||
key: "hasMateDownStream", | ||
value: function hasMateDownStream() { | ||
return !!(this.cramFlags & Constants.CRAM_FLAG_MATE_DOWNSTREAM); | ||
} | ||
CramRecord.prototype.hasMateDownStream = function () { | ||
return !!(this.cramFlags & constants_1.default.CRAM_FLAG_MATE_DOWNSTREAM); | ||
}; | ||
/** @returns {boolean} true if the read contains qual scores */ | ||
}, { | ||
key: "isPreservingQualityScores", | ||
value: function isPreservingQualityScores() { | ||
return !!(this.cramFlags & Constants.CRAM_FLAG_PRESERVE_QUAL_SCORES); | ||
} | ||
CramRecord.prototype.isPreservingQualityScores = function () { | ||
return !!(this.cramFlags & constants_1.default.CRAM_FLAG_PRESERVE_QUAL_SCORES); | ||
}; | ||
/** @returns {boolean} true if the read has no sequence bases */ | ||
}, { | ||
key: "isUnknownBases", | ||
value: function isUnknownBases() { | ||
return !!(this.cramFlags & Constants.CRAM_FLAG_NO_SEQ); | ||
} | ||
CramRecord.prototype.isUnknownBases = function () { | ||
return !!(this.cramFlags & constants_1.default.CRAM_FLAG_NO_SEQ); | ||
}; | ||
/** | ||
@@ -253,12 +210,8 @@ * Get the original sequence of this read. | ||
*/ | ||
}, { | ||
key: "getReadBases", | ||
value: function getReadBases() { | ||
if (!this.readBases && this._refRegion) { | ||
this.readBases = decodeReadSequence(this, this._refRegion); | ||
} | ||
return this.readBases; | ||
} | ||
CramRecord.prototype.getReadBases = function () { | ||
if (!this.readBases && this._refRegion) { | ||
this.readBases = decodeReadSequence(this, this._refRegion); | ||
} | ||
return this.readBases; | ||
}; | ||
/** | ||
@@ -268,44 +221,41 @@ * Get the pair orientation of a paired read. Adapted from igv.js | ||
*/ | ||
}, { | ||
key: "getPairOrientation", | ||
value: function getPairOrientation() { | ||
if (!this.isSegmentUnmapped() && this.isPaired() && !this.isMateUnmapped() && this.mate && this.sequenceId === this.mate.sequenceId) { | ||
var s1 = this.isReverseComplemented() ? 'R' : 'F'; | ||
var s2 = this.isMateReverseComplemented() ? 'R' : 'F'; | ||
var o1 = ' '; | ||
var o2 = ' '; | ||
if (this.isRead1()) { | ||
o1 = '1'; | ||
o2 = '2'; | ||
} else if (this.isRead2()) { | ||
o1 = '2'; | ||
o2 = '1'; | ||
CramRecord.prototype.getPairOrientation = function () { | ||
if (!this.isSegmentUnmapped() && | ||
this.isPaired() && | ||
!this.isMateUnmapped() && | ||
this.mate && | ||
this.sequenceId === this.mate.sequenceId) { | ||
var s1 = this.isReverseComplemented() ? 'R' : 'F'; | ||
var s2 = this.isMateReverseComplemented() ? 'R' : 'F'; | ||
var o1 = ' '; | ||
var o2 = ' '; | ||
if (this.isRead1()) { | ||
o1 = '1'; | ||
o2 = '2'; | ||
} | ||
else if (this.isRead2()) { | ||
o1 = '2'; | ||
o2 = '1'; | ||
} | ||
var tmp = []; | ||
var isize = this.templateLength || this.templateSize; | ||
if (this.alignmentStart > this.mate.alignmentStart && isize > 0) { | ||
isize = -isize; | ||
} | ||
if (isize > 0) { | ||
tmp[0] = s1; | ||
tmp[1] = o1; | ||
tmp[2] = s2; | ||
tmp[3] = o2; | ||
} | ||
else { | ||
tmp[2] = s1; | ||
tmp[3] = o1; | ||
tmp[0] = s2; | ||
tmp[1] = o2; | ||
} | ||
return tmp.join(''); | ||
} | ||
var tmp = []; | ||
var isize = this.templateLength || this.templateSize; | ||
if (this.alignmentStart > this.mate.alignmentStart && isize > 0) { | ||
isize = -isize; | ||
} | ||
if (isize > 0) { | ||
tmp[0] = s1; | ||
tmp[1] = o1; | ||
tmp[2] = s2; | ||
tmp[3] = o2; | ||
} else { | ||
tmp[2] = s1; | ||
tmp[3] = o1; | ||
tmp[0] = s2; | ||
tmp[1] = o2; | ||
} | ||
return tmp.join(''); | ||
} | ||
return null; | ||
} | ||
return null; | ||
}; | ||
/** | ||
@@ -324,42 +274,37 @@ * Annotates this feature with the given reference sequence basepair | ||
*/ | ||
}, { | ||
key: "addReferenceSequence", | ||
value: function addReferenceSequence(refRegion, compressionScheme) { | ||
var _this = this; | ||
if (this.readFeatures) { | ||
var _context; | ||
// use the reference bases to decode the bases | ||
// substituted in each base substitution | ||
(0, _forEach.default)(_context = this.readFeatures).call(_context, function (readFeature) { | ||
if (readFeature.code === 'X') decodeBaseSubstitution(_this, refRegion, compressionScheme, readFeature); | ||
CramRecord.prototype.addReferenceSequence = function (refRegion, compressionScheme) { | ||
var _this = this; | ||
if (this.readFeatures) { | ||
// use the reference bases to decode the bases | ||
// substituted in each base substitution | ||
this.readFeatures.forEach(function (readFeature) { | ||
if (readFeature.code === 'X') { | ||
decodeBaseSubstitution(_this, refRegion, compressionScheme, readFeature); | ||
} | ||
}); | ||
} | ||
// if this region completely covers this read, | ||
// keep a reference to it | ||
if (!this.readBases && | ||
refRegion.start <= this.alignmentStart && | ||
refRegion.end >= | ||
this.alignmentStart + (this.lengthOnRef || this.readLength) - 1) { | ||
this._refRegion = refRegion; | ||
} | ||
}; | ||
CramRecord.prototype.toJSON = function () { | ||
var _this = this; | ||
var data = {}; | ||
Object.keys(this).forEach(function (k) { | ||
if (k.charAt(0) === '_') { | ||
return; | ||
} | ||
data[k] = _this[k]; | ||
}); | ||
} // if this region completely covers this read, | ||
// keep a reference to it | ||
if (!this.readBases && refRegion.start <= this.alignmentStart && refRegion.end >= this.alignmentStart + (this.lengthOnRef || this.readLength) - 1) { | ||
this._refRegion = refRegion; | ||
} | ||
} | ||
}, { | ||
key: "toJSON", | ||
value: function toJSON() { | ||
var _context2, | ||
_this2 = this; | ||
var data = {}; | ||
(0, _forEach.default)(_context2 = (0, _keys.default)(this)).call(_context2, function (k) { | ||
if (k.charAt(0) === '_') return; | ||
data[k] = _this2[k]; | ||
}); | ||
data.readBases = this.getReadBases(); | ||
return data; | ||
} | ||
}]); | ||
return CramRecord; | ||
}(); | ||
module.exports = CramRecord; | ||
data.readBases = this.getReadBases(); | ||
return data; | ||
}; | ||
return CramRecord; | ||
}()); | ||
exports.default = CramRecord; | ||
//# sourceMappingURL=record.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _keys = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/keys")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _assign = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/assign")); | ||
var _require = require('@gmod/binary-parser'), | ||
Parser = _require.Parser; | ||
var singleItf8 = new Parser().itf8(); | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getSectionParsers = exports.cramFileDefinition = void 0; | ||
var binary_parser_1 = require("@gmod/binary-parser"); | ||
var singleItf8 = new binary_parser_1.Parser().itf8(); | ||
var cramFileDefinition = { | ||
parser: new Parser().string('magic', { | ||
length: 4 | ||
}).uint8('majorVersion').uint8('minorVersion').string('fileId', { | ||
length: 20, | ||
stripNull: true | ||
}), | ||
maxLength: 26 | ||
parser: new binary_parser_1.Parser() | ||
.string('magic', { length: 4 }) | ||
.uint8('majorVersion') | ||
.uint8('minorVersion') | ||
.string('fileId', { length: 20, stripNull: true }), | ||
maxLength: 26, | ||
}; | ||
exports.cramFileDefinition = cramFileDefinition; | ||
var cramBlockHeader = { | ||
parser: new Parser().uint8('compressionMethod', { | ||
formatter: | ||
/* istanbul ignore next */ | ||
function formatter(b) { | ||
var method = ['raw', 'gzip', 'bzip2', 'lzma', 'rans'][b]; | ||
if (!method) throw new Error("compression method number ".concat(b, " not implemented")); | ||
return method; | ||
} | ||
}).uint8('contentType', { | ||
formatter: | ||
/* istanbul ignore next */ | ||
function formatter(b) { | ||
var type = ['FILE_HEADER', 'COMPRESSION_HEADER', 'MAPPED_SLICE_HEADER', 'UNMAPPED_SLICE_HEADER', // < only used in cram v1 | ||
'EXTERNAL_DATA', 'CORE_DATA'][b]; | ||
if (!type) throw new Error("invalid block content type id ".concat(b)); | ||
return type; | ||
} | ||
}).itf8('contentId').itf8('compressedSize').itf8('uncompressedSize'), | ||
maxLength: 17 | ||
parser: new binary_parser_1.Parser() | ||
.uint8('compressionMethod', { | ||
formatter: /* istanbul ignore next */ function (/* istanbul ignore next */ b) { | ||
var method = [ | ||
'raw', | ||
'gzip', | ||
'bzip2', | ||
'lzma', | ||
'rans', | ||
'rans4x16', | ||
'arith', | ||
'fqzcomp', | ||
'tok3', | ||
][b]; | ||
if (!method) { | ||
throw new Error("compression method number ".concat(b, " not implemented")); | ||
} | ||
return method; | ||
}, | ||
}) | ||
.uint8('contentType', { | ||
formatter: /* istanbul ignore next */ function (/* istanbul ignore next */ b) { | ||
var type = [ | ||
'FILE_HEADER', | ||
'COMPRESSION_HEADER', | ||
'MAPPED_SLICE_HEADER', | ||
'UNMAPPED_SLICE_HEADER', | ||
'EXTERNAL_DATA', | ||
'CORE_DATA', | ||
][b]; | ||
if (!type) { | ||
throw new Error("invalid block content type id ".concat(b)); | ||
} | ||
return type; | ||
}, | ||
}) | ||
.itf8('contentId') | ||
.itf8('compressedSize') | ||
.itf8('uncompressedSize'), | ||
maxLength: 17, | ||
}; | ||
var cramBlockCrc32 = { | ||
parser: new Parser().uint32('crc32'), | ||
maxLength: 4 | ||
}; // const ENCODING_NAMES = [ | ||
parser: new binary_parser_1.Parser().uint32('crc32'), | ||
maxLength: 4, | ||
}; | ||
// const ENCODING_NAMES = [ | ||
// 'NULL', // 0 | ||
@@ -60,332 +73,281 @@ // 'EXTERNAL', // 1 | ||
// ] | ||
var cramTagDictionary = new Parser().itf8('size').buffer('ents', { | ||
length: 'size', | ||
formatter: | ||
/* istanbul ignore next */ | ||
function formatter(buffer) { | ||
function makeTagSet(stringStart, stringEnd) { | ||
var str = buffer.toString('utf8', stringStart, stringEnd); | ||
var tags = []; | ||
for (var _i = 0; _i < str.length; _i += 3) { | ||
tags.push(str.substr(_i, 3)); | ||
} | ||
return tags; | ||
} | ||
/* eslint-disable */ | ||
var tagSets = []; | ||
var stringStart = 0; | ||
var i; | ||
/* eslint-enable */ | ||
for (i = 0; i < buffer.length; i += 1) { | ||
if (!buffer[i]) { | ||
tagSets.push(makeTagSet(stringStart, i)); | ||
stringStart = i + 1; | ||
} | ||
} | ||
if (i > stringStart) tagSets.push(makeTagSet(stringStart, i)); | ||
return tagSets; | ||
} | ||
}); // const cramPreservationMapKeys = 'XX RN AP RR SM TD'.split(' ') | ||
var parseByteAsBool = new Parser().uint8(null, { | ||
formatter: | ||
/* istanbul ignore next */ | ||
function formatter(val) { | ||
return !!val; | ||
} | ||
}); | ||
var cramPreservationMap = new Parser().itf8('mapSize').itf8('mapCount').array('ents', { | ||
length: 'mapCount', | ||
type: new Parser().string('key', { | ||
length: 2, | ||
stripNull: false // formatter: val => cramPreservationMapKeys[val] || 0, | ||
}).choice('value', { | ||
tag: 'key', | ||
choices: { | ||
MI: parseByteAsBool, | ||
UI: parseByteAsBool, | ||
PI: parseByteAsBool, | ||
RN: parseByteAsBool, | ||
AP: parseByteAsBool, | ||
RR: parseByteAsBool, | ||
SM: new Parser().array(null, { | ||
type: 'uint8', | ||
length: 5 | ||
}), | ||
TD: new Parser().nest(null, { | ||
type: cramTagDictionary, | ||
formatter: | ||
/* istanbul ignore next */ | ||
function formatter(data) { | ||
return data.ents; | ||
var cramTagDictionary = new binary_parser_1.Parser().itf8('size').buffer('ents', { | ||
length: 'size', | ||
formatter: /* istanbul ignore next */ function (/* istanbul ignore next */ buffer) { | ||
function makeTagSet(stringStart, stringEnd) { | ||
var str = buffer.toString('utf8', stringStart, stringEnd); | ||
var tags = []; | ||
for (var i_1 = 0; i_1 < str.length; i_1 += 3) { | ||
tags.push(str.substr(i_1, 3)); | ||
} | ||
return tags; | ||
} | ||
}) | ||
} | ||
}) | ||
/* eslint-disable */ | ||
var tagSets = []; | ||
var stringStart = 0; | ||
var i; | ||
/* eslint-enable */ | ||
for (i = 0; i < buffer.length; i += 1) { | ||
if (!buffer[i]) { | ||
tagSets.push(makeTagSet(stringStart, i)); | ||
stringStart = i + 1; | ||
} | ||
} | ||
if (i > stringStart) { | ||
tagSets.push(makeTagSet(stringStart, i)); | ||
} | ||
return tagSets; | ||
}, | ||
}); | ||
// const cramPreservationMapKeys = 'XX RN AP RR SM TD'.split(' ') | ||
var parseByteAsBool = new binary_parser_1.Parser().uint8(null, { | ||
formatter: /* istanbul ignore next */ function (/* istanbul ignore next */ val) { return !!val; }, | ||
}); | ||
var cramPreservationMap = new binary_parser_1.Parser() | ||
.itf8('mapSize') | ||
.itf8('mapCount') | ||
.array('ents', { | ||
length: 'mapCount', | ||
type: new binary_parser_1.Parser() | ||
.string('key', { | ||
length: 2, | ||
stripNull: false, | ||
// formatter: val => cramPreservationMapKeys[val] || 0, | ||
}) | ||
.choice('value', { | ||
tag: 'key', | ||
choices: { | ||
MI: parseByteAsBool, | ||
UI: parseByteAsBool, | ||
PI: parseByteAsBool, | ||
RN: parseByteAsBool, | ||
AP: parseByteAsBool, | ||
RR: parseByteAsBool, | ||
SM: new binary_parser_1.Parser().array(null, { type: 'uint8', length: 5 }), | ||
TD: new binary_parser_1.Parser().nest(null, { | ||
type: cramTagDictionary, | ||
formatter: /* istanbul ignore next */ function (/* istanbul ignore next */ data) { return data.ents; }, | ||
}), | ||
}, | ||
}), | ||
}); | ||
/* istanbul ignore next */ | ||
function formatMap(data) { | ||
var map = {}; | ||
for (var i = 0; i < data.ents.length; i += 1) { | ||
var _data$ents$i = data.ents[i], | ||
key = _data$ents$i.key, | ||
value = _data$ents$i.value; | ||
if (map[key]) console.warn("duplicate key ".concat(key, " in map")); | ||
map[key] = value; | ||
} | ||
return map; | ||
var map = {}; | ||
for (var i = 0; i < data.ents.length; i += 1) { | ||
var _a = data.ents[i], key = _a.key, value = _a.value; | ||
if (map[key]) { | ||
console.warn("duplicate key ".concat(key, " in map")); | ||
} | ||
map[key] = value; | ||
} | ||
return map; | ||
} | ||
var unversionedParsers = { | ||
cramFileDefinition: cramFileDefinition, | ||
cramBlockHeader: cramBlockHeader, | ||
cramBlockCrc32: cramBlockCrc32 | ||
}; // each of these is a function of the major and minor version | ||
cramFileDefinition: cramFileDefinition, | ||
cramBlockHeader: cramBlockHeader, | ||
cramBlockCrc32: cramBlockCrc32, | ||
}; | ||
// each of these is a function of the major and minor version | ||
var versionedParsers = { | ||
// assemble a section parser for the unmapped slice header, with slight | ||
// variations depending on the major version of the cram file | ||
cramUnmappedSliceHeader: function cramUnmappedSliceHeader(majorVersion) { | ||
var maxLength = 0; | ||
var parser = new Parser().itf8('numRecords'); | ||
maxLength += 5; // recordCounter is itf8 in a CRAM v2 file, absent in CRAM v1 | ||
if (majorVersion >= 3) { | ||
parser = parser.ltf8('recordCounter'); | ||
maxLength += 9; | ||
} else if (majorVersion === 2) { | ||
parser = parser.itf8('recordCounter'); | ||
maxLength += 5; | ||
} | ||
parser = parser.itf8('numBlocks').itf8('numContentIds').array('contentIds', { | ||
type: singleItf8, | ||
length: 'numContentIds' | ||
}); | ||
maxLength += 5 * 2; // + numContentIds*5 | ||
// the md5 sum is missing in cram v1 | ||
if (majorVersion >= 2) { | ||
parser = parser.array('md5', { | ||
type: 'uint8', | ||
length: 16 | ||
}); | ||
maxLength += 16; | ||
} | ||
var maxLengthFunc = function maxLengthFunc(numContentIds) { | ||
return maxLength + numContentIds * 5; | ||
}; | ||
return { | ||
parser: parser, | ||
maxLength: maxLengthFunc | ||
}; // : p, maxLength: numContentIds => 5 + 9 + 5 * 2 + 5 * numContentIds + 16 } | ||
}, | ||
// assembles a section parser for the unmapped slice header, with slight | ||
// variations depending on the major version of the cram file | ||
cramMappedSliceHeader: function cramMappedSliceHeader(majorVersion) { | ||
var parser = new Parser().itf8('refSeqId').itf8('refSeqStart').itf8('refSeqSpan').itf8('numRecords'); | ||
var maxLength = 5 * 4; | ||
if (majorVersion >= 3) { | ||
parser = parser.ltf8('recordCounter'); | ||
maxLength += 9; | ||
} else if (majorVersion === 2) { | ||
parser = parser.itf8('recordCounter'); | ||
maxLength += 5; | ||
} | ||
parser = parser.itf8('numBlocks').itf8('numContentIds').array('contentIds', { | ||
type: singleItf8, | ||
length: 'numContentIds' | ||
}).itf8('refBaseBlockId'); | ||
maxLength += 5 * 3; // the md5 sum is missing in cram v1 | ||
if (majorVersion >= 2) { | ||
parser = parser.array('md5', { | ||
type: 'uint8', | ||
length: 16 | ||
}); | ||
maxLength += 16; | ||
} | ||
var maxLengthFunc = function maxLengthFunc(numContentIds) { | ||
return maxLength + numContentIds * 5; | ||
}; | ||
return { | ||
parser: parser, | ||
maxLength: maxLengthFunc | ||
}; | ||
}, | ||
cramEncoding: function cramEncoding(majorVersion) { | ||
var parser = new Parser().namely('cramEncoding').itf8('codecId').itf8('parametersBytes').choice('parameters', { | ||
tag: 'codecId', | ||
choices: { | ||
0: new Parser(), | ||
// NULL | ||
1: new Parser().itf8('blockContentId'), | ||
// EXTERNAL | ||
2: new Parser().itf8('offset').itf8('M'), | ||
// GOLOMB, | ||
// HUFFMAN_INT | ||
3: Parser.start().itf8('numCodes').array('symbols', { | ||
length: 'numCodes', | ||
type: singleItf8 | ||
}).itf8('numLengths').array('bitLengths', { | ||
length: 'numLengths', | ||
type: singleItf8 | ||
}), | ||
4: Parser.start() // BYTE_ARRAY_LEN | ||
.nest('lengthsEncoding', { | ||
type: 'cramEncoding' | ||
}).nest('valuesEncoding', { | ||
type: 'cramEncoding' | ||
}), | ||
// BYTE_ARRAY_STOP is a little different for CRAM v1 | ||
5: new Parser().uint8('stopByte')[majorVersion > 1 ? 'itf8' : 'int']('blockContentId'), | ||
6: new Parser().itf8('offset').itf8('length'), | ||
// BETA | ||
7: new Parser().itf8('offset').itf8('K'), | ||
// SUBEXP | ||
8: new Parser().itf8('offset').itf8('log2m'), | ||
// GOLOMB_RICE | ||
9: new Parser().itf8('offset') // GAMMA | ||
} | ||
}); | ||
return { | ||
parser: parser | ||
}; | ||
}, | ||
cramDataSeriesEncodingMap: function cramDataSeriesEncodingMap(majorVersion) { | ||
return new Parser().itf8('mapSize').itf8('mapCount').array('ents', { | ||
length: 'mapCount', | ||
type: new Parser().string('key', { | ||
length: 2, | ||
stripNull: false | ||
}).nest('value', { | ||
type: this.cramEncoding(majorVersion).parser | ||
}) | ||
}); | ||
}, | ||
cramTagEncodingMap: function cramTagEncodingMap(majorVersion) { | ||
return new Parser().itf8('mapSize').itf8('mapCount').array('ents', { | ||
length: 'mapCount', | ||
type: new Parser().itf8('key', { | ||
formatter: | ||
/* istanbul ignore next */ | ||
function formatter(integerRepresentation) { | ||
return ( | ||
/* istanbul ignore next */ | ||
String.fromCharCode(integerRepresentation >> 16 & 0xff) + String.fromCharCode(integerRepresentation >> 8 & 0xff) + String.fromCharCode(integerRepresentation & 0xff) | ||
); | ||
// assemble a section parser for the unmapped slice header, with slight | ||
// variations depending on the major version of the cram file | ||
cramUnmappedSliceHeader: function (majorVersion) { | ||
var maxLength = 0; | ||
var parser = new binary_parser_1.Parser().itf8('numRecords'); | ||
maxLength += 5; | ||
// recordCounter is itf8 in a CRAM v2 file, absent in CRAM v1 | ||
if (majorVersion >= 3) { | ||
parser = parser.ltf8('recordCounter'); | ||
maxLength += 9; | ||
} | ||
}).nest('value', { | ||
type: this.cramEncoding(majorVersion).parser | ||
}) | ||
}); | ||
}, | ||
cramCompressionHeader: function cramCompressionHeader(majorVersion) { | ||
var parser = new Parser(); // TODO: if we want to support CRAM v1, we will need to refactor | ||
// compression header into 2 parts to parse the landmarks, | ||
// like the container header | ||
parser = parser.nest('preservation', { | ||
type: cramPreservationMap, | ||
formatter: formatMap | ||
}).nest('dataSeriesEncoding', { | ||
type: this.cramDataSeriesEncodingMap(majorVersion), | ||
formatter: formatMap | ||
}).nest('tagEncoding', { | ||
type: this.cramTagEncodingMap(majorVersion), | ||
formatter: formatMap | ||
}); | ||
return { | ||
parser: parser | ||
}; | ||
}, | ||
cramContainerHeader1: function cramContainerHeader1(majorVersion) { | ||
var parser = new Parser().int32('length') // byte size of the container data (blocks) | ||
.itf8('refSeqId') // reference sequence identifier, -1 for unmapped reads, -2 for multiple reference sequences | ||
.itf8('refSeqStart') // the alignment start position or 0 for unmapped reads | ||
.itf8('alignmentSpan') // the length of the alignment or 0 for unmapped reads | ||
.itf8('numRecords'); // number of records in the container | ||
var maxLength = 4 + 5 * 4; | ||
if (majorVersion >= 3) { | ||
parser = parser.ltf8('recordCounter'); // 1-based sequential index of records in the file/stream. | ||
maxLength += 9; | ||
} else if (majorVersion === 2) { | ||
parser = parser.itf8('recordCounter'); | ||
maxLength += 5; | ||
} | ||
if (majorVersion > 1) { | ||
parser = parser.ltf8('numBases'); // number of read bases | ||
maxLength += 9; | ||
} | ||
parser = parser.itf8('numBlocks') // the number of blocks | ||
.itf8('numLandmarks'); // the number of landmarks | ||
maxLength += 5 + 5; | ||
return { | ||
parser: parser, | ||
maxLength: maxLength | ||
}; | ||
}, | ||
cramContainerHeader2: function cramContainerHeader2(majorVersion) { | ||
var parser = new Parser().itf8('numLandmarks') // the number of blocks | ||
// Each integer value of this array is a byte offset | ||
// into the blocks byte array. Landmarks are used for | ||
// random access indexing. | ||
.array('landmarks', { | ||
type: new Parser().itf8(), | ||
length: 'numLandmarks' | ||
}); | ||
var crcLength = 0; | ||
if (majorVersion >= 3) { | ||
parser = parser.uint32('crc32'); | ||
crcLength = 4; | ||
} | ||
return { | ||
parser: parser, | ||
maxLength: function maxLength(numLandmarks) { | ||
return 5 + numLandmarks * 5 + crcLength; | ||
} | ||
}; | ||
} | ||
else if (majorVersion === 2) { | ||
parser = parser.itf8('recordCounter'); | ||
maxLength += 5; | ||
} | ||
parser = parser | ||
.itf8('numBlocks') | ||
.itf8('numContentIds') | ||
.array('contentIds', { | ||
type: singleItf8, | ||
length: 'numContentIds', | ||
}); | ||
maxLength += 5 * 2; // + numContentIds*5 | ||
// the md5 sum is missing in cram v1 | ||
if (majorVersion >= 2) { | ||
parser = parser.array('md5', { type: 'uint8', length: 16 }); | ||
maxLength += 16; | ||
} | ||
var maxLengthFunc = function (numContentIds) { return maxLength + numContentIds * 5; }; | ||
return { parser: parser, maxLength: maxLengthFunc }; // : p, maxLength: numContentIds => 5 + 9 + 5 * 2 + 5 * numContentIds + 16 } | ||
}, | ||
// assembles a section parser for the unmapped slice header, with slight | ||
// variations depending on the major version of the cram file | ||
cramMappedSliceHeader: function (majorVersion) { | ||
var parser = new binary_parser_1.Parser() | ||
.itf8('refSeqId') | ||
.itf8('refSeqStart') | ||
.itf8('refSeqSpan') | ||
.itf8('numRecords'); | ||
var maxLength = 5 * 4; | ||
if (majorVersion >= 3) { | ||
parser = parser.ltf8('recordCounter'); | ||
maxLength += 9; | ||
} | ||
else if (majorVersion === 2) { | ||
parser = parser.itf8('recordCounter'); | ||
maxLength += 5; | ||
} | ||
parser = parser | ||
.itf8('numBlocks') | ||
.itf8('numContentIds') | ||
.array('contentIds', { | ||
type: singleItf8, | ||
length: 'numContentIds', | ||
}) | ||
.itf8('refBaseBlockId'); | ||
maxLength += 5 * 3; | ||
// the md5 sum is missing in cram v1 | ||
if (majorVersion >= 2) { | ||
parser = parser.array('md5', { type: 'uint8', length: 16 }); | ||
maxLength += 16; | ||
} | ||
var maxLengthFunc = function (numContentIds) { return maxLength + numContentIds * 5; }; | ||
return { parser: parser, maxLength: maxLengthFunc }; | ||
}, | ||
cramEncoding: function (majorVersion) { | ||
var parser = new binary_parser_1.Parser() | ||
.namely('cramEncoding') | ||
.itf8('codecId') | ||
.itf8('parametersBytes') | ||
.choice('parameters', { | ||
tag: 'codecId', | ||
choices: { | ||
0: new binary_parser_1.Parser(), | ||
1: new binary_parser_1.Parser().itf8('blockContentId'), | ||
2: new binary_parser_1.Parser().itf8('offset').itf8('M'), | ||
// HUFFMAN_INT | ||
3: binary_parser_1.Parser.start() | ||
.itf8('numCodes') | ||
.array('symbols', { length: 'numCodes', type: singleItf8 }) | ||
.itf8('numLengths') | ||
.array('bitLengths', { length: 'numLengths', type: singleItf8 }), | ||
4: binary_parser_1.Parser.start() // BYTE_ARRAY_LEN | ||
.nest('lengthsEncoding', { type: 'cramEncoding' }) | ||
.nest('valuesEncoding', { type: 'cramEncoding' }), | ||
// BYTE_ARRAY_STOP is a little different for CRAM v1 | ||
5: new binary_parser_1.Parser() | ||
.uint8('stopByte')[majorVersion > 1 ? 'itf8' : 'int']('blockContentId'), | ||
6: new binary_parser_1.Parser().itf8('offset').itf8('length'), | ||
7: new binary_parser_1.Parser().itf8('offset').itf8('K'), | ||
8: new binary_parser_1.Parser().itf8('offset').itf8('log2m'), | ||
9: new binary_parser_1.Parser().itf8('offset'), // GAMMA | ||
}, | ||
}); | ||
return { parser: parser }; | ||
}, | ||
cramDataSeriesEncodingMap: function (majorVersion) { | ||
return new binary_parser_1.Parser() | ||
.itf8('mapSize') | ||
.itf8('mapCount') | ||
.array('ents', { | ||
length: 'mapCount', | ||
type: new binary_parser_1.Parser() | ||
.string('key', { length: 2, stripNull: false }) | ||
.nest('value', { type: this.cramEncoding(majorVersion).parser }), | ||
}); | ||
}, | ||
cramTagEncodingMap: function (majorVersion) { | ||
return new binary_parser_1.Parser() | ||
.itf8('mapSize') | ||
.itf8('mapCount') | ||
.array('ents', { | ||
length: 'mapCount', | ||
type: new binary_parser_1.Parser() | ||
.itf8('key', { | ||
formatter: /* istanbul ignore next */ function (/* istanbul ignore next */ integerRepresentation) { | ||
/* istanbul ignore next */ | ||
return String.fromCharCode((integerRepresentation >> 16) & 0xff) + | ||
String.fromCharCode((integerRepresentation >> 8) & 0xff) + | ||
String.fromCharCode(integerRepresentation & 0xff); | ||
}, | ||
}) | ||
.nest('value', { type: this.cramEncoding(majorVersion).parser }), | ||
}); | ||
}, | ||
cramCompressionHeader: function (majorVersion) { | ||
var parser = new binary_parser_1.Parser(); | ||
// TODO: if we want to support CRAM v1, we will need to refactor | ||
// compression header into 2 parts to parse the landmarks, | ||
// like the container header | ||
parser = parser | ||
.nest('preservation', { | ||
type: cramPreservationMap, | ||
formatter: formatMap, | ||
}) | ||
.nest('dataSeriesEncoding', { | ||
type: this.cramDataSeriesEncodingMap(majorVersion), | ||
formatter: formatMap, | ||
}) | ||
.nest('tagEncoding', { | ||
type: this.cramTagEncodingMap(majorVersion), | ||
formatter: formatMap, | ||
}); | ||
return { parser: parser }; | ||
}, | ||
cramContainerHeader1: function (majorVersion) { | ||
var parser = new binary_parser_1.Parser() | ||
.int32('length') // byte size of the container data (blocks) | ||
.itf8('refSeqId') // reference sequence identifier, -1 for unmapped reads, -2 for multiple reference sequences | ||
.itf8('refSeqStart') // the alignment start position or 0 for unmapped reads | ||
.itf8('alignmentSpan') // the length of the alignment or 0 for unmapped reads | ||
.itf8('numRecords'); // number of records in the container | ||
var maxLength = 4 + 5 * 4; | ||
if (majorVersion >= 3) { | ||
parser = parser.ltf8('recordCounter'); // 1-based sequential index of records in the file/stream. | ||
maxLength += 9; | ||
} | ||
else if (majorVersion === 2) { | ||
parser = parser.itf8('recordCounter'); | ||
maxLength += 5; | ||
} | ||
if (majorVersion > 1) { | ||
parser = parser.ltf8('numBases'); // number of read bases | ||
maxLength += 9; | ||
} | ||
parser = parser | ||
.itf8('numBlocks') // the number of blocks | ||
.itf8('numLandmarks'); // the number of landmarks | ||
maxLength += 5 + 5; | ||
return { parser: parser, maxLength: maxLength }; | ||
}, | ||
cramContainerHeader2: function (majorVersion) { | ||
var parser = new binary_parser_1.Parser() | ||
.itf8('numLandmarks') // the number of blocks | ||
// Each integer value of this array is a byte offset | ||
// into the blocks byte array. Landmarks are used for | ||
// random access indexing. | ||
.array('landmarks', { | ||
type: new binary_parser_1.Parser().itf8(), | ||
length: 'numLandmarks', | ||
}); | ||
var crcLength = 0; | ||
if (majorVersion >= 3) { | ||
parser = parser.uint32('crc32'); | ||
crcLength = 4; | ||
} | ||
return { | ||
parser: parser, | ||
maxLength: function (numLandmarks) { return 5 + numLandmarks * 5 + crcLength; }, | ||
}; | ||
}, | ||
}; | ||
function getSectionParsers(majorVersion) { | ||
var _context; | ||
var parsers = (0, _assign.default)({}, unversionedParsers); | ||
(0, _forEach.default)(_context = (0, _keys.default)(versionedParsers)).call(_context, function (parserName) { | ||
parsers[parserName] = versionedParsers[parserName](majorVersion); | ||
}); | ||
return parsers; | ||
var parsers = Object.assign({}, unversionedParsers); | ||
Object.keys(versionedParsers).forEach(function (parserName) { | ||
parsers[parserName] = versionedParsers[parserName](majorVersion); | ||
}); | ||
return parsers; | ||
} | ||
module.exports = { | ||
cramFileDefinition: cramFileDefinition, | ||
getSectionParsers: getSectionParsers | ||
}; | ||
exports.getSectionParsers = getSectionParsers; | ||
//# sourceMappingURL=sectionParsers.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _concat = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/concat")); | ||
var _isNan = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/number/is-nan")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _flags = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/flags")); | ||
var _slice = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/slice")); | ||
var _toConsumableArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/toConsumableArray")); | ||
var _parseInt = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/number/parse-int")); | ||
var _minSafeInteger = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/number/min-safe-integer")); | ||
var _maxSafeInteger = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/number/max-safe-integer")); | ||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/slicedToArray")); | ||
var _indexOf = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/index-of")); | ||
var Long = require('long'); | ||
var _require = require('../../errors'), | ||
CramMalformedError = _require.CramMalformedError, | ||
CramUnimplementedError = _require.CramUnimplementedError; | ||
var CramRecord = require('../record'); | ||
var Constants = require('../constants'); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var long_1 = __importDefault(require("long")); | ||
var errors_1 = require("../../errors"); | ||
var record_1 = __importDefault(require("../record")); | ||
var constants_1 = __importDefault(require("../constants")); | ||
/** | ||
@@ -40,8 +14,8 @@ * given a Buffer, read a string up to the first null character | ||
*/ | ||
function readNullTerminatedStringFromBuffer(buffer) { | ||
var zeroOffset = (0, _indexOf.default)(buffer).call(buffer, 0); | ||
if (zeroOffset === -1) return buffer.toString('utf8'); | ||
return buffer.toString('utf8', 0, zeroOffset); | ||
var zeroOffset = buffer.indexOf(0); | ||
if (zeroOffset === -1) { | ||
return buffer.toString('utf8'); | ||
} | ||
return buffer.toString('utf8', 0, zeroOffset); | ||
} | ||
@@ -52,278 +26,284 @@ /** | ||
*/ | ||
function parseTagValueArray(buffer) { | ||
var arrayType = String.fromCharCode(buffer[0]); | ||
var length = buffer.readInt32LE(1); | ||
var schema = { | ||
c: ['readInt8', 1], | ||
C: ['readUInt8', 1], | ||
s: ['readInt16LE', 2], | ||
S: ['readUInt16LE', 2], | ||
i: ['readInt32LE', 4], | ||
I: ['readUInt32LE', 4], | ||
f: ['readFloatLE', 4] | ||
}[arrayType]; | ||
if (!schema) throw new CramMalformedError("invalid tag value array type '".concat(arrayType, "'")); | ||
var _schema = (0, _slicedToArray2.default)(schema, 2), | ||
getMethod = _schema[0], | ||
itemSize = _schema[1]; | ||
var array = new Array(length); | ||
var offset = 5; | ||
for (var i = 0; i < length; i += 1) { | ||
array[i] = buffer[getMethod](offset); | ||
offset += itemSize; | ||
} | ||
return array; | ||
var arrayType = String.fromCharCode(buffer[0]); | ||
var length = buffer.readInt32LE(1); | ||
var schema = { | ||
c: ['readInt8', 1], | ||
C: ['readUInt8', 1], | ||
s: ['readInt16LE', 2], | ||
S: ['readUInt16LE', 2], | ||
i: ['readInt32LE', 4], | ||
I: ['readUInt32LE', 4], | ||
f: ['readFloatLE', 4], | ||
}[arrayType]; | ||
if (!schema) { | ||
throw new errors_1.CramMalformedError("invalid tag value array type '".concat(arrayType, "'")); | ||
} | ||
var getMethod = schema[0], itemSize = schema[1]; | ||
var array = new Array(length); | ||
var offset = 5; | ||
for (var i = 0; i < length; i += 1) { | ||
array[i] = buffer[getMethod](offset); | ||
offset += itemSize; | ||
} | ||
return array; | ||
} | ||
function parseTagData(tagType, buffer) { | ||
if (!buffer.readInt32LE) buffer = Buffer.from(buffer); | ||
if (tagType === 'Z') return readNullTerminatedStringFromBuffer(buffer); | ||
if (tagType === 'A') return String.fromCharCode(buffer[0]); | ||
if (tagType === 'I') { | ||
var val = Long.fromBytesLE(buffer); | ||
if (val.greaterThan(_maxSafeInteger.default) || val.lessThan(_minSafeInteger.default)) throw new CramUnimplementedError('integer overflow'); | ||
return val.toNumber(); | ||
} | ||
if (tagType === 'i') return buffer.readInt32LE(0); | ||
if (tagType === 's') return buffer.readInt16LE(0); | ||
if (tagType === 'S') return buffer.readUInt16LE(0); | ||
if (tagType === 'c') return buffer.readInt8(0); | ||
if (tagType === 'C') return buffer.readUInt8(0); | ||
if (tagType === 'f') return buffer.readFloatLE(0); | ||
if (tagType === 'H') { | ||
var hex = readNullTerminatedStringFromBuffer(buffer); | ||
return (0, _parseInt.default)(hex.replace(/^0x/, ''), 16); | ||
} | ||
if (tagType === 'B') return parseTagValueArray(buffer); | ||
throw new CramMalformedError("Unrecognized tag type ".concat(tagType)); | ||
if (!buffer.readInt32LE) { | ||
buffer = Buffer.from(buffer); | ||
} | ||
if (tagType === 'Z') { | ||
return readNullTerminatedStringFromBuffer(buffer); | ||
} | ||
if (tagType === 'A') { | ||
return String.fromCharCode(buffer[0]); | ||
} | ||
if (tagType === 'I') { | ||
var val = long_1.default.fromBytesLE(buffer); | ||
if (val.greaterThan(Number.MAX_SAFE_INTEGER) || | ||
val.lessThan(Number.MIN_SAFE_INTEGER)) { | ||
throw new errors_1.CramUnimplementedError('integer overflow'); | ||
} | ||
return val.toNumber(); | ||
} | ||
if (tagType === 'i') { | ||
return buffer.readInt32LE(0); | ||
} | ||
if (tagType === 's') { | ||
return buffer.readInt16LE(0); | ||
} | ||
if (tagType === 'S') { | ||
return buffer.readUInt16LE(0); | ||
} | ||
if (tagType === 'c') { | ||
return buffer.readInt8(0); | ||
} | ||
if (tagType === 'C') { | ||
return buffer.readUInt8(0); | ||
} | ||
if (tagType === 'f') { | ||
return buffer.readFloatLE(0); | ||
} | ||
if (tagType === 'H') { | ||
var hex = readNullTerminatedStringFromBuffer(buffer); | ||
return Number.parseInt(hex.replace(/^0x/, ''), 16); | ||
} | ||
if (tagType === 'B') { | ||
return parseTagValueArray(buffer); | ||
} | ||
throw new errors_1.CramMalformedError("Unrecognized tag type ".concat(tagType)); | ||
} | ||
function decodeReadFeatures(cramRecord, readFeatureCount, decodeDataSeries, compressionScheme, majorVersion) { | ||
var currentReadPos = 0; | ||
var currentRefPos = cramRecord.alignmentStart - 1; | ||
var readFeatures = new Array(readFeatureCount); | ||
function decodeRFData(_ref) { | ||
var _ref2 = (0, _slicedToArray2.default)(_ref, 2), | ||
type = _ref2[0], | ||
dataSeriesName = _ref2[1]; | ||
var data = decodeDataSeries(dataSeriesName); | ||
if (type === 'character') { | ||
return String.fromCharCode(data); | ||
var currentReadPos = 0; | ||
var currentRefPos = cramRecord.alignmentStart - 1; | ||
var readFeatures = new Array(readFeatureCount); | ||
function decodeRFData(_a) { | ||
var type = _a[0], dataSeriesName = _a[1]; | ||
var data = decodeDataSeries(dataSeriesName); | ||
if (type === 'character') { | ||
return String.fromCharCode(data); | ||
} | ||
if (type === 'string') { | ||
return data.toString('utf8'); | ||
} | ||
if (type === 'numArray') { | ||
return data.toArray(); | ||
} | ||
// else if (type === 'number') { | ||
// return data[0] | ||
// } | ||
return data; | ||
} | ||
if (type === 'string') { | ||
return data.toString('utf8'); | ||
for (var i = 0; i < readFeatureCount; i += 1) { | ||
var code = String.fromCharCode(decodeDataSeries('FC')); | ||
var readPosDelta = decodeDataSeries('FP'); | ||
var readFeature = { code: code }; | ||
// map of operator name -> data series name | ||
var data1Schema = { | ||
B: ['character', 'BA'], | ||
S: ['string', majorVersion > 1 ? 'SC' : 'IN'], | ||
X: ['number', 'BS'], | ||
D: ['number', 'DL'], | ||
I: ['string', 'IN'], | ||
i: ['character', 'BA'], | ||
b: ['string', 'BB'], | ||
q: ['numArray', 'QQ'], | ||
Q: ['number', 'QS'], | ||
H: ['number', 'HC'], | ||
P: ['number', 'PD'], | ||
N: ['number', 'RS'], | ||
}[code]; | ||
if (!data1Schema) { | ||
throw new errors_1.CramMalformedError("invalid read feature code \"".concat(code, "\"")); | ||
} | ||
readFeature.data = decodeRFData(data1Schema); | ||
// if this is a tag with two data items, make the data an array and add the second item | ||
var data2Schema = { B: ['number', 'QS'] }[code]; | ||
if (data2Schema) { | ||
readFeature.data = [readFeature.data, decodeRFData(data2Schema)]; | ||
} | ||
currentReadPos += readPosDelta; | ||
readFeature.pos = currentReadPos; | ||
currentRefPos += readPosDelta; | ||
readFeature.refPos = currentRefPos; | ||
// for gapping features, adjust the reference position for read features that follow | ||
if (code === 'D' || code === 'N') { | ||
currentRefPos += readFeature.data; | ||
} | ||
else if (code === 'I' || code === 'S') { | ||
currentRefPos -= readFeature.data.length; | ||
} | ||
else if (code === 'i') { | ||
currentRefPos -= 1; | ||
} | ||
readFeatures[i] = readFeature; | ||
} | ||
if (type === 'numArray') { | ||
return data.toArray(); | ||
} // else if (type === 'number') { | ||
// return data[0] | ||
// } | ||
return data; | ||
} | ||
for (var i = 0; i < readFeatureCount; i += 1) { | ||
var code = String.fromCharCode(decodeDataSeries('FC')); | ||
var readPosDelta = decodeDataSeries('FP'); | ||
var readFeature = { | ||
code: code | ||
}; // map of operator name -> data series name | ||
var data1Schema = { | ||
B: ['character', 'BA'], | ||
S: ['string', majorVersion > 1 ? 'SC' : 'IN'], | ||
// IN if cram v1, SC otherwise | ||
X: ['number', 'BS'], | ||
D: ['number', 'DL'], | ||
I: ['string', 'IN'], | ||
i: ['character', 'BA'], | ||
b: ['string', 'BB'], | ||
q: ['numArray', 'QQ'], | ||
Q: ['number', 'QS'], | ||
H: ['number', 'HC'], | ||
P: ['number', 'PD'], | ||
N: ['number', 'RS'] | ||
}[code]; | ||
if (!data1Schema) throw new CramMalformedError("invalid read feature code \"".concat(code, "\"")); | ||
readFeature.data = decodeRFData(data1Schema); // if this is a tag with two data items, make the data an array and add the second item | ||
var data2Schema = { | ||
B: ['number', 'QS'] | ||
}[code]; | ||
if (data2Schema) readFeature.data = [readFeature.data, decodeRFData(data2Schema)]; | ||
currentReadPos += readPosDelta; | ||
readFeature.pos = currentReadPos; | ||
currentRefPos += readPosDelta; | ||
readFeature.refPos = currentRefPos; // for gapping features, adjust the reference position for read features that follow | ||
if (code === 'D' || code === 'N') currentRefPos += readFeature.data;else if (code === 'I' || code === 'S') currentRefPos -= readFeature.data.length;else if (code === 'i') currentRefPos -= 1; | ||
readFeatures[i] = readFeature; | ||
} | ||
return readFeatures; | ||
return readFeatures; | ||
} | ||
function thingToString(thing) { | ||
if (thing instanceof Buffer) { | ||
return readNullTerminatedStringFromBuffer(thing); | ||
} | ||
if (thing.length && (0, _indexOf.default)(thing)) { | ||
// array-like | ||
if (!thing[thing.length - 1]) { | ||
// trim zeroes off the end if necessary | ||
var termIndex = (0, _indexOf.default)(thing).call(thing, 0); | ||
return String.fromCharCode.apply(String, (0, _toConsumableArray2.default)((0, _slice.default)(thing).call(thing, 0, termIndex))); | ||
if (thing instanceof Buffer) { | ||
return readNullTerminatedStringFromBuffer(thing); | ||
} | ||
return String.fromCharCode.apply(String, (0, _toConsumableArray2.default)(thing)); | ||
} | ||
return String(thing); | ||
if (thing.length && thing.indexOf) { | ||
// array-like | ||
if (!thing[thing.length - 1]) { | ||
// trim zeroes off the end if necessary | ||
var termIndex = thing.indexOf(0); | ||
return String.fromCharCode.apply(String, thing.slice(0, termIndex)); | ||
} | ||
return String.fromCharCode.apply(String, thing); | ||
} | ||
return String(thing); | ||
} | ||
function decodeRecord(slice, decodeDataSeries, compressionScheme, sliceHeader, coreDataBlock, blocksByContentId, cursors, majorVersion, recordNumber) { | ||
var cramRecord = new CramRecord(); | ||
cramRecord.flags = decodeDataSeries('BF'); // note: the C data type of compressionFlags is byte in cram v1 | ||
// and int32 in cram v2+, but that does not matter for us here | ||
// in javascript land. | ||
cramRecord.cramFlags = decodeDataSeries('CF'); | ||
if (majorVersion > 1 && sliceHeader.content.refSeqId === -2) cramRecord.sequenceId = decodeDataSeries('RI');else cramRecord.sequenceId = sliceHeader.content.refSeqId; | ||
cramRecord.readLength = decodeDataSeries('RL'); // if APDelta, will calculate the true start in a second pass | ||
cramRecord.alignmentStart = decodeDataSeries('AP'); | ||
if (compressionScheme.APdelta) cramRecord.alignmentStart += cursors.lastAlignmentStart; | ||
cursors.lastAlignmentStart = cramRecord.alignmentStart; | ||
cramRecord.readGroupId = decodeDataSeries('RG'); | ||
if (compressionScheme.readNamesIncluded) cramRecord.readName = thingToString(decodeDataSeries('RN')); // mate record | ||
if (cramRecord.isDetached()) { | ||
// note: the MF is a byte in 1.0, int32 in 2+, but once again this doesn't matter for javascript | ||
var mate = {}; | ||
mate.flags = decodeDataSeries('MF'); | ||
if (!compressionScheme.readNamesIncluded) { | ||
mate.readName = thingToString(decodeDataSeries('RN')); | ||
cramRecord.readName = mate.readName; | ||
var cramRecord = new record_1.default(); | ||
cramRecord.flags = decodeDataSeries('BF'); | ||
// note: the C data type of compressionFlags is byte in cram v1 | ||
// and int32 in cram v2+, but that does not matter for us here | ||
// in javascript land. | ||
cramRecord.cramFlags = decodeDataSeries('CF'); | ||
if (majorVersion > 1 && sliceHeader.content.refSeqId === -2) { | ||
cramRecord.sequenceId = decodeDataSeries('RI'); | ||
} | ||
mate.sequenceId = decodeDataSeries('NS'); | ||
mate.alignmentStart = decodeDataSeries('NP'); | ||
if ((0, _flags.default)(mate) || mate.sequenceId > -1) cramRecord.mate = mate; | ||
cramRecord.templateSize = decodeDataSeries('TS'); // set mate unmapped if needed | ||
if ((0, _flags.default)(mate) & Constants.CRAM_M_UNMAP) { | ||
cramRecord.flags |= Constants.BAM_FMUNMAP; | ||
} // set mate reversed if needed | ||
if ((0, _flags.default)(mate) & Constants.CRAM_M_REVERSE) { | ||
cramRecord.flags |= Constants.BAM_FMREVERSE; | ||
} // detachedCount++ | ||
} else if (cramRecord.hasMateDownStream()) { | ||
cramRecord.mateRecordNumber = decodeDataSeries('NF') + recordNumber + 1; | ||
} // TODO: the aux tag parsing will have to be refactored if we want to support | ||
// cram v1 | ||
var TLindex = decodeDataSeries('TL'); | ||
if (TLindex < 0) | ||
/* TODO: check nTL: TLindex >= compressionHeader.tagEncoding.size */ | ||
throw new CramMalformedError('invalid TL index'); // TN = tag names | ||
var TN = compressionScheme.getTagNames(TLindex); | ||
var ntags = TN.length; | ||
for (var i = 0; i < ntags; i += 1) { | ||
var tagId = TN[i]; | ||
var tagName = tagId.substr(0, 2); | ||
var tagType = tagId.substr(2, 1); | ||
var tagCodec = compressionScheme.getCodecForTag(tagId); | ||
if (!tagCodec) throw new CramMalformedError("no codec defined for auxiliary tag ".concat(tagId)); | ||
var tagData = tagCodec.decode(slice, coreDataBlock, blocksByContentId, cursors); | ||
cramRecord.tags[tagName] = parseTagData(tagType, tagData); | ||
} | ||
if (!cramRecord.isSegmentUnmapped()) { | ||
var _context; | ||
// reading read features | ||
var | ||
/* int */ | ||
readFeatureCount = decodeDataSeries('FN'); | ||
if (readFeatureCount) { | ||
cramRecord.readFeatures = decodeReadFeatures(cramRecord, readFeatureCount, decodeDataSeries, compressionScheme, majorVersion); | ||
} // compute the read's true span on the reference sequence, and the end coordinate of the alignment on the reference | ||
var lengthOnRef = cramRecord.readLength; | ||
if (cramRecord.readFeatures) (0, _forEach.default)(_context = cramRecord.readFeatures).call(_context, function (_ref3) { | ||
var code = _ref3.code, | ||
data = _ref3.data; | ||
if (code === 'D' || code === 'N') lengthOnRef += data;else if (code === 'I' || code === 'S') lengthOnRef -= data.length;else if (code === 'i') lengthOnRef -= 1; | ||
}); | ||
if ((0, _isNan.default)(lengthOnRef)) { | ||
var _context2; | ||
console.warn("".concat(cramRecord.readName || (0, _concat.default)(_context2 = "".concat(cramRecord.sequenceId, ":")).call(_context2, cramRecord.alignmentStart), " record has invalid read features")); | ||
lengthOnRef = cramRecord.readLength; | ||
else { | ||
cramRecord.sequenceId = sliceHeader.content.refSeqId; | ||
} | ||
cramRecord.lengthOnRef = lengthOnRef; // mapping quality | ||
cramRecord.mappingQuality = decodeDataSeries('MQ'); | ||
if (cramRecord.isPreservingQualityScores()) { | ||
var bases = new Array(cramRecord.readLength); | ||
for (var _i = 0; _i < bases.length; _i += 1) { | ||
bases[_i] = decodeDataSeries('QS'); | ||
} | ||
cramRecord.qualityScores = bases; | ||
cramRecord.readLength = decodeDataSeries('RL'); | ||
// if APDelta, will calculate the true start in a second pass | ||
cramRecord.alignmentStart = decodeDataSeries('AP'); | ||
if (compressionScheme.APdelta) { | ||
cramRecord.alignmentStart += cursors.lastAlignmentStart; | ||
} | ||
} else if (cramRecord.isUnknownBases()) { | ||
cramRecord.readBases = null; | ||
cramRecord.qualityScores = null; | ||
} else { | ||
var _bases = new Array(cramRecord.readLength); | ||
for (var _i2 = 0; _i2 < _bases.length; _i2 += 1) { | ||
_bases[_i2] = decodeDataSeries('BA'); | ||
cursors.lastAlignmentStart = cramRecord.alignmentStart; | ||
cramRecord.readGroupId = decodeDataSeries('RG'); | ||
if (compressionScheme.readNamesIncluded) { | ||
cramRecord.readName = thingToString(decodeDataSeries('RN')); | ||
} | ||
cramRecord.readBases = String.fromCharCode.apply(String, _bases); | ||
if (cramRecord.isPreservingQualityScores()) { | ||
for (var _i3 = 0; _i3 < _bases.length; _i3 += 1) { | ||
_bases[_i3] = decodeDataSeries('QS'); | ||
} | ||
cramRecord.qualityScores = _bases; | ||
// mate record | ||
if (cramRecord.isDetached()) { | ||
// note: the MF is a byte in 1.0, int32 in 2+, but once again this doesn't matter for javascript | ||
var mate = {}; | ||
mate.flags = decodeDataSeries('MF'); | ||
if (!compressionScheme.readNamesIncluded) { | ||
mate.readName = thingToString(decodeDataSeries('RN')); | ||
cramRecord.readName = mate.readName; | ||
} | ||
mate.sequenceId = decodeDataSeries('NS'); | ||
mate.alignmentStart = decodeDataSeries('NP'); | ||
if (mate.flags || mate.sequenceId > -1) { | ||
cramRecord.mate = mate; | ||
} | ||
cramRecord.templateSize = decodeDataSeries('TS'); | ||
// set mate unmapped if needed | ||
if (mate.flags & constants_1.default.CRAM_M_UNMAP) { | ||
cramRecord.flags |= constants_1.default.BAM_FMUNMAP; | ||
} | ||
// set mate reversed if needed | ||
if (mate.flags & constants_1.default.CRAM_M_REVERSE) { | ||
cramRecord.flags |= constants_1.default.BAM_FMREVERSE; | ||
} | ||
// detachedCount++ | ||
} | ||
} | ||
return cramRecord; | ||
else if (cramRecord.hasMateDownStream()) { | ||
cramRecord.mateRecordNumber = decodeDataSeries('NF') + recordNumber + 1; | ||
} | ||
// TODO: the aux tag parsing will have to be refactored if we want to support | ||
// cram v1 | ||
var TLindex = decodeDataSeries('TL'); | ||
if (TLindex < 0) { | ||
/* TODO: check nTL: TLindex >= compressionHeader.tagEncoding.size */ | ||
throw new errors_1.CramMalformedError('invalid TL index'); | ||
} | ||
// TN = tag names | ||
var TN = compressionScheme.getTagNames(TLindex); | ||
var ntags = TN.length; | ||
for (var i = 0; i < ntags; i += 1) { | ||
var tagId = TN[i]; | ||
var tagName = tagId.substr(0, 2); | ||
var tagType = tagId.substr(2, 1); | ||
var tagCodec = compressionScheme.getCodecForTag(tagId); | ||
if (!tagCodec) { | ||
throw new errors_1.CramMalformedError("no codec defined for auxiliary tag ".concat(tagId)); | ||
} | ||
var tagData = tagCodec.decode(slice, coreDataBlock, blocksByContentId, cursors); | ||
cramRecord.tags[tagName] = parseTagData(tagType, tagData); | ||
} | ||
if (!cramRecord.isSegmentUnmapped()) { | ||
// reading read features | ||
var /* int */ readFeatureCount = decodeDataSeries('FN'); | ||
if (readFeatureCount) { | ||
cramRecord.readFeatures = decodeReadFeatures(cramRecord, readFeatureCount, decodeDataSeries, compressionScheme, majorVersion); | ||
} | ||
// compute the read's true span on the reference sequence, and the end coordinate of the alignment on the reference | ||
var lengthOnRef_1 = cramRecord.readLength; | ||
if (cramRecord.readFeatures) { | ||
cramRecord.readFeatures.forEach(function (_a) { | ||
var code = _a.code, data = _a.data; | ||
if (code === 'D' || code === 'N') { | ||
lengthOnRef_1 += data; | ||
} | ||
else if (code === 'I' || code === 'S') { | ||
lengthOnRef_1 -= data.length; | ||
} | ||
else if (code === 'i') { | ||
lengthOnRef_1 -= 1; | ||
} | ||
}); | ||
} | ||
if (Number.isNaN(lengthOnRef_1)) { | ||
console.warn("".concat(cramRecord.readName || | ||
"".concat(cramRecord.sequenceId, ":").concat(cramRecord.alignmentStart), " record has invalid read features")); | ||
lengthOnRef_1 = cramRecord.readLength; | ||
} | ||
cramRecord.lengthOnRef = lengthOnRef_1; | ||
// mapping quality | ||
cramRecord.mappingQuality = decodeDataSeries('MQ'); | ||
if (cramRecord.isPreservingQualityScores()) { | ||
var bases = new Array(cramRecord.readLength); | ||
for (var i = 0; i < bases.length; i += 1) { | ||
bases[i] = decodeDataSeries('QS'); | ||
} | ||
cramRecord.qualityScores = bases; | ||
} | ||
} | ||
else if (cramRecord.isUnknownBases()) { | ||
cramRecord.readBases = null; | ||
cramRecord.qualityScores = null; | ||
} | ||
else { | ||
var bases = new Array(cramRecord.readLength); | ||
for (var i = 0; i < bases.length; i += 1) { | ||
bases[i] = decodeDataSeries('BA'); | ||
} | ||
cramRecord.readBases = String.fromCharCode.apply(String, bases); | ||
if (cramRecord.isPreservingQualityScores()) { | ||
for (var i = 0; i < bases.length; i += 1) { | ||
bases[i] = decodeDataSeries('QS'); | ||
} | ||
cramRecord.qualityScores = bases; | ||
} | ||
} | ||
return cramRecord; | ||
} | ||
module.exports = decodeRecord; | ||
exports.default = decodeRecord; | ||
//# sourceMappingURL=decodeRecord.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _values = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/values")); | ||
var _promise = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/promise")); | ||
var _filter = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/filter")); | ||
var _concat = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/concat")); | ||
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _flags = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/flags")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _map = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/map")); | ||
var _toConsumableArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/toConsumableArray")); | ||
var _context17; | ||
var _require = require('../../errors'), | ||
CramMalformedError = _require.CramMalformedError, | ||
CramBufferOverrunError = _require.CramBufferOverrunError, | ||
CramArgumentError = _require.CramArgumentError; | ||
var _require2 = require('../util'), | ||
parseItem = _require2.parseItem, | ||
tinyMemoize = _require2.tinyMemoize, | ||
sequenceMD5 = _require2.sequenceMD5; | ||
var Constants = require('../constants'); | ||
var decodeRecord = require('./decodeRecord'); | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
}; | ||
var __generator = (this && this.__generator) || function (thisArg, body) { | ||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; | ||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; | ||
function verb(n) { return function (v) { return step([n, v]); }; } | ||
function step(op) { | ||
if (f) throw new TypeError("Generator is already executing."); | ||
while (_) try { | ||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; | ||
if (y = 0, t) op = [op[0] & 2, t.value]; | ||
switch (op[0]) { | ||
case 0: case 1: t = op; break; | ||
case 4: _.label++; return { value: op[1], done: false }; | ||
case 5: _.label++; y = op[1]; op = [0]; continue; | ||
case 7: op = _.ops.pop(); _.trys.pop(); continue; | ||
default: | ||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } | ||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } | ||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } | ||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } | ||
if (t[2]) _.ops.pop(); | ||
_.trys.pop(); continue; | ||
} | ||
op = body.call(thisArg, _); | ||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } | ||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; | ||
} | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../../errors"); | ||
var util_1 = require("../util"); | ||
var constants_1 = __importDefault(require("../constants")); | ||
var decodeRecord_1 = __importDefault(require("./decodeRecord")); | ||
/** | ||
@@ -51,29 +53,26 @@ * @private | ||
*/ | ||
function calculateMultiSegmentMatedTemplateLength(allRecords, currentRecordNumber, thisRecord) { | ||
function getAllMatedRecords(startRecord) { | ||
var records = [startRecord]; | ||
if (startRecord.mateRecordNumber >= 0) { | ||
var mateRecord = allRecords[startRecord.mateRecordNumber]; | ||
if (!mateRecord) throw new CramMalformedError('intra-slice mate record not found, this file seems malformed'); | ||
records.push.apply(records, (0, _toConsumableArray2.default)(getAllMatedRecords(mateRecord))); | ||
function getAllMatedRecords(startRecord) { | ||
var records = [startRecord]; | ||
if (startRecord.mateRecordNumber >= 0) { | ||
var mateRecord = allRecords[startRecord.mateRecordNumber]; | ||
if (!mateRecord) { | ||
throw new errors_1.CramMalformedError('intra-slice mate record not found, this file seems malformed'); | ||
} | ||
records.push.apply(records, getAllMatedRecords(mateRecord)); | ||
} | ||
return records; | ||
} | ||
return records; | ||
} | ||
var matedRecords = getAllMatedRecords(thisRecord); | ||
var starts = (0, _map.default)(matedRecords).call(matedRecords, function (r) { | ||
return r.alignmentStart; | ||
}); | ||
var ends = (0, _map.default)(matedRecords).call(matedRecords, function (r) { | ||
return r.alignmentStart + r.readLength - 1; | ||
}); | ||
var estimatedTemplateLength = Math.max.apply(Math, (0, _toConsumableArray2.default)(ends)) - Math.min.apply(Math, (0, _toConsumableArray2.default)(starts)) + 1; | ||
if (estimatedTemplateLength >= 0) (0, _forEach.default)(matedRecords).call(matedRecords, function (r) { | ||
if (r.templateLength !== undefined) throw new CramMalformedError('mate pair group has some members that have template lengths already, this file seems malformed'); | ||
r.templateLength = estimatedTemplateLength; | ||
}); | ||
var matedRecords = getAllMatedRecords(thisRecord); | ||
var starts = matedRecords.map(function (r) { return r.alignmentStart; }); | ||
var ends = matedRecords.map(function (r) { return r.alignmentStart + r.readLength - 1; }); | ||
var estimatedTemplateLength = Math.max.apply(Math, ends) - Math.min.apply(Math, starts) + 1; | ||
if (estimatedTemplateLength >= 0) { | ||
matedRecords.forEach(function (r) { | ||
if (r.templateLength !== undefined) { | ||
throw new errors_1.CramMalformedError('mate pair group has some members that have template lengths already, this file seems malformed'); | ||
} | ||
r.templateLength = estimatedTemplateLength; | ||
}); | ||
} | ||
} | ||
@@ -87,12 +86,10 @@ /** | ||
*/ | ||
function calculateIntraSliceMatePairTemplateLength(thisRecord, mateRecord) { | ||
// this just estimates the template length by using the simple (non-gapped) end coordinate of each | ||
// read, because gapping in the alignment doesn't mean the template is longer or shorter | ||
var start = Math.min(thisRecord.alignmentStart, mateRecord.alignmentStart); | ||
var end = Math.max(thisRecord.alignmentStart + thisRecord.readLength - 1, mateRecord.alignmentStart + mateRecord.readLength - 1); | ||
var lengthEstimate = end - start + 1; | ||
thisRecord.templateLength = lengthEstimate; | ||
mateRecord.templateLength = lengthEstimate; | ||
// this just estimates the template length by using the simple (non-gapped) end coordinate of each | ||
// read, because gapping in the alignment doesn't mean the template is longer or shorter | ||
var start = Math.min(thisRecord.alignmentStart, mateRecord.alignmentStart); | ||
var end = Math.max(thisRecord.alignmentStart + thisRecord.readLength - 1, mateRecord.alignmentStart + mateRecord.readLength - 1); | ||
var lengthEstimate = end - start + 1; | ||
thisRecord.templateLength = lengthEstimate; | ||
mateRecord.templateLength = lengthEstimate; | ||
} | ||
@@ -104,728 +101,430 @@ /** | ||
*/ | ||
function associateIntraSliceMate(allRecords, currentRecordNumber, thisRecord, mateRecord) { | ||
if (!mateRecord) throw new CramMalformedError('could not resolve intra-slice mate pairs, file seems truncated or malformed'); | ||
var complicatedMultiSegment = !!(mateRecord.mate || mateRecord.mateRecordNumber !== undefined && mateRecord.mateRecordNumber !== currentRecordNumber); // Deal with lossy read names | ||
if (!thisRecord.readName) { | ||
thisRecord.readName = String(thisRecord.uniqueId); | ||
mateRecord.readName = thisRecord.readName; | ||
} | ||
thisRecord.mate = { | ||
sequenceId: mateRecord.sequenceId, | ||
alignmentStart: mateRecord.alignmentStart, | ||
uniqueId: mateRecord.uniqueId | ||
}; | ||
if (mateRecord.readName) thisRecord.mate.readName = mateRecord.readName; // the mate record might have its own mate pointer, if this is some kind of | ||
// multi-segment (more than paired) scheme, so only relate that one back to this one | ||
// if it does not have any other relationship | ||
if (!mateRecord.mate && mateRecord.mateRecordNumber === undefined) { | ||
mateRecord.mate = { | ||
sequenceId: thisRecord.sequenceId, | ||
alignmentStart: thisRecord.alignmentStart, | ||
uniqueId: thisRecord.uniqueId | ||
if (!mateRecord) { | ||
throw new errors_1.CramMalformedError('could not resolve intra-slice mate pairs, file seems truncated or malformed'); | ||
} | ||
var complicatedMultiSegment = !!(mateRecord.mate || | ||
(mateRecord.mateRecordNumber !== undefined && | ||
mateRecord.mateRecordNumber !== currentRecordNumber)); | ||
// Deal with lossy read names | ||
if (!thisRecord.readName) { | ||
thisRecord.readName = String(thisRecord.uniqueId); | ||
mateRecord.readName = thisRecord.readName; | ||
} | ||
thisRecord.mate = { | ||
sequenceId: mateRecord.sequenceId, | ||
alignmentStart: mateRecord.alignmentStart, | ||
uniqueId: mateRecord.uniqueId, | ||
}; | ||
if (thisRecord.readName) mateRecord.mate.readName = thisRecord.readName; | ||
} // make sure the proper flags and cramFlags are set on both records | ||
// paired | ||
thisRecord.flags |= Constants.BAM_FPAIRED; // set mate unmapped if needed | ||
if ((0, _flags.default)(mateRecord) & Constants.BAM_FUNMAP) { | ||
thisRecord.flags |= Constants.BAM_FMUNMAP; // thisRecord.templateLength = 0 | ||
} | ||
if ((0, _flags.default)(thisRecord) & Constants.BAM_FUNMAP) { | ||
// thisRecord.templateLength = 0 | ||
mateRecord.flags |= Constants.BAM_FMUNMAP; | ||
} // set mate reversed if needed | ||
if ((0, _flags.default)(mateRecord) & Constants.BAM_FREVERSE) { | ||
thisRecord.flags |= Constants.BAM_FMREVERSE; | ||
} | ||
if ((0, _flags.default)(thisRecord) & Constants.BAM_FREVERSE) { | ||
mateRecord.flags |= Constants.BAM_FMREVERSE; | ||
} | ||
if (thisRecord.templateLength === undefined) { | ||
if (complicatedMultiSegment) calculateMultiSegmentMatedTemplateLength(allRecords, currentRecordNumber, thisRecord);else calculateIntraSliceMatePairTemplateLength(thisRecord, mateRecord); | ||
} // delete this last because it's used by the | ||
// complicated template length estimation | ||
delete thisRecord.mateRecordNumber; | ||
if (mateRecord.readName) { | ||
thisRecord.mate.readName = mateRecord.readName; | ||
} | ||
// the mate record might have its own mate pointer, if this is some kind of | ||
// multi-segment (more than paired) scheme, so only relate that one back to this one | ||
// if it does not have any other relationship | ||
if (!mateRecord.mate && mateRecord.mateRecordNumber === undefined) { | ||
mateRecord.mate = { | ||
sequenceId: thisRecord.sequenceId, | ||
alignmentStart: thisRecord.alignmentStart, | ||
uniqueId: thisRecord.uniqueId, | ||
}; | ||
if (thisRecord.readName) { | ||
mateRecord.mate.readName = thisRecord.readName; | ||
} | ||
} | ||
// make sure the proper flags and cramFlags are set on both records | ||
// paired | ||
thisRecord.flags |= constants_1.default.BAM_FPAIRED; | ||
// set mate unmapped if needed | ||
if (mateRecord.flags & constants_1.default.BAM_FUNMAP) { | ||
thisRecord.flags |= constants_1.default.BAM_FMUNMAP; | ||
// thisRecord.templateLength = 0 | ||
} | ||
if (thisRecord.flags & constants_1.default.BAM_FUNMAP) { | ||
// thisRecord.templateLength = 0 | ||
mateRecord.flags |= constants_1.default.BAM_FMUNMAP; | ||
} | ||
// set mate reversed if needed | ||
if (mateRecord.flags & constants_1.default.BAM_FREVERSE) { | ||
thisRecord.flags |= constants_1.default.BAM_FMREVERSE; | ||
} | ||
if (thisRecord.flags & constants_1.default.BAM_FREVERSE) { | ||
mateRecord.flags |= constants_1.default.BAM_FMREVERSE; | ||
} | ||
if (thisRecord.templateLength === undefined) { | ||
if (complicatedMultiSegment) { | ||
calculateMultiSegmentMatedTemplateLength(allRecords, currentRecordNumber, thisRecord); | ||
} | ||
else { | ||
calculateIntraSliceMatePairTemplateLength(thisRecord, mateRecord); | ||
} | ||
} | ||
// delete this last because it's used by the | ||
// complicated template length estimation | ||
delete thisRecord.mateRecordNumber; | ||
} | ||
var CramSlice = | ||
/*#__PURE__*/ | ||
function () { | ||
function CramSlice(container, position) { | ||
(0, _classCallCheck2.default)(this, CramSlice); | ||
this.container = container; | ||
this.file = container.file; | ||
this.containerPosition = position; | ||
} // memoize | ||
(0, _createClass2.default)(CramSlice, [{ | ||
key: "getHeader", | ||
value: function () { | ||
var _getHeader = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee() { | ||
var sectionParsers, containerHeader, header; | ||
return _regenerator.default.wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
_context.next = 2; | ||
return this.file.getSectionParsers(); | ||
case 2: | ||
sectionParsers = _context.sent; | ||
_context.next = 5; | ||
return this.container.getHeader(); | ||
case 5: | ||
containerHeader = _context.sent; | ||
_context.next = 8; | ||
return this.file.readBlock(containerHeader._endPosition + this.containerPosition); | ||
case 8: | ||
header = _context.sent; | ||
if (!(header.contentType === 'MAPPED_SLICE_HEADER')) { | ||
_context.next = 13; | ||
break; | ||
} | ||
header.content = parseItem(header.content, sectionParsers.cramMappedSliceHeader.parser, 0, containerHeader._endPosition); | ||
_context.next = 18; | ||
break; | ||
case 13: | ||
if (!(header.contentType === 'UNMAPPED_SLICE_HEADER')) { | ||
_context.next = 17; | ||
break; | ||
} | ||
header.content = parseItem(header.content, sectionParsers.cramUnmappedSliceHeader.parser, 0, containerHeader._endPosition); | ||
_context.next = 18; | ||
break; | ||
case 17: | ||
throw new CramMalformedError("error reading slice header block, invalid content type ".concat(header._contentType)); | ||
case 18: | ||
return _context.abrupt("return", header); | ||
case 19: | ||
case "end": | ||
return _context.stop(); | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function getHeader() { | ||
return _getHeader.apply(this, arguments); | ||
} | ||
return getHeader; | ||
}() // memoize | ||
}, { | ||
key: "getBlocks", | ||
value: function () { | ||
var _getBlocks = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee2() { | ||
var header, blockPosition, blocks, i; | ||
return _regenerator.default.wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
_context2.next = 2; | ||
return this.getHeader(); | ||
case 2: | ||
header = _context2.sent; | ||
// read all the blocks into memory and store them | ||
blockPosition = header._endPosition; | ||
blocks = new Array(header.content.numBlocks); | ||
i = 0; | ||
case 6: | ||
if (!(i < blocks.length)) { | ||
_context2.next = 14; | ||
break; | ||
} | ||
_context2.next = 9; | ||
return this.file.readBlock(blockPosition); | ||
case 9: | ||
blocks[i] = _context2.sent; | ||
blockPosition = blocks[i]._endPosition; | ||
case 11: | ||
i += 1; | ||
_context2.next = 6; | ||
break; | ||
case 14: | ||
return _context2.abrupt("return", blocks); | ||
case 15: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function getBlocks() { | ||
return _getBlocks.apply(this, arguments); | ||
} | ||
return getBlocks; | ||
}() // no memoize | ||
}, { | ||
key: "getCoreDataBlock", | ||
value: function () { | ||
var _getCoreDataBlock = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee3() { | ||
var blocks; | ||
return _regenerator.default.wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
_context3.next = 2; | ||
return this.getBlocks(); | ||
case 2: | ||
blocks = _context3.sent; | ||
return _context3.abrupt("return", blocks[0]); | ||
case 4: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
function getCoreDataBlock() { | ||
return _getCoreDataBlock.apply(this, arguments); | ||
} | ||
return getCoreDataBlock; | ||
}() // memoize | ||
}, { | ||
key: "_getBlocksContentIdIndex", | ||
value: function () { | ||
var _getBlocksContentIdIndex2 = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee4() { | ||
var blocks, blocksByContentId; | ||
return _regenerator.default.wrap(function _callee4$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return this.getBlocks(); | ||
case 2: | ||
blocks = _context4.sent; | ||
blocksByContentId = {}; | ||
(0, _forEach.default)(blocks).call(blocks, function (block) { | ||
if (block.contentType === 'EXTERNAL_DATA') { | ||
blocksByContentId[block.contentId] = block; | ||
} | ||
}); | ||
return _context4.abrupt("return", blocksByContentId); | ||
case 6: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee4, this); | ||
})); | ||
function _getBlocksContentIdIndex() { | ||
return _getBlocksContentIdIndex2.apply(this, arguments); | ||
} | ||
return _getBlocksContentIdIndex; | ||
}() | ||
}, { | ||
key: "getBlockByContentId", | ||
value: function () { | ||
var _getBlockByContentId = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee5(id) { | ||
var blocksByContentId; | ||
return _regenerator.default.wrap(function _callee5$(_context5) { | ||
while (1) { | ||
switch (_context5.prev = _context5.next) { | ||
case 0: | ||
_context5.next = 2; | ||
return this._getBlocksContentIdIndex(); | ||
case 2: | ||
blocksByContentId = _context5.sent; | ||
return _context5.abrupt("return", blocksByContentId[id]); | ||
case 4: | ||
case "end": | ||
return _context5.stop(); | ||
} | ||
} | ||
}, _callee5, this); | ||
})); | ||
function getBlockByContentId(_x) { | ||
return _getBlockByContentId.apply(this, arguments); | ||
} | ||
return getBlockByContentId; | ||
}() | ||
}, { | ||
key: "getReferenceRegion", | ||
value: function () { | ||
var _getReferenceRegion = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee6() { | ||
var sliceHeader, compressionScheme, refBlock, seq; | ||
return _regenerator.default.wrap(function _callee6$(_context6) { | ||
while (1) { | ||
switch (_context6.prev = _context6.next) { | ||
case 0: | ||
_context6.next = 2; | ||
return this.getHeader(); | ||
case 2: | ||
sliceHeader = _context6.sent.content; | ||
if (!(sliceHeader.refSeqId < 0)) { | ||
_context6.next = 5; | ||
break; | ||
} | ||
return _context6.abrupt("return", undefined); | ||
case 5: | ||
_context6.next = 7; | ||
return this.container.getCompressionScheme(); | ||
case 7: | ||
compressionScheme = _context6.sent; | ||
if (!(sliceHeader.refBaseBlockId >= 0)) { | ||
_context6.next = 15; | ||
break; | ||
} | ||
refBlock = this.getBlockByContentId(sliceHeader.refBaseBlockId); | ||
if (refBlock) { | ||
_context6.next = 12; | ||
break; | ||
} | ||
throw new CramMalformedError('embedded reference specified, but reference block does not exist'); | ||
case 12: | ||
if (!(sliceHeader.span > refBlock.uncompressedSize)) { | ||
_context6.next = 14; | ||
break; | ||
} | ||
throw new CramMalformedError('Embedded reference is too small'); | ||
case 14: | ||
return _context6.abrupt("return", { | ||
seq: refBlock.data.toString('utf8'), | ||
start: sliceHeader.refSeqStart, | ||
end: sliceHeader.refSeqStart + sliceHeader.refSeqSpan - 1, | ||
span: sliceHeader.refSeqSpan | ||
}); | ||
case 15: | ||
if (!(compressionScheme.referenceRequired || this.file.fetchReferenceSequenceCallback)) { | ||
_context6.next = 24; | ||
break; | ||
} | ||
if (this.file.fetchReferenceSequenceCallback) { | ||
_context6.next = 18; | ||
break; | ||
} | ||
throw new Error('reference sequence not embedded, and seqFetch callback not provided, cannot fetch reference sequence'); | ||
case 18: | ||
_context6.next = 20; | ||
return this.file.fetchReferenceSequenceCallback(sliceHeader.refSeqId, sliceHeader.refSeqStart, sliceHeader.refSeqStart + sliceHeader.refSeqSpan - 1); | ||
case 20: | ||
seq = _context6.sent; | ||
if (!(seq.length !== sliceHeader.refSeqSpan)) { | ||
_context6.next = 23; | ||
break; | ||
} | ||
throw new CramArgumentError('seqFetch callback returned a reference sequence of the wrong length'); | ||
case 23: | ||
return _context6.abrupt("return", { | ||
seq: seq, | ||
start: sliceHeader.refSeqStart, | ||
end: sliceHeader.refSeqStart + sliceHeader.refSeqSpan - 1, | ||
span: sliceHeader.refSeqSpan | ||
}); | ||
case 24: | ||
return _context6.abrupt("return", undefined); | ||
case 25: | ||
case "end": | ||
return _context6.stop(); | ||
} | ||
} | ||
}, _callee6, this); | ||
})); | ||
function getReferenceRegion() { | ||
return _getReferenceRegion.apply(this, arguments); | ||
} | ||
return getReferenceRegion; | ||
}() | ||
}, { | ||
key: "getAllRecords", | ||
value: function getAllRecords() { | ||
return this.getRecords(function () { | ||
return true; | ||
}); | ||
var CramSlice = /** @class */ (function () { | ||
function CramSlice(container, position) { | ||
this.container = container; | ||
this.file = container.file; | ||
this.containerPosition = position; | ||
} | ||
}, { | ||
key: "_fetchRecords", | ||
value: function () { | ||
var _fetchRecords2 = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee7() { | ||
var _this = this; | ||
var _ref, majorVersion, compressionScheme, sliceHeader, blocksByContentId, refRegion, _context7, _context8, _context9, _context10, _context11, seq, start, end, seqMd5, storedMd5, coreDataBlock, cursors, decodeDataSeries, records, i, _i, mateRecordNumber; | ||
return _regenerator.default.wrap(function _callee7$(_context12) { | ||
while (1) { | ||
switch (_context12.prev = _context12.next) { | ||
case 0: | ||
_context12.next = 2; | ||
return this.file.getDefinition(); | ||
case 2: | ||
_ref = _context12.sent; | ||
majorVersion = _ref.majorVersion; | ||
_context12.next = 6; | ||
return this.container.getCompressionScheme(); | ||
case 6: | ||
compressionScheme = _context12.sent; | ||
_context12.next = 9; | ||
return this.getHeader(); | ||
case 9: | ||
sliceHeader = _context12.sent; | ||
_context12.next = 12; | ||
return this._getBlocksContentIdIndex(); | ||
case 12: | ||
blocksByContentId = _context12.sent; | ||
if (!(majorVersion > 1 && this.file.options.checkSequenceMD5 && sliceHeader.content.refSeqId >= 0 && sliceHeader.content.md5.join('') !== '0000000000000000')) { | ||
_context12.next = 23; | ||
break; | ||
// memoize | ||
CramSlice.prototype.getHeader = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var sectionParsers, containerHeader, header; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.file.getSectionParsers()]; | ||
case 1: | ||
sectionParsers = _a.sent(); | ||
return [4 /*yield*/, this.container.getHeader()]; | ||
case 2: | ||
containerHeader = _a.sent(); | ||
return [4 /*yield*/, this.file.readBlock(containerHeader._endPosition + this.containerPosition)]; | ||
case 3: | ||
header = _a.sent(); | ||
if (header.contentType === 'MAPPED_SLICE_HEADER') { | ||
header.content = (0, util_1.parseItem)(header.content, sectionParsers.cramMappedSliceHeader.parser, 0, containerHeader._endPosition); | ||
} | ||
else if (header.contentType === 'UNMAPPED_SLICE_HEADER') { | ||
header.content = (0, util_1.parseItem)(header.content, sectionParsers.cramUnmappedSliceHeader.parser, 0, containerHeader._endPosition); | ||
} | ||
else { | ||
throw new errors_1.CramMalformedError("error reading slice header block, invalid content type ".concat(header._contentType)); | ||
} | ||
return [2 /*return*/, header]; | ||
} | ||
_context12.next = 16; | ||
return this.getReferenceRegion(); | ||
case 16: | ||
refRegion = _context12.sent; | ||
if (!refRegion) { | ||
_context12.next = 23; | ||
break; | ||
}); | ||
}); | ||
}; | ||
// memoize | ||
CramSlice.prototype.getBlocks = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var header, blockPosition, blocks, i, _a, _b; | ||
return __generator(this, function (_c) { | ||
switch (_c.label) { | ||
case 0: return [4 /*yield*/, this.getHeader() | ||
// read all the blocks into memory and store them | ||
]; | ||
case 1: | ||
header = _c.sent(); | ||
blockPosition = header._endPosition; | ||
blocks = new Array(header.content.numBlocks); | ||
i = 0; | ||
_c.label = 2; | ||
case 2: | ||
if (!(i < blocks.length)) return [3 /*break*/, 5]; | ||
_a = blocks; | ||
_b = i; | ||
return [4 /*yield*/, this.file.readBlock(blockPosition)]; | ||
case 3: | ||
_a[_b] = _c.sent(); | ||
blockPosition = blocks[i]._endPosition; | ||
_c.label = 4; | ||
case 4: | ||
i += 1; | ||
return [3 /*break*/, 2]; | ||
case 5: return [2 /*return*/, blocks]; | ||
} | ||
seq = refRegion.seq, start = refRegion.start, end = refRegion.end; | ||
seqMd5 = sequenceMD5(seq); | ||
storedMd5 = (0, _map.default)(_context7 = sliceHeader.content.md5).call(_context7, function (byte) { | ||
return (byte < 16 ? '0' : '') + byte.toString(16); | ||
}).join(''); | ||
if (!(seqMd5 !== storedMd5)) { | ||
_context12.next = 23; | ||
break; | ||
}); | ||
}); | ||
}; | ||
// no memoize | ||
CramSlice.prototype.getCoreDataBlock = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var blocks; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getBlocks() | ||
// the core data block is always the first block in the slice | ||
]; | ||
case 1: | ||
blocks = _a.sent(); | ||
// the core data block is always the first block in the slice | ||
return [2 /*return*/, blocks[0]]; | ||
} | ||
throw new CramMalformedError((0, _concat.default)(_context8 = (0, _concat.default)(_context9 = (0, _concat.default)(_context10 = (0, _concat.default)(_context11 = "MD5 checksum reference mismatch for ref ".concat(sliceHeader.content.refSeqId, " pos ")).call(_context11, start, "..")).call(_context10, end, ". recorded MD5: ")).call(_context9, storedMd5, ", calculated MD5: ")).call(_context8, seqMd5)); | ||
case 23: | ||
_context12.next = 25; | ||
return this.getCoreDataBlock(); | ||
case 25: | ||
coreDataBlock = _context12.sent; | ||
cursors = { | ||
lastAlignmentStart: sliceHeader.content.refSeqStart || 0, | ||
coreBlock: { | ||
bitPosition: 7, | ||
bytePosition: 0 | ||
}, | ||
externalBlocks: { | ||
getCursor: function getCursor(contentId) { | ||
if (!this[contentId]) this[contentId] = { | ||
bitPosition: 7, | ||
bytePosition: 0 | ||
}; | ||
return this[contentId]; | ||
} | ||
} | ||
}; | ||
decodeDataSeries = function decodeDataSeries(dataSeriesName) { | ||
var codec = compressionScheme.getCodecForDataSeries(dataSeriesName); | ||
if (!codec) throw new CramMalformedError("no codec defined for ".concat(dataSeriesName, " data series")); // console.log(dataSeriesName, Object.getPrototypeOf(codec)) | ||
return codec.decode(_this, coreDataBlock, blocksByContentId, cursors); | ||
}; | ||
records = new Array(sliceHeader.content.numRecords); | ||
i = 0; | ||
case 30: | ||
if (!(i < records.length)) { | ||
_context12.next = 48; | ||
break; | ||
}); | ||
}); | ||
}; | ||
// memoize | ||
CramSlice.prototype._getBlocksContentIdIndex = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var blocks, blocksByContentId; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getBlocks()]; | ||
case 1: | ||
blocks = _a.sent(); | ||
blocksByContentId = {}; | ||
blocks.forEach(function (block) { | ||
if (block.contentType === 'EXTERNAL_DATA') { | ||
blocksByContentId[block.contentId] = block; | ||
} | ||
}); | ||
return [2 /*return*/, blocksByContentId]; | ||
} | ||
_context12.prev = 31; | ||
records[i] = decodeRecord(this, decodeDataSeries, compressionScheme, sliceHeader, coreDataBlock, blocksByContentId, cursors, majorVersion, i); | ||
records[i].uniqueId = sliceHeader.contentPosition + sliceHeader.content.recordCounter + i + 1; | ||
_context12.next = 45; | ||
break; | ||
case 36: | ||
_context12.prev = 36; | ||
_context12.t0 = _context12["catch"](31); | ||
if (!(_context12.t0 instanceof CramBufferOverrunError)) { | ||
_context12.next = 44; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramSlice.prototype.getBlockByContentId = function (id) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var blocksByContentId; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this._getBlocksContentIdIndex()]; | ||
case 1: | ||
blocksByContentId = _a.sent(); | ||
return [2 /*return*/, blocksByContentId[id]]; | ||
} | ||
console.warn('read attempted beyond end of buffer, file seems truncated.'); | ||
records = (0, _filter.default)(records).call(records, function (r) { | ||
return !!r; | ||
}); | ||
return _context12.abrupt("break", 48); | ||
case 44: | ||
throw _context12.t0; | ||
case 45: | ||
i += 1; | ||
_context12.next = 30; | ||
break; | ||
case 48: | ||
// interpret `recordsToNextFragment` attributes to make standard `mate` objects | ||
// Resolve mate pair cross-references between records in this slice | ||
for (_i = 0; _i < records.length; _i += 1) { | ||
mateRecordNumber = records[_i].mateRecordNumber; | ||
if (mateRecordNumber >= 0) associateIntraSliceMate(records, _i, records[_i], records[mateRecordNumber]); | ||
}); | ||
}); | ||
}; | ||
CramSlice.prototype.getReferenceRegion = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var sliceHeader, compressionScheme, refBlock, seq; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.getHeader()]; | ||
case 1: | ||
sliceHeader = (_a.sent()).content; | ||
if (sliceHeader.refSeqId < 0) { | ||
return [2 /*return*/, undefined]; | ||
} | ||
return [4 /*yield*/, this.container.getCompressionScheme() | ||
// console.log(JSON.stringify(sliceHeader, null, ' ')) | ||
]; | ||
case 2: | ||
compressionScheme = _a.sent(); | ||
// console.log(JSON.stringify(sliceHeader, null, ' ')) | ||
if (sliceHeader.refBaseBlockId >= 0) { | ||
refBlock = this.getBlockByContentId(sliceHeader.refBaseBlockId); | ||
if (!refBlock) { | ||
throw new errors_1.CramMalformedError('embedded reference specified, but reference block does not exist'); | ||
} | ||
if (sliceHeader.span > refBlock.uncompressedSize) { | ||
throw new errors_1.CramMalformedError('Embedded reference is too small'); | ||
} | ||
return [2 /*return*/, { | ||
seq: refBlock.data.toString('utf8'), | ||
start: sliceHeader.refSeqStart, | ||
end: sliceHeader.refSeqStart + sliceHeader.refSeqSpan - 1, | ||
span: sliceHeader.refSeqSpan, | ||
}]; | ||
} | ||
if (!(compressionScheme.referenceRequired || | ||
this.file.fetchReferenceSequenceCallback)) return [3 /*break*/, 4]; | ||
if (!this.file.fetchReferenceSequenceCallback) { | ||
throw new Error('reference sequence not embedded, and seqFetch callback not provided, cannot fetch reference sequence'); | ||
} | ||
return [4 /*yield*/, this.file.fetchReferenceSequenceCallback(sliceHeader.refSeqId, sliceHeader.refSeqStart, sliceHeader.refSeqStart + sliceHeader.refSeqSpan - 1)]; | ||
case 3: | ||
seq = _a.sent(); | ||
if (seq.length !== sliceHeader.refSeqSpan) { | ||
throw new errors_1.CramArgumentError('seqFetch callback returned a reference sequence of the wrong length'); | ||
} | ||
return [2 /*return*/, { | ||
seq: seq, | ||
start: sliceHeader.refSeqStart, | ||
end: sliceHeader.refSeqStart + sliceHeader.refSeqSpan - 1, | ||
span: sliceHeader.refSeqSpan, | ||
}]; | ||
case 4: return [2 /*return*/, undefined]; | ||
} | ||
return _context12.abrupt("return", records); | ||
case 50: | ||
case "end": | ||
return _context12.stop(); | ||
} | ||
} | ||
}, _callee7, this, [[31, 36]]); | ||
})); | ||
function _fetchRecords() { | ||
return _fetchRecords2.apply(this, arguments); | ||
} | ||
return _fetchRecords; | ||
}() | ||
}, { | ||
key: "getRecords", | ||
value: function () { | ||
var _getRecords = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee9(filterFunction) { | ||
var _context13, | ||
_this2 = this; | ||
var cacheKey, recordsPromise, records, sliceHeader, _context14, singleRefId, _compressionScheme, refRegions, i, seqId, refRegion, end, _i2, _seqId, _refRegion; | ||
return _regenerator.default.wrap(function _callee9$(_context16) { | ||
while (1) { | ||
switch (_context16.prev = _context16.next) { | ||
case 0: | ||
// fetch the features if necessary, using the file-level feature cache | ||
cacheKey = this.container.filePosition + this.containerPosition; | ||
recordsPromise = this.file.featureCache.get(cacheKey); | ||
if (!recordsPromise) { | ||
recordsPromise = this._fetchRecords(); | ||
this.file.featureCache.set(cacheKey, recordsPromise); | ||
}); | ||
}); | ||
}; | ||
CramSlice.prototype.getAllRecords = function () { | ||
return this.getRecords(function () { return true; }); | ||
}; | ||
CramSlice.prototype._fetchRecords = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var majorVersion, compressionScheme, sliceHeader, blocksByContentId, refRegion, seq, start, end, seqMd5, storedMd5, coreDataBlock, cursors, decodeDataSeries, records, i, i, mateRecordNumber; | ||
var _this = this; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: return [4 /*yield*/, this.file.getDefinition()]; | ||
case 1: | ||
majorVersion = (_a.sent()).majorVersion; | ||
return [4 /*yield*/, this.container.getCompressionScheme()]; | ||
case 2: | ||
compressionScheme = _a.sent(); | ||
return [4 /*yield*/, this.getHeader()]; | ||
case 3: | ||
sliceHeader = _a.sent(); | ||
return [4 /*yield*/, this._getBlocksContentIdIndex() | ||
// check MD5 of reference if available | ||
]; | ||
case 4: | ||
blocksByContentId = _a.sent(); | ||
if (!(majorVersion > 1 && | ||
this.file.options.checkSequenceMD5 && | ||
sliceHeader.content.refSeqId >= 0 && | ||
sliceHeader.content.md5.join('') !== '0000000000000000')) return [3 /*break*/, 6]; | ||
return [4 /*yield*/, this.getReferenceRegion()]; | ||
case 5: | ||
refRegion = _a.sent(); | ||
if (refRegion) { | ||
seq = refRegion.seq, start = refRegion.start, end = refRegion.end; | ||
seqMd5 = (0, util_1.sequenceMD5)(seq); | ||
storedMd5 = sliceHeader.content.md5 | ||
.map(function (byte) { return (byte < 16 ? '0' : '') + byte.toString(16); }) | ||
.join(''); | ||
if (seqMd5 !== storedMd5) { | ||
throw new errors_1.CramMalformedError("MD5 checksum reference mismatch for ref ".concat(sliceHeader.content.refSeqId, " pos ").concat(start, "..").concat(end, ". recorded MD5: ").concat(storedMd5, ", calculated MD5: ").concat(seqMd5)); | ||
} | ||
} | ||
_a.label = 6; | ||
case 6: return [4 /*yield*/, this.getCoreDataBlock()]; | ||
case 7: | ||
coreDataBlock = _a.sent(); | ||
cursors = { | ||
lastAlignmentStart: sliceHeader.content.refSeqStart || 0, | ||
coreBlock: { bitPosition: 7, bytePosition: 0 }, | ||
externalBlocks: { | ||
getCursor: function (contentId) { | ||
if (!this[contentId]) { | ||
this[contentId] = { bitPosition: 7, bytePosition: 0 }; | ||
} | ||
return this[contentId]; | ||
}, | ||
}, | ||
}; | ||
decodeDataSeries = function (dataSeriesName) { | ||
var codec = compressionScheme.getCodecForDataSeries(dataSeriesName); | ||
if (!codec) { | ||
throw new errors_1.CramMalformedError("no codec defined for ".concat(dataSeriesName, " data series")); | ||
} | ||
// console.log(dataSeriesName, Object.getPrototypeOf(codec)) | ||
return codec.decode(_this, coreDataBlock, blocksByContentId, cursors); | ||
}; | ||
records = new Array(sliceHeader.content.numRecords); | ||
for (i = 0; i < records.length; i += 1) { | ||
try { | ||
records[i] = (0, decodeRecord_1.default)(this, decodeDataSeries, compressionScheme, sliceHeader, coreDataBlock, blocksByContentId, cursors, majorVersion, i); | ||
records[i].uniqueId = | ||
sliceHeader.contentPosition + | ||
sliceHeader.content.recordCounter + | ||
i + | ||
1; | ||
} | ||
catch (e) { | ||
if (e instanceof errors_1.CramBufferOverrunError) { | ||
console.warn('read attempted beyond end of buffer, file seems truncated.'); | ||
records = records.filter(function (r) { return !!r; }); | ||
break; | ||
} | ||
else { | ||
throw e; | ||
} | ||
} | ||
} | ||
// interpret `recordsToNextFragment` attributes to make standard `mate` objects | ||
// Resolve mate pair cross-references between records in this slice | ||
for (i = 0; i < records.length; i += 1) { | ||
mateRecordNumber = records[i].mateRecordNumber; | ||
if (mateRecordNumber >= 0) { | ||
associateIntraSliceMate(records, i, records[i], records[mateRecordNumber]); | ||
} | ||
} | ||
return [2 /*return*/, records]; | ||
} | ||
_context16.t0 = _filter.default; | ||
_context16.next = 6; | ||
return recordsPromise; | ||
case 6: | ||
_context16.t1 = _context13 = _context16.sent; | ||
_context16.t2 = _context13; | ||
_context16.t3 = filterFunction; | ||
records = (0, _context16.t0)(_context16.t1).call(_context16.t2, _context16.t3); | ||
if (!(records.length && this.file.fetchReferenceSequenceCallback)) { | ||
_context16.next = 24; | ||
break; | ||
} | ||
_context16.next = 13; | ||
return this.getHeader(); | ||
case 13: | ||
sliceHeader = _context16.sent; | ||
if (!(sliceHeader.content.refSeqId >= 0 || // single-ref slice | ||
sliceHeader.content.refSeqId === -2 // multi-ref slice | ||
)) { | ||
_context16.next = 24; | ||
break; | ||
} | ||
singleRefId = sliceHeader.content.refSeqId >= 0 ? sliceHeader.content.refSeqId : undefined; | ||
_context16.next = 18; | ||
return this.container.getCompressionScheme(); | ||
case 18: | ||
_compressionScheme = _context16.sent; | ||
refRegions = {}; // seqId => { start, end, seq } | ||
// iterate over the records to find the spans of the reference sequences we need to fetch | ||
for (i = 0; i < records.length; i += 1) { | ||
seqId = singleRefId !== undefined ? singleRefId : records[i].sequenceId; | ||
refRegion = refRegions[seqId]; | ||
if (!refRegion) { | ||
refRegion = { | ||
id: seqId, | ||
start: records[i].alignmentStart, | ||
end: -Infinity | ||
}; | ||
refRegions[seqId] = refRegion; | ||
} | ||
end = records[i].alignmentStart + (records[i].lengthOnRef || records[i].readLength) - 1; | ||
if (end > refRegion.end) refRegion.end = end; | ||
if (records[i].alignmentStart < refRegion.start) refRegion.start = records[i].alignmentStart; | ||
} // fetch the `seq` for all of the ref regions | ||
_context16.next = 23; | ||
return _promise.default.all((0, _map.default)(_context14 = (0, _values.default)(refRegions)).call(_context14, | ||
/*#__PURE__*/ | ||
function () { | ||
var _ref2 = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee8(refRegion) { | ||
return _regenerator.default.wrap(function _callee8$(_context15) { | ||
while (1) { | ||
switch (_context15.prev = _context15.next) { | ||
case 0: | ||
if (!(refRegion.id !== -1 && refRegion.start <= refRegion.end)) { | ||
_context15.next = 4; | ||
break; | ||
}); | ||
}); | ||
}; | ||
CramSlice.prototype.getRecords = function (filterFunction) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var cacheKey, recordsPromise, records, sliceHeader, singleRefId, compressionScheme, refRegions, i, seqId, refRegion, end, i, seqId, refRegion; | ||
var _this = this; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
cacheKey = this.container.filePosition + this.containerPosition; | ||
recordsPromise = this.file.featureCache.get(cacheKey); | ||
if (!recordsPromise) { | ||
recordsPromise = this._fetchRecords(); | ||
this.file.featureCache.set(cacheKey, recordsPromise); | ||
} | ||
return [4 /*yield*/, recordsPromise]; | ||
case 1: | ||
records = (_a.sent()).filter(filterFunction); | ||
if (!(records.length && this.file.fetchReferenceSequenceCallback)) return [3 /*break*/, 5]; | ||
return [4 /*yield*/, this.getHeader()]; | ||
case 2: | ||
sliceHeader = _a.sent(); | ||
if (!(sliceHeader.content.refSeqId >= 0 || // single-ref slice | ||
sliceHeader.content.refSeqId === -2) // multi-ref slice | ||
) return [3 /*break*/, 5]; // multi-ref slice | ||
singleRefId = sliceHeader.content.refSeqId >= 0 | ||
? sliceHeader.content.refSeqId | ||
: undefined; | ||
return [4 /*yield*/, this.container.getCompressionScheme()]; | ||
case 3: | ||
compressionScheme = _a.sent(); | ||
refRegions = {} // seqId => { start, end, seq } | ||
; | ||
// iterate over the records to find the spans of the reference sequences we need to fetch | ||
for (i = 0; i < records.length; i += 1) { | ||
seqId = singleRefId !== undefined ? singleRefId : records[i].sequenceId; | ||
refRegion = refRegions[seqId]; | ||
if (!refRegion) { | ||
refRegion = { | ||
id: seqId, | ||
start: records[i].alignmentStart, | ||
end: -Infinity, | ||
}; | ||
refRegions[seqId] = refRegion; | ||
} | ||
_context15.next = 3; | ||
return _this2.file.fetchReferenceSequenceCallback(refRegion.id, refRegion.start, refRegion.end); | ||
case 3: | ||
refRegion.seq = _context15.sent; | ||
case 4: | ||
case "end": | ||
return _context15.stop(); | ||
end = records[i].alignmentStart + | ||
(records[i].lengthOnRef || records[i].readLength) - | ||
1; | ||
if (end > refRegion.end) { | ||
refRegion.end = end; | ||
} | ||
if (records[i].alignmentStart < refRegion.start) { | ||
refRegion.start = records[i].alignmentStart; | ||
} | ||
} | ||
} | ||
}, _callee8); | ||
})); | ||
return function (_x3) { | ||
return _ref2.apply(this, arguments); | ||
}; | ||
}())); | ||
case 23: | ||
// now decorate all the records with them | ||
for (_i2 = 0; _i2 < records.length; _i2 += 1) { | ||
_seqId = singleRefId !== undefined ? singleRefId : records[_i2].sequenceId; | ||
_refRegion = refRegions[_seqId]; | ||
if (_refRegion && _refRegion.seq) { | ||
records[_i2].addReferenceSequence(_refRegion, _compressionScheme); | ||
} | ||
// fetch the `seq` for all of the ref regions | ||
return [4 /*yield*/, Promise.all(Object.values(refRegions).map(function (refRegion) { return __awaiter(_this, void 0, void 0, function () { | ||
var _a; | ||
return __generator(this, function (_b) { | ||
switch (_b.label) { | ||
case 0: | ||
if (!(refRegion.id !== -1 && refRegion.start <= refRegion.end)) return [3 /*break*/, 2]; | ||
_a = refRegion; | ||
return [4 /*yield*/, this.file.fetchReferenceSequenceCallback(refRegion.id, refRegion.start, refRegion.end)]; | ||
case 1: | ||
_a.seq = _b.sent(); | ||
_b.label = 2; | ||
case 2: return [2 /*return*/]; | ||
} | ||
}); | ||
}); })) | ||
// now decorate all the records with them | ||
]; | ||
case 4: | ||
// fetch the `seq` for all of the ref regions | ||
_a.sent(); | ||
// now decorate all the records with them | ||
for (i = 0; i < records.length; i += 1) { | ||
seqId = singleRefId !== undefined ? singleRefId : records[i].sequenceId; | ||
refRegion = refRegions[seqId]; | ||
if (refRegion && refRegion.seq) { | ||
records[i].addReferenceSequence(refRegion, compressionScheme); | ||
} | ||
} | ||
_a.label = 5; | ||
case 5: return [2 /*return*/, records]; | ||
} | ||
case 24: | ||
return _context16.abrupt("return", records); | ||
case 25: | ||
case "end": | ||
return _context16.stop(); | ||
} | ||
} | ||
}, _callee9, this); | ||
})); | ||
function getRecords(_x2) { | ||
return _getRecords.apply(this, arguments); | ||
} | ||
return getRecords; | ||
}() | ||
}]); | ||
return CramSlice; | ||
}(); // memoize several methods in the class for performance | ||
(0, _forEach.default)(_context17 = 'getHeader getBlocks _getBlocksContentIdIndex'.split(' ')).call(_context17, function (method) { | ||
return tinyMemoize(CramSlice, method); | ||
}); | ||
module.exports = CramSlice; | ||
}); | ||
}); | ||
}; | ||
return CramSlice; | ||
}()); | ||
exports.default = CramSlice; | ||
// memoize several methods in the class for performance | ||
'getHeader getBlocks _getBlocksContentIdIndex' | ||
.split(' ') | ||
.forEach(function (method) { return (0, util_1.tinyMemoize)(CramSlice, method); }); | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _promise = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/promise")); | ||
var md5 = require('md5'); | ||
var _require = require('../errors'), | ||
CramBufferOverrunError = _require.CramBufferOverrunError; | ||
module.exports = { | ||
itf8Size: function itf8Size(v) { | ||
if (!(v & ~0x7f)) return 1; | ||
if (!(v & ~0x3fff)) return 2; | ||
if (!(v & ~0x1fffff)) return 3; | ||
if (!(v & ~0xfffffff)) return 4; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.sequenceMD5 = exports.tinyMemoize = exports.parseItem = exports.parseItf8 = exports.itf8Size = void 0; | ||
var md5_1 = __importDefault(require("md5")); | ||
var errors_1 = require("../errors"); | ||
function itf8Size(v) { | ||
if (!(v & ~0x7f)) { | ||
return 1; | ||
} | ||
if (!(v & ~0x3fff)) { | ||
return 2; | ||
} | ||
if (!(v & ~0x1fffff)) { | ||
return 3; | ||
} | ||
if (!(v & ~0xfffffff)) { | ||
return 4; | ||
} | ||
return 5; | ||
}, | ||
parseItf8: function parseItf8(buffer, initialOffset) { | ||
} | ||
exports.itf8Size = itf8Size; | ||
function parseItf8(buffer, initialOffset) { | ||
var offset = initialOffset; | ||
var countFlags = buffer[offset]; | ||
var result; | ||
if (countFlags < 0x80) { | ||
result = countFlags; | ||
offset += 1; | ||
} else if (countFlags < 0xc0) { | ||
result = (countFlags << 8 | buffer[offset + 1]) & 0x3fff; | ||
offset += 2; | ||
} else if (countFlags < 0xe0) { | ||
result = (countFlags << 16 | buffer[offset + 1] << 8 | buffer[offset + 2]) & 0x1fffff; | ||
offset += 3; | ||
} else if (countFlags < 0xf0) { | ||
result = (countFlags << 24 | buffer[offset + 1] << 16 | buffer[offset + 2] << 8 | buffer[offset + 3]) & 0x0fffffff; | ||
offset += 4; | ||
} else { | ||
result = (countFlags & 0x0f) << 28 | buffer[offset + 1] << 20 | buffer[offset + 2] << 12 | buffer[offset + 3] << 4 | buffer[offset + 4] & 0x0f; // x=((0xff & 0x0f)<<28) | (0xff<<20) | (0xff<<12) | (0xff<<4) | (0x0f & 0x0f); | ||
// TODO *val_p = uv < 0x80000000UL ? uv : -((int32_t) (0xffffffffUL - uv)) - 1; | ||
offset += 5; | ||
result = countFlags; | ||
offset += 1; | ||
} | ||
else if (countFlags < 0xc0) { | ||
result = ((countFlags << 8) | buffer[offset + 1]) & 0x3fff; | ||
offset += 2; | ||
} | ||
else if (countFlags < 0xe0) { | ||
result = | ||
((countFlags << 16) | (buffer[offset + 1] << 8) | buffer[offset + 2]) & | ||
0x1fffff; | ||
offset += 3; | ||
} | ||
else if (countFlags < 0xf0) { | ||
result = | ||
((countFlags << 24) | | ||
(buffer[offset + 1] << 16) | | ||
(buffer[offset + 2] << 8) | | ||
buffer[offset + 3]) & | ||
0x0fffffff; | ||
offset += 4; | ||
} | ||
else { | ||
result = | ||
((countFlags & 0x0f) << 28) | | ||
(buffer[offset + 1] << 20) | | ||
(buffer[offset + 2] << 12) | | ||
(buffer[offset + 3] << 4) | | ||
(buffer[offset + 4] & 0x0f); | ||
// x=((0xff & 0x0f)<<28) | (0xff<<20) | (0xff<<12) | (0xff<<4) | (0x0f & 0x0f); | ||
// TODO *val_p = uv < 0x80000000UL ? uv : -((int32_t) (0xffffffffUL - uv)) - 1; | ||
offset += 5; | ||
} | ||
if (offset > buffer.length) { | ||
throw new CramBufferOverrunError('Attempted to read beyond end of buffer; this file seems truncated.'); | ||
throw new errors_1.CramBufferOverrunError('Attempted to read beyond end of buffer; this file seems truncated.'); | ||
} | ||
return [result, offset - initialOffset]; | ||
}, | ||
// parseLtf8(buffer, initialOffset) { | ||
// let offset = initialOffset | ||
// const countFlags = buffer[offset] | ||
// let result | ||
// if (countFlags < 0x80) { | ||
// result = countFlags | ||
// offset += 1 | ||
// } else if (countFlags < 0xc0) { | ||
// result = ((buffer[offset] << 8) | buffer[offset + 1]) & 0x3fff | ||
// offset += 2 | ||
// } else if (countFlags < 0xe0) { | ||
// result = | ||
// ((buffer[offset] << 16) | | ||
// (buffer[offset + 1] << 8) | | ||
// buffer[offset + 2]) & | ||
// 0x1fffff | ||
// offset += 3 | ||
// } else if (countFlags < 0xf0) { | ||
// result = | ||
// ((buffer[offset] << 24) | | ||
// (buffer[offset + 1] << 16) | | ||
// (buffer[offset + 2] << 8) | | ||
// buffer[offset + 3]) & | ||
// 0x0fffffff | ||
// offset += 4 | ||
// } else if (countFlags < 0xf8) { | ||
// result = | ||
// ((buffer[offset] & 15) * Math.pow(2,32) + (buffer[offset + 1] << 24)) | | ||
// ((buffer[offset + 2] << 16) | | ||
// (buffer[offset + 3] << 8) | | ||
// buffer[offset + 4]) | ||
// // TODO *val_p = uv < 0x80000000UL ? uv : -((int32_t) (0xffffffffUL - uv)) - 1; | ||
// offset += 5 | ||
// } else if (countFlags < 0xfc) { | ||
// result = | ||
// ((((buffer[offset] & 7) << 8) | buffer[offset + 1]) * Math.pow(2,32) + | ||
// (buffer[offset + 2] << 24)) | | ||
// ((buffer[offset + 3] << 16) | | ||
// (buffer[offset + 4] << 8) | | ||
// buffer[offset + 5]) | ||
// offset += 6 | ||
// } else if (countFlags < 0xfe) { | ||
// result = | ||
// ((((buffer[offset] & 3) << 16) | | ||
// (buffer[offset + 1] << 8) | | ||
// buffer[offset + 2]) * | ||
// Math.pow(2,32) + | ||
// (buffer[offset + 3] << 24)) | | ||
// ((buffer[offset + 4] << 16) | | ||
// (buffer[offset + 5] << 8) | | ||
// buffer[offset + 6]) | ||
// offset += 7 | ||
// } else if (countFlags < 0xff) { | ||
// result = Long.fromBytesBE(buffer.slice(offset + 1, offset + 8)) | ||
// if ( | ||
// result.greaterThan(Number.MAX_SAFE_INTEGER) || | ||
// result.lessThan(Number.MIN_SAFE_INTEGER) | ||
// ) | ||
// throw new CramUnimplementedError('integer overflow') | ||
// result = result.toNumber() | ||
// offset += 8 | ||
// } else { | ||
// result = Long.fromBytesBE(buffer.slice(offset + 1, offset + 9)) | ||
// if ( | ||
// result.greaterThan(Number.MAX_SAFE_INTEGER) || | ||
// result.lessThan(Number.MIN_SAFE_INTEGER) | ||
// ) | ||
// throw new CramUnimplementedError('integer overflow') | ||
// result = result.toNumber() | ||
// offset += 9 | ||
// } | ||
// return [result, offset - initialOffset] | ||
// }, | ||
parseItem: function parseItem(buffer, parser) { | ||
var startBufferPosition = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; | ||
var startFilePosition = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0; | ||
var _parser$parse = parser.parse(buffer), | ||
offset = _parser$parse.offset, | ||
result = _parser$parse.result; | ||
} | ||
exports.parseItf8 = parseItf8; | ||
// parseLtf8(buffer, initialOffset) { | ||
// let offset = initialOffset | ||
// const countFlags = buffer[offset] | ||
// let result | ||
// if (countFlags < 0x80) { | ||
// result = countFlags | ||
// offset += 1 | ||
// } else if (countFlags < 0xc0) { | ||
// result = ((buffer[offset] << 8) | buffer[offset + 1]) & 0x3fff | ||
// offset += 2 | ||
// } else if (countFlags < 0xe0) { | ||
// result = | ||
// ((buffer[offset] << 16) | | ||
// (buffer[offset + 1] << 8) | | ||
// buffer[offset + 2]) & | ||
// 0x1fffff | ||
// offset += 3 | ||
// } else if (countFlags < 0xf0) { | ||
// result = | ||
// ((buffer[offset] << 24) | | ||
// (buffer[offset + 1] << 16) | | ||
// (buffer[offset + 2] << 8) | | ||
// buffer[offset + 3]) & | ||
// 0x0fffffff | ||
// offset += 4 | ||
// } else if (countFlags < 0xf8) { | ||
// result = | ||
// ((buffer[offset] & 15) * Math.pow(2,32) + (buffer[offset + 1] << 24)) | | ||
// ((buffer[offset + 2] << 16) | | ||
// (buffer[offset + 3] << 8) | | ||
// buffer[offset + 4]) | ||
// // TODO *val_p = uv < 0x80000000UL ? uv : -((int32_t) (0xffffffffUL - uv)) - 1; | ||
// offset += 5 | ||
// } else if (countFlags < 0xfc) { | ||
// result = | ||
// ((((buffer[offset] & 7) << 8) | buffer[offset + 1]) * Math.pow(2,32) + | ||
// (buffer[offset + 2] << 24)) | | ||
// ((buffer[offset + 3] << 16) | | ||
// (buffer[offset + 4] << 8) | | ||
// buffer[offset + 5]) | ||
// offset += 6 | ||
// } else if (countFlags < 0xfe) { | ||
// result = | ||
// ((((buffer[offset] & 3) << 16) | | ||
// (buffer[offset + 1] << 8) | | ||
// buffer[offset + 2]) * | ||
// Math.pow(2,32) + | ||
// (buffer[offset + 3] << 24)) | | ||
// ((buffer[offset + 4] << 16) | | ||
// (buffer[offset + 5] << 8) | | ||
// buffer[offset + 6]) | ||
// offset += 7 | ||
// } else if (countFlags < 0xff) { | ||
// result = Long.fromBytesBE(buffer.slice(offset + 1, offset + 8)) | ||
// if ( | ||
// result.greaterThan(Number.MAX_SAFE_INTEGER) || | ||
// result.lessThan(Number.MIN_SAFE_INTEGER) | ||
// ) | ||
// throw new CramUnimplementedError('integer overflow') | ||
// result = result.toNumber() | ||
// offset += 8 | ||
// } else { | ||
// result = Long.fromBytesBE(buffer.slice(offset + 1, offset + 9)) | ||
// if ( | ||
// result.greaterThan(Number.MAX_SAFE_INTEGER) || | ||
// result.lessThan(Number.MIN_SAFE_INTEGER) | ||
// ) | ||
// throw new CramUnimplementedError('integer overflow') | ||
// result = result.toNumber() | ||
// offset += 9 | ||
// } | ||
// return [result, offset - initialOffset] | ||
// }, | ||
function parseItem(buffer, parser, startBufferPosition, startFilePosition) { | ||
if (startBufferPosition === void 0) { startBufferPosition = 0; } | ||
if (startFilePosition === void 0) { startFilePosition = 0; } | ||
var _a = parser.parse(buffer), offset = _a.offset, result = _a.result; | ||
result._endPosition = offset + startFilePosition; | ||
result._size = offset - startBufferPosition; | ||
return result; | ||
}, | ||
// this would be nice as a decorator, but i'm a little worried about | ||
// babel support for it going away or changing. | ||
// memoizes a method in the stupidest possible way, with no regard for the | ||
// arguments. actually, this only works on methods that take no arguments | ||
tinyMemoize: function tinyMemoize(_class, methodName) { | ||
} | ||
exports.parseItem = parseItem; | ||
// this would be nice as a decorator, but i'm a little worried about | ||
// babel support for it going away or changing. | ||
// memoizes a method in the stupidest possible way, with no regard for the | ||
// arguments. actually, this only works on methods that take no arguments | ||
function tinyMemoize(_class, methodName) { | ||
var method = _class.prototype[methodName]; | ||
var memoAttrName = "_memo_".concat(methodName); | ||
_class.prototype[methodName] = function _tinyMemoized() { | ||
var _this = this; | ||
if (!(memoAttrName in this)) { | ||
var res = method.call(this); | ||
this[memoAttrName] = res; | ||
_promise.default.resolve(res).catch(function () { | ||
delete _this[memoAttrName]; | ||
}); | ||
} | ||
return this[memoAttrName]; | ||
var _this = this; | ||
if (!(memoAttrName in this)) { | ||
var res = method.call(this); | ||
this[memoAttrName] = res; | ||
Promise.resolve(res).catch(function () { | ||
delete _this[memoAttrName]; | ||
}); | ||
} | ||
return this[memoAttrName]; | ||
}; | ||
}, | ||
sequenceMD5: function sequenceMD5(seq) { | ||
return md5(seq.toUpperCase().replace(/[^\x21-\x7e]/g, '')); | ||
} | ||
}; | ||
} | ||
exports.tinyMemoize = tinyMemoize; | ||
function sequenceMD5(seq) { | ||
return (0, md5_1.default)(seq.toUpperCase().replace(/[^\x21-\x7e]/g, '')); | ||
} | ||
exports.sequenceMD5 = sequenceMD5; | ||
//# sourceMappingURL=util.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _wrapNativeSuper2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/wrapNativeSuper")); | ||
var CramError = | ||
/*#__PURE__*/ | ||
function (_Error) { | ||
(0, _inherits2.default)(CramError, _Error); | ||
function CramError() { | ||
(0, _classCallCheck2.default)(this, CramError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramError).apply(this, arguments)); | ||
} | ||
return CramError; | ||
}((0, _wrapNativeSuper2.default)(Error)); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.CramArgumentError = exports.CramSizeLimitError = exports.CramBufferOverrunError = exports.CramMalformedError = exports.CramUnimplementedError = exports.CramError = void 0; | ||
var CramError = /** @class */ (function (_super) { | ||
__extends(CramError, _super); | ||
function CramError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramError; | ||
}(Error)); | ||
exports.CramError = CramError; | ||
/** Error caused by encountering a part of the CRAM spec that has not yet been implemented */ | ||
var CramUnimplementedError = | ||
/*#__PURE__*/ | ||
function (_Error2) { | ||
(0, _inherits2.default)(CramUnimplementedError, _Error2); | ||
function CramUnimplementedError() { | ||
(0, _classCallCheck2.default)(this, CramUnimplementedError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramUnimplementedError).apply(this, arguments)); | ||
} | ||
return CramUnimplementedError; | ||
}((0, _wrapNativeSuper2.default)(Error)); | ||
var CramUnimplementedError = /** @class */ (function (_super) { | ||
__extends(CramUnimplementedError, _super); | ||
function CramUnimplementedError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramUnimplementedError; | ||
}(Error)); | ||
exports.CramUnimplementedError = CramUnimplementedError; | ||
/** An error caused by malformed data. */ | ||
var CramMalformedError = | ||
/*#__PURE__*/ | ||
function (_CramError) { | ||
(0, _inherits2.default)(CramMalformedError, _CramError); | ||
function CramMalformedError() { | ||
(0, _classCallCheck2.default)(this, CramMalformedError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramMalformedError).apply(this, arguments)); | ||
} | ||
return CramMalformedError; | ||
}(CramError); | ||
var CramMalformedError = /** @class */ (function (_super) { | ||
__extends(CramMalformedError, _super); | ||
function CramMalformedError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramMalformedError; | ||
}(CramError)); | ||
exports.CramMalformedError = CramMalformedError; | ||
/** | ||
* An error caused by attempting to read beyond the end of the defined data. | ||
*/ | ||
var CramBufferOverrunError = | ||
/*#__PURE__*/ | ||
function (_CramMalformedError) { | ||
(0, _inherits2.default)(CramBufferOverrunError, _CramMalformedError); | ||
function CramBufferOverrunError() { | ||
(0, _classCallCheck2.default)(this, CramBufferOverrunError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramBufferOverrunError).apply(this, arguments)); | ||
} | ||
return CramBufferOverrunError; | ||
}(CramMalformedError); | ||
var CramBufferOverrunError = /** @class */ (function (_super) { | ||
__extends(CramBufferOverrunError, _super); | ||
function CramBufferOverrunError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramBufferOverrunError; | ||
}(CramMalformedError)); | ||
exports.CramBufferOverrunError = CramBufferOverrunError; | ||
/** | ||
* An error caused by data being too big, exceeding a size limit. | ||
*/ | ||
var CramSizeLimitError = | ||
/*#__PURE__*/ | ||
function (_CramError2) { | ||
(0, _inherits2.default)(CramSizeLimitError, _CramError2); | ||
function CramSizeLimitError() { | ||
(0, _classCallCheck2.default)(this, CramSizeLimitError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramSizeLimitError).apply(this, arguments)); | ||
} | ||
return CramSizeLimitError; | ||
}(CramError); | ||
var CramSizeLimitError = /** @class */ (function (_super) { | ||
__extends(CramSizeLimitError, _super); | ||
function CramSizeLimitError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramSizeLimitError; | ||
}(CramError)); | ||
exports.CramSizeLimitError = CramSizeLimitError; | ||
/** | ||
* An invalid argument was supplied to a cram-js method or object. | ||
*/ | ||
var CramArgumentError = | ||
/*#__PURE__*/ | ||
function (_CramError3) { | ||
(0, _inherits2.default)(CramArgumentError, _CramError3); | ||
function CramArgumentError() { | ||
(0, _classCallCheck2.default)(this, CramArgumentError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramArgumentError).apply(this, arguments)); | ||
} | ||
return CramArgumentError; | ||
}(CramError); | ||
module.exports = { | ||
CramBufferOverrunError: CramBufferOverrunError, | ||
CramMalformedError: CramMalformedError, | ||
CramUnimplementedError: CramUnimplementedError, | ||
CramSizeLimitError: CramSizeLimitError, | ||
CramArgumentError: CramArgumentError | ||
}; | ||
var CramArgumentError = /** @class */ (function (_super) { | ||
__extends(CramArgumentError, _super); | ||
function CramArgumentError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramArgumentError; | ||
}(CramError)); | ||
exports.CramArgumentError = CramArgumentError; | ||
//# sourceMappingURL=errors.js.map |
"use strict"; | ||
var CramFile = require('./cramFile'); | ||
var IndexedCramFile = require('./indexedCramFile'); | ||
var CraiIndex = require('./craiIndex'); | ||
module.exports = { | ||
CramFile: CramFile, | ||
IndexedCramFile: IndexedCramFile, | ||
CraiIndex: CraiIndex | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.CraiIndex = exports.IndexedCramFile = exports.CramFile = void 0; | ||
var cramFile_1 = __importDefault(require("./cramFile")); | ||
exports.CramFile = cramFile_1.default; | ||
var indexedCramFile_1 = __importDefault(require("./indexedCramFile")); | ||
exports.IndexedCramFile = indexedCramFile_1.default; | ||
var craiIndex_1 = __importDefault(require("./craiIndex")); | ||
exports.CraiIndex = craiIndex_1.default; | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); | ||
var _sort = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/sort")); | ||
var _filter = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/filter")); | ||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/slicedToArray")); | ||
var _entries = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/object/entries")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _toConsumableArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/toConsumableArray")); | ||
var _promise = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/promise")); | ||
var _concat = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/concat")); | ||
var _map = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/map")); | ||
var _reduce = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/reduce")); | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _require = require('./errors'), | ||
CramUnimplementedError = _require.CramUnimplementedError, | ||
CramSizeLimitError = _require.CramSizeLimitError; | ||
var CramFile = require('./cramFile'); | ||
var IndexedCramFile = | ||
/*#__PURE__*/ | ||
function () { | ||
/** | ||
* | ||
* @param {object} args | ||
* @param {CramFile} args.cram | ||
* @param {Index-like} args.index object that supports getEntriesForRange(seqId,start,end) -> Promise[Array[index entries]] | ||
* @param {number} [args.cacheSize] optional maximum number of CRAM records to cache. default 20,000 | ||
* @param {number} [args.fetchSizeLimit] optional maximum number of bytes to fetch in a single getRecordsForRange call. Default 3 MiB. | ||
* @param {boolean} [args.checkSequenceMD5] - default true. if false, disables verifying the MD5 | ||
* checksum of the reference sequence underlying a slice. In some applications, this check can cause an inconvenient amount (many megabases) of sequences to be fetched. | ||
*/ | ||
function IndexedCramFile(args) { | ||
(0, _classCallCheck2.default)(this, IndexedCramFile); | ||
// { cram, index, seqFetch /* fasta, fastaIndex */ }) { | ||
if (args.cram) this.cram = args.cram;else this.cram = new CramFile({ | ||
url: args.cramUrl, | ||
path: args.cramPath, | ||
filehandle: args.cramFilehandle, | ||
seqFetch: args.seqFetch, | ||
checkSequenceMD5: args.checkSequenceMD5, | ||
cacheSize: args.cacheSize | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
if (!(this.cram instanceof CramFile)) throw new Error('invalid arguments: no cramfile'); | ||
this.index = args.index; | ||
if (!this.index.getEntriesForRange) throw new Error('invalid arguments: not an index'); | ||
this.fetchSizeLimit = args.fetchSizeLimit || 3000000; | ||
} | ||
/** | ||
* | ||
* @param {number} seq numeric ID of the reference sequence | ||
* @param {number} start start of the range of interest. 1-based closed coordinates. | ||
* @param {number} end end of the range of interest. 1-based closed coordinates. | ||
* @returns {Promise[Array[CramRecord]]} | ||
*/ | ||
(0, _createClass2.default)(IndexedCramFile, [{ | ||
key: "getRecordsForRange", | ||
value: function () { | ||
var _getRecordsForRange = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee(seq, start, end) { | ||
var _context, | ||
_context2, | ||
_this = this, | ||
_context3, | ||
_concatInstanceProper, | ||
_context4; | ||
var opts, | ||
seqId, | ||
slices, | ||
totalSize, | ||
filter, | ||
sliceResults, | ||
ret, | ||
_context5, | ||
_context6, | ||
_context7, | ||
readNames, | ||
readIds, | ||
i, | ||
name, | ||
id, | ||
unmatedPairs, | ||
matePromises, | ||
_i, | ||
_name, | ||
mateSlices, | ||
mateBlocks, | ||
mateChunks, | ||
_i2, | ||
_mateChunks, | ||
mateRecordPromises, | ||
mateFeatPromises, | ||
mateTotalSize, | ||
_context8, | ||
newMateFeats, | ||
newMates, | ||
_args = arguments; | ||
return _regenerator.default.wrap(function _callee$(_context9) { | ||
while (1) { | ||
switch (_context9.prev = _context9.next) { | ||
case 0: | ||
opts = _args.length > 3 && _args[3] !== undefined ? _args[3] : {}; | ||
opts.viewAsPairs = opts.viewAsPairs || false; | ||
opts.pairAcrossChr = opts.pairAcrossChr || false; | ||
opts.maxInsertSize = opts.maxInsertSize || 200000; | ||
if (!(typeof seq === 'string')) { | ||
_context9.next = 6; | ||
break; | ||
} | ||
throw new CramUnimplementedError('string sequence names not yet supported'); | ||
case 6: | ||
seqId = seq; | ||
_context9.next = 9; | ||
return this.index.getEntriesForRange(seqId, start, end); | ||
case 9: | ||
slices = _context9.sent; | ||
totalSize = (0, _reduce.default)(_context = (0, _map.default)(slices).call(slices, function (s) { | ||
return s.sliceBytes; | ||
})).call(_context, function (a, b) { | ||
return a + b; | ||
}, 0); | ||
if (!(totalSize > this.fetchSizeLimit)) { | ||
_context9.next = 13; | ||
break; | ||
} | ||
throw new CramSizeLimitError((0, _concat.default)(_context2 = "data size of ".concat(totalSize.toLocaleString(), " bytes exceeded fetch size limit of ")).call(_context2, this.fetchSizeLimit.toLocaleString(), " bytes")); | ||
case 13: | ||
// TODO: do we need to merge or de-duplicate the blocks? | ||
// fetch all the slices and parse the feature data | ||
filter = function filter(feature) { | ||
return feature.sequenceId === seq && feature.alignmentStart <= end && feature.alignmentStart + feature.lengthOnRef - 1 >= start; | ||
}; | ||
_context9.next = 16; | ||
return _promise.default.all((0, _map.default)(slices).call(slices, function (slice) { | ||
return _this.getRecordsInSlice(slice, filter); | ||
})); | ||
case 16: | ||
sliceResults = _context9.sent; | ||
ret = (_concatInstanceProper = (0, _concat.default)(_context3 = Array.prototype)).call.apply(_concatInstanceProper, (0, _concat.default)(_context4 = [_context3]).call(_context4, (0, _toConsumableArray2.default)(sliceResults))); | ||
if (!opts.viewAsPairs) { | ||
_context9.next = 42; | ||
break; | ||
} | ||
readNames = {}; | ||
readIds = {}; | ||
for (i = 0; i < ret.length; i += 1) { | ||
name = ret[i].readName; | ||
id = ret[i].uniqueId; | ||
if (!readNames[name]) readNames[name] = 0; | ||
readNames[name] += 1; | ||
readIds[id] = 1; | ||
} | ||
unmatedPairs = {}; | ||
(0, _forEach.default)(_context5 = (0, _entries.default)(readNames)).call(_context5, function (_ref) { | ||
var _ref2 = (0, _slicedToArray2.default)(_ref, 2), | ||
k = _ref2[0], | ||
v = _ref2[1]; | ||
if (v === 1) unmatedPairs[k] = true; | ||
}); | ||
matePromises = []; | ||
for (_i = 0; _i < ret.length; _i += 1) { | ||
_name = ret[_i].readName; | ||
if (unmatedPairs[_name] && ret[_i].mate && (ret[_i].mate.sequenceId === seqId || opts.pairAcrossChr) && Math.abs(ret[_i].alignmentStart - ret[_i].mate.alignmentStart) < opts.maxInsertSize) { | ||
mateSlices = this.index.getEntriesForRange(ret[_i].mate.sequenceId, ret[_i].mate.alignmentStart, ret[_i].mate.alignmentStart + 1); | ||
matePromises.push(mateSlices); | ||
} | ||
} | ||
_context9.next = 28; | ||
return _promise.default.all(matePromises); | ||
case 28: | ||
mateBlocks = _context9.sent; | ||
mateChunks = []; | ||
for (_i2 = 0; _i2 < mateBlocks.length; _i2 += 1) { | ||
(_mateChunks = mateChunks).push.apply(_mateChunks, (0, _toConsumableArray2.default)(mateBlocks[_i2])); | ||
} // filter out duplicates | ||
mateChunks = (0, _filter.default)(_context6 = (0, _sort.default)(mateChunks).call(mateChunks, function (a, b) { | ||
return a.toString().localeCompare(b.toString()); | ||
})).call(_context6, function (item, pos, ary) { | ||
return !pos || item.toString() !== ary[pos - 1].toString(); | ||
}); | ||
mateRecordPromises = []; | ||
mateFeatPromises = []; | ||
mateTotalSize = (0, _reduce.default)(_context7 = (0, _map.default)(mateChunks).call(mateChunks, function (s) { | ||
return s.sliceBytes; | ||
})).call(_context7, function (a, b) { | ||
return a + b; | ||
}, 0); | ||
if (!(mateTotalSize > this.fetchSizeLimit)) { | ||
_context9.next = 37; | ||
break; | ||
} | ||
throw new Error((0, _concat.default)(_context8 = "mate data size of ".concat(mateTotalSize.toLocaleString(), " bytes exceeded fetch size limit of ")).call(_context8, this.fetchSizeLimit.toLocaleString(), " bytes")); | ||
case 37: | ||
(0, _forEach.default)(mateChunks).call(mateChunks, function (c) { | ||
var recordPromise = _this.cram.featureCache.get(c.toString()); | ||
if (!recordPromise) { | ||
recordPromise = _this.getRecordsInSlice(c, function () { | ||
return true; | ||
}); | ||
_this.cram.featureCache.set(c.toString(), recordPromise); | ||
} | ||
mateRecordPromises.push(recordPromise); | ||
var featPromise = recordPromise.then(function (feats) { | ||
var mateRecs = []; | ||
for (var _i3 = 0; _i3 < feats.length; _i3 += 1) { | ||
var feature = feats[_i3]; | ||
if (unmatedPairs[feature.readName] && !readIds[feature.uniqueId]) { | ||
mateRecs.push(feature); | ||
} | ||
} | ||
return mateRecs; | ||
}); | ||
mateFeatPromises.push(featPromise); | ||
}); | ||
_context9.next = 40; | ||
return _promise.default.all(mateFeatPromises); | ||
case 40: | ||
newMateFeats = _context9.sent; | ||
if (newMateFeats.length) { | ||
newMates = (0, _reduce.default)(newMateFeats).call(newMateFeats, function (result, current) { | ||
return (0, _concat.default)(result).call(result, current); | ||
}); | ||
ret = (0, _concat.default)(ret).call(ret, newMates); | ||
} | ||
case 42: | ||
return _context9.abrupt("return", ret); | ||
case 43: | ||
case "end": | ||
return _context9.stop(); | ||
}; | ||
var __generator = (this && this.__generator) || function (thisArg, body) { | ||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; | ||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; | ||
function verb(n) { return function (v) { return step([n, v]); }; } | ||
function step(op) { | ||
if (f) throw new TypeError("Generator is already executing."); | ||
while (_) try { | ||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; | ||
if (y = 0, t) op = [op[0] & 2, t.value]; | ||
switch (op[0]) { | ||
case 0: case 1: t = op; break; | ||
case 4: _.label++; return { value: op[1], done: false }; | ||
case 5: _.label++; y = op[1]; op = [0]; continue; | ||
case 7: op = _.ops.pop(); _.trys.pop(); continue; | ||
default: | ||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } | ||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } | ||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } | ||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } | ||
if (t[2]) _.ops.pop(); | ||
_.trys.pop(); continue; | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function getRecordsForRange(_x, _x2, _x3) { | ||
return _getRecordsForRange.apply(this, arguments); | ||
} | ||
return getRecordsForRange; | ||
}() | ||
}, { | ||
key: "getRecordsInSlice", | ||
value: function getRecordsInSlice(_ref3, filterFunction) { | ||
var containerStart = _ref3.containerStart, | ||
sliceStart = _ref3.sliceStart, | ||
sliceBytes = _ref3.sliceBytes; | ||
var container = this.cram.getContainerAtPosition(containerStart); | ||
var slice = container.getSlice(sliceStart, sliceBytes); | ||
return slice.getRecords(filterFunction); | ||
op = body.call(thisArg, _); | ||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } | ||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; | ||
} | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("./errors"); | ||
var cramFile_1 = __importDefault(require("./cramFile")); | ||
var IndexedCramFile = /** @class */ (function () { | ||
/** | ||
* | ||
* @param {object} args | ||
* @param {CramFile} args.cram | ||
* @param {Index-like} args.index object that supports getEntriesForRange(seqId,start,end) -> Promise[Array[index entries]] | ||
* @param {number} [args.cacheSize] optional maximum number of CRAM records to cache. default 20,000 | ||
* @param {number} [args.fetchSizeLimit] optional maximum number of bytes to fetch in a single getRecordsForRange call. Default 3 MiB. | ||
* @param {boolean} [args.checkSequenceMD5] - default true. if false, disables verifying the MD5 | ||
* checksum of the reference sequence underlying a slice. In some applications, this check can cause an inconvenient amount (many megabases) of sequences to be fetched. | ||
*/ | ||
function IndexedCramFile(args) { | ||
// { cram, index, seqFetch /* fasta, fastaIndex */ }) { | ||
if (args.cram) { | ||
this.cram = args.cram; | ||
} | ||
else { | ||
this.cram = new cramFile_1.default({ | ||
url: args.cramUrl, | ||
path: args.cramPath, | ||
filehandle: args.cramFilehandle, | ||
seqFetch: args.seqFetch, | ||
checkSequenceMD5: args.checkSequenceMD5, | ||
cacheSize: args.cacheSize, | ||
}); | ||
} | ||
if (!(this.cram instanceof cramFile_1.default)) { | ||
throw new Error('invalid arguments: no cramfile'); | ||
} | ||
this.index = args.index; | ||
if (!this.index.getEntriesForRange) { | ||
throw new Error('invalid arguments: not an index'); | ||
} | ||
this.fetchSizeLimit = args.fetchSizeLimit || 3000000; | ||
} | ||
/** | ||
* | ||
* @param {number} seq numeric ID of the reference sequence | ||
* @param {number} start start of the range of interest. 1-based closed coordinates. | ||
* @param {number} end end of the range of interest. 1-based closed coordinates. | ||
* @returns {Promise[Array[CramRecord]]} | ||
*/ | ||
IndexedCramFile.prototype.getRecordsForRange = function (seq, start, end, opts) { | ||
if (opts === void 0) { opts = {}; } | ||
return __awaiter(this, void 0, void 0, function () { | ||
var seqId, slices, totalSize, filter, sliceResults, ret, readNames, readIds_1, i, name_1, id, unmatedPairs_1, matePromises, i, name_2, mateSlices, mateBlocks, mateChunks, i, mateRecordPromises_1, mateFeatPromises_1, mateTotalSize, newMateFeats, newMates; | ||
var _a; | ||
var _this = this; | ||
return __generator(this, function (_b) { | ||
switch (_b.label) { | ||
case 0: | ||
opts.viewAsPairs = opts.viewAsPairs || false; | ||
opts.pairAcrossChr = opts.pairAcrossChr || false; | ||
opts.maxInsertSize = opts.maxInsertSize || 200000; | ||
if (typeof seq === 'string') { | ||
// TODO: support string reference sequence names somehow | ||
throw new errors_1.CramUnimplementedError('string sequence names not yet supported'); | ||
} | ||
seqId = seq; | ||
return [4 /*yield*/, this.index.getEntriesForRange(seqId, start, end)]; | ||
case 1: | ||
slices = _b.sent(); | ||
totalSize = slices.map(function (s) { return s.sliceBytes; }).reduce(function (a, b) { return a + b; }, 0); | ||
if (totalSize > this.fetchSizeLimit) { | ||
throw new errors_1.CramSizeLimitError("data size of ".concat(totalSize.toLocaleString(), " bytes exceeded fetch size limit of ").concat(this.fetchSizeLimit.toLocaleString(), " bytes")); | ||
} | ||
filter = function (feature) { | ||
return feature.sequenceId === seq && | ||
feature.alignmentStart <= end && | ||
feature.alignmentStart + feature.lengthOnRef - 1 >= start; | ||
}; | ||
return [4 /*yield*/, Promise.all(slices.map(function (slice) { return _this.getRecordsInSlice(slice, filter); }))]; | ||
case 2: | ||
sliceResults = _b.sent(); | ||
ret = (_a = Array.prototype).concat.apply(_a, sliceResults); | ||
if (!opts.viewAsPairs) return [3 /*break*/, 5]; | ||
readNames = {}; | ||
readIds_1 = {}; | ||
for (i = 0; i < ret.length; i += 1) { | ||
name_1 = ret[i].readName; | ||
id = ret[i].uniqueId; | ||
if (!readNames[name_1]) { | ||
readNames[name_1] = 0; | ||
} | ||
readNames[name_1] += 1; | ||
readIds_1[id] = 1; | ||
} | ||
unmatedPairs_1 = {}; | ||
Object.entries(readNames).forEach(function (_a) { | ||
var k = _a[0], v = _a[1]; | ||
if (v === 1) { | ||
unmatedPairs_1[k] = true; | ||
} | ||
}); | ||
matePromises = []; | ||
for (i = 0; i < ret.length; i += 1) { | ||
name_2 = ret[i].readName; | ||
if (unmatedPairs_1[name_2] && | ||
ret[i].mate && | ||
(ret[i].mate.sequenceId === seqId || opts.pairAcrossChr) && | ||
Math.abs(ret[i].alignmentStart - ret[i].mate.alignmentStart) < | ||
opts.maxInsertSize) { | ||
mateSlices = this.index.getEntriesForRange(ret[i].mate.sequenceId, ret[i].mate.alignmentStart, ret[i].mate.alignmentStart + 1); | ||
matePromises.push(mateSlices); | ||
} | ||
} | ||
return [4 /*yield*/, Promise.all(matePromises)]; | ||
case 3: | ||
mateBlocks = _b.sent(); | ||
mateChunks = []; | ||
for (i = 0; i < mateBlocks.length; i += 1) { | ||
mateChunks.push.apply(mateChunks, mateBlocks[i]); | ||
} | ||
// filter out duplicates | ||
mateChunks = mateChunks | ||
.sort(function (a, b) { return a.toString().localeCompare(b.toString()); }) | ||
.filter(function (item, pos, ary) { | ||
return !pos || item.toString() !== ary[pos - 1].toString(); | ||
}); | ||
mateRecordPromises_1 = []; | ||
mateFeatPromises_1 = []; | ||
mateTotalSize = mateChunks | ||
.map(function (s) { return s.sliceBytes; }) | ||
.reduce(function (a, b) { return a + b; }, 0); | ||
if (mateTotalSize > this.fetchSizeLimit) { | ||
throw new Error("mate data size of ".concat(mateTotalSize.toLocaleString(), " bytes exceeded fetch size limit of ").concat(this.fetchSizeLimit.toLocaleString(), " bytes")); | ||
} | ||
mateChunks.forEach(function (c) { | ||
var recordPromise = _this.cram.featureCache.get(c.toString()); | ||
if (!recordPromise) { | ||
recordPromise = _this.getRecordsInSlice(c, function () { return true; }); | ||
_this.cram.featureCache.set(c.toString(), recordPromise); | ||
} | ||
mateRecordPromises_1.push(recordPromise); | ||
var featPromise = recordPromise.then(function (feats) { | ||
var mateRecs = []; | ||
for (var i = 0; i < feats.length; i += 1) { | ||
var feature = feats[i]; | ||
if (unmatedPairs_1[feature.readName] && !readIds_1[feature.uniqueId]) { | ||
mateRecs.push(feature); | ||
} | ||
} | ||
return mateRecs; | ||
}); | ||
mateFeatPromises_1.push(featPromise); | ||
}); | ||
return [4 /*yield*/, Promise.all(mateFeatPromises_1)]; | ||
case 4: | ||
newMateFeats = _b.sent(); | ||
if (newMateFeats.length) { | ||
newMates = newMateFeats.reduce(function (result, current) { | ||
return result.concat(current); | ||
}); | ||
ret = ret.concat(newMates); | ||
} | ||
_b.label = 5; | ||
case 5: return [2 /*return*/, ret]; | ||
} | ||
}); | ||
}); | ||
}; | ||
IndexedCramFile.prototype.getRecordsInSlice = function (_a, filterFunction) { | ||
var containerStart = _a.containerStart, sliceStart = _a.sliceStart, sliceBytes = _a.sliceBytes; | ||
var container = this.cram.getContainerAtPosition(containerStart); | ||
var slice = container.getSlice(sliceStart, sliceBytes); | ||
return slice.getRecords(filterFunction); | ||
}; | ||
/** | ||
* | ||
* @param {number} seqId | ||
@@ -315,12 +217,8 @@ * @returns {Promise} true if the CRAM file contains data for the given | ||
*/ | ||
}, { | ||
key: "hasDataForReferenceSequence", | ||
value: function hasDataForReferenceSequence(seqId) { | ||
return this.index.hasDataForReferenceSequence(seqId); | ||
} | ||
}]); | ||
return IndexedCramFile; | ||
}(); | ||
module.exports = IndexedCramFile; | ||
IndexedCramFile.prototype.hasDataForReferenceSequence = function (seqId) { | ||
return this.index.hasDataForReferenceSequence(seqId); | ||
}; | ||
return IndexedCramFile; | ||
}()); | ||
exports.default = IndexedCramFile; | ||
//# sourceMappingURL=indexedCramFile.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
var _promise = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/promise")); | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var LRU = require('quick-lru'); | ||
var BufferCache = | ||
/*#__PURE__*/ | ||
function () { | ||
function BufferCache(_ref) { | ||
var fetch = _ref.fetch, | ||
_ref$size = _ref.size, | ||
size = _ref$size === void 0 ? 10000000 : _ref$size, | ||
_ref$chunkSize = _ref.chunkSize, | ||
chunkSize = _ref$chunkSize === void 0 ? 32768 : _ref$chunkSize; | ||
(0, _classCallCheck2.default)(this, BufferCache); | ||
if (!fetch) throw new Error('fetch function required'); | ||
this.fetch = fetch; | ||
this.chunkSize = chunkSize; | ||
this.lruCache = new LRU({ | ||
maxSize: Math.floor(size / chunkSize) | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
} | ||
(0, _createClass2.default)(BufferCache, [{ | ||
key: "get", | ||
value: function () { | ||
var _get = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee(outputBuffer, offset, length, position) { | ||
var _this = this; | ||
var firstChunk, lastChunk, fetches, _loop, chunk, chunks, chunksOffset; | ||
return _regenerator.default.wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
if (!(outputBuffer.length < offset + length)) { | ||
_context.next = 2; | ||
break; | ||
} | ||
throw new Error('output buffer not big enough for request'); | ||
case 2: | ||
// calculate the list of chunks involved in this fetch | ||
firstChunk = Math.floor(position / this.chunkSize); | ||
lastChunk = Math.floor((position + length) / this.chunkSize); // fetch them all as necessary | ||
fetches = new Array(lastChunk - firstChunk + 1); | ||
_loop = function _loop(chunk) { | ||
fetches[chunk - firstChunk] = _this._getChunk(chunk).then(function (data) { | ||
return { | ||
data: data, | ||
chunkNumber: chunk | ||
}; | ||
}); | ||
}; | ||
for (chunk = firstChunk; chunk <= lastChunk; chunk += 1) { | ||
_loop(chunk); | ||
} // stitch together the response buffer using them | ||
_context.next = 9; | ||
return _promise.default.all(fetches); | ||
case 9: | ||
chunks = _context.sent; | ||
chunksOffset = position - chunks[0].chunkNumber * this.chunkSize; | ||
(0, _forEach.default)(chunks).call(chunks, function (_ref2) { | ||
var data = _ref2.data, | ||
chunkNumber = _ref2.chunkNumber; | ||
var chunkPositionStart = chunkNumber * _this.chunkSize; | ||
var copyStart = 0; | ||
var copyEnd = _this.chunkSize; | ||
var copyOffset = offset + (chunkNumber - firstChunk) * _this.chunkSize - chunksOffset; | ||
if (chunkNumber === firstChunk) { | ||
copyOffset = offset; | ||
copyStart = chunksOffset; | ||
} | ||
if (chunkNumber === lastChunk) { | ||
copyEnd = position + length - chunkPositionStart; | ||
} | ||
data.copy(outputBuffer, copyOffset, copyStart, copyEnd); | ||
}); | ||
case 12: | ||
case "end": | ||
return _context.stop(); | ||
}; | ||
var __generator = (this && this.__generator) || function (thisArg, body) { | ||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; | ||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; | ||
function verb(n) { return function (v) { return step([n, v]); }; } | ||
function step(op) { | ||
if (f) throw new TypeError("Generator is already executing."); | ||
while (_) try { | ||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; | ||
if (y = 0, t) op = [op[0] & 2, t.value]; | ||
switch (op[0]) { | ||
case 0: case 1: t = op; break; | ||
case 4: _.label++; return { value: op[1], done: false }; | ||
case 5: _.label++; y = op[1]; op = [0]; continue; | ||
case 7: op = _.ops.pop(); _.trys.pop(); continue; | ||
default: | ||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } | ||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } | ||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } | ||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } | ||
if (t[2]) _.ops.pop(); | ||
_.trys.pop(); continue; | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function get(_x, _x2, _x3, _x4) { | ||
return _get.apply(this, arguments); | ||
} | ||
return get; | ||
}() | ||
}, { | ||
key: "_getChunk", | ||
value: function _getChunk(chunkNumber) { | ||
var cachedPromise = this.lruCache.get(chunkNumber); | ||
if (cachedPromise) return cachedPromise; | ||
var freshPromise = this.fetch(chunkNumber * this.chunkSize, this.chunkSize); | ||
this.lruCache.set(chunkNumber, freshPromise); | ||
return freshPromise; | ||
op = body.call(thisArg, _); | ||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } | ||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; | ||
} | ||
}]); | ||
return BufferCache; | ||
}(); | ||
module.exports = BufferCache; | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var quick_lru_1 = __importDefault(require("quick-lru")); | ||
var BufferCache = /** @class */ (function () { | ||
function BufferCache(_a) { | ||
var fetch = _a.fetch, _b = _a.size, size = _b === void 0 ? 10000000 : _b, _c = _a.chunkSize, chunkSize = _c === void 0 ? 32768 : _c; | ||
if (!fetch) { | ||
throw new Error('fetch function required'); | ||
} | ||
this.fetch = fetch; | ||
this.chunkSize = chunkSize; | ||
this.lruCache = new quick_lru_1.default({ maxSize: Math.floor(size / chunkSize) }); | ||
} | ||
BufferCache.prototype.get = function (outputBuffer, offset, length, position) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var firstChunk, lastChunk, fetches, _loop_1, this_1, chunk, chunks, chunksOffset; | ||
var _this = this; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
if (outputBuffer.length < offset + length) { | ||
throw new Error('output buffer not big enough for request'); | ||
} | ||
firstChunk = Math.floor(position / this.chunkSize); | ||
lastChunk = Math.floor((position + length) / this.chunkSize); | ||
fetches = new Array(lastChunk - firstChunk + 1); | ||
_loop_1 = function (chunk) { | ||
fetches[chunk - firstChunk] = this_1._getChunk(chunk).then(function (data) { return ({ | ||
data: data, | ||
chunkNumber: chunk, | ||
}); }); | ||
}; | ||
this_1 = this; | ||
for (chunk = firstChunk; chunk <= lastChunk; chunk += 1) { | ||
_loop_1(chunk); | ||
} | ||
return [4 /*yield*/, Promise.all(fetches)]; | ||
case 1: | ||
chunks = _a.sent(); | ||
chunksOffset = position - chunks[0].chunkNumber * this.chunkSize; | ||
chunks.forEach(function (_a) { | ||
var data = _a.data, chunkNumber = _a.chunkNumber; | ||
var chunkPositionStart = chunkNumber * _this.chunkSize; | ||
var copyStart = 0; | ||
var copyEnd = _this.chunkSize; | ||
var copyOffset = offset + (chunkNumber - firstChunk) * _this.chunkSize - chunksOffset; | ||
if (chunkNumber === firstChunk) { | ||
copyOffset = offset; | ||
copyStart = chunksOffset; | ||
} | ||
if (chunkNumber === lastChunk) { | ||
copyEnd = position + length - chunkPositionStart; | ||
} | ||
data.copy(outputBuffer, copyOffset, copyStart, copyEnd); | ||
}); | ||
return [2 /*return*/]; | ||
} | ||
}); | ||
}); | ||
}; | ||
BufferCache.prototype._getChunk = function (chunkNumber) { | ||
var cachedPromise = this.lruCache.get(chunkNumber); | ||
if (cachedPromise) { | ||
return cachedPromise; | ||
} | ||
var freshPromise = this.fetch(chunkNumber * this.chunkSize, this.chunkSize); | ||
this.lruCache.set(chunkNumber, freshPromise); | ||
return freshPromise; | ||
}; | ||
return BufferCache; | ||
}()); | ||
exports.default = BufferCache; | ||
//# sourceMappingURL=bufferCache.js.map |
"use strict"; | ||
var url = require('url'); | ||
var RemoteFile = require('./remoteFile'); | ||
var LocalFile = require('./localFile'); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.open = exports.fromUrl = exports.RemoteFile = exports.LocalFile = void 0; | ||
var url_1 = __importDefault(require("url")); | ||
var remoteFile_1 = __importDefault(require("./remoteFile")); | ||
exports.RemoteFile = remoteFile_1.default; | ||
var localFile_1 = __importDefault(require("./localFile")); | ||
exports.LocalFile = localFile_1.default; | ||
function fromUrl(source) { | ||
var _url$parse = url.parse(source), | ||
protocol = _url$parse.protocol, | ||
pathname = _url$parse.pathname; | ||
if (protocol === 'file:') { | ||
return new LocalFile(unescape(pathname)); | ||
} | ||
return new RemoteFile(source); | ||
var _a = url_1.default.parse(source), protocol = _a.protocol, pathname = _a.pathname; | ||
if (protocol === 'file:') { | ||
return new localFile_1.default(unescape(pathname)); | ||
} | ||
return new remoteFile_1.default(source); | ||
} | ||
module.exports = { | ||
LocalFile: LocalFile, | ||
RemoteFile: RemoteFile, | ||
fromUrl: fromUrl, | ||
open: function open(maybeUrl, maybePath, maybeFilehandle) { | ||
if (maybeFilehandle) return maybeFilehandle; | ||
if (maybeUrl) return fromUrl(maybeUrl); | ||
if (maybePath) return new LocalFile(maybePath); | ||
exports.fromUrl = fromUrl; | ||
function open(maybeUrl, maybePath, maybeFilehandle) { | ||
if (maybeFilehandle) { | ||
return maybeFilehandle; | ||
} | ||
if (maybeUrl) { | ||
return fromUrl(maybeUrl); | ||
} | ||
if (maybePath) { | ||
return new localFile_1.default(maybePath); | ||
} | ||
throw new Error('no url, path, or filehandle provided, cannot open'); | ||
} | ||
}; | ||
} | ||
exports.open = open; | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _require = require('es6-promisify'), | ||
promisify = _require.promisify; // don't load fs native module if running in webpacked code | ||
var fs = typeof __webpack_require__ !== 'function' ? require('fs') : null; // eslint-disable-line camelcase | ||
var fsOpen = fs && promisify(fs.open); | ||
var fsRead = fs && promisify(fs.read); | ||
var fsFStat = fs && promisify(fs.fstat); | ||
var fsReadFile = fs && promisify(fs.readFile); | ||
var LocalFile = | ||
/*#__PURE__*/ | ||
function () { | ||
function LocalFile(source) { | ||
(0, _classCallCheck2.default)(this, LocalFile); | ||
this.position = 0; | ||
this.filename = source; | ||
this.fd = fsOpen(this.filename, 'r'); | ||
} | ||
(0, _createClass2.default)(LocalFile, [{ | ||
key: "read", | ||
value: function () { | ||
var _read = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee(buffer) { | ||
var offset, | ||
length, | ||
position, | ||
readPosition, | ||
_args = arguments; | ||
return _regenerator.default.wrap(function _callee$(_context) { | ||
while (1) { | ||
switch (_context.prev = _context.next) { | ||
case 0: | ||
offset = _args.length > 1 && _args[1] !== undefined ? _args[1] : 0; | ||
length = _args.length > 2 ? _args[2] : undefined; | ||
position = _args.length > 3 ? _args[3] : undefined; | ||
readPosition = position; | ||
if (readPosition === null) { | ||
readPosition = this.position; | ||
this.position += length; | ||
} | ||
_context.t0 = fsRead; | ||
_context.next = 8; | ||
return this.fd; | ||
case 8: | ||
_context.t1 = _context.sent; | ||
_context.t2 = buffer; | ||
_context.t3 = offset; | ||
_context.t4 = length; | ||
_context.t5 = position; | ||
return _context.abrupt("return", (0, _context.t0)(_context.t1, _context.t2, _context.t3, _context.t4, _context.t5)); | ||
case 14: | ||
case "end": | ||
return _context.stop(); | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
}; | ||
var __generator = (this && this.__generator) || function (thisArg, body) { | ||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; | ||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; | ||
function verb(n) { return function (v) { return step([n, v]); }; } | ||
function step(op) { | ||
if (f) throw new TypeError("Generator is already executing."); | ||
while (_) try { | ||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; | ||
if (y = 0, t) op = [op[0] & 2, t.value]; | ||
switch (op[0]) { | ||
case 0: case 1: t = op; break; | ||
case 4: _.label++; return { value: op[1], done: false }; | ||
case 5: _.label++; y = op[1]; op = [0]; continue; | ||
case 7: op = _.ops.pop(); _.trys.pop(); continue; | ||
default: | ||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } | ||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } | ||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } | ||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } | ||
if (t[2]) _.ops.pop(); | ||
_.trys.pop(); continue; | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function read(_x) { | ||
return _read.apply(this, arguments); | ||
} | ||
return read; | ||
}() | ||
}, { | ||
key: "readFile", | ||
value: function () { | ||
var _readFile = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee2() { | ||
return _regenerator.default.wrap(function _callee2$(_context2) { | ||
while (1) { | ||
switch (_context2.prev = _context2.next) { | ||
case 0: | ||
_context2.t0 = fsReadFile; | ||
_context2.next = 3; | ||
return this.fd; | ||
case 3: | ||
_context2.t1 = _context2.sent; | ||
return _context2.abrupt("return", (0, _context2.t0)(_context2.t1)); | ||
case 5: | ||
case "end": | ||
return _context2.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function readFile() { | ||
return _readFile.apply(this, arguments); | ||
} | ||
return readFile; | ||
}() | ||
}, { | ||
key: "stat", | ||
value: function () { | ||
var _stat = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee3() { | ||
return _regenerator.default.wrap(function _callee3$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
if (this._stat) { | ||
_context3.next = 8; | ||
break; | ||
op = body.call(thisArg, _); | ||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } | ||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; | ||
} | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var es6_promisify_1 = require("es6-promisify"); | ||
var fs_1 = __importDefault(require("fs")); | ||
var fsOpen = fs_1.default && (0, es6_promisify_1.promisify)(fs_1.default.open); | ||
var fsRead = fs_1.default && (0, es6_promisify_1.promisify)(fs_1.default.read); | ||
var fsFStat = fs_1.default && (0, es6_promisify_1.promisify)(fs_1.default.fstat); | ||
var fsReadFile = fs_1.default && (0, es6_promisify_1.promisify)(fs_1.default.readFile); | ||
var LocalFile = /** @class */ (function () { | ||
function LocalFile(source) { | ||
this.position = 0; | ||
this.filename = source; | ||
this.fd = fsOpen(this.filename, 'r'); | ||
} | ||
LocalFile.prototype.read = function (buffer, offset, length, position) { | ||
if (offset === void 0) { offset = 0; } | ||
return __awaiter(this, void 0, void 0, function () { | ||
var readPosition, _a; | ||
return __generator(this, function (_b) { | ||
switch (_b.label) { | ||
case 0: | ||
readPosition = position; | ||
if (readPosition === null) { | ||
readPosition = this.position; | ||
this.position += length; | ||
} | ||
_a = fsRead; | ||
return [4 /*yield*/, this.fd]; | ||
case 1: return [2 /*return*/, _a.apply(void 0, [_b.sent(), buffer, offset, length, position])]; | ||
} | ||
_context3.t0 = fsFStat; | ||
_context3.next = 4; | ||
return this.fd; | ||
case 4: | ||
_context3.t1 = _context3.sent; | ||
_context3.next = 7; | ||
return (0, _context3.t0)(_context3.t1); | ||
case 7: | ||
this._stat = _context3.sent; | ||
case 8: | ||
return _context3.abrupt("return", this._stat); | ||
case 9: | ||
case "end": | ||
return _context3.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
function stat() { | ||
return _stat.apply(this, arguments); | ||
} | ||
return stat; | ||
}() | ||
}]); | ||
return LocalFile; | ||
}(); | ||
module.exports = LocalFile; | ||
}); | ||
}); | ||
}; | ||
LocalFile.prototype.readFile = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var _a; | ||
return __generator(this, function (_b) { | ||
switch (_b.label) { | ||
case 0: | ||
_a = fsReadFile; | ||
return [4 /*yield*/, this.fd]; | ||
case 1: return [2 /*return*/, _a.apply(void 0, [_b.sent()])]; | ||
} | ||
}); | ||
}); | ||
}; | ||
LocalFile.prototype.stat = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var _a, _b; | ||
return __generator(this, function (_c) { | ||
switch (_c.label) { | ||
case 0: | ||
if (!!this._stat) return [3 /*break*/, 3]; | ||
_a = this; | ||
_b = fsFStat; | ||
return [4 /*yield*/, this.fd]; | ||
case 1: return [4 /*yield*/, _b.apply(void 0, [_c.sent()])]; | ||
case 2: | ||
_a._stat = _c.sent(); | ||
_c.label = 3; | ||
case 3: return [2 /*return*/, this._stat]; | ||
} | ||
}); | ||
}); | ||
}; | ||
return LocalFile; | ||
}()); | ||
exports.default = LocalFile; | ||
//# sourceMappingURL=localFile.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _regenerator = _interopRequireDefault(require("@babel/runtime-corejs3/regenerator")); | ||
var _parseInt2 = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/parse-int")); | ||
var _concat = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/concat")); | ||
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/asyncToGenerator")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var fetch = require('cross-fetch'); | ||
var BufferCache = require('./bufferCache'); | ||
var RemoteFile = | ||
/*#__PURE__*/ | ||
function () { | ||
function RemoteFile(source) { | ||
var _this = this; | ||
(0, _classCallCheck2.default)(this, RemoteFile); | ||
this.position = 0; | ||
this.url = source; | ||
this.cache = new BufferCache({ | ||
fetch: function fetch(start, length) { | ||
return _this._fetch(start, length); | ||
} | ||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||
return new (P || (P = Promise))(function (resolve, reject) { | ||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } | ||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } | ||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } | ||
step((generator = generator.apply(thisArg, _arguments || [])).next()); | ||
}); | ||
} | ||
(0, _createClass2.default)(RemoteFile, [{ | ||
key: "_fetch", | ||
value: function () { | ||
var _fetch2 = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee(position, length) { | ||
var _context2; | ||
var headers, _context, response, nodeBuffer, sizeMatch; | ||
return _regenerator.default.wrap(function _callee$(_context3) { | ||
while (1) { | ||
switch (_context3.prev = _context3.next) { | ||
case 0: | ||
headers = {}; | ||
if (length < Infinity) { | ||
headers.range = (0, _concat.default)(_context = "bytes=".concat(position, "-")).call(_context, position + length); | ||
} else if (length === Infinity && position !== 0) { | ||
headers.range = "bytes=".concat(position, "-"); | ||
} | ||
_context3.next = 4; | ||
return fetch(this.url, { | ||
method: 'GET', | ||
headers: headers, | ||
redirect: 'follow', | ||
mode: 'cors' | ||
}); | ||
case 4: | ||
response = _context3.sent; | ||
if (!(response.status === 200 && position === 0 || response.status === 206)) { | ||
_context3.next = 14; | ||
break; | ||
} | ||
_context3.t0 = Buffer; | ||
_context3.next = 9; | ||
return response.arrayBuffer(); | ||
case 9: | ||
_context3.t1 = _context3.sent; | ||
nodeBuffer = _context3.t0.from.call(_context3.t0, _context3.t1); | ||
// try to parse out the size of the remote file | ||
sizeMatch = /\/(\d+)$/.exec(response.headers.get('content-range')); | ||
if (sizeMatch[1]) this._stat = { | ||
size: (0, _parseInt2.default)(sizeMatch[1], 10) | ||
}; | ||
return _context3.abrupt("return", nodeBuffer); | ||
case 14: | ||
throw new Error((0, _concat.default)(_context2 = "HTTP ".concat(response.status, " fetching ")).call(_context2, this.url)); | ||
case 15: | ||
case "end": | ||
return _context3.stop(); | ||
}; | ||
var __generator = (this && this.__generator) || function (thisArg, body) { | ||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; | ||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; | ||
function verb(n) { return function (v) { return step([n, v]); }; } | ||
function step(op) { | ||
if (f) throw new TypeError("Generator is already executing."); | ||
while (_) try { | ||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; | ||
if (y = 0, t) op = [op[0] & 2, t.value]; | ||
switch (op[0]) { | ||
case 0: case 1: t = op; break; | ||
case 4: _.label++; return { value: op[1], done: false }; | ||
case 5: _.label++; y = op[1]; op = [0]; continue; | ||
case 7: op = _.ops.pop(); _.trys.pop(); continue; | ||
default: | ||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } | ||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } | ||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } | ||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } | ||
if (t[2]) _.ops.pop(); | ||
_.trys.pop(); continue; | ||
} | ||
} | ||
}, _callee, this); | ||
})); | ||
function _fetch(_x, _x2) { | ||
return _fetch2.apply(this, arguments); | ||
} | ||
return _fetch; | ||
}() | ||
}, { | ||
key: "read", | ||
value: function read(buffer) { | ||
var offset = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; | ||
var length = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : Infinity; | ||
var position = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0; | ||
var readPosition = position; | ||
if (readPosition === null) { | ||
readPosition = this.position; | ||
this.position += length; | ||
} | ||
return this.cache.get(buffer, offset, length, position); | ||
op = body.call(thisArg, _); | ||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } | ||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; | ||
} | ||
}, { | ||
key: "readFile", | ||
value: function () { | ||
var _readFile = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee2() { | ||
var response; | ||
return _regenerator.default.wrap(function _callee2$(_context4) { | ||
while (1) { | ||
switch (_context4.prev = _context4.next) { | ||
case 0: | ||
_context4.next = 2; | ||
return fetch(this.url, { | ||
method: 'GET', | ||
redirect: 'follow', | ||
mode: 'cors' | ||
}); | ||
case 2: | ||
response = _context4.sent; | ||
_context4.t0 = Buffer; | ||
_context4.next = 6; | ||
return response.arrayBuffer(); | ||
case 6: | ||
_context4.t1 = _context4.sent; | ||
return _context4.abrupt("return", _context4.t0.from.call(_context4.t0, _context4.t1)); | ||
case 8: | ||
case "end": | ||
return _context4.stop(); | ||
} | ||
} | ||
}, _callee2, this); | ||
})); | ||
function readFile() { | ||
return _readFile.apply(this, arguments); | ||
} | ||
return readFile; | ||
}() | ||
}, { | ||
key: "stat", | ||
value: function () { | ||
var _stat = (0, _asyncToGenerator2.default)( | ||
/*#__PURE__*/ | ||
_regenerator.default.mark(function _callee3() { | ||
var buf; | ||
return _regenerator.default.wrap(function _callee3$(_context5) { | ||
while (1) { | ||
switch (_context5.prev = _context5.next) { | ||
case 0: | ||
if (this._stat) { | ||
_context5.next = 6; | ||
break; | ||
}; | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var cross_fetch_1 = __importDefault(require("cross-fetch")); | ||
var bufferCache_1 = __importDefault(require("./bufferCache")); | ||
var RemoteFile = /** @class */ (function () { | ||
function RemoteFile(source) { | ||
var _this = this; | ||
this.position = 0; | ||
this.url = source; | ||
this.cache = new bufferCache_1.default({ | ||
fetch: function (start, length) { return _this._fetch(start, length); }, | ||
}); | ||
} | ||
RemoteFile.prototype._fetch = function (position, length) { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var headers, response, nodeBuffer, _a, _b, sizeMatch; | ||
return __generator(this, function (_c) { | ||
switch (_c.label) { | ||
case 0: | ||
headers = {}; | ||
if (length < Infinity) { | ||
headers.range = "bytes=".concat(position, "-").concat(position + length); | ||
} | ||
else if (length === Infinity && position !== 0) { | ||
headers.range = "bytes=".concat(position, "-"); | ||
} | ||
return [4 /*yield*/, (0, cross_fetch_1.default)(this.url, { | ||
method: 'GET', | ||
headers: headers, | ||
redirect: 'follow', | ||
mode: 'cors', | ||
})]; | ||
case 1: | ||
response = _c.sent(); | ||
if (!((response.status === 200 && position === 0) || | ||
response.status === 206)) return [3 /*break*/, 3]; | ||
_b = (_a = Buffer).from; | ||
return [4 /*yield*/, response.arrayBuffer()]; | ||
case 2: | ||
nodeBuffer = _b.apply(_a, [_c.sent()]); | ||
sizeMatch = /\/(\d+)$/.exec(response.headers.get('content-range')); | ||
if (sizeMatch[1]) { | ||
this._stat = { size: parseInt(sizeMatch[1], 10) }; | ||
} | ||
return [2 /*return*/, nodeBuffer]; | ||
case 3: throw new Error("HTTP ".concat(response.status, " fetching ").concat(this.url)); | ||
} | ||
buf = Buffer.allocUnsafe(10); | ||
_context5.next = 4; | ||
return this.read(buf, 0, 10, 0); | ||
case 4: | ||
if (this._stat) { | ||
_context5.next = 6; | ||
break; | ||
}); | ||
}); | ||
}; | ||
RemoteFile.prototype.read = function (buffer, offset, length, position) { | ||
if (offset === void 0) { offset = 0; } | ||
if (length === void 0) { length = Infinity; } | ||
if (position === void 0) { position = 0; } | ||
var readPosition = position; | ||
if (readPosition === null) { | ||
readPosition = this.position; | ||
this.position += length; | ||
} | ||
return this.cache.get(buffer, offset, length, position); | ||
}; | ||
RemoteFile.prototype.readFile = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var response, _a, _b; | ||
return __generator(this, function (_c) { | ||
switch (_c.label) { | ||
case 0: return [4 /*yield*/, (0, cross_fetch_1.default)(this.url, { | ||
method: 'GET', | ||
redirect: 'follow', | ||
mode: 'cors', | ||
})]; | ||
case 1: | ||
response = _c.sent(); | ||
_b = (_a = Buffer).from; | ||
return [4 /*yield*/, response.arrayBuffer()]; | ||
case 2: return [2 /*return*/, _b.apply(_a, [_c.sent()])]; | ||
} | ||
throw new Error("unable to determine size of file at ".concat(this.url)); | ||
case 6: | ||
return _context5.abrupt("return", this._stat); | ||
case 7: | ||
case "end": | ||
return _context5.stop(); | ||
} | ||
} | ||
}, _callee3, this); | ||
})); | ||
function stat() { | ||
return _stat.apply(this, arguments); | ||
} | ||
return stat; | ||
}() | ||
}]); | ||
return RemoteFile; | ||
}(); | ||
module.exports = RemoteFile; | ||
}); | ||
}); | ||
}; | ||
RemoteFile.prototype.stat = function () { | ||
return __awaiter(this, void 0, void 0, function () { | ||
var buf; | ||
return __generator(this, function (_a) { | ||
switch (_a.label) { | ||
case 0: | ||
if (!!this._stat) return [3 /*break*/, 2]; | ||
buf = Buffer.allocUnsafe(10); | ||
return [4 /*yield*/, this.read(buf, 0, 10, 0)]; | ||
case 1: | ||
_a.sent(); | ||
if (!this._stat) { | ||
throw new Error("unable to determine size of file at ".concat(this.url)); | ||
} | ||
_a.label = 2; | ||
case 2: return [2 /*return*/, this._stat]; | ||
} | ||
}); | ||
}); | ||
}; | ||
return RemoteFile; | ||
}()); | ||
exports.default = RemoteFile; | ||
//# sourceMappingURL=remoteFile.js.map |
"use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.RANS_BYTE_L = exports.TOTFREQ = exports.TF_SHIFT = void 0; | ||
var TF_SHIFT = 12; | ||
exports.TF_SHIFT = TF_SHIFT; | ||
var TOTFREQ = 1 << TF_SHIFT; | ||
exports.TOTFREQ = TOTFREQ; | ||
var RANS_BYTE_L = 1 << 23; | ||
module.exports = { | ||
TF_SHIFT: TF_SHIFT, | ||
TOTFREQ: TOTFREQ, | ||
RANS_BYTE_L: RANS_BYTE_L | ||
}; | ||
exports.RANS_BYTE_L = RANS_BYTE_L; | ||
//# sourceMappingURL=constants.js.map |
"use strict"; | ||
var _require = require('../errors'), | ||
CramMalformedError = _require.CramMalformedError; | ||
var Constants = require('./constants'); | ||
var Decoding = require('./decoding'); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../errors"); | ||
var constants_1 = require("./constants"); | ||
var decoding_1 = __importDefault(require("./decoding")); | ||
function uncompress( | ||
/* ByteBuffer */ | ||
input, | ||
/* Decoding.AriDecoder */ | ||
D, | ||
/* Decoding.Symbol[] */ | ||
syms, | ||
/* ByteBuffer */ | ||
out) { | ||
var rans0 = input.getInt(); | ||
var rans1 = input.getInt(); | ||
var rans2 = input.getInt(); | ||
var rans3 = input.getInt(); | ||
var | ||
/* int */ | ||
outputSize = out.remaining(); | ||
var | ||
/* int */ | ||
outputEnd = outputSize & ~3; | ||
for (var i = 0; i < outputEnd; i += 4) { | ||
var | ||
/* byte */ | ||
c0 = D.R[Decoding.get(rans0, Constants.TF_SHIFT)]; | ||
var | ||
/* byte */ | ||
c1 = D.R[Decoding.get(rans1, Constants.TF_SHIFT)]; | ||
var | ||
/* byte */ | ||
c2 = D.R[Decoding.get(rans2, Constants.TF_SHIFT)]; | ||
var | ||
/* byte */ | ||
c3 = D.R[Decoding.get(rans3, Constants.TF_SHIFT)]; | ||
out.putAt(i, c0); | ||
out.putAt(i + 1, c1); | ||
out.putAt(i + 2, c2); | ||
out.putAt(i + 3, c3); | ||
rans0 = Decoding.advanceSymbolStep(rans0, syms[0xff & c0], Constants.TF_SHIFT); | ||
rans1 = Decoding.advanceSymbolStep(rans1, syms[0xff & c1], Constants.TF_SHIFT); | ||
rans2 = Decoding.advanceSymbolStep(rans2, syms[0xff & c2], Constants.TF_SHIFT); | ||
rans3 = Decoding.advanceSymbolStep(rans3, syms[0xff & c3], Constants.TF_SHIFT); | ||
rans0 = Decoding.renormalize(rans0, input); | ||
rans1 = Decoding.renormalize(rans1, input); | ||
rans2 = Decoding.renormalize(rans2, input); | ||
rans3 = Decoding.renormalize(rans3, input); | ||
} | ||
out.setPosition(outputEnd); | ||
var | ||
/* byte */ | ||
c; | ||
switch (outputSize & 3) { | ||
case 0: | ||
break; | ||
case 1: | ||
c = D.R[Decoding.get(rans0, Constants.TF_SHIFT)]; | ||
Decoding.advanceSymbol(rans0, input, syms[0xff & c], Constants.TF_SHIFT); | ||
out.put(c); | ||
break; | ||
case 2: | ||
c = D.R[Decoding.get(rans0, Constants.TF_SHIFT)]; | ||
Decoding.advanceSymbol(rans0, input, syms[0xff & c], Constants.TF_SHIFT); | ||
out.put(c); | ||
c = D.R[Decoding.get(rans1, Constants.TF_SHIFT)]; | ||
Decoding.advanceSymbol(rans1, input, syms[0xff & c], Constants.TF_SHIFT); | ||
out.put(c); | ||
break; | ||
case 3: | ||
c = D.R[Decoding.get(rans0, Constants.TF_SHIFT)]; | ||
Decoding.advanceSymbol(rans0, input, syms[0xff & c], Constants.TF_SHIFT); | ||
out.put(c); | ||
c = D.R[Decoding.get(rans1, Constants.TF_SHIFT)]; | ||
Decoding.advanceSymbol(rans1, input, syms[0xff & c], Constants.TF_SHIFT); | ||
out.put(c); | ||
c = D.R[Decoding.get(rans2, Constants.TF_SHIFT)]; | ||
Decoding.advanceSymbol(rans2, input, syms[0xff & c], Constants.TF_SHIFT); | ||
out.put(c); | ||
break; | ||
default: | ||
throw new CramMalformedError('invalid output size encountered during rANS decoding'); | ||
} | ||
out.setPosition(0); | ||
/* ByteBuffer */ input, | ||
/* Decoding.AriDecoder */ D, | ||
/* Decoding.Symbol[] */ syms, | ||
/* ByteBuffer */ out) { | ||
var rans0 = input.getInt(); | ||
var rans1 = input.getInt(); | ||
var rans2 = input.getInt(); | ||
var rans3 = input.getInt(); | ||
var /* int */ outputSize = out.remaining(); | ||
var /* int */ outputEnd = outputSize & ~3; | ||
for (var i = 0; i < outputEnd; i += 4) { | ||
var /* byte */ c0 = D.R[decoding_1.default.get(rans0, constants_1.TF_SHIFT)]; | ||
var /* byte */ c1 = D.R[decoding_1.default.get(rans1, constants_1.TF_SHIFT)]; | ||
var /* byte */ c2 = D.R[decoding_1.default.get(rans2, constants_1.TF_SHIFT)]; | ||
var /* byte */ c3 = D.R[decoding_1.default.get(rans3, constants_1.TF_SHIFT)]; | ||
out.putAt(i, c0); | ||
out.putAt(i + 1, c1); | ||
out.putAt(i + 2, c2); | ||
out.putAt(i + 3, c3); | ||
rans0 = decoding_1.default.advanceSymbolStep(rans0, syms[0xff & c0], constants_1.TF_SHIFT); | ||
rans1 = decoding_1.default.advanceSymbolStep(rans1, syms[0xff & c1], constants_1.TF_SHIFT); | ||
rans2 = decoding_1.default.advanceSymbolStep(rans2, syms[0xff & c2], constants_1.TF_SHIFT); | ||
rans3 = decoding_1.default.advanceSymbolStep(rans3, syms[0xff & c3], constants_1.TF_SHIFT); | ||
rans0 = decoding_1.default.renormalize(rans0, input); | ||
rans1 = decoding_1.default.renormalize(rans1, input); | ||
rans2 = decoding_1.default.renormalize(rans2, input); | ||
rans3 = decoding_1.default.renormalize(rans3, input); | ||
} | ||
out.setPosition(outputEnd); | ||
var /* byte */ c; | ||
switch (outputSize & 3) { | ||
case 0: | ||
break; | ||
case 1: | ||
c = D.R[decoding_1.default.get(rans0, constants_1.TF_SHIFT)]; | ||
decoding_1.default.advanceSymbol(rans0, input, syms[0xff & c], constants_1.TF_SHIFT); | ||
out.put(c); | ||
break; | ||
case 2: | ||
c = D.R[decoding_1.default.get(rans0, constants_1.TF_SHIFT)]; | ||
decoding_1.default.advanceSymbol(rans0, input, syms[0xff & c], constants_1.TF_SHIFT); | ||
out.put(c); | ||
c = D.R[decoding_1.default.get(rans1, constants_1.TF_SHIFT)]; | ||
decoding_1.default.advanceSymbol(rans1, input, syms[0xff & c], constants_1.TF_SHIFT); | ||
out.put(c); | ||
break; | ||
case 3: | ||
c = D.R[decoding_1.default.get(rans0, constants_1.TF_SHIFT)]; | ||
decoding_1.default.advanceSymbol(rans0, input, syms[0xff & c], constants_1.TF_SHIFT); | ||
out.put(c); | ||
c = D.R[decoding_1.default.get(rans1, constants_1.TF_SHIFT)]; | ||
decoding_1.default.advanceSymbol(rans1, input, syms[0xff & c], constants_1.TF_SHIFT); | ||
out.put(c); | ||
c = D.R[decoding_1.default.get(rans2, constants_1.TF_SHIFT)]; | ||
decoding_1.default.advanceSymbol(rans2, input, syms[0xff & c], constants_1.TF_SHIFT); | ||
out.put(c); | ||
break; | ||
default: | ||
throw new errors_1.CramMalformedError('invalid output size encountered during rANS decoding'); | ||
} | ||
out.setPosition(0); | ||
} | ||
module.exports = { | ||
uncompress: uncompress | ||
}; | ||
exports.default = uncompress; | ||
//# sourceMappingURL=d04.js.map |
"use strict"; | ||
var Constants = require('./constants'); | ||
var Decoding = require('./decoding'); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var constants_1 = require("./constants"); | ||
var decoding_1 = __importDefault(require("./decoding")); | ||
function uncompress( | ||
/* ByteBuffer */ | ||
input, | ||
/* ByteBuffer */ | ||
output, | ||
/* Decoding.AriDecoder[] */ | ||
D, | ||
/* Decoding.Symbol[][] */ | ||
syms) { | ||
var | ||
/* int */ | ||
outputSize = output.remaining(); | ||
var rans0 = input.getInt(); | ||
var rans1 = input.getInt(); | ||
var rans2 = input.getInt(); | ||
var rans7 = input.getInt(); | ||
var | ||
/* int */ | ||
isz4 = outputSize >> 2; | ||
var | ||
/* int */ | ||
i0 = 0; | ||
var | ||
/* int */ | ||
i1 = isz4; | ||
var | ||
/* int */ | ||
i2 = 2 * isz4; | ||
var | ||
/* int */ | ||
i7 = 3 * isz4; | ||
var | ||
/* int */ | ||
l0 = 0; | ||
var | ||
/* int */ | ||
l1 = 0; | ||
var | ||
/* int */ | ||
l2 = 0; | ||
var | ||
/* int */ | ||
l7 = 0; | ||
for (; i0 < isz4; i0 += 1, i1 += 1, i2 += 1, i7 += 1) { | ||
var | ||
/* int */ | ||
c0 = 0xff & D[l0].R[Decoding.get(rans0, Constants.TF_SHIFT)]; | ||
var | ||
/* int */ | ||
c1 = 0xff & D[l1].R[Decoding.get(rans1, Constants.TF_SHIFT)]; | ||
var | ||
/* int */ | ||
c2 = 0xff & D[l2].R[Decoding.get(rans2, Constants.TF_SHIFT)]; | ||
var | ||
/* int */ | ||
c7 = 0xff & D[l7].R[Decoding.get(rans7, Constants.TF_SHIFT)]; | ||
output.putAt(i0, c0); | ||
output.putAt(i1, c1); | ||
output.putAt(i2, c2); | ||
output.putAt(i7, c7); | ||
rans0 = Decoding.advanceSymbolStep(rans0, syms[l0][c0], Constants.TF_SHIFT); | ||
rans1 = Decoding.advanceSymbolStep(rans1, syms[l1][c1], Constants.TF_SHIFT); | ||
rans2 = Decoding.advanceSymbolStep(rans2, syms[l2][c2], Constants.TF_SHIFT); | ||
rans7 = Decoding.advanceSymbolStep(rans7, syms[l7][c7], Constants.TF_SHIFT); | ||
rans0 = Decoding.renormalize(rans0, input); | ||
rans1 = Decoding.renormalize(rans1, input); | ||
rans2 = Decoding.renormalize(rans2, input); | ||
rans7 = Decoding.renormalize(rans7, input); | ||
l0 = c0; | ||
l1 = c1; | ||
l2 = c2; | ||
l7 = c7; | ||
} // Remainder | ||
for (; i7 < outputSize; i7 += 1) { | ||
var | ||
/* int */ | ||
_c = 0xff & D[l7].R[Decoding.get(rans7, Constants.TF_SHIFT)]; | ||
output.putAt(i7, _c); | ||
rans7 = Decoding.advanceSymbol(rans7, input, syms[l7][_c], Constants.TF_SHIFT); | ||
l7 = _c; | ||
} | ||
/* ByteBuffer */ input, | ||
/* ByteBuffer */ output, | ||
/* Decoding.AriDecoder[] */ D, | ||
/* Decoding.Symbol[][] */ syms) { | ||
var /* int */ outputSize = output.remaining(); | ||
var rans0 = input.getInt(); | ||
var rans1 = input.getInt(); | ||
var rans2 = input.getInt(); | ||
var rans7 = input.getInt(); | ||
var /* int */ isz4 = outputSize >> 2; | ||
var /* int */ i0 = 0; | ||
var /* int */ i1 = isz4; | ||
var /* int */ i2 = 2 * isz4; | ||
var /* int */ i7 = 3 * isz4; | ||
var /* int */ l0 = 0; | ||
var /* int */ l1 = 0; | ||
var /* int */ l2 = 0; | ||
var /* int */ l7 = 0; | ||
for (; i0 < isz4; i0 += 1, i1 += 1, i2 += 1, i7 += 1) { | ||
var /* int */ c0 = 0xff & D[l0].R[decoding_1.default.get(rans0, constants_1.TF_SHIFT)]; | ||
var /* int */ c1 = 0xff & D[l1].R[decoding_1.default.get(rans1, constants_1.TF_SHIFT)]; | ||
var /* int */ c2 = 0xff & D[l2].R[decoding_1.default.get(rans2, constants_1.TF_SHIFT)]; | ||
var /* int */ c7 = 0xff & D[l7].R[decoding_1.default.get(rans7, constants_1.TF_SHIFT)]; | ||
output.putAt(i0, c0); | ||
output.putAt(i1, c1); | ||
output.putAt(i2, c2); | ||
output.putAt(i7, c7); | ||
rans0 = decoding_1.default.advanceSymbolStep(rans0, syms[l0][c0], constants_1.TF_SHIFT); | ||
rans1 = decoding_1.default.advanceSymbolStep(rans1, syms[l1][c1], constants_1.TF_SHIFT); | ||
rans2 = decoding_1.default.advanceSymbolStep(rans2, syms[l2][c2], constants_1.TF_SHIFT); | ||
rans7 = decoding_1.default.advanceSymbolStep(rans7, syms[l7][c7], constants_1.TF_SHIFT); | ||
rans0 = decoding_1.default.renormalize(rans0, input); | ||
rans1 = decoding_1.default.renormalize(rans1, input); | ||
rans2 = decoding_1.default.renormalize(rans2, input); | ||
rans7 = decoding_1.default.renormalize(rans7, input); | ||
l0 = c0; | ||
l1 = c1; | ||
l2 = c2; | ||
l7 = c7; | ||
} | ||
// Remainder | ||
for (; i7 < outputSize; i7 += 1) { | ||
var /* int */ c7 = 0xff & D[l7].R[decoding_1.default.get(rans7, constants_1.TF_SHIFT)]; | ||
output.putAt(i7, c7); | ||
rans7 = decoding_1.default.advanceSymbol(rans7, input, syms[l7][c7], constants_1.TF_SHIFT); | ||
l7 = c7; | ||
} | ||
} | ||
module.exports = { | ||
uncompress: uncompress | ||
}; | ||
exports.default = uncompress; | ||
//# sourceMappingURL=d14.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _require = require('../errors'), | ||
CramMalformedError = _require.CramMalformedError; | ||
var Constants = require('./constants'); | ||
var FC = // int F, C; | ||
function FC() { | ||
(0, _classCallCheck2.default)(this, FC); | ||
this.F = undefined; | ||
this.C = undefined; | ||
}; | ||
var AriDecoder = // final FC[] fc = new FC[256]; | ||
// byte[] R; | ||
function AriDecoder() { | ||
(0, _classCallCheck2.default)(this, AriDecoder); | ||
this.fc = new Array(256); | ||
for (var i = 0; i < this.fc.length; i += 1) { | ||
this.fc[i] = new FC(); | ||
} | ||
this.R = null; | ||
}; | ||
var _Symbol = // int start; // Start of range. | ||
// int freq; // Symbol frequency. | ||
function _Symbol() { | ||
(0, _classCallCheck2.default)(this, _Symbol); | ||
this.start = undefined; | ||
this.freq = undefined; | ||
}; // Initialize a decoder symbol to start "start" and frequency "freq" | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../errors"); | ||
var constants_1 = require("./constants"); | ||
var FC = /** @class */ (function () { | ||
// int F, C; | ||
function FC() { | ||
this.F = undefined; | ||
this.C = undefined; | ||
} | ||
return FC; | ||
}()); | ||
var AriDecoder = /** @class */ (function () { | ||
// final FC[] fc = new FC[256]; | ||
// byte[] R; | ||
function AriDecoder() { | ||
this.fc = new Array(256); | ||
for (var i = 0; i < this.fc.length; i += 1) { | ||
this.fc[i] = new FC(); | ||
} | ||
this.R = null; | ||
} | ||
return AriDecoder; | ||
}()); | ||
var Symbol = /** @class */ (function () { | ||
// int start; // Start of range. | ||
// int freq; // Symbol frequency. | ||
function Symbol() { | ||
this.start = undefined; | ||
this.freq = undefined; | ||
} | ||
return Symbol; | ||
}()); | ||
// Initialize a decoder symbol to start "start" and frequency "freq" | ||
function symbolInit(sym, start, freq) { | ||
if (!(start <= 1 << 16)) throw new CramMalformedError("assertion failed: start <= 1<<16"); | ||
if (!(freq <= (1 << 16) - start)) throw new CramMalformedError("assertion failed: freq <= 1<<16"); | ||
sym.start = start; | ||
sym.freq = freq; | ||
} // Advances in the bit stream by "popping" a single symbol with range start | ||
if (!(start <= 1 << 16)) { | ||
throw new errors_1.CramMalformedError("assertion failed: start <= 1<<16"); | ||
} | ||
if (!(freq <= (1 << 16) - start)) { | ||
throw new errors_1.CramMalformedError("assertion failed: freq <= 1<<16"); | ||
} | ||
sym.start = start; | ||
sym.freq = freq; | ||
} | ||
// Advances in the bit stream by "popping" a single symbol with range start | ||
// "start" and frequency "freq". All frequencies are assumed to sum to | ||
// "1 << scaleBits". | ||
// No renormalization or output happens. | ||
/* private static int */ | ||
function advanceStep( | ||
/* final int */ | ||
r, | ||
/* final int */ | ||
start, | ||
/* final int */ | ||
freq, | ||
/* final int */ | ||
scaleBits) { | ||
/* final int */ | ||
var mask = (1 << scaleBits) - 1; // s, x = D(x) | ||
return freq * (r >> scaleBits) + (r & mask) - start; | ||
} // Equivalent to RansDecAdvanceStep that takes a symbol. | ||
/* static int */ | ||
function advanceSymbolStep( | ||
/* final int */ | ||
r, | ||
/* final RansDecSymbol */ | ||
sym, | ||
/* final int */ | ||
scaleBits) { | ||
return advanceStep(r, sym.start, sym.freq, scaleBits); | ||
} // Returns the current cumulative frequency (map it to a symbol yourself!) | ||
/* static int */ | ||
function get( | ||
/* final int */ | ||
r, | ||
/* final int */ | ||
scaleBits) { | ||
return r & (1 << scaleBits) - 1; | ||
} // Advances in the bit stream by "popping" a single symbol with range start | ||
/* private static int */ function advanceStep( | ||
/* final int */ r, | ||
/* final int */ start, | ||
/* final int */ freq, | ||
/* final int */ scaleBits) { | ||
/* final int */ var mask = (1 << scaleBits) - 1; | ||
// s, x = D(x) | ||
return freq * (r >> scaleBits) + (r & mask) - start; | ||
} | ||
// Equivalent to RansDecAdvanceStep that takes a symbol. | ||
/* static int */ function advanceSymbolStep( | ||
/* final int */ r, | ||
/* final RansDecSymbol */ sym, | ||
/* final int */ scaleBits) { | ||
return advanceStep(r, sym.start, sym.freq, scaleBits); | ||
} | ||
// Returns the current cumulative frequency (map it to a symbol yourself!) | ||
/* static int */ function get(/* final int */ r, /* final int */ scaleBits) { | ||
return r & ((1 << scaleBits) - 1); | ||
} | ||
// Advances in the bit stream by "popping" a single symbol with range start | ||
// "start" and frequency "freq". All frequencies are assumed to sum to | ||
// "1 << scaleBits", | ||
// and the resulting bytes get written to ptr (which is updated). | ||
/* private static int */ | ||
function advance( | ||
/* int */ | ||
r, | ||
/* final ByteBuffer */ | ||
pptr, | ||
/* final int */ | ||
start, | ||
/* final int */ | ||
freq, | ||
/* final int */ | ||
scaleBits) { | ||
/* final int */ | ||
var mask = (1 << scaleBits) - 1; // s, x = D(x) | ||
r = freq * (r >> scaleBits) + (r & mask) - start; // re-normalize | ||
if (r < Constants.RANS_BYTE_L) { | ||
do { | ||
/* final int */ | ||
var b = 0xff & pptr.get(); | ||
r = r << 8 | b; | ||
} while (r < Constants.RANS_BYTE_L); | ||
} | ||
return r; | ||
} // Equivalent to RansDecAdvance that takes a symbol. | ||
/* static int */ | ||
function advanceSymbol( | ||
/* final int */ | ||
r, | ||
/* final ByteBuffer */ | ||
pptr, | ||
/* final RansDecSymbol */ | ||
sym, | ||
/* final int */ | ||
scaleBits) { | ||
return advance(r, pptr, sym.start, sym.freq, scaleBits); | ||
} // Re-normalize. | ||
/* static int */ | ||
function renormalize( | ||
/* int */ | ||
r, | ||
/* final ByteBuffer */ | ||
pptr) { | ||
// re-normalize | ||
if (r < Constants.RANS_BYTE_L) { | ||
do { | ||
r = r << 8 | 0xff & pptr.get(); | ||
} while (r < Constants.RANS_BYTE_L); | ||
} | ||
return r; | ||
/* private static int */ function advance( | ||
/* int */ r, | ||
/* final ByteBuffer */ pptr, | ||
/* final int */ start, | ||
/* final int */ freq, | ||
/* final int */ scaleBits) { | ||
/* final int */ var mask = (1 << scaleBits) - 1; | ||
// s, x = D(x) | ||
r = freq * (r >> scaleBits) + (r & mask) - start; | ||
// re-normalize | ||
if (r < constants_1.RANS_BYTE_L) { | ||
do { | ||
/* final int */ var b = 0xff & pptr.get(); | ||
r = (r << 8) | b; | ||
} while (r < constants_1.RANS_BYTE_L); | ||
} | ||
return r; | ||
} | ||
// Equivalent to RansDecAdvance that takes a symbol. | ||
/* static int */ function advanceSymbol( | ||
/* final int */ r, | ||
/* final ByteBuffer */ pptr, | ||
/* final RansDecSymbol */ sym, | ||
/* final int */ scaleBits) { | ||
return advance(r, pptr, sym.start, sym.freq, scaleBits); | ||
} | ||
// Re-normalize. | ||
/* static int */ function renormalize( | ||
/* int */ r, | ||
/* final ByteBuffer */ pptr) { | ||
// re-normalize | ||
if (r < constants_1.RANS_BYTE_L) { | ||
do { | ||
r = (r << 8) | (0xff & pptr.get()); | ||
} while (r < constants_1.RANS_BYTE_L); | ||
} | ||
return r; | ||
} | ||
var Decode = { | ||
FC: FC, | ||
AriDecoder: AriDecoder, | ||
Symbol: _Symbol, | ||
symbolInit: symbolInit, | ||
advanceStep: advanceStep, | ||
advanceSymbolStep: advanceSymbolStep, | ||
get: get, | ||
advanceSymbol: advanceSymbol, | ||
renormalize: renormalize | ||
FC: FC, | ||
AriDecoder: AriDecoder, | ||
Symbol: Symbol, | ||
symbolInit: symbolInit, | ||
advanceStep: advanceStep, | ||
advanceSymbolStep: advanceSymbolStep, | ||
get: get, | ||
advanceSymbol: advanceSymbol, | ||
renormalize: renormalize, | ||
}; | ||
module.exports = Decode; | ||
exports.default = Decode; | ||
//# sourceMappingURL=decoding.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _fill = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/fill")); | ||
var _require = require('../errors'), | ||
CramMalformedError = _require.CramMalformedError; | ||
var Constants = require('./constants'); | ||
var Decoding = require('./decoding'); | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.readStatsO1 = exports.readStatsO0 = void 0; | ||
var errors_1 = require("../errors"); | ||
var constants_1 = require("./constants"); | ||
var decoding_1 = __importDefault(require("./decoding")); | ||
function assert(result) { | ||
if (!result) throw new CramMalformedError('assertion failed'); | ||
if (!result) { | ||
throw new errors_1.CramMalformedError('assertion failed'); | ||
} | ||
} | ||
function readStatsO0( | ||
/* ByteBuffer */ | ||
cp, | ||
/* Decoding.AriDecoder */ | ||
decoder, | ||
/* Decoding.RansDecSymbol[] */ | ||
syms) { | ||
// Pre-compute reverse lookup of frequency. | ||
var rle = 0; | ||
var x = 0; | ||
var j = cp.get() & 0xff; | ||
do { | ||
var _context; | ||
if (decoder.fc[j] == null) decoder.fc[j] = new Decoding.FC(); | ||
decoder.fc[j].F = cp.get() & 0xff; | ||
if (decoder.fc[j].F >= 128) { | ||
decoder.fc[j].F &= ~128; | ||
decoder.fc[j].F = (decoder.fc[j].F & 127) << 8 | cp.get() & 0xff; | ||
} | ||
decoder.fc[j].C = x; | ||
Decoding.symbolInit(syms[j], decoder.fc[j].C, decoder.fc[j].F); | ||
/* Build reverse lookup table */ | ||
if (!decoder.R) decoder.R = new Array(Constants.TOTFREQ); | ||
(0, _fill.default)(_context = decoder.R).call(_context, j, x, x + decoder.fc[j].F); | ||
x += decoder.fc[j].F; | ||
if (rle === 0 && j + 1 === (0xff & cp.getByteAt(cp.position()))) { | ||
j = cp.get() & 0xff; | ||
rle = cp.get() & 0xff; | ||
} else if (rle !== 0) { | ||
rle -= 1; | ||
j += 1; | ||
} else { | ||
j = cp.get() & 0xff; | ||
} | ||
} while (j !== 0); | ||
assert(x < Constants.TOTFREQ); | ||
} | ||
function readStatsO1( | ||
/* ByteBuffer */ | ||
cp, | ||
/* Decoding.AriDecoder[] */ | ||
D, | ||
/* Decoding.RansDecSymbol[][] */ | ||
syms) { | ||
var rlei = 0; | ||
var i = 0xff & cp.get(); | ||
do { | ||
var rlej = 0; | ||
/* ByteBuffer */ cp, | ||
/* Decoding.AriDecoder */ decoder, | ||
/* Decoding.RansDecSymbol[] */ syms) { | ||
// Pre-compute reverse lookup of frequency. | ||
var rle = 0; | ||
var x = 0; | ||
var j = 0xff & cp.get(); | ||
if (D[i] == null) D[i] = new Decoding.AriDecoder(); | ||
var j = cp.get() & 0xff; | ||
do { | ||
var _context2; | ||
if (D[i].fc[j] == null) D[i].fc[j] = new Decoding.FC(); | ||
D[i].fc[j].F = 0xff & cp.get(); | ||
if (D[i].fc[j].F >= 128) { | ||
D[i].fc[j].F &= ~128; | ||
D[i].fc[j].F = (D[i].fc[j].F & 127) << 8 | 0xff & cp.get(); | ||
} | ||
D[i].fc[j].C = x; | ||
if (D[i].fc[j].F === 0) D[i].fc[j].F = Constants.TOTFREQ; | ||
if (syms[i][j] == null) syms[i][j] = new Decoding.RansDecSymbol(); | ||
Decoding.symbolInit(syms[i][j], D[i].fc[j].C, D[i].fc[j].F); | ||
/* Build reverse lookup table */ | ||
if (D[i].R == null) D[i].R = new Array(Constants.TOTFREQ); | ||
(0, _fill.default)(_context2 = D[i].R).call(_context2, j, x, x + D[i].fc[j].F); | ||
x += D[i].fc[j].F; | ||
assert(x <= Constants.TOTFREQ); | ||
if (rlej === 0 && j + 1 === (0xff & cp.getByteAt(cp.position()))) { | ||
j = 0xff & cp.get(); | ||
rlej = 0xff & cp.get(); | ||
} else if (rlej !== 0) { | ||
rlej -= 1; | ||
j += 1; | ||
} else { | ||
j = 0xff & cp.get(); | ||
} | ||
if (decoder.fc[j] == null) { | ||
decoder.fc[j] = new decoding_1.default.FC(); | ||
} | ||
decoder.fc[j].F = cp.get() & 0xff; | ||
if (decoder.fc[j].F >= 128) { | ||
decoder.fc[j].F &= ~128; | ||
decoder.fc[j].F = ((decoder.fc[j].F & 127) << 8) | (cp.get() & 0xff); | ||
} | ||
decoder.fc[j].C = x; | ||
decoding_1.default.symbolInit(syms[j], decoder.fc[j].C, decoder.fc[j].F); | ||
/* Build reverse lookup table */ | ||
if (!decoder.R) { | ||
decoder.R = new Array(constants_1.TOTFREQ); | ||
} | ||
decoder.R.fill(j, x, x + decoder.fc[j].F); | ||
x += decoder.fc[j].F; | ||
if (rle === 0 && j + 1 === (0xff & cp.getByteAt(cp.position()))) { | ||
j = cp.get() & 0xff; | ||
rle = cp.get() & 0xff; | ||
} | ||
else if (rle !== 0) { | ||
rle -= 1; | ||
j += 1; | ||
} | ||
else { | ||
j = cp.get() & 0xff; | ||
} | ||
} while (j !== 0); | ||
if (rlei === 0 && i + 1 === (0xff & cp.getByteAt(cp.position()))) { | ||
i = 0xff & cp.get(); | ||
rlei = 0xff & cp.get(); | ||
} else if (rlei !== 0) { | ||
rlei -= 1; | ||
i += 1; | ||
} else { | ||
i = 0xff & cp.get(); | ||
} | ||
} while (i !== 0); | ||
assert(x < constants_1.TOTFREQ); | ||
} | ||
module.exports = { | ||
readStatsO0: readStatsO0, | ||
readStatsO1: readStatsO1 | ||
}; | ||
exports.readStatsO0 = readStatsO0; | ||
function readStatsO1( | ||
/* ByteBuffer */ cp, | ||
/* Decoding.AriDecoder[] */ D, | ||
/* Decoding.RansDecSymbol[][] */ syms) { | ||
var rlei = 0; | ||
var i = 0xff & cp.get(); | ||
do { | ||
var rlej = 0; | ||
var x = 0; | ||
var j = 0xff & cp.get(); | ||
if (D[i] == null) { | ||
D[i] = new decoding_1.default.AriDecoder(); | ||
} | ||
do { | ||
if (D[i].fc[j] == null) { | ||
D[i].fc[j] = new decoding_1.default.FC(); | ||
} | ||
D[i].fc[j].F = 0xff & cp.get(); | ||
if (D[i].fc[j].F >= 128) { | ||
D[i].fc[j].F &= ~128; | ||
D[i].fc[j].F = ((D[i].fc[j].F & 127) << 8) | (0xff & cp.get()); | ||
} | ||
D[i].fc[j].C = x; | ||
if (D[i].fc[j].F === 0) { | ||
D[i].fc[j].F = constants_1.TOTFREQ; | ||
} | ||
if (syms[i][j] == null) { | ||
syms[i][j] = new decoding_1.default.RansDecSymbol(); | ||
} | ||
decoding_1.default.symbolInit(syms[i][j], D[i].fc[j].C, D[i].fc[j].F); | ||
/* Build reverse lookup table */ | ||
if (D[i].R == null) { | ||
D[i].R = new Array(constants_1.TOTFREQ); | ||
} | ||
D[i].R.fill(j, x, x + D[i].fc[j].F); | ||
x += D[i].fc[j].F; | ||
assert(x <= constants_1.TOTFREQ); | ||
if (rlej === 0 && j + 1 === (0xff & cp.getByteAt(cp.position()))) { | ||
j = 0xff & cp.get(); | ||
rlej = 0xff & cp.get(); | ||
} | ||
else if (rlej !== 0) { | ||
rlej -= 1; | ||
j += 1; | ||
} | ||
else { | ||
j = 0xff & cp.get(); | ||
} | ||
} while (j !== 0); | ||
if (rlei === 0 && i + 1 === (0xff & cp.getByteAt(cp.position()))) { | ||
i = 0xff & cp.get(); | ||
rlei = 0xff & cp.get(); | ||
} | ||
else if (rlei !== 0) { | ||
rlei -= 1; | ||
i += 1; | ||
} | ||
else { | ||
i = 0xff & cp.get(); | ||
} | ||
} while (i !== 0); | ||
} | ||
exports.readStatsO1 = readStatsO1; | ||
//# sourceMappingURL=frequencies.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _fill = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/fill")); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _createClass2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/createClass")); | ||
var _require = require('../errors'), | ||
CramMalformedError = _require.CramMalformedError; | ||
var Decoding = require('./decoding'); | ||
var Frequencies = require('./frequencies'); | ||
var D04 = require('./d04'); | ||
var D14 = require('./d14'); // const /* int */ ORDER_BYTE_LENGTH = 1 | ||
var __importDefault = (this && this.__importDefault) || function (mod) { | ||
return (mod && mod.__esModule) ? mod : { "default": mod }; | ||
}; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
var errors_1 = require("../errors"); | ||
var decoding_1 = __importDefault(require("./decoding")); | ||
var frequencies_1 = require("./frequencies"); | ||
var d04_1 = __importDefault(require("./d04")); | ||
var d14_1 = __importDefault(require("./d14")); | ||
// const /* int */ ORDER_BYTE_LENGTH = 1 | ||
// const /* int */ COMPRESSED_BYTE_LENGTH = 4 | ||
var | ||
/* int */ | ||
RAW_BYTE_LENGTH = 4; // const /* int */ PREFIX_BYTE_LENGTH = | ||
var /* int */ RAW_BYTE_LENGTH = 4; | ||
// const /* int */ PREFIX_BYTE_LENGTH = | ||
// ORDER_BYTE_LENGTH + COMPRESSED_BYTE_LENGTH + RAW_BYTE_LENGTH | ||
@@ -107,162 +95,111 @@ // enum ORDER { | ||
// } | ||
function | ||
/* static ByteBuffer */ | ||
uncompressOrder0Way4( | ||
/* const ByteBuffer */ | ||
input, | ||
/* const ByteBuffer */ | ||
out) { | ||
// input.order(ByteOrder.LITTLE_ENDIAN); | ||
var D = new Decoding.AriDecoder(); | ||
var syms = new Array(256); | ||
for (var i = 0; i < syms.length; i += 1) { | ||
syms[i] = new Decoding.Symbol(); | ||
} | ||
Frequencies.readStatsO0(input, D, syms); | ||
D04.uncompress(input, D, syms, out); | ||
return out; | ||
function uncompressOrder0Way4( | ||
/* const ByteBuffer */ input, | ||
/* const ByteBuffer */ out) { | ||
// input.order(ByteOrder.LITTLE_ENDIAN); | ||
var D = new decoding_1.default.AriDecoder(); | ||
var syms = new Array(256); | ||
for (var i = 0; i < syms.length; i += 1) { | ||
syms[i] = new decoding_1.default.Symbol(); | ||
} | ||
(0, frequencies_1.readStatsO0)(input, D, syms); | ||
(0, d04_1.default)(input, D, syms, out); | ||
return out; | ||
} | ||
function | ||
/* static ByteBuffer */ | ||
uncompressOrder1Way4( | ||
/* const ByteBuffer */ | ||
input, | ||
/* const ByteBuffer */ | ||
output) { | ||
var D = new Array(256); | ||
for (var i = 0; i < D.length; i += 1) { | ||
D[i] = new Decoding.AriDecoder(); | ||
} | ||
var | ||
/* Decoding.RansDecSymbol[][] */ | ||
syms = new Array(256); | ||
for (var _i = 0; _i < syms.length; _i += 1) { | ||
syms[_i] = new Array(256); | ||
for (var j = 0; j < syms[_i].length; j += 1) { | ||
syms[_i][j] = new Decoding.Symbol(); | ||
function uncompressOrder1Way4( | ||
/* const ByteBuffer */ input, | ||
/* const ByteBuffer */ output) { | ||
var D = new Array(256); | ||
for (var i = 0; i < D.length; i += 1) { | ||
D[i] = new decoding_1.default.AriDecoder(); | ||
} | ||
} | ||
Frequencies.readStatsO1(input, D, syms); | ||
D14.uncompress(input, output, D, syms); | ||
return output; | ||
var /* Decoding.RansDecSymbol[][] */ syms = new Array(256); | ||
for (var i = 0; i < syms.length; i += 1) { | ||
syms[i] = new Array(256); | ||
for (var j = 0; j < syms[i].length; j += 1) { | ||
syms[i][j] = new decoding_1.default.Symbol(); | ||
} | ||
} | ||
(0, frequencies_1.readStatsO1)(input, D, syms); | ||
(0, d14_1.default)(input, output, D, syms); | ||
return output; | ||
} | ||
/* compat layer to make a node buffer act like a java ByteBuffer */ | ||
var ByteBuffer = | ||
/*#__PURE__*/ | ||
function () { | ||
function ByteBuffer(nodeBuffer) { | ||
var initialInputPosition = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0; | ||
(0, _classCallCheck2.default)(this, ByteBuffer); | ||
this._buffer = nodeBuffer; | ||
this._position = initialInputPosition; | ||
this.length = nodeBuffer.length; | ||
} | ||
(0, _createClass2.default)(ByteBuffer, [{ | ||
key: "get", | ||
value: function get() { | ||
var b = this._buffer[this._position]; | ||
this._position += 1; | ||
return b; | ||
var ByteBuffer = /** @class */ (function () { | ||
function ByteBuffer(nodeBuffer, initialInputPosition) { | ||
if (initialInputPosition === void 0) { initialInputPosition = 0; } | ||
this._buffer = nodeBuffer; | ||
this._position = initialInputPosition; | ||
this.length = nodeBuffer.length; | ||
} | ||
}, { | ||
key: "getByte", | ||
value: function getByte() { | ||
return this.get(); | ||
ByteBuffer.prototype.get = function () { | ||
var b = this._buffer[this._position]; | ||
this._position += 1; | ||
return b; | ||
}; | ||
ByteBuffer.prototype.getByte = function () { | ||
return this.get(); | ||
}; | ||
ByteBuffer.prototype.getByteAt = function (position) { | ||
return this._buffer[position]; | ||
}; | ||
ByteBuffer.prototype.position = function () { | ||
return this._position; | ||
}; | ||
ByteBuffer.prototype.put = function (val) { | ||
this._buffer[this._position] = val; | ||
this._position += 1; | ||
return val; | ||
}; | ||
ByteBuffer.prototype.putAt = function (position, val) { | ||
this._buffer[position] = val; | ||
return val; | ||
}; | ||
ByteBuffer.prototype.setPosition = function (pos) { | ||
this._position = pos; | ||
return pos; | ||
}; | ||
ByteBuffer.prototype.getInt = function () { | ||
var i = this._buffer.readInt32LE(this._position); | ||
this._position += 4; | ||
return i; | ||
}; | ||
ByteBuffer.prototype.remaining = function () { | ||
return this._buffer.length - this._position; | ||
}; | ||
return ByteBuffer; | ||
}()); | ||
// static /* const */ ByteBuffer EMPTY_BUFFER = ByteBuffer.allocate(0); | ||
function uncompress(inputBuffer, outputBuffer, initialInputPosition) { | ||
if (initialInputPosition === void 0) { initialInputPosition = 0; } | ||
if (inputBuffer.length === 0) { | ||
outputBuffer.fill(0); | ||
return outputBuffer; | ||
} | ||
}, { | ||
key: "getByteAt", | ||
value: function getByteAt(position) { | ||
return this._buffer[position]; | ||
var input = new ByteBuffer(inputBuffer, initialInputPosition); | ||
// input.order(ByteOrder.LITTLE_ENDIAN); | ||
var order = input.get(); | ||
if (order !== 0 && order !== 1) { | ||
throw new errors_1.CramMalformedError("Invalid rANS order ".concat(order)); | ||
} | ||
}, { | ||
key: "position", | ||
value: function position() { | ||
return this._position; | ||
var /* int */ inputSize = input.getInt(); | ||
if (inputSize !== input.remaining() - RAW_BYTE_LENGTH) { | ||
throw new errors_1.CramMalformedError('Incorrect input length.'); | ||
} | ||
}, { | ||
key: "put", | ||
value: function put(val) { | ||
this._buffer[this._position] = val; | ||
this._position += 1; | ||
return val; | ||
var /* int */ outputSize = input.getInt(); | ||
var output = new ByteBuffer(outputBuffer || Buffer.allocUnsafe(outputSize)); | ||
// TODO output.limit(outputSize) | ||
if (output.length < outputSize) { | ||
throw new errors_1.CramMalformedError("Output buffer too small to fit ".concat(outputSize, " bytes.")); | ||
} | ||
}, { | ||
key: "putAt", | ||
value: function putAt(position, val) { | ||
this._buffer[position] = val; | ||
return val; | ||
switch (order) { | ||
case 0: | ||
return uncompressOrder0Way4(input, output); | ||
case 1: | ||
return uncompressOrder1Way4(input, output); | ||
default: | ||
throw new errors_1.CramMalformedError("Invalid rANS order: ".concat(order)); | ||
} | ||
}, { | ||
key: "setPosition", | ||
value: function setPosition(pos) { | ||
this._position = pos; | ||
return pos; | ||
} | ||
}, { | ||
key: "getInt", | ||
value: function getInt() { | ||
var i = this._buffer.readInt32LE(this._position); | ||
this._position += 4; | ||
return i; | ||
} | ||
}, { | ||
key: "remaining", | ||
value: function remaining() { | ||
return this._buffer.length - this._position; | ||
} | ||
}]); | ||
return ByteBuffer; | ||
}(); // static /* const */ ByteBuffer EMPTY_BUFFER = ByteBuffer.allocate(0); | ||
function uncompress(inputBuffer, outputBuffer) { | ||
var initialInputPosition = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0; | ||
if (inputBuffer.length === 0) { | ||
(0, _fill.default)(outputBuffer).call(outputBuffer, 0); | ||
return outputBuffer; | ||
} | ||
var input = new ByteBuffer(inputBuffer, initialInputPosition); // input.order(ByteOrder.LITTLE_ENDIAN); | ||
var order = input.get(); | ||
if (order !== 0 && order !== 1) throw new CramMalformedError("Invalid rANS order ".concat(order)); | ||
var | ||
/* int */ | ||
inputSize = input.getInt(); | ||
if (inputSize !== input.remaining() - RAW_BYTE_LENGTH) throw new CramMalformedError('Incorrect input length.'); | ||
var | ||
/* int */ | ||
outputSize = input.getInt(); | ||
var output = new ByteBuffer(outputBuffer || Buffer.allocUnsafe(outputSize)); // TODO output.limit(outputSize) | ||
if (output.length < outputSize) throw new CramMalformedError("Output buffer too small to fit ".concat(outputSize, " bytes.")); | ||
switch (order) { | ||
case 0: | ||
return uncompressOrder0Way4(input, output); | ||
case 1: | ||
return uncompressOrder1Way4(input, output); | ||
default: | ||
throw new CramMalformedError("Invalid rANS order: ".concat(order)); | ||
} | ||
} | ||
module.exports = { | ||
uncompress: uncompress | ||
}; | ||
exports.default = uncompress; | ||
//# sourceMappingURL=index.js.map |
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _slicedToArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/slicedToArray")); | ||
var _map = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/map")); | ||
var _slice = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/slice")); | ||
var _toArray2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/toArray")); | ||
var _forEach = _interopRequireDefault(require("@babel/runtime-corejs3/core-js-stable/instance/for-each")); | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.parseHeaderText = void 0; | ||
function parseHeaderText(text) { | ||
var lines = text.split(/\r?\n/); | ||
var data = []; | ||
(0, _forEach.default)(lines).call(lines, function (line) { | ||
var _line$split = line.split(/\t/), | ||
_line$split2 = (0, _toArray2.default)(_line$split), | ||
tag = _line$split2[0], | ||
fields = (0, _slice.default)(_line$split2).call(_line$split2, 1); | ||
var parsedFields = (0, _map.default)(fields).call(fields, function (f) { | ||
var _f$split = f.split(':', 2), | ||
_f$split2 = (0, _slicedToArray2.default)(_f$split, 2), | ||
fieldTag = _f$split2[0], | ||
value = _f$split2[1]; | ||
return { | ||
tag: fieldTag, | ||
value: value | ||
}; | ||
var lines = text.split(/\r?\n/); | ||
var data = []; | ||
lines.forEach(function (line) { | ||
var _a = line.split(/\t/), tag = _a[0], fields = _a.slice(1); | ||
var parsedFields = fields.map(function (f) { | ||
var _a = f.split(':', 2), fieldTag = _a[0], value = _a[1]; | ||
return { tag: fieldTag, value: value }; | ||
}); | ||
if (tag) { | ||
data.push({ tag: tag.substr(1), data: parsedFields }); | ||
} | ||
}); | ||
if (tag) data.push({ | ||
tag: tag.substr(1), | ||
data: parsedFields | ||
}); | ||
}); | ||
return data; | ||
return data; | ||
} | ||
module.exports = { | ||
parseHeaderText: parseHeaderText | ||
}; | ||
exports.parseHeaderText = parseHeaderText; | ||
//# sourceMappingURL=sam.js.map |
169
errors.js
"use strict"; | ||
var _interopRequireDefault = require("@babel/runtime-corejs3/helpers/interopRequireDefault"); | ||
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/classCallCheck")); | ||
var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/possibleConstructorReturn")); | ||
var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/getPrototypeOf")); | ||
var _inherits2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/inherits")); | ||
var _wrapNativeSuper2 = _interopRequireDefault(require("@babel/runtime-corejs3/helpers/wrapNativeSuper")); | ||
var CramError = | ||
/*#__PURE__*/ | ||
function (_Error) { | ||
(0, _inherits2.default)(CramError, _Error); | ||
function CramError() { | ||
(0, _classCallCheck2.default)(this, CramError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramError).apply(this, arguments)); | ||
} | ||
return CramError; | ||
}((0, _wrapNativeSuper2.default)(Error)); | ||
var __extends = (this && this.__extends) || (function () { | ||
var extendStatics = function (d, b) { | ||
extendStatics = Object.setPrototypeOf || | ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || | ||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; | ||
return extendStatics(d, b); | ||
}; | ||
return function (d, b) { | ||
if (typeof b !== "function" && b !== null) | ||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); | ||
extendStatics(d, b); | ||
function __() { this.constructor = d; } | ||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); | ||
}; | ||
})(); | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.CramArgumentError = exports.CramSizeLimitError = exports.CramBufferOverrunError = exports.CramMalformedError = exports.CramUnimplementedError = exports.CramError = void 0; | ||
var CramError = /** @class */ (function (_super) { | ||
__extends(CramError, _super); | ||
function CramError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramError; | ||
}(Error)); | ||
exports.CramError = CramError; | ||
/** Error caused by encountering a part of the CRAM spec that has not yet been implemented */ | ||
var CramUnimplementedError = | ||
/*#__PURE__*/ | ||
function (_Error2) { | ||
(0, _inherits2.default)(CramUnimplementedError, _Error2); | ||
function CramUnimplementedError() { | ||
(0, _classCallCheck2.default)(this, CramUnimplementedError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramUnimplementedError).apply(this, arguments)); | ||
} | ||
return CramUnimplementedError; | ||
}((0, _wrapNativeSuper2.default)(Error)); | ||
var CramUnimplementedError = /** @class */ (function (_super) { | ||
__extends(CramUnimplementedError, _super); | ||
function CramUnimplementedError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramUnimplementedError; | ||
}(Error)); | ||
exports.CramUnimplementedError = CramUnimplementedError; | ||
/** An error caused by malformed data. */ | ||
var CramMalformedError = | ||
/*#__PURE__*/ | ||
function (_CramError) { | ||
(0, _inherits2.default)(CramMalformedError, _CramError); | ||
function CramMalformedError() { | ||
(0, _classCallCheck2.default)(this, CramMalformedError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramMalformedError).apply(this, arguments)); | ||
} | ||
return CramMalformedError; | ||
}(CramError); | ||
var CramMalformedError = /** @class */ (function (_super) { | ||
__extends(CramMalformedError, _super); | ||
function CramMalformedError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramMalformedError; | ||
}(CramError)); | ||
exports.CramMalformedError = CramMalformedError; | ||
/** | ||
* An error caused by attempting to read beyond the end of the defined data. | ||
*/ | ||
var CramBufferOverrunError = | ||
/*#__PURE__*/ | ||
function (_CramMalformedError) { | ||
(0, _inherits2.default)(CramBufferOverrunError, _CramMalformedError); | ||
function CramBufferOverrunError() { | ||
(0, _classCallCheck2.default)(this, CramBufferOverrunError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramBufferOverrunError).apply(this, arguments)); | ||
} | ||
return CramBufferOverrunError; | ||
}(CramMalformedError); | ||
var CramBufferOverrunError = /** @class */ (function (_super) { | ||
__extends(CramBufferOverrunError, _super); | ||
function CramBufferOverrunError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramBufferOverrunError; | ||
}(CramMalformedError)); | ||
exports.CramBufferOverrunError = CramBufferOverrunError; | ||
/** | ||
* An error caused by data being too big, exceeding a size limit. | ||
*/ | ||
var CramSizeLimitError = | ||
/*#__PURE__*/ | ||
function (_CramError2) { | ||
(0, _inherits2.default)(CramSizeLimitError, _CramError2); | ||
function CramSizeLimitError() { | ||
(0, _classCallCheck2.default)(this, CramSizeLimitError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramSizeLimitError).apply(this, arguments)); | ||
} | ||
return CramSizeLimitError; | ||
}(CramError); | ||
var CramSizeLimitError = /** @class */ (function (_super) { | ||
__extends(CramSizeLimitError, _super); | ||
function CramSizeLimitError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramSizeLimitError; | ||
}(CramError)); | ||
exports.CramSizeLimitError = CramSizeLimitError; | ||
/** | ||
* An invalid argument was supplied to a cram-js method or object. | ||
*/ | ||
var CramArgumentError = | ||
/*#__PURE__*/ | ||
function (_CramError3) { | ||
(0, _inherits2.default)(CramArgumentError, _CramError3); | ||
function CramArgumentError() { | ||
(0, _classCallCheck2.default)(this, CramArgumentError); | ||
return (0, _possibleConstructorReturn2.default)(this, (0, _getPrototypeOf2.default)(CramArgumentError).apply(this, arguments)); | ||
} | ||
return CramArgumentError; | ||
}(CramError); | ||
module.exports = { | ||
CramBufferOverrunError: CramBufferOverrunError, | ||
CramMalformedError: CramMalformedError, | ||
CramUnimplementedError: CramUnimplementedError, | ||
CramSizeLimitError: CramSizeLimitError, | ||
CramArgumentError: CramArgumentError | ||
}; | ||
var CramArgumentError = /** @class */ (function (_super) { | ||
__extends(CramArgumentError, _super); | ||
function CramArgumentError() { | ||
return _super !== null && _super.apply(this, arguments) || this; | ||
} | ||
return CramArgumentError; | ||
}(CramError)); | ||
exports.CramArgumentError = CramArgumentError; | ||
//# sourceMappingURL=errors.js.map |
{ | ||
"name": "@gmod/cram", | ||
"version": "1.5.9", | ||
"version": "1.6.0", | ||
"description": "read CRAM files with pure Javascript", | ||
@@ -8,2 +8,3 @@ "license": "MIT", | ||
"main": "dist/index.js", | ||
"module": "esm/index.js", | ||
"author": { | ||
@@ -19,23 +20,19 @@ "name": "Robert Buels", | ||
"errors.js", | ||
"dist" | ||
"dist", | ||
"esm" | ||
], | ||
"scripts": { | ||
"test": "node ./node_modules/.bin/mocha", | ||
"coverage": "rm -rf coverage; nyc ./node_modules/.bin/mocha; nyc report --reporter=lcov", | ||
"test": "jest", | ||
"lint": "eslint src test", | ||
"docs": "documentation readme --shallow src/indexedCramFile.js --section=IndexedCramFile; documentation readme --shallow src/cramFile/file.js --section=CramFile; documentation readme --shallow src/craiIndex.js --section=CraiIndex; documentation readme --shallow errors.js '--section=Exception Classes'; documentation readme --shallow src/cramFile/file.js --section=CramFile; documentation readme --shallow src/cramFile/record.js --section=CramRecord", | ||
"clean": "rimraf dist", | ||
"prebuild": "npm run clean", | ||
"build": "babel src -d dist && webpack", | ||
"built-tests": "rm -rf built-tests; mkdir built-tests; ln -s ../test/data built-tests/data; babel test/*.js -d built-tests; babel test/lib/*.js -d built-tests/lib;babel test/lib/fasta/*.js -d built-tests/lib/fasta;node ./node_modules/.bin/mocha built-tests", | ||
"watch": "npm-watch", | ||
"clean": "rimraf dist esm", | ||
"build:esm": "tsc --target es2018 --outDir esm", | ||
"build:es5": "tsc --target es5 --outDir dist", | ||
"build": "npm run build:esm && npm run build:es5", | ||
"postbuild": "webpack", | ||
"prepublishOnly": "npm test && npm run build && cp dist/errors.js errors.js", | ||
"postpublish": "rm errors.js", | ||
"postversion": "git push origin master --follow-tags" | ||
"postversion": "git push --follow-tags" | ||
}, | ||
"watch": { | ||
"test": "{src,test}/*.js", | ||
"lint": "{src,test}/*.js", | ||
"build": "src" | ||
}, | ||
"keywords": [ | ||
@@ -48,6 +45,7 @@ "cram", | ||
"dependencies": { | ||
"@babel/runtime-corejs3": "^7.4.5", | ||
"@gmod/binary-parser": "^1.3.5", | ||
"@jkbonfield/htscodecs": "^0.5.1", | ||
"abortable-promise-cache": "^1.2.0", | ||
"buffer-crc32": "^0.2.13", | ||
"bzip2": "^0.1.1", | ||
"cross-fetch": "^3.0.0", | ||
@@ -60,22 +58,18 @@ "es6-promisify": "^6.0.1", | ||
"devDependencies": { | ||
"@babel/cli": "^7.4.4", | ||
"@babel/core": "^7.4.5", | ||
"@babel/plugin-transform-runtime": "^7.4.4", | ||
"@babel/preset-env": "^7.4.5", | ||
"chai": "^4.1.2", | ||
"documentation": "^9.1.1", | ||
"eslint": "^5.12.0", | ||
"eslint-config-airbnb-base": "^13.1.0", | ||
"eslint-config-prettier": "^4.1.0", | ||
"eslint-plugin-import": "^2.2.0", | ||
"eslint-plugin-prettier": "^3.0.1", | ||
"get-stream": "^4.1.0", | ||
"glob": "^7.1.2", | ||
"imports-loader": "^0.8.0", | ||
"mocha": "^6.0.0", | ||
"mock-fs": "^4.12.0", | ||
"npm-watch": "^0.5.0", | ||
"nyc": "^13.0.0", | ||
"prettier": "^1.10.2", | ||
"rimraf": "^2.6.1", | ||
"@gmod/indexedfasta": "^2.0.2", | ||
"@types/jest": "^27.4.0", | ||
"@typescript-eslint/eslint-plugin": "^5.9.1", | ||
"@typescript-eslint/parser": "^5.9.1", | ||
"documentation": "^13.2.5", | ||
"eslint": "^7.0.0", | ||
"eslint-config-prettier": "^8.3.0", | ||
"eslint-plugin-import": "^2.25.4", | ||
"eslint-plugin-prettier": "^4.0.0", | ||
"glob": "^7.2.0", | ||
"jest": "^27.4.7", | ||
"mock-fs": "^5.1.2", | ||
"prettier": "^2.5.1", | ||
"rimraf": "^3.0.2", | ||
"ts-jest": "^27.1.2", | ||
"typescript": "^4.5.4", | ||
"webpack": "4", | ||
@@ -86,3 +80,7 @@ "webpack-cli": "^3.2.1" | ||
"access": "public" | ||
}, | ||
"browser": { | ||
"./dist/io/localFile.js": false, | ||
"./esm/io/localFile.js": false | ||
} | ||
} |
268
README.md
# @gmod/cram | ||
[![Generated with nod](https://img.shields.io/badge/generator-nod-2196F3.svg?style=flat-square)](https://github.com/diegohaz/nod) | ||
[![NPM version](https://img.shields.io/npm/v/@gmod/cram.svg?style=flat-square)](https://npmjs.org/package/@gmod/cram) | ||
[![Build Status](https://img.shields.io/travis/GMOD/cram-js/master.svg?style=flat-square)](https://travis-ci.org/GMOD/cram-js) [![Coverage Status](https://img.shields.io/codecov/c/github/GMOD/cram-js/master.svg?style=flat-square)](https://codecov.io/gh/GMOD/cram-js/branch/master) [![Greenkeeper badge](https://badges.greenkeeper.io/GMOD/cram-js.svg)](https://greenkeeper.io/) | ||
[![Coverage Status](https://img.shields.io/codecov/c/github/GMOD/cram-js/master.svg?style=flat-square)](https://codecov.io/gh/GMOD/cram-js/branch/master) | ||
[![Build Status](https://img.shields.io/github/workflow/status/GMOD/cram-js/Push/master?logo=github\&style=flat-query)](https://github.com/GMOD/cram-js/actions?query=branch%3Amaster+workflow%3APush+) | ||
Read CRAM files (indexed or unindexed) with pure JS, works in node or in the browser. | ||
- Reads CRAM 3.x and 2.x | ||
- Does not read CRAM 1.x | ||
- Can use .crai indexes out of the box, for efficient sequence fetching, but also has an [index API](#craiindex) that would allow use with other index types | ||
- Does not implement bzip2 or lzma codecs (yet), as these are rarely used in-the-wild; if this is important to your use case, please file an issue | ||
* Reads CRAM 3.x and 2.x (3.1 included) | ||
* Does not read CRAM 1.x | ||
* Can use .crai indexes out of the box, for efficient sequence fetching, but also has an [index API](#craiindex) that would allow use with other index types | ||
* Does not implement bzip2 or lzma codecs (yet), as these are rarely used in-the-wild; if this is important to your use case, please file an issue | ||
@@ -41,3 +41,3 @@ ## Install | ||
index: new CraiIndex({ | ||
path: '/filesystem/yourfile.cram.crai'), | ||
path: '/filesystem/yourfile.cram.crai', | ||
}), | ||
@@ -49,4 +49,4 @@ seqFetch: async (seqId, start, end) => { | ||
// * the seqId is a numeric identifier | ||
// * you can return an empty string for testing if you want, but you may not get proper interpretation of record.readFeatures | ||
return t.getSequence(seqId, start-1, end) | ||
} | ||
}, | ||
@@ -65,12 +65,15 @@ checkSequenceMD5: false, | ||
console.log(`got a record named ${record.readName}`) | ||
record.readFeatures.forEach(({ code, pos, refPos, ref, sub }) => { | ||
// process the "read features". this can be used similar to | ||
// CIGAR/MD strings in SAM. see CRAM specs for more details. | ||
if (code === 'X') | ||
console.log( | ||
`${ | ||
record.readName | ||
} shows a base substitution of ${ref}->${sub} at ${refPos}`, | ||
) | ||
}) | ||
if(record.readFeatures != undefined) { | ||
record.readFeatures.forEach(({ code, pos, refPos, ref, sub }) => { | ||
// process the read features. this can be used similar to | ||
// CIGAR/MD strings in SAM. see CRAM specs for more details. | ||
if (code === 'X') { | ||
console.log( | ||
`${ | ||
record.readName | ||
} shows a base substitution of ${ref}->${sub} at ${refPos}`, | ||
) | ||
} | ||
}) | ||
} | ||
}) | ||
@@ -88,39 +91,40 @@ } | ||
- [CramRecord](#cramrecord) - format of CRAM records returned by this API | ||
- [ReadFeatures](#readfeatures) - format of read features on records | ||
- [IndexedCramFile](#indexedcramfile) - indexed access into a CRAM file | ||
- [CramFile](#cramfile) - .cram API | ||
- [CraiIndex](#craiindex) - .crai index API | ||
- [Error Classes](#error-classes) - special error classes thrown by this API | ||
* [CramRecord](#cramrecord) - format of CRAM records returned by this API | ||
* [ReadFeatures](#readfeatures) - format of read features on records | ||
* [IndexedCramFile](#indexedcramfile) - indexed access into a CRAM file | ||
* [CramFile](#cramfile) - .cram API | ||
* [CraiIndex](#craiindex) - .crai index API | ||
* [Error Classes](#error-classes) - special error classes thrown by this API | ||
### CramRecord | ||
These are the record objects returned by this API. Much of the data | ||
is stored in them as simple object entries, but there are some accessor | ||
methods used for conveniently getting the values of each of the flags in | ||
the `flags` and `cramFlags` fields. | ||
<!-- Generated by documentation.js. Update this documentation by updating the source code. --> | ||
#### Static fields | ||
##### Table of Contents | ||
- **flags** (`number`): the SAM bit-flags field, see the SAM spec for interpretation. Some of the `is*` methods below interpret this field. | ||
- **cramFlags** (`number`): the CRAM-specific bit-flags field, see the CRAM spec for interpretation. Some of the `is*` methods below interpret this field. | ||
- **sequenceId** (`number`): the ID number of the record's reference sequence | ||
- **readLength** (`number`): length of the read in bases | ||
- **alignmentStart** (`number`): start coordinate of the alignment on the reference in 1-based closed coordinates | ||
- **readGroupId** (`number`): ID number of the read group, or -1 if none | ||
- **readName** (`number`): name of the read (string) | ||
- **templateSize** (`number`): for paired sequencing, the total size of the template | ||
- **readFeatures** (`array[ReadFeature]`): array of read features showing insertions, deletions, mismatches, etc. See [ReadFeatures](#readfeatures) for their format. | ||
- **lengthOnRef** (`number`): span of the alignment along the reference sequence | ||
- **mappingQuality** (`number`): SAM mapping quality | ||
- **qualityScores** (`array[number]`): array of numeric quality scores | ||
- **uniqueId** (`number`): unique ID number of the record within the file | ||
- **mate** (`object`) | ||
- **flags** (`number`): CRAM mapping flags for the mate. See CRAM spec for interpretation. Some of the `is*` methods below interpret this field. | ||
- **sequenceId** (`number`): reference sequence ID for the mate mapping | ||
- **alignmentStart** (`number`): start coordinate of the mate mapping. 1-based coordinates. | ||
* [CramRecord](#cramrecord) | ||
* [isPaired](#ispaired) | ||
* [isProperlyPaired](#isproperlypaired) | ||
* [isSegmentUnmapped](#issegmentunmapped) | ||
* [isMateUnmapped](#ismateunmapped) | ||
* [isReverseComplemented](#isreversecomplemented) | ||
* [isMateReverseComplemented](#ismatereversecomplemented) | ||
* [isRead1](#isread1) | ||
* [isRead2](#isread2) | ||
* [isSecondary](#issecondary) | ||
* [isFailedQc](#isfailedqc) | ||
* [isDuplicate](#isduplicate) | ||
* [isSupplementary](#issupplementary) | ||
* [isDetached](#isdetached) | ||
* [hasMateDownStream](#hasmatedownstream) | ||
* [isPreservingQualityScores](#ispreservingqualityscores) | ||
* [isUnknownBases](#isunknownbases) | ||
* [getReadBases](#getreadbases) | ||
* [getPairOrientation](#getpairorientation) | ||
* [addReferenceSequence](#addreferencesequence) | ||
* [Parameters](#parameters) | ||
<!-- Generated by documentation.js. Update this documentation by updating the source code. --> | ||
#### CramRecord | ||
#### Methods | ||
Class of each CRAM record returned by this API. | ||
@@ -197,2 +201,8 @@ ##### isPaired | ||
##### getPairOrientation | ||
Get the pair orientation of a paired read. Adapted from igv.js | ||
Returns **[String](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** of paired orientatin | ||
##### addReferenceSequence | ||
@@ -205,10 +215,11 @@ | ||
**Parameters** | ||
###### Parameters | ||
- `refRegion` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** | ||
- `refRegion.start` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
- `refRegion.end` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
- `refRegion.seq` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** | ||
- `compressionScheme` **CramContainerCompressionScheme** | ||
* `refRegion` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** | ||
* `refRegion.start` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
* `refRegion.end` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
* `refRegion.seq` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)** | ||
* `compressionScheme` **CramContainerCompressionScheme** | ||
Returns **[undefined](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/undefined)** nothing | ||
@@ -222,11 +233,9 @@ | ||
- **code** (`character`): One of "bqBXIDiQNSPH". See page 15 of the CRAM v3 spec for their meanings. | ||
- **data** (`any`): the data associated with the feature. The format of this varies depending on the feature code. | ||
- **pos** (`number`): location relative to the read (1-based) | ||
- **refPos** (`number`): location relative to the reference (1-based) | ||
* **code** (`character`): One of "bqBXIDiQNSPH". See page 15 of the CRAM v3 spec for their meanings. | ||
* **data** (`any`): the data associated with the feature. The format of this varies depending on the feature code. | ||
* **pos** (`number`): location relative to the read (1-based) | ||
* **refPos** (`number`): location relative to the reference (1-based) | ||
### IndexedCramFile | ||
The pairing of an index and a CramFile. Supports efficient fetching of records for sections of reference sequences. | ||
<!-- Generated by documentation.js. Update this documentation by updating the source code. --> | ||
@@ -236,16 +245,20 @@ | ||
- [constructor](#constructor) | ||
- [getRecordsForRange](#getrecordsforrange) | ||
- [hasDataForReferenceSequence](#hasdataforreferencesequence) | ||
* [constructor](#constructor) | ||
* [Parameters](#parameters) | ||
* [getRecordsForRange](#getrecordsforrange) | ||
* [Parameters](#parameters-1) | ||
* [hasDataForReferenceSequence](#hasdataforreferencesequence) | ||
* [Parameters](#parameters-2) | ||
#### constructor | ||
**Parameters** | ||
##### Parameters | ||
- `args` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** | ||
- `args.cram` **CramFile** | ||
- `args.index` **Index-like** object that supports getEntriesForRange(seqId,start,end) -> Promise\[Array[index entries]] | ||
- `args.cacheSize` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** optional maximum number of CRAM records to cache. default 20,000 | ||
- `args.fetchSizeLimit` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** optional maximum number of bytes to fetch in a single getRecordsForRange call. Default 3 MiB. | ||
- `args.checkSequenceMD5` **[boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** default true. if false, disables verifying the MD5 | ||
* `args` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** | ||
* `args.cram` **CramFile** | ||
* `args.index` **Index-like** object that supports getEntriesForRange(seqId,start,end) -> Promise\[Array\[index entries]] | ||
* `args.cacheSize` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** optional maximum number of CRAM records to cache. default 20,000 | ||
* `args.fetchSizeLimit` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** optional maximum number of bytes to fetch in a single getRecordsForRange call. Default 3 MiB. | ||
* `args.checkSequenceMD5` **[boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** default true. if false, disables verifying the MD5 | ||
checksum of the reference sequence underlying a slice. In some applications, this check can cause an inconvenient amount (many megabases) of sequences to be fetched. | ||
@@ -255,13 +268,14 @@ | ||
**Parameters** | ||
##### Parameters | ||
- `seq` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** numeric ID of the reference sequence | ||
- `start` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** start of the range of interest. 1-based closed coordinates. | ||
- `end` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** end of the range of interest. 1-based closed coordinates. | ||
* `seq` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** numeric ID of the reference sequence | ||
* `start` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** start of the range of interest. 1-based closed coordinates. | ||
* `end` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** end of the range of interest. 1-based closed coordinates. | ||
* `opts` (optional, default `{}`) | ||
#### hasDataForReferenceSequence | ||
**Parameters** | ||
##### Parameters | ||
- `seqId` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
* `seqId` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
@@ -277,18 +291,20 @@ Returns **[Promise](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Promise)** true if the CRAM file contains data for the given | ||
- [constructor](#constructor) | ||
- [containerCount](#containercount) | ||
* [constructor](#constructor) | ||
* [Parameters](#parameters) | ||
* [containerCount](#containercount) | ||
#### constructor | ||
**Parameters** | ||
##### Parameters | ||
- `args` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** | ||
- `args.filehandle` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** a filehandle that implements the stat() and | ||
* `args` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** | ||
* `args.filehandle` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** a filehandle that implements the stat() and | ||
read() methods of the Node filehandle API <https://nodejs.org/api/fs.html#fs_class_filehandle> | ||
- `args.path` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** path to the cram file | ||
- `args.url` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** url for the cram file. also supports file:// urls for local files | ||
- `args.seqFetch` **[function](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function)?** a function with signature | ||
* `args.path` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** path to the cram file | ||
* `args.url` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)?** url for the cram file. also supports file:// urls for local files | ||
* `args.seqFetch` **[function](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Statements/function)?** a function with signature | ||
`(seqId, startCoordinate, endCoordinate)` that returns a promise for a string of sequence bases | ||
- `args.cacheSize` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** optional maximum number of CRAM records to cache. default 20,000 | ||
- `args.checkSequenceMD5` **[boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** default true. if false, disables verifying the MD5 | ||
* `args.cacheSize` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)?** optional maximum number of CRAM records to cache. default 20,000 | ||
* `args.checkSequenceMD5` **[boolean](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Boolean)?** default true. if false, disables verifying the MD5 | ||
checksum of the reference sequence underlying a slice. In some applications, this check can cause an inconvenient amount (many megabases) of sequences to be fetched. | ||
@@ -298,8 +314,4 @@ | ||
Returns **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** the number of containers in the file | ||
### CraiIndex | ||
Represents a .crai index. | ||
<!-- Generated by documentation.js. Update this documentation by updating the source code. --> | ||
@@ -309,20 +321,24 @@ | ||
- [constructor](#constructor) | ||
- [hasDataForReferenceSequence](#hasdataforreferencesequence) | ||
- [getEntriesForRange](#getentriesforrange) | ||
* [constructor](#constructor) | ||
* [Parameters](#parameters) | ||
* [hasDataForReferenceSequence](#hasdataforreferencesequence) | ||
* [Parameters](#parameters-1) | ||
* [getEntriesForRange](#getentriesforrange) | ||
* [Parameters](#parameters-2) | ||
#### constructor | ||
**Parameters** | ||
##### Parameters | ||
- `args` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** | ||
- `args.path` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** | ||
- `args.url` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** | ||
- `args.filehandle` **FileHandle?** | ||
* `args` **[object](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object)** | ||
* `args.path` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** | ||
* `args.url` **[string](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/String)?** | ||
* `args.filehandle` **FileHandle?** | ||
#### hasDataForReferenceSequence | ||
**Parameters** | ||
##### Parameters | ||
- `seqId` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
* `seqId` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
@@ -336,7 +352,7 @@ Returns **[Promise](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Promise)** true if the index contains entries for | ||
**Parameters** | ||
##### Parameters | ||
- `seqId` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
- `queryStart` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
- `queryEnd` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
* `seqId` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
* `queryStart` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
* `queryEnd` **[number](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Number)** | ||
@@ -347,46 +363,2 @@ Returns **[Promise](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Promise)** promise for | ||
#### Error Classes | ||
`@gmod/cram/errors` contains some special error classes thrown by cram-js. A list of the error classes is below. | ||
<!-- Generated by documentation.js. Update this documentation by updating the source code. --> | ||
##### Table of Contents | ||
- [CramUnimplementedError](#cramunimplementederror) | ||
- [CramMalformedError](#crammalformederror) | ||
- [CramBufferOverrunError](#crambufferoverrunerror) | ||
- [CramSizeLimitError](#cramsizelimiterror) | ||
- [CramArgumentError](#cramargumenterror) | ||
#### CramUnimplementedError | ||
**Extends Error** | ||
Error caused by encountering a part of the CRAM spec that has not yet been implemented | ||
#### CramMalformedError | ||
**Extends CramError** | ||
An error caused by malformed data. | ||
#### CramBufferOverrunError | ||
**Extends CramMalformedError** | ||
An error caused by attempting to read beyond the end of the defined data. | ||
#### CramSizeLimitError | ||
**Extends CramError** | ||
An error caused by data being too big, exceeding a size limit. | ||
#### CramArgumentError | ||
**Extends CramError** | ||
An invalid argument was supplied to a cram-js method or object. | ||
### CramUnimplementedError | ||
@@ -393,0 +365,0 @@ |
Sorry, the diff of this file is too big to display
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
Filesystem access
Supply chain riskAccesses the file system, and could potentially read sensitive data.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
950849
18
210
9970
10
375
3
7
+ Added@jkbonfield/htscodecs@^0.5.1
+ Addedbzip2@^0.1.1
+ Added@jkbonfield/htscodecs@0.5.1(transitive)
+ Addedbzip2@0.1.1(transitive)
- Removed@babel/runtime-corejs3@^7.4.5
- Removed@babel/runtime-corejs3@7.26.0(transitive)
- Removedcore-js-pure@3.39.0(transitive)
- Removedregenerator-runtime@0.14.1(transitive)