Comparing version 2.0.5 to 3.0.0
@@ -1,5 +0,9 @@ | ||
## [2.0.5](https://github.com/GMOD/bbi-js/compare/v2.0.4...v2.0.5) (2022-12-17) | ||
# [3.0.0](https://github.com/GMOD/bbi-js/compare/v2.0.5...v3.0.0) (2023-01-11) | ||
- Update to rxjs 7 | ||
## [2.0.5](https://github.com/GMOD/bbi-js/compare/v2.0.4...v2.0.5) (2022-12-17) | ||
- Cleanup package.json and README | ||
@@ -9,4 +13,2 @@ | ||
- Use plain TextDecoder for decoding autoSql | ||
@@ -13,0 +15,0 @@ |
@@ -81,2 +81,16 @@ "use strict"; | ||
class BBI { | ||
/* fetch and parse header information from a bigwig or bigbed file | ||
* @param abortSignal - abort the operation, can be null | ||
* @return a Header object | ||
*/ | ||
getHeader(opts = {}) { | ||
const options = 'aborted' in opts ? { signal: opts } : opts; | ||
if (!this.headerP) { | ||
this.headerP = this._getHeader(options).catch(e => { | ||
this.headerP = undefined; | ||
throw e; | ||
}); | ||
} | ||
return this.headerP; | ||
} | ||
/* | ||
@@ -104,16 +118,2 @@ * @param filehandle - a filehandle from generic-filehandle or implementing something similar to the node10 fs.promises API | ||
} | ||
/* fetch and parse header information from a bigwig or bigbed file | ||
* @param abortSignal - abort the operation, can be null | ||
* @return a Header object | ||
*/ | ||
getHeader(opts = {}) { | ||
const options = 'aborted' in opts ? { signal: opts } : opts; | ||
if (!this.headerP) { | ||
this.headerP = this._getHeader(options).catch(e => { | ||
this.headerP = undefined; | ||
throw e; | ||
}); | ||
} | ||
return this.headerP; | ||
} | ||
_getHeader(opts) { | ||
@@ -275,6 +275,4 @@ return __awaiter(this, void 0, void 0, function* () { | ||
const ob = yield this.getFeatureStream(refName, start, end, opts); | ||
const ret = yield ob | ||
.pipe((0, operators_1.reduce)((acc, curr) => acc.concat(curr))) | ||
.toPromise(); | ||
return ret || []; | ||
const ret = yield (0, rxjs_1.firstValueFrom)(ob.pipe((0, operators_1.toArray)())); | ||
return ret.flat(); | ||
}); | ||
@@ -281,0 +279,0 @@ } |
@@ -197,3 +197,3 @@ "use strict"; | ||
}); | ||
const ret = yield (0, rxjs_1.merge)(...res).toPromise(); | ||
const ret = yield (0, rxjs_1.firstValueFrom)((0, rxjs_1.merge)(...res)); | ||
return ret.filter(f => { var _a; return ((_a = f.rest) === null || _a === void 0 ? void 0 : _a.split('\t')[(f.field || 0) - 3]) === name; }); | ||
@@ -200,0 +200,0 @@ }); |
import { Parser } from 'binary-parser'; | ||
import { LocalFile, RemoteFile } from 'generic-filehandle'; | ||
import { Observable } from 'rxjs'; | ||
import { reduce } from 'rxjs/operators'; | ||
import { firstValueFrom, Observable } from 'rxjs'; | ||
import { toArray } from 'rxjs/operators'; | ||
import { BlockView } from './blockView'; | ||
@@ -69,2 +69,16 @@ const BIG_WIG_MAGIC = -2003829722; | ||
export class BBI { | ||
/* fetch and parse header information from a bigwig or bigbed file | ||
* @param abortSignal - abort the operation, can be null | ||
* @return a Header object | ||
*/ | ||
getHeader(opts = {}) { | ||
const options = 'aborted' in opts ? { signal: opts } : opts; | ||
if (!this.headerP) { | ||
this.headerP = this._getHeader(options).catch(e => { | ||
this.headerP = undefined; | ||
throw e; | ||
}); | ||
} | ||
return this.headerP; | ||
} | ||
/* | ||
@@ -92,16 +106,2 @@ * @param filehandle - a filehandle from generic-filehandle or implementing something similar to the node10 fs.promises API | ||
} | ||
/* fetch and parse header information from a bigwig or bigbed file | ||
* @param abortSignal - abort the operation, can be null | ||
* @return a Header object | ||
*/ | ||
getHeader(opts = {}) { | ||
const options = 'aborted' in opts ? { signal: opts } : opts; | ||
if (!this.headerP) { | ||
this.headerP = this._getHeader(options).catch(e => { | ||
this.headerP = undefined; | ||
throw e; | ||
}); | ||
} | ||
return this.headerP; | ||
} | ||
async _getHeader(opts) { | ||
@@ -252,8 +252,6 @@ const header = await this._getMainHeader(opts); | ||
const ob = await this.getFeatureStream(refName, start, end, opts); | ||
const ret = await ob | ||
.pipe(reduce((acc, curr) => acc.concat(curr))) | ||
.toPromise(); | ||
return ret || []; | ||
const ret = await firstValueFrom(ob.pipe(toArray())); | ||
return ret.flat(); | ||
} | ||
} | ||
//# sourceMappingURL=bbi.js.map |
import { Parser } from 'binary-parser'; | ||
import { Observable, merge } from 'rxjs'; | ||
import { Observable, merge, firstValueFrom } from 'rxjs'; | ||
import { map, reduce } from 'rxjs/operators'; | ||
@@ -174,3 +174,3 @@ import AbortablePromiseCache from 'abortable-promise-cache'; | ||
}); | ||
const ret = await merge(...res).toPromise(); | ||
const ret = await firstValueFrom(merge(...res)); | ||
return ret.filter(f => { var _a; return ((_a = f.rest) === null || _a === void 0 ? void 0 : _a.split('\t')[(f.field || 0) - 3]) === name; }); | ||
@@ -177,0 +177,0 @@ } |
{ | ||
"name": "@gmod/bbi", | ||
"version": "2.0.5", | ||
"version": "3.0.0", | ||
"description": "Parser for BigWig/BigBed files", | ||
@@ -49,24 +49,24 @@ "license": "MIT", | ||
"quick-lru": "^4.0.0", | ||
"rxjs": "^6.5.2" | ||
"rxjs": "^7.8.0" | ||
}, | ||
"devDependencies": { | ||
"@gmod/bed": "^2.1.2", | ||
"@types/jest": "^29.1.2", | ||
"@types/jest": "^29.2.4", | ||
"@types/long": "^5.0.0", | ||
"@types/node": "^18.11.0", | ||
"@types/node": "^18.11.15", | ||
"@types/pako": "^2.0.0", | ||
"@typescript-eslint/eslint-plugin": "^5.40.0", | ||
"@typescript-eslint/parser": "^5.40.0", | ||
"@typescript-eslint/eslint-plugin": "^5.46.1", | ||
"@typescript-eslint/parser": "^5.46.1", | ||
"cross-fetch": "^3.0.2", | ||
"eslint": "^8.25.0", | ||
"eslint": "^8.29.0", | ||
"eslint-config-prettier": "^8.3.0", | ||
"eslint-plugin-import": "^2.25.3", | ||
"eslint-plugin-prettier": "^4.0.0", | ||
"jest": "^29.2.0", | ||
"jest-environment-jsdom": "^29.2.0", | ||
"prettier": "^2.5.1", | ||
"jest": "^29.3.1", | ||
"jest-environment-jsdom": "^29.3.1", | ||
"prettier": "^2.8.1", | ||
"rimraf": "^3.0.2", | ||
"standard-changelog": "^2.0.11", | ||
"ts-jest": "^29.0.3", | ||
"typescript": "^4.8.4" | ||
"typescript": "^4.9.4" | ||
}, | ||
@@ -73,0 +73,0 @@ "publishConfig": { |
157
README.md
@@ -13,10 +13,12 @@ # bbi-js | ||
const {BigWig} = require('@gmod/bbi'); | ||
const file = new BigWig({ | ||
path: 'volvox.bw' | ||
}); | ||
(async () => { | ||
await file.getHeader(); | ||
const feats = await file.getFeatures('chr1', 0, 100, { scale: 1 }); | ||
})(); | ||
```ts | ||
const { BigWig } = require('@gmod/bbi') | ||
const file = new BigWig({ | ||
path: 'volvox.bw', | ||
}) | ||
;(async () => { | ||
await file.getHeader() | ||
const feats = await file.getFeatures('chr1', 0, 100, { scale: 1 }) | ||
})() | ||
``` | ||
@@ -26,22 +28,23 @@ If using remotely, you can use it in combination with generic-filehandle or your own implementation of something like generic-filehandle | ||
const {BigWig} = require('@gmod/bbi'); | ||
const {RemoteFile} = require('generic-filehandle') | ||
```ts | ||
const { BigWig } = require('@gmod/bbi') | ||
const { RemoteFile } = require('generic-filehandle') | ||
// if running in the browser, RemoteFile will use the the global fetch | ||
const file = new BigWig({ | ||
filehandle: new RemoteFile('volvox.bw') | ||
}); | ||
// if running in the browser, RemoteFile will use the the global fetch | ||
const file = new BigWig({ | ||
filehandle: new RemoteFile('volvox.bw'), | ||
}) | ||
// if running under node.js you must supply the fetch function to RemoteFile | ||
const fetch = require('node-fetch') | ||
const file = new BigWig({ | ||
filehandle: new RemoteFile('volvox.bw', { fetch }), | ||
}) | ||
// if running under node.js you must supply the fetch function to RemoteFile | ||
const fetch = require('node-fetch') | ||
const file = new BigWig({ | ||
filehandle: new RemoteFile('volvox.bw', {fetch}) | ||
}); | ||
;(async () => { | ||
await file.getHeader() | ||
const feats = await file.getFeatures('chr1', 0, 100, { scale: 1 }) | ||
})() | ||
``` | ||
(async () => { | ||
await file.getHeader(); | ||
const feats = await file.getFeatures('chr1', 0, 100, { scale: 1 }); | ||
})(); | ||
## Documentation | ||
@@ -72,7 +75,9 @@ | ||
const feats = await bigwig.getFeatures('chr1', 0, 100) | ||
// returns array of features with start, end, score | ||
// coordinates on returned data are are 0-based half open | ||
// no conversion to 1-based as in wig is done) | ||
// note refseq is not returned on the object, it is clearly chr1 from the query though | ||
```ts | ||
const feats = await bigwig.getFeatures('chr1', 0, 100) | ||
// returns array of features with start, end, score | ||
// coordinates on returned data are are 0-based half open | ||
// no conversion to 1-based as in wig is done) | ||
// note refseq is not returned on the object, it is clearly chr1 from the query though | ||
``` | ||
@@ -95,10 +100,16 @@ ### Understanding scale and reductionLevel | ||
const observer = await bigwig.getFeatureStream('chr1', 0, 100) | ||
observer.subscribe(chunk => { | ||
/* chunk contains array of features with start, end, score */ | ||
}, error => { | ||
/* process error */ | ||
}, () => { | ||
/* completed */ | ||
}) | ||
```ts | ||
const observer = await bigwig.getFeatureStream('chr1', 0, 100) | ||
observer.subscribe( | ||
chunk => { | ||
/* chunk contains array of features with start, end, score */ | ||
}, | ||
error => { | ||
/* process error */ | ||
}, | ||
() => { | ||
/* completed */ | ||
}, | ||
) | ||
``` | ||
@@ -135,19 +146,19 @@ ### BigBed | ||
```js | ||
import {BigBed} from '@gmod/bbi' | ||
import BED from '@gmod/bed' | ||
```ts | ||
import {BigBed} from '@gmod/bbi' | ||
import BED from '@gmod/bed' | ||
const ti = new BigBed({ | ||
filehandle: new LocalFile(require.resolve('./data/hg18.bb')), | ||
}) | ||
const {autoSql} = await ti.getHeader() | ||
const feats = await ti.getFeatures('chr7', 0, 100000) | ||
const parser = new BED({autoSql}) | ||
const lines = feats.map(f => { | ||
const { start, end, rest, uniqueId } = f | ||
return parser.parseLine(`chr7\t${start}\t${end}\t${rest}, { uniqueId })\ | ||
}) | ||
// @gmod/bbi returns features with {uniqueId, start, end, rest} | ||
// we reconstitute this as a line for @gmod/bed with a template string | ||
// note: the uniqueId is based on file offsets and helps to deduplicate exact feature copies if they exist | ||
const ti = new BigBed({ | ||
filehandle: new LocalFile(require.resolve('./data/hg18.bb')), | ||
}) | ||
const {autoSql} = await ti.getHeader() | ||
const feats = await ti.getFeatures('chr7', 0, 100000) | ||
const parser = new BED({autoSql}) | ||
const lines = feats.map(f => { | ||
const { start, end, rest, uniqueId } = f | ||
return parser.parseLine(`chr7\t${start}\t${end}\t${rest}, { uniqueId })\ | ||
}) | ||
// @gmod/bbi returns features with {uniqueId, start, end, rest} | ||
// we reconstitute this as a line for @gmod/bed with a template string | ||
// note: the uniqueId is based on file offsets and helps to deduplicate exact feature copies if they exist | ||
``` | ||
@@ -157,25 +168,29 @@ | ||
```json | ||
{ | ||
"chromId": 0, | ||
"start": 64068, | ||
"end": 64107, | ||
"rest": "uc003sil.1\t0\t-\t64068\t64068\t255,0,0\t.\tDQ584609", | ||
"uniqueId": "bb-171" | ||
} | ||
``` | ||
{ chromId: 0, | ||
start: 64068, | ||
end: 64107, | ||
rest: 'uc003sil.1\t0\t-\t64068\t64068\t255,0,0\t.\tDQ584609', | ||
uniqueId: 'bb-171' } | ||
``` | ||
Features after parsing with @gmod/bed: | ||
```json | ||
{ | ||
"uniqueId": "bb-0", | ||
"chrom": "chr7", | ||
"chromStart": 54028, | ||
"chromEnd": 73584, | ||
"name": "uc003sii.2", | ||
"score": 0, | ||
"strand": -1, | ||
"thickStart": 54028, | ||
"thickEnd": 54028, | ||
"reserved": "255,0,0", | ||
"spID": "AL137655" | ||
} | ||
``` | ||
{ uniqueId: 'bb-0', | ||
chrom: 'chr7', | ||
chromStart: 54028, | ||
chromEnd: 73584, | ||
name: 'uc003sii.2', | ||
score: 0, | ||
strand: -1, | ||
thickStart: 54028, | ||
thickEnd: 54028, | ||
reserved: '255,0,0', | ||
spID: 'AL137655' } | ||
``` | ||
@@ -182,0 +197,0 @@ ## Academic Use |
import { Parser } from 'binary-parser' | ||
import { LocalFile, RemoteFile, GenericFilehandle } from 'generic-filehandle' | ||
import { Observable, Observer } from 'rxjs' | ||
import { reduce } from 'rxjs/operators' | ||
import { firstValueFrom, Observable, Observer } from 'rxjs' | ||
import { toArray } from 'rxjs/operators' | ||
import { BlockView } from './blockView' | ||
@@ -370,10 +370,8 @@ | ||
}, | ||
): Promise<Feature[]> { | ||
) { | ||
const ob = await this.getFeatureStream(refName, start, end, opts) | ||
const ret = await ob | ||
.pipe(reduce((acc, curr) => acc.concat(curr))) | ||
.toPromise() | ||
return ret || [] | ||
const ret = await firstValueFrom(ob.pipe(toArray())) | ||
return ret.flat() | ||
} | ||
} |
import { Parser } from 'binary-parser' | ||
import { Observable, merge } from 'rxjs' | ||
import { Observable, merge, firstValueFrom } from 'rxjs' | ||
import { map, reduce } from 'rxjs/operators' | ||
@@ -219,6 +219,3 @@ import AbortablePromiseCache from 'abortable-promise-cache' | ||
*/ | ||
public async searchExtraIndex( | ||
name: string, | ||
opts: RequestOptions = {}, | ||
): Promise<Feature[]> { | ||
public async searchExtraIndex(name: string, opts: RequestOptions = {}) { | ||
const blocks = await this.searchExtraIndexBlocks(name, opts) | ||
@@ -242,5 +239,5 @@ if (!blocks.length) { | ||
}) | ||
const ret = await merge(...res).toPromise() | ||
const ret = await firstValueFrom(merge(...res)) | ||
return ret.filter(f => f.rest?.split('\t')[(f.field || 0) - 3] === name) | ||
} | ||
} |
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
Sorry, the diff of this file is not supported yet
252831
83
198
4135
+ Addedrxjs@7.8.2(transitive)
+ Addedtslib@2.8.1(transitive)
- Removedrxjs@6.6.7(transitive)
- Removedtslib@1.14.1(transitive)
Updatedrxjs@^7.8.0