datastore-fs
Advanced tools
Comparing version 0.8.0 to 0.9.0
@@ -0,1 +1,6 @@ | ||
<a name="0.9.0"></a> | ||
# [0.9.0](https://github.com/ipfs/js-datastore-fs/compare/v0.8.0...v0.9.0) (2019-05-29) | ||
<a name="0.8.0"></a> | ||
@@ -2,0 +7,0 @@ # [0.8.0](https://github.com/ipfs/js-datastore-fs/compare/v0.7.0...v0.8.0) (2019-01-24) |
{ | ||
"name": "datastore-fs", | ||
"version": "0.8.0", | ||
"version": "0.9.0", | ||
"description": "Datastore implementation with file system backend", | ||
"leadMaintainer": "Pedro Teixeira <pedro@protocol.ai>", | ||
"leadMaintainer": "Alex Potsides <alex.potsides@protocol.ai>", | ||
"main": "src/index.js", | ||
"scripts": { | ||
"test": "aegir test", | ||
"test:node": "aegir test -t node", | ||
"test:browser": "aegir test -t browser", | ||
"test:webworker": "aegir test -t webworker", | ||
"build": "aegir build", | ||
"lint": "aegir lint", | ||
"test": "aegir test --target node", | ||
"test:node": "aegir test --target node", | ||
"flow": "flow", | ||
"release": "aegir release --target node --docs", | ||
"release-minor": "aegir release --type minor --target node --docs", | ||
"release-major": "aegir release --type major --target node --docs", | ||
"coverage": "aegir coverage --timeout 10000", | ||
"coverage-publish": "aegir coverage --provider codecov --timeout 10000", | ||
"docs": "aegir docs" | ||
"release": "aegir release", | ||
"release-minor": "aegir release --type minor", | ||
"release-major": "aegir release --type major", | ||
"coverage": "nyc -s npm run test:node && nyc report --reporter=html", | ||
"dep-check": "aegir dep-check" | ||
}, | ||
@@ -37,19 +38,16 @@ "repository": { | ||
"dependencies": { | ||
"async": "^2.6.1", | ||
"datastore-core": "~0.6.0", | ||
"datastore-core": "~0.7.0", | ||
"fast-write-atomic": "~0.2.0", | ||
"glob": "^7.1.3", | ||
"graceful-fs": "^4.1.11", | ||
"interface-datastore": "~0.6.0", | ||
"mkdirp": "~0.5.1", | ||
"pull-stream": "^3.6.9" | ||
"interface-datastore": "~0.7.0", | ||
"mkdirp": "~0.5.1" | ||
}, | ||
"devDependencies": { | ||
"aegir": "^15.3.1", | ||
"aegir": "^19.0.3", | ||
"async-iterator-all": "^1.0.0", | ||
"chai": "^4.2.0", | ||
"cids": "~0.5.5", | ||
"cids": "~0.7.1", | ||
"detect-node": "^2.0.4", | ||
"dirty-chai": "^2.0.1", | ||
"flow-bin": "~0.84.0", | ||
"flow-typed": "^2.5.1", | ||
"memdown": "^1.4.1", | ||
"memdown": "^4.0.0", | ||
"rimraf": "^2.6.2" | ||
@@ -67,4 +65,6 @@ }, | ||
"Vasco Santos <vasco.santos@moxy.studio>", | ||
"Victor Bjelkholm <victorbjelkholm@gmail.com>" | ||
"Victor Bjelkholm <victorbjelkholm@gmail.com>", | ||
"Zane Starr <zcstarr@zaner.attlocal.net>", | ||
"achingbrain <alex@achingbrain.net>" | ||
] | ||
} |
@@ -7,7 +7,8 @@ # js-datastore-fs | ||
[![standard-readme compliant](https://img.shields.io/badge/standard--readme-OK-green.svg?style=flat-square)](https://github.com/RichardLitt/standard-readme) | ||
[![Build Status](https://travis-ci.org/ipfs/js-datastore-fs.svg)](https://travis-ci.org/ipfs/js-datastore-fs) [![Circle CI](https://circleci.com/gh/ipfs/js-datastore-fs.svg?style=svg)](https://circleci.com/gh/ipfs/js-datastore-fs) | ||
[![Coverage Status](https://coveralls.io/repos/github/ipfs/js-datastore-fs/badge.svg?branch=master)](https://coveralls.io/github/ipfs/js-datastore-fs?branch=master) [![Dependency Status](https://david-dm.org/diasdavid/js-peer-id.svg?style=flat-square)](https://david-dm.org/ipfs/js-datastore-fs) | ||
[![Build Status](https://flat.badgen.net/travis/ipfs/js-datastore-fs)](https://travis-ci.com/ipfs/js-datastore-fs) | ||
[![Codecov](https://codecov.io/gh/ipfs/js-datastore-fs/branch/master/graph/badge.svg)](https://codecov.io/gh/ipfs/js-datastore-fs) | ||
[![Dependency Status](https://david-dm.org/ipfs/js-datastore-fs.svg?style=flat-square)](https://david-dm.org/ipfs/js-datastore-fs) | ||
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat-square)](https://github.com/feross/standard) | ||
![](https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square) | ||
![](https://img.shields.io/badge/Node.js-%3E%3D4.0.0-orange.svg?style=flat-square) | ||
![](https://img.shields.io/badge/Node.js-%3E%3D8.0.0-orange.svg?style=flat-square) | ||
@@ -18,10 +19,13 @@ > Datastore implementation with file system backend. | ||
[Pedro Teixeira](https://github.com/pgte) | ||
[Alex Potsides](https://github.com/achingbrain) | ||
## Table of Contents | ||
- [Install](#install) | ||
- [Usage](#usage) | ||
- [Contribute](#contribute) | ||
- [License](#license) | ||
- [js-datastore-fs](#js-datastore-fs) | ||
- [Lead Maintainer](#lead-maintainer) | ||
- [Table of Contents](#table-of-contents) | ||
- [Install](#install) | ||
- [Usage](#usage) | ||
- [Contribute](#contribute) | ||
- [License](#license) | ||
@@ -43,8 +47,10 @@ ## Install | ||
PRs accepted. | ||
Feel free to join in. All welcome. Open an [issue](https://github.com/ipfs/js-datastore-fs/issues)! | ||
Small note: If editing the Readme, please conform to the [standard-readme](https://github.com/RichardLitt/standard-readme) specification. | ||
This repository falls under the IPFS [Code of Conduct](https://github.com/ipfs/community/blob/master/code-of-conduct.md). | ||
[![](https://cdn.rawgit.com/jbenet/contribute-ipfs-gif/master/img/contribute.gif)](https://github.com/ipfs/community/blob/master/contributing.md) | ||
## License | ||
MIT 2017 © IPFS | ||
[MIT](LICENSE) |
227
src/index.js
@@ -1,35 +0,25 @@ | ||
/* @flow */ | ||
'use strict' | ||
/* :: import type {Batch, Query, QueryResult, Callback} from 'interface-datastore' */ | ||
const fs = require('graceful-fs') | ||
const pull = require('pull-stream') | ||
const fs = require('fs') | ||
const glob = require('glob') | ||
const setImmediate = require('async/setImmediate') | ||
const waterfall = require('async/series') | ||
const each = require('async/each') | ||
const mkdirp = require('mkdirp') | ||
const writeFile = require('fast-write-atomic') | ||
const promisify = require('util').promisify | ||
const writeFile = promisify(require('fast-write-atomic')) | ||
const path = require('path') | ||
const asyncFilter = require('interface-datastore').utils.asyncFilter | ||
const asyncSort = require('interface-datastore').utils.asyncSort | ||
const filter = require('interface-datastore').utils.filter | ||
const take = require('interface-datastore').utils.take | ||
const map = require('interface-datastore').utils.map | ||
const sortAll = require('interface-datastore').utils.sortAll | ||
const IDatastore = require('interface-datastore') | ||
const noop = () => {} | ||
const asyncMkdirp = promisify(require('mkdirp')) | ||
const fsAccess = promisify(fs.access || noop) | ||
const fsReadFile = promisify(fs.readFile || noop) | ||
const fsUnlink = promisify(fs.unlink || noop) | ||
const Key = IDatastore.Key | ||
const Errors = IDatastore.Errors | ||
/* :: export type FsInputOptions = { | ||
createIfMissing?: bool, | ||
errorIfExists?: bool, | ||
extension?: string | ||
} | ||
type FsOptions = { | ||
createIfMissing: bool, | ||
errorIfExists: bool, | ||
extension: string | ||
} | ||
*/ | ||
/** | ||
@@ -42,6 +32,3 @@ * A datastore backed by the file system. | ||
class FsDatastore { | ||
/* :: path: string */ | ||
/* :: opts: FsOptions */ | ||
constructor (location /* : string */, opts /* : ?FsInputOptions */) { | ||
constructor (location, opts) { | ||
this.path = path.resolve(location) | ||
@@ -61,5 +48,4 @@ this.opts = Object.assign({}, { | ||
open (callback /* : Callback<void> */) /* : void */ { | ||
open () { | ||
this._openOrCreate() | ||
setImmediate(callback) | ||
} | ||
@@ -74,7 +60,7 @@ | ||
if (!fs.existsSync(this.path)) { | ||
throw new Error(`Datastore directory: ${this.path} does not exist`) | ||
throw Errors.notFoundError(new Error(`Datastore directory: ${this.path} does not exist`)) | ||
} | ||
if (this.opts.errorIfExists) { | ||
throw new Error(`Datastore directory: ${this.path} already exists`) | ||
throw Errors.dbOpenFailedError(new Error(`Datastore directory: ${this.path} already exists`)) | ||
} | ||
@@ -103,3 +89,3 @@ } | ||
} catch (err) { | ||
if (err.message.match('does not exist')) { | ||
if (err.code === 'ERR_NOT_FOUND') { | ||
this._create() | ||
@@ -120,3 +106,3 @@ return | ||
*/ | ||
_encode (key /* : Key */) /* : {dir: string, file: string} */ { | ||
_encode (key) { | ||
const parent = key.parent().toString() | ||
@@ -140,3 +126,3 @@ const dir = path.join(this.path, parent) | ||
*/ | ||
_decode (file /* : string */) /* : Key */ { | ||
_decode (file) { | ||
const ext = this.opts.extension | ||
@@ -159,12 +145,9 @@ if (path.extname(file) !== ext) { | ||
* @param {Buffer} val | ||
* @param {function(Error)} callback | ||
* @returns {void} | ||
* @returns {Promise<void>} | ||
*/ | ||
putRaw (key /* : Key */, val /* : Buffer */, callback /* : Callback<void> */) /* : void */ { | ||
async putRaw (key, val) { | ||
const parts = this._encode(key) | ||
const file = parts.file.slice(0, -this.opts.extension.length) | ||
waterfall([ | ||
(cb) => mkdirp(parts.dir, { fs: fs }, cb), | ||
(cb) => writeFile(file, val, cb) | ||
], (err) => callback(err)) | ||
await asyncMkdirp(parts.dir, { fs: fs }) | ||
await writeFile(file, val) | ||
} | ||
@@ -177,16 +160,12 @@ | ||
* @param {Buffer} val | ||
* @param {function(Error)} callback | ||
* @returns {void} | ||
* @returns {Promise<void>} | ||
*/ | ||
put (key /* : Key */, val /* : Buffer */, callback /* : Callback<void> */) /* : void */ { | ||
async put (key, val) { | ||
const parts = this._encode(key) | ||
waterfall([ | ||
(cb) => mkdirp(parts.dir, { fs: fs }, cb), | ||
(cb) => writeFile(parts.file, val, cb) | ||
], (err) => { | ||
if (err) { | ||
return callback(Errors.dbWriteFailedError(err)) | ||
} | ||
callback() | ||
}) | ||
try { | ||
await asyncMkdirp(parts.dir, { fs: fs }) | ||
await writeFile(parts.file, val) | ||
} catch (err) { | ||
throw Errors.dbWriteFailedError(err) | ||
} | ||
} | ||
@@ -198,15 +177,15 @@ | ||
* @param {Key} key | ||
* @param {function(Error, Buffer)} callback | ||
* @returns {void} | ||
* @returns {Promise<Buffer>} | ||
*/ | ||
getRaw (key /* : Key */, callback /* : Callback<Buffer> */) /* : void */ { | ||
async getRaw (key) { | ||
const parts = this._encode(key) | ||
let file = parts.file | ||
file = file.slice(0, -this.opts.extension.length) | ||
fs.readFile(file, (err, data) => { | ||
if (err) { | ||
return callback(Errors.notFoundError(err)) | ||
} | ||
callback(null, data) | ||
}) | ||
let data | ||
try { | ||
data = await fsReadFile(file) | ||
} catch (err) { | ||
throw Errors.notFoundError(err) | ||
} | ||
return data | ||
} | ||
@@ -218,13 +197,13 @@ | ||
* @param {Key} key | ||
* @param {function(Error, Buffer)} callback | ||
* @returns {void} | ||
* @returns {Promise<Buffer>} | ||
*/ | ||
get (key /* : Key */, callback /* : Callback<Buffer> */) /* : void */ { | ||
async get (key) { | ||
const parts = this._encode(key) | ||
fs.readFile(parts.file, (err, data) => { | ||
if (err) { | ||
return callback(Errors.notFoundError(err)) | ||
} | ||
callback(null, data) | ||
}) | ||
let data | ||
try { | ||
data = await fsReadFile(parts.file) | ||
} catch (err) { | ||
throw Errors.notFoundError(err) | ||
} | ||
return data | ||
} | ||
@@ -236,10 +215,12 @@ | ||
* @param {Key} key | ||
* @param {function(Error, bool)} callback | ||
* @returns {void} | ||
* @returns {Promise<bool>} | ||
*/ | ||
has (key /* : Key */, callback /* : Callback<bool> */) /* : void */ { | ||
async has (key) { | ||
const parts = this._encode(key) | ||
fs.access(parts.file, err => { | ||
callback(null, !err) | ||
}) | ||
try { | ||
await fsAccess(parts.file) | ||
} catch (err) { | ||
return false | ||
} | ||
return true | ||
} | ||
@@ -251,13 +232,15 @@ | ||
* @param {Key} key | ||
* @param {function(Error)} callback | ||
* @returns {void} | ||
* @returns {Promise<void>} | ||
*/ | ||
delete (key /* : Key */, callback /* : Callback<void> */) /* : void */ { | ||
async delete (key) { | ||
const parts = this._encode(key) | ||
fs.unlink(parts.file, (err) => { | ||
if (err) { | ||
return callback(Errors.dbDeleteFailedError(err)) | ||
try { | ||
await fsUnlink(parts.file) | ||
} catch (err) { | ||
if (err.code === 'ENOENT') { | ||
return | ||
} | ||
callback() | ||
}) | ||
throw Errors.dbDeleteFailedError(err) | ||
} | ||
} | ||
@@ -270,21 +253,20 @@ | ||
*/ | ||
batch () /* : Batch<Buffer> */ { | ||
batch () { | ||
const puts = [] | ||
const deletes = [] | ||
return { | ||
put (key /* : Key */, value /* : Buffer */) /* : void */ { | ||
put (key, value) { | ||
puts.push({ key: key, value: value }) | ||
}, | ||
delete (key /* : Key */) /* : void */ { | ||
delete (key) { | ||
deletes.push(key) | ||
}, | ||
commit: (callback /* : (err: ?Error) => void */) => { | ||
waterfall([ | ||
(cb) => each(puts, (p, cb) => { | ||
this.put(p.key, p.value, cb) | ||
}, cb), | ||
(cb) => each(deletes, (k, cb) => { | ||
this.delete(k, cb) | ||
}, cb) | ||
], (err) => callback(err)) | ||
commit: () /* : Promise<void> */ => { | ||
return Promise.all( | ||
puts | ||
.map((put) => this.put(put.key, put.value)) | ||
.concat( | ||
deletes.map((del) => this.delete(del)) | ||
) | ||
) | ||
} | ||
@@ -298,5 +280,5 @@ } | ||
* @param {Object} q | ||
* @returns {PullStream} | ||
* @returns {Iterable} | ||
*/ | ||
query (q /* : Query<Buffer> */) /* : QueryResult<Buffer> */ { | ||
query (q) { | ||
// glob expects a POSIX path | ||
@@ -308,26 +290,22 @@ let prefix = q.prefix || '**' | ||
.join('/') | ||
let tasks = [pull.values(glob.sync(pattern))] | ||
let files = glob.sync(pattern) | ||
let it | ||
if (!q.keysOnly) { | ||
tasks.push(pull.asyncMap((f, cb) => { | ||
fs.readFile(f, (err, buf) => { | ||
if (err) { | ||
return cb(err) | ||
} | ||
cb(null, { | ||
key: this._decode(f), | ||
value: buf | ||
}) | ||
}) | ||
})) | ||
it = map(files, async (f) => { | ||
const buf = await fsReadFile(f) | ||
return { | ||
key: this._decode(f), | ||
value: buf | ||
} | ||
}) | ||
} else { | ||
tasks.push(pull.map(f => ({ key: this._decode(f) }))) | ||
it = map(files, f => ({ key: this._decode(f) })) | ||
} | ||
if (q.filters != null) { | ||
tasks = tasks.concat(q.filters.map(asyncFilter)) | ||
if (Array.isArray(q.filters)) { | ||
it = q.filters.reduce((it, f) => filter(it, f), it) | ||
} | ||
if (q.orders != null) { | ||
tasks = tasks.concat(q.orders.map(asyncSort)) | ||
if (Array.isArray(q.orders)) { | ||
it = q.orders.reduce((it, f) => sortAll(it, f), it) | ||
} | ||
@@ -337,10 +315,10 @@ | ||
let i = 0 | ||
tasks.push(pull.filter(() => i++ >= q.offset)) | ||
it = filter(it, () => i++ >= q.offset) | ||
} | ||
if (q.limit != null) { | ||
tasks.push(pull.take(q.limit)) | ||
it = take(it, q.limit) | ||
} | ||
return pull.apply(null, tasks) | ||
return it | ||
} | ||
@@ -350,11 +328,6 @@ | ||
* Close the store. | ||
* | ||
* @param {function(Error)} callback | ||
* @returns {void} | ||
*/ | ||
close (callback /* : (err: ?Error) => void */) /* : void */ { | ||
setImmediate(callback) | ||
} | ||
close () { } | ||
} | ||
module.exports = FsDatastore |
@@ -1,2 +0,1 @@ | ||
/* @flow */ | ||
/* eslint-env mocha */ | ||
@@ -8,10 +7,9 @@ 'use strict' | ||
const expect = chai.expect | ||
const pull = require('pull-stream') | ||
const path = require('path') | ||
const promisify = require('util').promisify | ||
const noop = () => {} | ||
const mkdirp = require('mkdirp') | ||
const rimraf = require('rimraf') | ||
const waterfall = require('async/waterfall') | ||
const parallel = require('async/parallel') | ||
const rimraf = promisify(require('rimraf')) | ||
const fs = require('fs') | ||
const fsReadFile = promisify(require('fs').readFile || noop) | ||
const Key = require('interface-datastore').Key | ||
@@ -21,2 +19,3 @@ const utils = require('interface-datastore').utils | ||
const sh = require('datastore-core').shard | ||
const isNode = require('detect-node') | ||
@@ -26,2 +25,10 @@ const FsStore = require('../src') | ||
describe('FsDatastore', () => { | ||
if (!isNode) { | ||
it('only supports node.js', () => { | ||
}) | ||
return | ||
} | ||
describe('construction', () => { | ||
@@ -46,3 +53,3 @@ it('defaults - folder missing', () => { | ||
expect( | ||
() => new FsStore(dir, {createIfMissing: false}) | ||
() => new FsStore(dir, { createIfMissing: false }) | ||
).to.throw() | ||
@@ -55,3 +62,3 @@ }) | ||
expect( | ||
() => new FsStore(dir, {errorIfExists: true}) | ||
() => new FsStore(dir, { errorIfExists: true }) | ||
).to.throw() | ||
@@ -79,35 +86,57 @@ }) | ||
it('sharding files', (done) => { | ||
it('deleting files', async () => { | ||
const dir = utils.tmpdir() | ||
const fs = new FsStore(dir) | ||
const key = new Key('1234') | ||
await fs.put(key, Buffer.from([0, 1, 2, 3])) | ||
await fs.delete(key) | ||
try { | ||
await fs.get(key) | ||
throw new Error('Should have errored') | ||
} catch (err) { | ||
expect(err.code).to.equal('ERR_NOT_FOUND') | ||
} | ||
}) | ||
it('deleting non-existent files', async () => { | ||
const dir = utils.tmpdir() | ||
const fs = new FsStore(dir) | ||
const key = new Key('5678') | ||
await fs.delete(key) | ||
try { | ||
await fs.get(key) | ||
throw new Error('Should have errored') | ||
} catch (err) { | ||
expect(err.code).to.equal('ERR_NOT_FOUND') | ||
} | ||
}) | ||
it('sharding files', async () => { | ||
const dir = utils.tmpdir() | ||
const fstore = new FsStore(dir) | ||
const shard = new sh.NextToLast(2) | ||
waterfall([ | ||
(cb) => ShardingStore.create(fstore, shard, cb), | ||
(cb) => fs.readFile(path.join(dir, sh.SHARDING_FN), cb), | ||
(file, cb) => { | ||
expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n') | ||
fs.readFile(path.join(dir, sh.README_FN), cb) | ||
}, | ||
(readme, cb) => { | ||
expect(readme.toString()).to.be.eql(sh.readme) | ||
cb() | ||
}, | ||
(cb) => rimraf(dir, cb) | ||
], done) | ||
await ShardingStore.create(fstore, shard) | ||
const file = await fsReadFile(path.join(dir, sh.SHARDING_FN)) | ||
expect(file.toString()).to.be.eql('/repo/flatfs/shard/v1/next-to-last/2\n') | ||
const readme = await fsReadFile(path.join(dir, sh.README_FN)) | ||
expect(readme.toString()).to.be.eql(sh.readme) | ||
await rimraf(dir) | ||
}) | ||
it('query', (done) => { | ||
it('query', async () => { | ||
const fs = new FsStore(path.join(__dirname, 'test-repo', 'blocks')) | ||
pull( | ||
fs.query({}), | ||
pull.collect((err, res) => { | ||
expect(err).to.not.exist() | ||
expect(res).to.have.length(23) | ||
done() | ||
}) | ||
) | ||
let res = [] | ||
for await (const q of fs.query({})) { | ||
res.push(q) | ||
} | ||
expect(res).to.have.length(23) | ||
}) | ||
it('interop with go', (done) => { | ||
it('interop with go', async () => { | ||
const repodir = path.join(__dirname, '/test-repo/blocks') | ||
@@ -117,19 +146,9 @@ const fstore = new FsStore(repodir) | ||
const expected = fs.readFileSync(path.join(repodir, 'VO', key.toString() + '.data')) | ||
waterfall([ | ||
(cb) => ShardingStore.open(fstore, cb), | ||
(flatfs, cb) => parallel([ | ||
(cb) => pull( | ||
flatfs.query({}), | ||
pull.collect(cb) | ||
), | ||
(cb) => flatfs.get(key, cb) | ||
], (err, res) => { | ||
expect(err).to.not.exist() | ||
expect(res[0]).to.have.length(23) | ||
expect(res[1]).to.be.eql(expected) | ||
cb() | ||
}) | ||
], done) | ||
const flatfs = await ShardingStore.open(fstore) | ||
let res = await flatfs.get(key) | ||
let queryResult = flatfs.query({}) | ||
let results = [] | ||
for await (const result of queryResult) results.push(result) | ||
expect(results).to.have.length(23) | ||
expect(res).to.be.eql(expected) | ||
}) | ||
@@ -141,7 +160,7 @@ | ||
require('interface-datastore/src/tests')({ | ||
setup (callback) { | ||
callback(null, new FsStore(dir)) | ||
setup: () => { | ||
return new FsStore(dir) | ||
}, | ||
teardown (callback) { | ||
rimraf(dir, callback) | ||
teardown: () => { | ||
return rimraf(dir) | ||
} | ||
@@ -155,8 +174,8 @@ }) | ||
require('interface-datastore/src/tests')({ | ||
setup (callback) { | ||
setup: () => { | ||
const shard = new sh.NextToLast(2) | ||
ShardingStore.createOrOpen(new FsStore(dir), shard, callback) | ||
return ShardingStore.createOrOpen(new FsStore(dir), shard) | ||
}, | ||
teardown (callback) { | ||
rimraf(dir, callback) | ||
teardown: () => { | ||
return rimraf(dir) | ||
} | ||
@@ -163,0 +182,0 @@ }) |
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
Major refactor
Supply chain riskPackage has recently undergone a major refactor. It may be unstable or indicate significant internal changes. Use caution when updating to versions that include significant changes.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
License Policy Violation
LicenseThis package is not allowed per your license policy. Review the package's license to ensure compliance.
Found 1 instance in 1 package
New author
Supply chain riskA new npm collaborator published a version of the package for the first time. New collaborators are usually benign additions to a project, but do indicate a change to the security surface area of a package.
Found 1 instance in 1 package
5
54
100028
41
432
3
12
+ Addeddatastore-core@0.7.0(transitive)
+ Addeddebug@4.3.7(transitive)
+ Addedinterface-datastore@0.7.0(transitive)
+ Addedms@2.1.3(transitive)
- Removedasync@^2.6.1
- Removedgraceful-fs@^4.1.11
- Removedpull-stream@^3.6.9
- Removedasync@2.6.4(transitive)
- Removeddatastore-core@0.6.1(transitive)
- Removedgraceful-fs@4.2.11(transitive)
- Removedinterface-datastore@0.6.0(transitive)
- Removedlodash@4.17.21(transitive)
- Removedpull-defer@0.2.3(transitive)
- Removedpull-many@1.0.9(transitive)
- Removedpull-stream@3.7.0(transitive)
Updateddatastore-core@~0.7.0
Updatedinterface-datastore@~0.7.0