New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

barnard59-core

Package Overview
Dependencies
Maintainers
1
Versions
38
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

barnard59-core - npm Package Compare versions

Comparing version 0.0.5 to 0.0.6

test/forEach.test.js

2

lib/createLoaderRegistry.js

@@ -15,3 +15,3 @@ const jsLoader = require('rdf-loader-code/ecmaScript')

function createLoaderRegistry (additionalLoaders = []) {
const loaders = [ ...defaultLoaders, ...additionalLoaders ]
const loaders = [...defaultLoaders, ...additionalLoaders]

@@ -18,0 +18,0 @@ return loaders.reduce((registry, loader) => {

const { promisify } = require('util')
const { finished, Duplex, Readable } = require('readable-stream')
const { finished, Duplex } = require('readable-stream')
const { isWritable } = require('isstream')
const ReadableToReadable = require('readable-to-readable')
function objectToReadable (content, objectMode) {
const stream = new Readable({
objectMode,
read: () => {}
})
stream.push(content)
stream.push(null)
return stream
}
class ForEach extends Duplex {
constructor (pipeline, master, log, handleChunk) {
constructor ({ pipeline, master, log, variable }) {
super({ objectMode: true })

@@ -25,4 +13,4 @@

this.master = master
this.handleChunk = handleChunk
this.readFrom = null
this.variable = variable
this.pull = null
this.done = false

@@ -33,21 +21,31 @@ }

try {
const current = this.child.clone({
...this.master,
const subPipeline = this.child.clone({
basePath: this.master.basePath,
context: this.master.context,
objectMode: true,
variables: this.master.variables,
log: this.log
})
this.readFrom = ReadableToReadable.readFrom(current, { end: false })
this.pull = ReadableToReadable.readFrom(subPipeline, { end: false })
if (this.handleChunk) {
this.handleChunk.call(undefined, current, chunk)
if (this.variable) {
// if the argument is a function, call it with the chunk as argument
if (typeof this.variable === 'function') {
this.variable.call(undefined, subPipeline, chunk)
}
// if the argument is a string, assign the chunk to the variable with the argument as key
if (typeof this.variable === 'string' && subPipeline.variables) {
subPipeline.variables.set(this.variable, chunk)
}
}
if (isWritable(current)) {
objectToReadable(chunk, current._writableState.objectMode).pipe(current)
if (isWritable(subPipeline)) {
subPipeline.end(chunk)
}
await promisify(finished)(current)
await promisify(finished)(subPipeline)
this.readFrom = null
this.pull = null

@@ -69,7 +67,7 @@ return callback()

if (this.readFrom && !await this.readFrom()) {
if (this.pull && !await this.pull()) {
return
}
setTimeout(() => this._read(), 0)
setImmediate(() => this._read())
}

@@ -82,8 +80,13 @@

}
}
static create (pipeline, handleChunk) {
return new ForEach(pipeline, this.pipeline, this.log, handleChunk)
}
function factory (pipeline, variable) {
return new ForEach({
pipeline,
master: this.pipeline,
log: this.log,
variable
})
}
module.exports = ForEach.create
module.exports = factory
const Transform = require('readable-stream').Transform
const levels = [ 'trace', 'debug', 'info', 'warn', 'error', 'fatal' ]
const levels = ['trace', 'debug', 'info', 'warn', 'error', 'fatal']

@@ -5,0 +5,0 @@ class Logger extends Transform {

@@ -157,3 +157,3 @@ const isStream = require('isstream')

async parseOperation (operation) {
let result = await this.loaderRegistry.load(operation, {
const result = await this.loaderRegistry.load(operation, {
context: this.context,

@@ -223,3 +223,3 @@ variables: this.variables,

variables.push([ variable.name, variable.value ])
variables.push([variable.name, variable.value])
return variables

@@ -226,0 +226,0 @@ }, Promise.resolve([]))

{
"name": "barnard59-core",
"version": "0.0.5",
"version": "0.0.6",
"description": "Core component of Barnard59 Linked Data pipelines",
"main": "index.js",
"scripts": {
"lint": "eslint --ext .js .",
"pretest": "npm run lint",
"test": "jest"
"coverage": "codecov",
"test": "standard && mocha"
},

@@ -34,23 +33,12 @@ "repository": {

"@rdfjs/parser-n3": "^1.1.2",
"@types/expect": "^1.20.3",
"@types/sinon": "^7.0.11",
"barnard59-base": "0.0.1",
"eslint": "^5.15.1",
"eslint-plugin-jest": "^22.3.0",
"expect": "^23.6.0",
"jest": "^23.6.0",
"jest-extended": "^0.11.0",
"jest-fetch-mock": "^2.1.1",
"codecov": "^3.6.5",
"get-stream": "^5.1.0",
"into-stream": "^5.1.1",
"mocha": "^7.1.2",
"nock": "^12.0.3",
"rdf-ext": "^1.1.2",
"sinon": "^7.3.1",
"standard": "^12.0.1"
},
"jest": {
"automock": false,
"collectCoverage": true,
"collectCoverageFrom": [
"lib/**"
],
"setupTestFrameworkScriptFile": "jest-extended"
"standard": "^14.3.4"
}
}

@@ -0,10 +1,11 @@

const { strictEqual } = require('assert')
const { describe, it } = require('mocha')
const { isReadable, isWritable, isDuplex } = require('isstream')
const createBaseStream = require('../lib/createBaseStream')
const eventToPromise = require('../lib/eventToPromise')
const createDummyPipeline = require('./support/createDummyPipeline')
const eventToPromise = require('../lib/eventToPromise')
const expect = require('expect')
const { isReadable, isWritable, isDuplex } = require('isstream')
describe('createBaseStream', () => {
describe('Plain', () => {
test('creates a Readable stream', async () => {
it('should create a Readable stream', async () => {
// given

@@ -18,6 +19,6 @@ const pipeline = createDummyPipeline()

// then
expect(isReadable(stream)).toBe(true)
strictEqual(isReadable(stream), true)
})
test('calls the init function on read', async () => {
it('should call the init function on read', async () => {
// given

@@ -37,6 +38,6 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})
test('doesn\'t call the pipeline read function', async () => {
it('should not call the pipeline read function', async () => {
// given

@@ -56,3 +57,3 @@ let touched = false

// then
expect(touched).toBe(false)
strictEqual(touched, false)
})

@@ -62,3 +63,3 @@ })

describe('Duplex', () => {
test('creates a Duplex stream', async () => {
it('should create a Duplex stream', async () => {
// given

@@ -71,6 +72,6 @@ const pipeline = createDummyPipeline({ readable: true, writable: true })

// then
expect(isDuplex(stream)).toBe(true)
strictEqual(isDuplex(stream), true)
})
test('calls the init function when read is called first', async () => {
it('should call the init function when read is called first', async () => {
// given

@@ -90,6 +91,6 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})
test('calls the init function when write is called first', async () => {
it('should call the init function when write is called first', async () => {
// given

@@ -109,6 +110,6 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})
test('calls the read function on read', async () => {
it('should call the read function on read', async () => {
// given

@@ -128,6 +129,6 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})
test('calls the write function on write', async () => {
it('should call the write function on write', async () => {
// given

@@ -147,3 +148,3 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})

@@ -153,3 +154,3 @@ })

describe('Readable', () => {
test('creates a Readable stream', async () => {
it('should create a Readable stream', async () => {
// given

@@ -162,6 +163,6 @@ const pipeline = createDummyPipeline({ readable: true })

// then
expect(isReadable(stream)).toBe(true)
strictEqual(isReadable(stream), true)
})
test('calls the init function when read is called', async () => {
it('should call the init function when read is called', async () => {
// given

@@ -181,6 +182,6 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})
test('calls the read function on read', async () => {
it('should call the read function on read', async () => {
// given

@@ -200,3 +201,3 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})

@@ -206,3 +207,3 @@ })

describe('Writable', () => {
test('creates a Writable stream', async () => {
it('should create a Writable stream', async () => {
// given

@@ -215,6 +216,6 @@ const pipeline = createDummyPipeline({ writable: true })

// then
expect(isWritable(stream)).toBe(true)
strictEqual(isWritable(stream), true)
})
test('calls the init function when write is called', async () => {
it('should call the init function when write is called', async () => {
// given

@@ -234,6 +235,6 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})
test('calls the write function on write', async () => {
it('should call the write function on write', async () => {
// given

@@ -253,5 +254,5 @@ let touched = false

// then
expect(touched).toBe(true)
strictEqual(touched, true)
})
})
})

@@ -0,13 +1,14 @@

const { rejects, strictEqual } = require('assert')
const path = require('path')
const clownface = require('clownface')
const Clownface = require('clownface/lib/Clownface')
const { describe, it } = require('mocha')
const isStream = require('isstream')
const rdf = require('rdf-ext')
const createLoaderRegistry = require('../lib/createLoaderRegistry')
const createPipelineStream = require('../lib/createPipelineStream')
const expect = require('expect')
const eventToPromise = require('../lib/eventToPromise')
const isStream = require('isstream')
const Pipeline = require('../lib/Pipeline')
const load = require('./support/load-pipeline')
const path = require('path')
const rdf = require('rdf-ext')
const run = require('./support/run')
const Clownface = require('clownface/lib/Clownface')
const Pipeline = require('../lib/Pipeline')

@@ -17,3 +18,3 @@ const pipelineTerm = rdf.namedNode('http://example.org/pipeline/')

describe('createPipelineStream', () => {
test('returns a stream', async () => {
it('should return a stream', async () => {
// given

@@ -27,6 +28,6 @@ const definition = await load('write.ttl')

// then
expect(isStream(stream)).toBe(true)
strictEqual(isStream(stream), true)
})
test('should handle stream errors', async () => {
it('should handle stream errors', async () => {
// given

@@ -44,6 +45,6 @@ const definition = await load('stream-error.ttl')

// then
await expect(promise).rejects.toBeInstanceOf(Error)
await rejects(promise)
})
test('inner pipeline is assigned to _pipeline property', async () => {
it('should assign the inner pipeline to the _pipeline property', async () => {
// given

@@ -57,6 +58,6 @@ const definition = await load('write.ttl')

// then
expect(stream._pipeline instanceof Pipeline).toBe(true)
strictEqual(stream._pipeline instanceof Pipeline, true)
})
test('clone is a method', async () => {
it('should have a clone method', async () => {
// given

@@ -70,6 +71,6 @@ const definition = await load('write.ttl')

// then
expect(typeof stream.clone).toBe('function')
strictEqual(typeof stream.clone, 'function')
})
test('basePath is a string property', async () => {
it('should have a basePath string property', async () => {
// given

@@ -83,6 +84,6 @@ const definition = await load('write.ttl')

// then
expect(typeof stream.basePath).toBe('string')
strictEqual(typeof stream.basePath, 'string')
})
test('context is a object property', async () => {
it('should have a context object property', async () => {
// given

@@ -96,6 +97,6 @@ const definition = await load('write.ttl')

// then
expect(typeof stream.context).toBe('object')
strictEqual(typeof stream.context, 'object')
})
test('node is a clownface property', async () => {
it('should have a node clownface property', async () => {
// given

@@ -109,6 +110,6 @@ const definition = await load('write.ttl')

// then
expect(stream.node instanceof Clownface).toBe(true)
strictEqual(stream.node instanceof Clownface, true)
})
test('variables is a Map property', async () => {
it('should have a variables Map property', async () => {
// given

@@ -122,6 +123,6 @@ const definition = await load('write.ttl')

// then
expect(stream.variables instanceof Map).toBe(true)
strictEqual(stream.variables instanceof Map, true)
})
test('write on pipeline is forwarded to stream', async () => {
it('should forward data from the last step to the pipeline stream', async () => {
// given

@@ -137,6 +138,6 @@ const definition = await load('write.ttl')

// then
expect(stream.context.content.toString()).toBe('test')
strictEqual(stream.context.content.toString(), 'test')
})
test('read on pipeline is forwarded to stream', async () => {
it('should forward data written to the pipeline stream to the first step', async () => {
// given

@@ -151,6 +152,6 @@ const definition = await load('read.ttl')

// then
expect(content.toString()).toBe('test')
strictEqual(content.toString(), 'test')
})
test('should handle step creating errors', async () => {
it('should handle step creating errors', async () => {
// given

@@ -165,7 +166,7 @@ const definition = await load('step-error.ttl')

// then
await expect(promise).rejects.toBeInstanceOf(Error)
await rejects(promise)
})
describe('PlainPipeline', () => {
test('end event is emitted', async () => {
it('should emit an end event', async () => {
// given

@@ -186,3 +187,3 @@ const definition = await load('plain.ttl')

describe('ReadablePipeline', () => {
test('end event is emitted', async () => {
it('should emit an end event', async () => {
// given

@@ -203,3 +204,3 @@ const definition = await load('read.ttl')

describe('WriteablePipeline', () => {
test('finish event is emitted', async () => {
it('should emit a finish event', async () => {
// given

@@ -219,3 +220,3 @@ const definition = await load('write.ttl')

test('calling end is forwarded to the pipeline steps', async () => {
it('should forward the end of stream event to the step', async () => {
// given

@@ -222,0 +223,0 @@ const definition = await load('write.ttl')

@@ -1,3 +0,4 @@

const expect = require('expect')
const { strictEqual } = require('assert')
const path = require('path')
const { describe, it } = require('mocha')
const Pipeline = require('../lib/pipelineFactory')

@@ -8,3 +9,3 @@ const load = require('./support/load-pipeline')

describe('forEach', () => {
test('executes example correctly', async () => {
it('should execute the example correctly', async () => {
// given

@@ -21,3 +22,3 @@ const definition = await load('../../examples/forEach.ttl')

const outJson = JSON.parse(out)
expect(outJson.length).toBe(24)
strictEqual(outJson.length, 24)
})

@@ -29,3 +30,3 @@

* */
test('variables set in forEach are preserved during execution', async () => {
it('should preserve variables set during forEach execution', async () => {
// given

@@ -45,5 +46,5 @@ const definition = await load('/e2e/foreach-with-handler.ttl')

// then
expect(out.length).toBeGreaterThan(0)
expect(out[0]).not.toBe(out[1])
strictEqual(out.length > 0, true)
strictEqual(out[0] !== out[1], true)
})
})

@@ -27,4 +27,4 @@ /* global describe, test, beforeEach */

const variables = new Map([
[ 'foo', 'bar' ],
[ 'hello', 'world' ]
['foo', 'bar'],
['hello', 'world']
])

@@ -31,0 +31,0 @@

@@ -24,3 +24,3 @@ /* global describe, test, beforeEach */

.addOut(ns.p('name'), 'foo')
const variables = new Map([ [ 'foo', 'bar' ] ])
const variables = new Map([['foo', 'bar']])

@@ -67,3 +67,3 @@ // when

const node = rdf.literal('foo', ns.p('VariableName'))
const variables = new Map([ [ 'foo', 'bar' ] ])
const variables = new Map([['foo', 'bar']])

@@ -70,0 +70,0 @@ // when

@@ -0,11 +1,12 @@

const { strictEqual } = require('assert')
const clownface = require('clownface')
const expect = require('expect')
const { describe, it } = require('mocha')
const rdf = require('rdf-ext')
const { Writable } = require('readable-stream')
const sinon = require('sinon')
const run = require('../lib/run')
const Logger = require('../lib/logger')
const sinon = require('sinon')
const { Writable } = require('readable-stream')
describe('Logger', () => {
test('should unpipe itself from master at end', async () => {
it('should unpipe itself from master at end', async () => {
// given

@@ -22,6 +23,6 @@ const masterNode = clownface(rdf.dataset(), rdf.namedNode('http://example.org/master'))

// then
expect(child._readableState.pipesCount).toBe(0)
strictEqual(child._readableState.pipesCount, 0)
})
test('should not push forwarded logs if has not been piped', () => {
it('should not push forwarded logs if has not been piped', () => {
// given

@@ -38,6 +39,6 @@ const masterNode = clownface(rdf.dataset(), rdf.namedNode('http://example.org/master'))

// then
expect(pushSpy.notCalled).toBeTruthy()
strictEqual(pushSpy.notCalled, true)
})
test('should not push direct log if has not been piped', () => {
it('should not push direct log if has not been piped', () => {
// given

@@ -52,6 +53,6 @@ const masterNode = clownface(rdf.dataset(), rdf.namedNode('http://example.org/master'))

// then
expect(pushSpy.notCalled).toBeTruthy()
strictEqual(pushSpy.notCalled, true)
})
test('should push forwarded logs when piped', () => {
it('should push forwarded logs when piped', () => {
// given

@@ -69,6 +70,6 @@ const masterNode = clownface(rdf.dataset(), rdf.namedNode('http://example.org/master'))

// then
expect(pushSpy.called).toBeTruthy()
strictEqual(pushSpy.called, true)
})
test('should push direct logs when piped', () => {
it('should push direct logs when piped', () => {
// given

@@ -88,4 +89,4 @@ const masterNode = clownface(rdf.dataset(), rdf.namedNode('http://example.org/master'))

// then
expect(pushSpy.called).toBeTruthy()
strictEqual(pushSpy.called, true)
})
})

@@ -1,22 +0,29 @@

const expect = require('expect')
const { deepStrictEqual } = require('assert')
const { beforeEach, describe, it } = require('mocha')
const nock = require('nock')
const Pipeline = require('../lib/pipelineFactory')
const asyncLoaders = require('./support/asyncLoaders')
const load = require('./support/load-pipeline')
const run = require('./support/run')
const asyncLoaders = require('./support/asyncLoaders')
const fetch = require('jest-fetch-mock')
jest.setMock('node-fetch', fetch)
const dateTimeLd = {
'@context': {
date: 'http://purl.org/dc/elements/1.1/date'
},
'@id': 'http://worldtimeapi.org/api/timezone/CET',
date: '2019-03-07T12:58:54.094127+01:00'
}
const dateTime = {
datetime: '2019-03-07T12:58:54.094127+01:00'
}
describe('Pipeline', () => {
beforeEach(() => {
fetch.resetMocks()
const dateTime = {
'datetime': '2019-03-07T12:58:54.094127+01:00'
}
fetch.mockResponse(JSON.stringify(dateTime))
nock('http://worldtimeapi.org')
.get('/api/timezone/CET')
.reply(200, dateTime)
})
test('can load code using node: scheme', async () => {
it('should load code using node: scheme', async () => {
// given

@@ -30,8 +37,6 @@ const definition = await load('e2e/world-clock-node.ttl')

// then
const outJson = JSON.parse(out)
expect(outJson).toContainKey('date')
expect(outJson).toContainValue('http://worldtimeapi.org/api/timezone/CET')
deepStrictEqual(JSON.parse(out), dateTimeLd)
})
test('can load code using file: scheme', async () => {
it('should load code using file: scheme', async () => {
// given

@@ -45,8 +50,6 @@ const definition = await load('e2e/world-clock-file.ttl')

// then
const outJson = JSON.parse(out)
expect(outJson).toContainKey('date')
expect(outJson).toContainValue('http://worldtimeapi.org/api/timezone/CET')
deepStrictEqual(JSON.parse(out), dateTimeLd)
})
test('can load code using async loaders', async () => {
it('should load code using async loaders', async () => {
// given

@@ -65,6 +68,4 @@ const definition = await load('e2e/world-clock-async.ttl')

// then
const outJson = JSON.parse(out)
expect(outJson).toContainKey('@context')
expect(outJson).toContainKey('date')
deepStrictEqual(JSON.parse(out), { abc: 'dfg' })
})
})

@@ -1,4 +0,4 @@

const assert = require('assert')
const expect = require('expect')
const { deepStrictEqual, strictEqual, throws } = require('assert')
const path = require('path')
const { describe, it } = require('mocha')
const Pipeline = require('../lib/pipelineFactory')

@@ -11,3 +11,3 @@ const load = require('./support/load-pipeline')

describe('constructor', () => {
test('loads selected pipeline when there are multiple', async () => {
it('should load the selected pipeline when there are multiple', async () => {
// given

@@ -20,6 +20,6 @@ const definition = await load('multiple.ttl')

// then
assert.ok(pipeline)
strictEqual(Boolean(pipeline), true)
})
test('throws when the iri is missing', async () => {
it('should throw an error when the iri is missing', async () => {
// given

@@ -29,3 +29,3 @@ const definition = await load('multiple.ttl')

// then
assert.throws(() => {
throws(() => {
// when

@@ -36,3 +36,3 @@ Pipeline(definition)

test('throws when the pipeline is not found', async () => {
it('should throw an error when the pipeline is not found', async () => {
// given

@@ -42,3 +42,3 @@ const definition = await load('multiple.ttl')

// then
assert.throws(() => {
throws(() => {
// when

@@ -51,3 +51,3 @@ Pipeline(definition, ns.pipeline('no-such-pipeline'))

describe('variables', () => {
test('should be parsed from definition', async () => {
it('should be parsed from definition', async () => {
// given

@@ -62,6 +62,6 @@ const definition = await load('variables.ttl')

// then
expect(pipeline.variables.get('foo')).toBe('bar')
strictEqual(pipeline.variables.get('foo'), 'bar')
})
test('should combine values from definition and constructor call', async () => {
it('should combine values from definition and constructor call', async () => {
// given

@@ -78,7 +78,7 @@ const definition = await load('variables.ttl')

// then
expect(pipeline.variables.size).toBe(2)
expect(pipeline.variables.get('hello')).toBe('world')
strictEqual(pipeline.variables.size, 2)
strictEqual(pipeline.variables.get('hello'), 'world')
})
test('should get combined from multiple sets', async () => {
it('should get combined from multiple sets', async () => {
// given

@@ -93,8 +93,8 @@ const definition = await load('variables.ttl')

// then
expect(pipeline.variables.size).toBe(2)
expect(pipeline.variables.get('username')).toBe('tpluscode')
expect(pipeline.variables.get('auth')).toBe('http://auth0.com/connect/token')
strictEqual(pipeline.variables.size, 2)
strictEqual(pipeline.variables.get('username'), 'tpluscode')
strictEqual(pipeline.variables.get('auth'), 'http://auth0.com/connect/token')
})
test('should prefer variable from constructor over that from definition', async () => {
it('should prefer variable from constructor over that from definition', async () => {
// given

@@ -111,4 +111,4 @@ const definition = await load('variables.ttl')

// then
expect(pipeline.variables.size).toBe(1)
expect(pipeline.variables.get('foo')).toBe('boar')
strictEqual(pipeline.variables.size, 1)
strictEqual(pipeline.variables.get('foo'), 'boar')
})

@@ -119,3 +119,3 @@ })

describe('arguments', () => {
test('should accept arguments as rdf:List', async () => {
it('should accept arguments as rdf:List', async () => {
// given

@@ -130,6 +130,6 @@ const definition = await load('arguments.ttl')

// then
expect(content).toEqual(['a', 'b'])
deepStrictEqual(content, ['a', 'b'])
})
test('should accept arguments as key value pairs', async () => {
it('should accept arguments as key value pairs', async () => {
// given

@@ -144,3 +144,3 @@ const definition = await load('arguments.ttl')

// then
expect(content).toEqual([{ a: '1', b: '2' }])
deepStrictEqual(content, [{ a: '1', b: '2' }])
})

@@ -151,3 +151,3 @@ })

describe('run', () => {
test('should forward stream errors to the logger', async () => {
it('should forward stream errors to the logger', async () => {
// given

@@ -172,7 +172,7 @@ const definition = await load('stream-error.ttl')

// then
assert.strictEqual(errors.length, 1)
assert.strictEqual(errors[0].message.includes('error in pipeline step http://example.org/pipeline/error'), true)
assert.strictEqual(errors[0].message.includes('at ReadStream'), true)
strictEqual(errors.length, 1)
strictEqual(errors[0].message.includes('error in pipeline step http://example.org/pipeline/error'), true)
strictEqual(errors[0].message.includes('at ReadStream'), true)
})
})
})

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc