Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

csvtojson

Package Overview
Dependencies
Maintainers
1
Versions
82
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

csvtojson - npm Package Compare versions

Comparing version 0.3.7 to 0.3.8

libs/core/defaultParsers.js

64

libs/core/csvConverter.js

@@ -11,3 +11,2 @@ module.exports = csvAdv;

var eol = os.EOL;
function csvAdv(params) {

@@ -18,3 +17,4 @@ Transform.call(this);

"delimiter": ",", // change the delimiter of csv columns
"quote": "\"" //quote for a column containing delimiter.
"quote": "\"", //quote for a column containing delimiter.
"trim":true //trim column's space charcters
}

@@ -39,62 +39,12 @@ if (params && typeof params == "object") {

var self = this;
var started = false;
self.on("record", function(rowStr, index, lastLine) {
var quote = self.param.quote;
var delimiter = self.param.delimiter;
var rowArr = rowStr.split(delimiter);
var row = [];
var inquote = false;
var quoteBuff = "";
for (var i = 0; i < rowArr.length; i++) {
var ele = rowArr[i];
if (self._isToogleQuote(ele)) {
if (inquote) {
quoteBuff += delimiter;
inquote = false;
quoteBuff += ele.substr(0, ele.length - 1);
row.push(quoteBuff);
quoteBuff = "";
} else {
inquote = true;
quoteBuff += ele.substring(1);
}
} else {
if (inquote) {
quoteBuff += ele;
} else {
if (ele.indexOf(quote) === 0 && ele[ele.length - 1] == quote) {
ele = ele.substring(1, ele.length - 1);
}
row.push(ele);
}
}
}
if (index == 0) {
self._headRowProcess(row);
self.push("[" + eol);
} else if (rowStr.length > 0) {
var resultRow = {};
self._rowProcess(row, index, resultRow);
self.emit("record_parsed", resultRow, row, index - 1);
if (started === true) {
self.push("," + eol);
}
self.push(JSON.stringify(resultRow));
started = true;
}
});
self.on("end", function() {
var finalResult = self.param.constructResult ? self.resultObject.getBuffer() : {};
self.emit("end_parsed", finalResult);
if (self._callback && typeof self._callback == "function") {
var func = self._callback;
self._callback = null;
func(null, finalResult);
}
});
this._callback = null;
this.init();
return this;
};
utils.inherits(csvAdv, Transform);
csvAdv.prototype.init=function(){
require("./init_onend.js").call(this);
require("./init_onrecord.js").call(this);
}
csvAdv.prototype._isToogleQuote = function(segment) {

@@ -101,0 +51,0 @@ var quote = this.param.quote;

module.exports.Converter=require("./csvConverter.js");
module.exports.Parser=require("./parser.js");
module.exports.parserMgr=require("./parserMgr.js");
module.exports.parserMgr=require("./parserMgr.js");

@@ -36,2 +36,11 @@ module.exports=Parser;

params.resultRow[params.head]=params.item;
}
}
Parser.prototype.getHead=function(){
return this.head;
}
Parser.prototype.clone=function(){
return new Parser(this.name,this.regExp,this.parse);
}
Parser.prototype.getName=function(){
return this.name;
}

@@ -8,8 +8,2 @@ //module interfaces

var Parser=require("./parser.js");
var defaultParsers=[
{"name":"array", "regExp":/^\*array\*/,"parserFunc":_arrayParser},
{"name":"json", "regExp":/^\*json\*/,"parserFunc":_jsonParser},
{"name":"omit", "regExp":/^\*omit\*/,"parserFunc":function(){}},
{"name":"jsonarray","regExp":/^\*jsonarray\*/,"parserFunc":_jsonArrParser}
];

@@ -29,8 +23,2 @@ function registerParser(parser){

function initDefaultParsers(){
for (var i=0;i<defaultParsers.length;i++){
var parserCfg=defaultParsers[i];
addParser(parserCfg.name,parserCfg.regExp,parserCfg.parserFunc);
}
}
function initParsers(row){

@@ -49,48 +37,21 @@ var parsers=[];

if (parser.test(columnTitle)){
return parser;
var inst=parser.clone();
inst.head=columnTitle;
return inst;
}
}
return new Parser();
return getParserByName("json",columnTitle);
}
//default parsers
function _arrayParser(params){
var fieldName=params.head.replace(this.regExp,"");
if (params.resultRow[fieldName]==undefined){
params.resultRow[fieldName]=[];
function getParserByName(parserName,columnTitle){
for (var i=0;i<registeredParsers.length;i++){
var parser=registeredParsers[i];
if (parser.getName()==parserName){
var inst=parser.clone();
inst.head=columnTitle;
return inst;
}
params.resultRow[fieldName].push(params.item);
}
return new Parser();
}
function _jsonParser(params){
var fieldStr=params.head.replace(this.regExp,"");
var headArr=fieldStr.split(".");
var pointer=params.resultRow;
while (headArr.length>1){
var headStr=headArr.shift();
if (pointer[headStr]==undefined){
pointer[headStr]={};
}
pointer=pointer[headStr];
}
pointer[headArr.shift()]=params.item;
}
function _jsonArrParser(params){
var fieldStr=params.head.replace(this.regExp,"");
var headArr=fieldStr.split(".");
var pointer=params.resultRow;
while (headArr.length>1){
var headStr=headArr.shift();
if (pointer[headStr]==undefined){
pointer[headStr]={};
}
pointer=pointer[headStr];
}
var arrFieldName=headArr.shift();
if (pointer[arrFieldName]==undefined){
pointer[arrFieldName]=[];
}
pointer[arrFieldName].push(params.item);
}
initDefaultParsers();
require("./defaultParsers.js");
{
"name":"csvtojson",
"description":"A tool concentrating on converting csv data to JSON with customised parser supporting",
"author":"Keyang Xiang <keyang.xiang@gmail.com>",
"homepage": "http://keyangxiang.com/blog/csv2json/",
"bugs": "https://github.com/Keyang/node-csvtojson/issues",
"repository": {
"type": "git",
"url": "https://github.com/Keyang/node-csvtojson.git"
},
"contributors":[
{
"name":"Keyang Xiang",
"email":"keyang.xiang@gmail.com"
}
],
"version":"0.3.7",
"keywords":[
"csv",
"json",
"convert",
"parser",
"exntendible",
"plugin"
],
"bin":{
"csvtojson":"./bin/csvtojson"
}
"name": "csvtojson",
"description": "A tool concentrating on converting csv data to JSON with customised parser supporting",
"author": "Keyang Xiang <keyang.xiang@gmail.com>",
"homepage": "http://keyangxiang.com/blog/csv2json/",
"bugs": "https://github.com/Keyang/node-csvtojson/issues",
"repository": {
"type": "git",
"url": "https://github.com/Keyang/node-csvtojson.git"
},
"contributors": [{
"name": "Keyang Xiang",
"email": "keyang.xiang@gmail.com"
}],
"version": "0.3.8",
"keywords": [
"csv",
"json",
"convert",
"parser",
"exntendible",
"plugin"
],
"bin": {
"csvtojson": "./bin/csvtojson"
},
"license": [{
"type": "MIT",
"url": "https://github.com/Keyang/node-csvtojson/blob/master/LICENSE"
}]
}
#CSV2JSON
All you need nodejs csv to json converter. Support big json data, CLI, web server, nested JSON, customised parser, stream, pipe, and more!
All you need nodejs csv to json converter. Support big json data, CLI, web server, powerful nested JSON, customised parser, stream, pipe, and more!

@@ -30,16 +30,18 @@ #IMPORTANT!!

* [Example](#example)
* [Usage](#usage)
* [CLI Usage](#usage)
* [CLI Tool](#command-line-tools)
* [Web Service](#webservice)
* [API & Library](#api)
* [Quick Start](#quick-start)
* [Customised Parser](#parser)
* [Webserver](#webserver)
* [Events](#events)
* [Built-in Parsers](#default-parsers)
* [Example](#example)
* [Big CSV File Streaming](#big-csv-file)
* [Process Big CSV File in CLI](#convert-big-csv-file-with-command-line-tool)
* [Column Array](#column-array)
* [Parse String](#parse-string)
* [Demo Product](#demo-product)
* [Quick Start](#quick-start)
* [Parameters](#params)
* [Customised Parser](#parser)
* [Webserver](#webserver)
* [Events](#events)
* [Built-in Parsers](#default-parsers)
* [Example](#example)
* [Big CSV File Streaming](#big-csv-file)
* [Process Big CSV File in CLI](#convert-big-csv-file-with-command-line-tool)
* [Column Array](#column-array)
* [Parse String](#parse-string)
* [Empowered JSON Parser](#empowered-json-parser)
* [Change Log](#change-log)

@@ -96,8 +98,24 @@

### API
#Demo Product
I am hosting a free online service to convert CSV to JSON. The tool can be found from [here](http://keyangxiang.com/projects.html?_ctl=project&_act=csv2json).
Just paste your CSV data and it will convert to JSON data for you.
The product simply uses csvtojson web interface. All the source code is like below:
```js
var server=require("csvtojson").interfaces.web;
server.startWebServer({
"port":process.env.VCAP_APP_PORT || 8801
});
```
It uses HTTP Request as readable stream and HTTP Response as writable stream.
# Quick Start
Use csvtojson library to your own project.
Import csvtojson to your package.json or install through npm:
>npm install csvtojson
#### Quick Start
~~The core of the tool is Converter class. It is based on node-csv library (version 0.3.6). Therefore it has all features of [node-csv](http://www.adaltas.com/projects/node-csv/).~~ To start a parse, simply use following code:

@@ -124,3 +142,3 @@

```
#### Params
# Params
The parameters for Converter constructor are:

@@ -131,4 +149,5 @@

* quote: If a column contains delimiter, it is able to use quote character to surround the column content. e.g. "hello, world" wont be split into two columns while parsing. default: " (double quote)
* trim: Indicate if parser trim off spaces surrounding column content. e.g. " content " will be trimmed to "content". Default: true
#### Parser
# Parser
CSVTOJSON allows adding customised parsers which concentrating on what to parse and how to parse.

@@ -189,3 +208,3 @@ It is the main power of the tool that developer only needs to concentrate on how to deal with the data and other concerns like streaming, memory, web, cli etc are done automatically.

#### WebServer
# WebServer
It is able to start the web server through code.

@@ -204,3 +223,3 @@

#### Events
# Events

@@ -229,3 +248,3 @@ Following events are used for Converter class:

#### Default Parsers
# Default Parsers
There are default parsers in the library they are

@@ -241,3 +260,3 @@

####Example:
#Example:

@@ -276,3 +295,3 @@ Original data:

"age": "28",
"number": "51289",
"number": "51289",
"key": [

@@ -291,3 +310,4 @@ "key3",

```
#### Big CSV File
# Big CSV File
csvtojson library was designed to accept big csv file converting. To avoid memory consumption, it is recommending to use read stream and write stream.

@@ -308,3 +328,3 @@

#### Convert Big CSV File with Command line tool
# Convert Big CSV File with Command line tool
csvtojson command line tool supports streaming in big csv file and stream out json file.

@@ -328,3 +348,3 @@

#### Column Array
# Column Array
To convert a csv data to column array, you have to construct the result in memory. See example below

@@ -379,4 +399,4 @@

#### Parse String
To parse a string, simply call fromString(csvString,callback) method.
# Parse String
To parse a string, simply call fromString(csvString,callback) method. The callback parameter is optional.

@@ -403,6 +423,76 @@ For example:

#Empowered JSON Parser
Since version 0.3.8, csvtojson now can replicate any complex JSON structure.
As we know, JSON object represents a graph while CSV only contains 2-dimension data (table).
To make JSON and CSV containing same amount information, we need "flatten" some information in JSON.
Here is an example. Original CSV:
```
fieldA.title, fieldA.children[0].name, fieldA.children[0].id,fieldA.children[1].name, fieldA.children[1].employee[].name,fieldA.children[1].employee[].name, fieldA.address[],fieldA.address[], description
Food Factory, Oscar, 0023, Tikka, Tim, Joe, 3 Lame Road, Grantstown, A fresh new food factory
Kindom Garden, Ceil, 54, Pillow, Amst, Tom, 24 Shaker Street, HelloTown, Awesome castle
```
The data above contains nested JSON including nested array of JSON objects and plain texts.
Using csvtojson to convert, the result would be like:
```json
[{
"fieldA": {
"title": "Food Factory",
"children": [{
"name": "Oscar",
"id": "0023"
}, {
"name": "Tikka",
"employee": [{
"name": "Tim"
}, {
"name": "Joe"
}]
}],
"address": ["3 Lame Road", "Grantstown"]
},
"description": "A fresh new food factory"
}, {
"fieldA": {
"title": "Kindom Garden",
"children": [{
"name": "Ceil",
"id": "54"
}, {
"name": "Pillow",
"employee": [{
"name": "Amst"
}, {
"name": "Tom"
}]
}],
"address": ["24 Shaker Street", "HelloTown"]
},
"description": "Awesome castle"
}]
```
Here is the rule for CSV data headers:
* Use dot(.) to represent nested JSON. e.g. field1.field2.field3 will be converted to {field1:{field2:{field3:<value>}}}
* Use square brackets([]) to represent an Array. e.g. field1.field2[< index >] will be converted to {field1:{field2:[<values>]}}. Different column with same header name will be added to same array.
* Array could contain nested JSON object. e.g. field1.field2[< index >].name will be converted to {field1:{field2:[{name:<value>}]}}
* The index could be omitted in some situation. However it causes information lost. Therefore Index should **NOT** be omitted if array contains JSON objects with more than 1 field (See example above fieldA.children[1].employee field, it is still ok if child JSON contains only 1 field).
Since 0.3.8, JSON parser is the default parser. It does not need to add "\*json\*" to column titles. Theoretically, the JSON parser now should have functionality of "Array" parser, "JSONArray" parser, and old "JSON" parser.
This mainly purposes on the next few versions where csvtojson could convert a JSON object back to CSV format without losing information.
It can be used to process JSON data exported from no-sql database like MongoDB.
#Change Log
##0.3.8
* Empowered built-in JSON parser.
* Change: Use JSON parser as default parser.
* Added parameter trim in constructor. default: true. trim will trim content spaces.
##0.3.5

@@ -409,0 +499,0 @@ * Added fromString method to support direct string input

@@ -120,2 +120,14 @@ var CSVAdv = require("../libs/core/csvConverter.js");

});
it("should be able to convert csv string without callback provided", function(done) {
var testData = __dirname + "/data/testData";
var data = fs.readFileSync(testData).toString();
var result = {}
var csvConverter = new CSVAdv();
//end_parsed will be emitted once parsing finished
csvConverter.on("end_parsed", function(jsonObj) {
assert(jsonObj.length === 2);
done();
});
csvConverter.fromString(data);
});
it("should be able to handle columns with double quotes", function(done) {

@@ -122,0 +134,0 @@ var testData = __dirname + "/data/dataWithQoutes";

@@ -1,79 +0,206 @@

var assert=require("assert");
var parserMgr=require("../libs/core/parserMgr.js");
describe("ParserMgr",function(){
it("should add a correct parser",function(){
parserMgr.addParser("myparserName",/myParser.+/,function(){});
var assert = require("assert");
var parserMgr = require("../libs/core/parserMgr.js");
var CSVAdv = require("../libs/core/csvConverter.js");
describe("ParserMgr", function() {
it("should add a correct parser", function() {
parserMgr.addParser("myparserName", /myParser.+/, function() {});
});
it("should add a parser if regular expression is a string", function() {
parserMgr.addParser("myparserName", "hello regexp", function() {});
});
describe("array parser", function() {
it("should return an array parser with specific column title", function() {
var parser = parserMgr.getParser("*array*myArray");
assert(parser.name === "array");
});
it ("should add a parser if regular expression is a string",function(){
parserMgr.addParser("myparserName","hello regexp",function(){});
it("should parse as an array", function() {
var parser = parserMgr.getParser("*array*myArray");
var resultRow = {};
parser.parse({
"head": "*array*myArray",
"item": "item1",
"resultRow": resultRow
});
parser.parse({
"head": "*array*myArray",
"item": "item2",
"resultRow": resultRow
});
assert(resultRow.myArray[0] == "item1");
assert(resultRow.myArray[1] == "item2");
});
});
describe("json parser", function() {
it("should return an json parser with specific column title", function() {
var parser = parserMgr.getParser("*json*myJSON.item1");
assert(parser.name === "json");
});
describe ("array parser",function(){
it ("should return an array parser with specific column title",function(){
var parser=parserMgr.getParser("*array*myArray");
assert(parser.name==="array");
});
it("should parse as an json", function() {
var parser1 = parserMgr.getParser("*json*myJSON.item1");
var parser2 = parserMgr.getParser("*json*myJSON.item2");
var resultRow = {};
parser1.parse({
"item": "item1",
"resultRow": resultRow
});
parser2.parse({
"item": "item2",
"resultRow": resultRow
});
assert(resultRow.myJSON.item1 == "item1");
assert(resultRow.myJSON.item2 == "item2");
});
it("should parse a json containing array", function() {
var parser1 = parserMgr.getParser("*json*myJSON.item1[0]");
var parser2 = parserMgr.getParser("*json*myJSON.item1[1]");
var resultRow = {};
parser1.parse({
"item": "item1",
"resultRow": resultRow
});
parser2.parse({
"item": "item2",
"resultRow": resultRow
});
assert(resultRow.myJSON.item1);
assert(resultRow.myJSON.item1.length === 2);
});
it("should parse a json containing child json with array", function() {
var parser1 = parserMgr.getParser("*json*myJSON.item1.arr[0]");
var parser2 = parserMgr.getParser("*json*myJSON.item1.arr[1]");
var parser3 = parserMgr.getParser("*json*myJSON.item1.title");
var resultRow = {};
parser1.parse({
"item": "item1",
"resultRow": resultRow
});
parser2.parse({
"item": "item2",
"resultRow": resultRow
});
parser3.parse({
"item": "mytitle",
"resultRow": resultRow
});
assert(resultRow.myJSON.item1);
assert(resultRow.myJSON.item1.arr.length === 2);
assert(resultRow.myJSON.item1.title == "mytitle");
});
it("should parse a json containing child json with array containing child json", function() {
var parser1 = parserMgr.getParser("*json*myJSON.item1.arr[0].title");
var parser2 = parserMgr.getParser("*json*myJSON.item1.arr[1].title");
var parser3 = parserMgr.getParser("*json*myJSON.item1.arr[2].title");
var resultRow = {};
parser1.parse({
"item": "item1",
"resultRow": resultRow
});
parser2.parse({
"item": "item2",
"resultRow": resultRow
});
parser3.parse({
"item": "item3",
"resultRow": resultRow
});
assert(resultRow.myJSON.item1);
assert(resultRow.myJSON.item1.arr.length === 3);
assert(resultRow.myJSON.item1.arr[0].title == "item1");
});
it("should parse a json containing child json with dynamic array containing child json", function() {
var parser1 = parserMgr.getParser("*json*myJSON.item1.arr[].title");
var parser2 = parserMgr.getParser("*json*myJSON.item1.arr[].title");
var parser3 = parserMgr.getParser("*json*myJSON.item1.arr[].title");
var resultRow = {};
parser1.parse({
"item": "item1",
"resultRow": resultRow
});
parser2.parse({
"item": "item2",
"resultRow": resultRow
});
parser3.parse({
"item": "item3",
"resultRow": resultRow
});
assert(resultRow.myJSON.item1);
assert(resultRow.myJSON.item1.arr.length === 3);
assert(resultRow.myJSON.item1.arr[2].title == "item3");
});
it("should parse a complex JSON's original CSV file", function(done) {
var converter = new CSVAdv();
var fs = require("fs");
var r = fs.createReadStream(__dirname + "/data/complexJSONCSV");
converter.on("end_parsed", function(res) {
assert(res);
assert(res.length === 2);
assert(res[0].fieldA.title == "Food Factory");
assert(res[0].fieldA.children.length === 2);
assert(res[0].fieldA.children[0].name == "Oscar");
assert(res[0].fieldA.children[0].id == "0023");
assert(res[0].fieldA.children[1].name == "Tikka");
assert(res[0].fieldA.children[1].employee.length === 2);
assert(res[0].fieldA.children[1].employee[0].name == "Tim",JSON.stringify(res[0].fieldA.children[1].employee[0] ));
assert(res[0].fieldA.address.length === 2);
assert(res[0].fieldA.address[0] == "3 Lame Road");
assert(res[0].fieldA.address[1] == "Grantstown");
assert(res[0].description == "A fresh new food factory",res[0].description);
done();
});
r.pipe(converter);
});
});
describe("json array parser", function() {
it("should return an json array parser with specific column title", function() {
var parser = parserMgr.getParser("*jsonarray*myJSON.item");
assert(parser.name === "jsonarray");
});
it ("should parse as an array",function(){
var parser=parserMgr.getParser("*array*myArray");
var resultRow={};
parser.parse({"head":"*array*myArray","item":"item1","resultRow":resultRow});
parser.parse({"head":"*array*myArray","item":"item2","resultRow":resultRow});
assert(resultRow.myArray[0]=="item1");
assert(resultRow.myArray[1]=="item2");
});
});
describe ("json parser",function(){
it ("should return an json parser with specific column title",function(){
var parser=parserMgr.getParser("*json*myJSON.item1");
assert(parser.name==="json");
});
it("should parse as an json array with multiple columns", function() {
var parser1 = parserMgr.getParser("*jsonarray*myJSON.item");
var parser2 = parserMgr.getParser("*jsonarray*myJSON.item");
var resultRow = {};
parser1.parse({
"head": "*jsonarray*myJSON.item",
"item": "item1",
"resultRow": resultRow
});
parser2.parse({
"head": "*jsonarray*myJSON.item",
"item": "item2",
"resultRow": resultRow
});
assert(resultRow.myJSON.item[0] == "item1");
assert(resultRow.myJSON.item[1] == "item2");
});
});
describe("*omit* parser", function() {
it("should return an omit parser with specific column title", function() {
var parser = parserMgr.getParser("*omit*item");
assert(parser.name === "omit");
});
it ("should parse as an json",function(){
var parser1=parserMgr.getParser("*json*myJSON.item1");
var parser2=parserMgr.getParser("*json*myJSON.item2");
var resultRow={};
parser1.parse({"head":"*json*myJSON.item1","item":"item1","resultRow":resultRow});
parser2.parse({"head":"*json*myJSON.item2","item":"item2","resultRow":resultRow});
assert(resultRow.myJSON.item1=="item1");
assert(resultRow.myJSON.item2=="item2");
});
});
describe ("json array parser",function(){
it ("should return an json array parser with specific column title",function(){
var parser=parserMgr.getParser("*jsonarray*myJSON.item");
assert(parser.name==="jsonarray");
});
it("should not contain omitted column in result", function() {
var parser1 = parserMgr.getParser("*omit*column");
var resultRow = {};
parser1.parse({
"head": "*omit*column",
"item": "item1",
"resultRow": resultRow
});
assert("{}" === JSON.stringify(resultRow));
});
});
it ("should parse as an json array with multiple columns",function(){
var parser1=parserMgr.getParser("*jsonarray*myJSON.item");
var parser2=parserMgr.getParser("*jsonarray*myJSON.item");
var resultRow={};
parser1.parse({"head":"*jsonarray*myJSON.item","item":"item1","resultRow":resultRow});
parser2.parse({"head":"*jsonarray*myJSON.item","item":"item2","resultRow":resultRow});
assert(resultRow.myJSON.item[0]=="item1");
assert(resultRow.myJSON.item[1]=="item2");
});
});
describe ("*omit* parser",function(){
it ("should return an omit parser with specific column title",function(){
var parser=parserMgr.getParser("*omit*item");
assert(parser.name==="omit");
});
it ("should not contain omitted column in result",function(){
var parser1=parserMgr.getParser("*omit*column");
var resultRow={};
parser1.parse({"head":"*omit*column","item":"item1","resultRow":resultRow});
assert("{}"===JSON.stringify(resultRow));
});
});
it ("can parse a csv head to parser array",function(){
var head=["*array*myArr","*json*json.item1"];
var parsers=parserMgr.initParsers(head);
assert(parsers[0].name=="array");
assert(parsers[1].name=="json");
});
});
it("can parse a csv head to parser array", function() {
var head = ["*array*myArr", "*json*json.item1"];
var parsers = parserMgr.initParsers(head);
assert(parsers[0].name == "array");
assert(parsers[1].name == "json");
});
});

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc